diff --git a/README.md b/README.md index ea3d95d..e3363ff 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,3 @@ to update you need: -apt install python3-pip python3.4 python3.5 python3.4-dev python3.5-dev +apt install python3-pip python3.4 python3.5 python3.4-dev python3.5-dev python3.6 python3.6-dev diff --git a/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/METADATA b/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/METADATA index f993bb0..e5ec428 100644 --- a/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/METADATA +++ b/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/METADATA @@ -6,6 +6,7 @@ Home-page: http://www.sqlalchemy.org Author: Mike Bayer Author-email: mike_mp@zzzcomputing.com License: MIT License +Description-Content-Type: UNKNOWN Platform: UNKNOWN Classifier: Development Status :: 5 - Production/Stable Classifier: Intended Audience :: Developers diff --git a/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/RECORD b/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/RECORD index 2c799dd..057fad3 100644 --- a/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/RECORD +++ b/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/RECORD @@ -1,13 +1,13 @@ SQLAlchemy-1.0.12.dist-info/DESCRIPTION.rst,sha256=ZN8fj2owI_rw0Emr3_RXqoNfTFkThjiZy7xcCzg1W_g,5013 -SQLAlchemy-1.0.12.dist-info/METADATA,sha256=fntYBelbmQAxIrj5_YGLpIGPzwQBxiA_6kJwVdrwMF4,5786 +SQLAlchemy-1.0.12.dist-info/METADATA,sha256=xCBLJSNub29eg_Bm-fHTUT_al-Sr8jh38ztUF4_s1so,5820 SQLAlchemy-1.0.12.dist-info/RECORD,, -SQLAlchemy-1.0.12.dist-info/WHEEL,sha256=HslHw5cSLCuyOLxj8duGAooHNvXnupcmoBU1NzRPr2w,104 -SQLAlchemy-1.0.12.dist-info/metadata.json,sha256=JpEwQiqyWE4fnbo3thN5jZ_9e7jYut521x-nYPY27A4,965 +SQLAlchemy-1.0.12.dist-info/WHEEL,sha256=AEztX7vHDtcgysb-4-5-DyIKMLIPg6NMxY9dXTRdoXQ,104 +SQLAlchemy-1.0.12.dist-info/metadata.json,sha256=QT7EcApgL9QrRqR1YIngngveBNd13H8h-oNK9fsxj0U,1004 SQLAlchemy-1.0.12.dist-info/top_level.txt,sha256=rp-ZgB7D8G11ivXON5VGPjupT1voYmWqkciDt5Uaw_Q,11 sqlalchemy/__init__.py,sha256=fTurvwmGkoRt_zdwxoZNWTHg6VdzvBpeHyPmUnexOK4,2112 -sqlalchemy/cprocessors.cpython-34m.so,sha256=9c-YOtWdmfG-k2sVd7Z2mpBiNFXtvi29UgfOxNPOOPM,42824 -sqlalchemy/cresultproxy.cpython-34m.so,sha256=NW56qP26QzyMYvdFfLv15pXoJRQSlDDoxIBzfcMzmQI,54056 -sqlalchemy/cutils.cpython-34m.so,sha256=Nr_Lwl_q1cujcQksdw2oLYMsxh7lbgn06TUhjfNZNZQ,25280 +sqlalchemy/cprocessors.cpython-34m.so,sha256=hvG3A0r4VO9gevdsLGZYRdqNfG2rahDIFUqJ-fUxAB4,52136 +sqlalchemy/cresultproxy.cpython-34m.so,sha256=piAFu3JE3mOaKpNSg6vcu8jGTl_-X6elUDWS2h_YOfQ,61504 +sqlalchemy/cutils.cpython-34m.so,sha256=-ARQsTXx0XDzghnRNCwdaxm2eeIn2TuEqoU_Wb18h6E,34312 sqlalchemy/events.py,sha256=j8yref-XfuJxkPKbvnZmB4jeUAIujPcbLAzD2cKV4f4,43944 sqlalchemy/exc.py,sha256=NhA5R5nDdducWkp0MXtlQ0-Q6iF_rhqkHWblIfuSYGk,11706 sqlalchemy/inspection.py,sha256=zMa-2nt-OQ0Op1dqq0Z2XCnpdAFSTkqif5Kdi8Wz8AU,3093 @@ -191,186 +191,186 @@ sqlalchemy/util/langhelpers.py,sha256=Nhe3Y9ieK6JaFYejjYosVOjOSSIBT2V385Hu6HGcyZ sqlalchemy/util/queue.py,sha256=rs3W0LDhKt7M_dlQEjYpI9KS-bzQmmwN38LE_-RRVvU,6548 sqlalchemy/util/topological.py,sha256=xKsYjjAat4p8cdqRHKwibLzr6WONbPTC0X8Mqg7jYno,2794 SQLAlchemy-1.0.12.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -sqlalchemy/testing/__pycache__/replay_fixture.cpython-34.pyc,, -sqlalchemy/engine/__pycache__/strategies.cpython-34.pyc,, -sqlalchemy/connectors/__pycache__/mxodbc.cpython-34.pyc,, -sqlalchemy/ext/__pycache__/hybrid.cpython-34.pyc,, -sqlalchemy/dialects/firebird/__pycache__/kinterbasdb.cpython-34.pyc,, -sqlalchemy/engine/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/sql/__pycache__/expression.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/relationships.cpython-34.pyc,, -sqlalchemy/ext/declarative/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/testing/__pycache__/fixtures.cpython-34.pyc,, -sqlalchemy/testing/plugin/__pycache__/plugin_base.cpython-34.pyc,, -sqlalchemy/testing/__pycache__/assertions.cpython-34.pyc,, -sqlalchemy/util/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/testing/suite/__pycache__/test_select.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/unitofwork.cpython-34.pyc,, -sqlalchemy/testing/suite/__pycache__/test_insert.cpython-34.pyc,, -sqlalchemy/dialects/mysql/__pycache__/oursql.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/descriptor_props.cpython-34.pyc,, -sqlalchemy/sql/__pycache__/selectable.cpython-34.pyc,, -sqlalchemy/__pycache__/exc.cpython-34.pyc,, -sqlalchemy/dialects/mysql/__pycache__/pymysql.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/interfaces.cpython-34.pyc,, -sqlalchemy/ext/declarative/__pycache__/clsregistry.cpython-34.pyc,, -sqlalchemy/connectors/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/engine/__pycache__/url.cpython-34.pyc,, -sqlalchemy/dialects/sqlite/__pycache__/base.cpython-34.pyc,, -sqlalchemy/sql/__pycache__/base.cpython-34.pyc,, -sqlalchemy/testing/__pycache__/util.cpython-34.pyc,, -sqlalchemy/ext/__pycache__/serializer.cpython-34.pyc,, -sqlalchemy/sql/__pycache__/dml.cpython-34.pyc,, -sqlalchemy/sql/__pycache__/functions.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/attributes.cpython-34.pyc,, -sqlalchemy/testing/suite/__pycache__/test_sequence.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/strategy_options.cpython-34.pyc,, -sqlalchemy/testing/suite/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/sql/__pycache__/elements.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/deprecated_interfaces.cpython-34.pyc,, -sqlalchemy/__pycache__/interfaces.cpython-34.pyc,, -sqlalchemy/engine/__pycache__/interfaces.cpython-34.pyc,, -sqlalchemy/dialects/mysql/__pycache__/mysqlconnector.cpython-34.pyc,, -sqlalchemy/dialects/mssql/__pycache__/pyodbc.cpython-34.pyc,, -sqlalchemy/dialects/mssql/__pycache__/pymssql.cpython-34.pyc,, -sqlalchemy/__pycache__/pool.cpython-34.pyc,, -sqlalchemy/dialects/sybase/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/event/__pycache__/attr.cpython-34.pyc,, -sqlalchemy/dialects/firebird/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/sql/__pycache__/visitors.cpython-34.pyc,, -sqlalchemy/dialects/mysql/__pycache__/mysqldb.cpython-34.pyc,, -sqlalchemy/sql/__pycache__/util.cpython-34.pyc,, -sqlalchemy/__pycache__/inspection.cpython-34.pyc,, -sqlalchemy/testing/plugin/__pycache__/pytestplugin.cpython-34.pyc,, -sqlalchemy/dialects/mssql/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/events.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/connectors/__pycache__/pyodbc.cpython-34.pyc,, -sqlalchemy/testing/suite/__pycache__/test_dialect.cpython-34.pyc,, -sqlalchemy/dialects/mysql/__pycache__/cymysql.cpython-34.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-34.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/psycopg2cffi.cpython-34.pyc,, -sqlalchemy/dialects/mssql/__pycache__/mxodbc.cpython-34.pyc,, -sqlalchemy/sql/__pycache__/schema.cpython-34.pyc,, -sqlalchemy/util/__pycache__/queue.cpython-34.pyc,, -sqlalchemy/sql/__pycache__/sqltypes.cpython-34.pyc,, -sqlalchemy/testing/__pycache__/assertsql.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/session.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/evaluator.cpython-34.pyc,, -sqlalchemy/testing/suite/__pycache__/test_reflection.cpython-34.pyc,, -sqlalchemy/testing/plugin/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/util/__pycache__/compat.cpython-34.pyc,, -sqlalchemy/testing/__pycache__/entities.cpython-34.pyc,, -sqlalchemy/event/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/dialects/mssql/__pycache__/information_schema.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/scoping.cpython-34.pyc,, -sqlalchemy/testing/suite/__pycache__/test_update_delete.cpython-34.pyc,, -sqlalchemy/dialects/oracle/__pycache__/cx_oracle.cpython-34.pyc,, -sqlalchemy/testing/__pycache__/mock.cpython-34.pyc,, -sqlalchemy/sql/__pycache__/annotation.cpython-34.pyc,, -sqlalchemy/__pycache__/processors.cpython-34.pyc,, -sqlalchemy/engine/__pycache__/reflection.cpython-34.pyc,, -sqlalchemy/ext/__pycache__/baked.cpython-34.pyc,, -sqlalchemy/ext/declarative/__pycache__/api.cpython-34.pyc,, -sqlalchemy/connectors/__pycache__/zxJDBC.cpython-34.pyc,, -sqlalchemy/__pycache__/events.cpython-34.pyc,, sqlalchemy/orm/__pycache__/path_registry.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/dynamic.cpython-34.pyc,, -sqlalchemy/sql/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/sql/__pycache__/default_comparator.cpython-34.pyc,, -sqlalchemy/dialects/firebird/__pycache__/base.cpython-34.pyc,, -sqlalchemy/dialects/mysql/__pycache__/gaerdbms.cpython-34.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/ext/__pycache__/horizontal_shard.cpython-34.pyc,, -sqlalchemy/engine/__pycache__/base.cpython-34.pyc,, -sqlalchemy/ext/__pycache__/orderinglist.cpython-34.pyc,, -sqlalchemy/dialects/mssql/__pycache__/base.cpython-34.pyc,, -sqlalchemy/sql/__pycache__/ddl.cpython-34.pyc,, -sqlalchemy/testing/suite/__pycache__/test_ddl.cpython-34.pyc,, -sqlalchemy/util/__pycache__/_collections.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/strategies.cpython-34.pyc,, -sqlalchemy/dialects/mysql/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/sql/__pycache__/crud.cpython-34.pyc,, -sqlalchemy/testing/__pycache__/profiling.cpython-34.pyc,, -sqlalchemy/ext/__pycache__/mutable.cpython-34.pyc,, -sqlalchemy/engine/__pycache__/threadlocal.cpython-34.pyc,, -sqlalchemy/ext/__pycache__/compiler.cpython-34.pyc,, -sqlalchemy/ext/__pycache__/instrumentation.cpython-34.pyc,, -sqlalchemy/sql/__pycache__/compiler.cpython-34.pyc,, -sqlalchemy/engine/__pycache__/result.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/query.cpython-34.pyc,, -sqlalchemy/dialects/sqlite/__pycache__/pysqlite.cpython-34.pyc,, -sqlalchemy/event/__pycache__/api.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/identity.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/sync.cpython-34.pyc,, -sqlalchemy/dialects/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/util/__pycache__/deprecations.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/exc.cpython-34.pyc,, -sqlalchemy/testing/__pycache__/distutils_run.cpython-34.pyc,, -sqlalchemy/sql/__pycache__/naming.cpython-34.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/pypostgresql.cpython-34.pyc,, -sqlalchemy/ext/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/util/__pycache__/topological.cpython-34.pyc,, -sqlalchemy/dialects/mssql/__pycache__/zxjdbc.cpython-34.pyc,, -sqlalchemy/databases/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/testing/__pycache__/engines.cpython-34.pyc,, sqlalchemy/ext/__pycache__/associationproxy.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/properties.cpython-34.pyc,, -sqlalchemy/sql/__pycache__/operators.cpython-34.pyc,, -sqlalchemy/dialects/oracle/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/testing/__pycache__/warnings.cpython-34.pyc,, -sqlalchemy/sql/__pycache__/type_api.cpython-34.pyc,, -sqlalchemy/util/__pycache__/langhelpers.cpython-34.pyc,, -sqlalchemy/testing/suite/__pycache__/test_types.cpython-34.pyc,, -sqlalchemy/event/__pycache__/base.cpython-34.pyc,, -sqlalchemy/ext/__pycache__/automap.cpython-34.pyc,, -sqlalchemy/event/__pycache__/legacy.cpython-34.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-34.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/zxjdbc.cpython-34.pyc,, -sqlalchemy/testing/__pycache__/runner.cpython-34.pyc,, -sqlalchemy/dialects/mssql/__pycache__/adodbapi.cpython-34.pyc,, -sqlalchemy/event/__pycache__/registry.cpython-34.pyc,, -sqlalchemy/testing/suite/__pycache__/test_results.cpython-34.pyc,, sqlalchemy/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/__pycache__/schema.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/mapper.cpython-34.pyc,, -sqlalchemy/dialects/sybase/__pycache__/pyodbc.cpython-34.pyc,, -sqlalchemy/testing/__pycache__/pickleable.cpython-34.pyc,, -sqlalchemy/dialects/sybase/__pycache__/pysybase.cpython-34.pyc,, -sqlalchemy/dialects/oracle/__pycache__/zxjdbc.cpython-34.pyc,, -sqlalchemy/testing/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/testing/plugin/__pycache__/bootstrap.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/dependency.cpython-34.pyc,, -sqlalchemy/dialects/sqlite/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/__pycache__/types.cpython-34.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-34.pyc,, -sqlalchemy/testing/__pycache__/config.cpython-34.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/base.cpython-34.pyc,, -sqlalchemy/testing/plugin/__pycache__/noseplugin.cpython-34.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-34.pyc,, -sqlalchemy/testing/__pycache__/exclusions.cpython-34.pyc,, -sqlalchemy/testing/__pycache__/schema.cpython-34.pyc,, -sqlalchemy/engine/__pycache__/default.cpython-34.pyc,, -sqlalchemy/engine/__pycache__/util.cpython-34.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/constraints.cpython-34.pyc,, -sqlalchemy/dialects/mysql/__pycache__/zxjdbc.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/persistence.cpython-34.pyc,, -sqlalchemy/dialects/firebird/__pycache__/fdb.cpython-34.pyc,, -sqlalchemy/dialects/sybase/__pycache__/mxodbc.cpython-34.pyc,, -sqlalchemy/testing/__pycache__/provision.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/deprecated_interfaces.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/dynamic.cpython-34.pyc,, +sqlalchemy/event/__pycache__/legacy.cpython-34.pyc,, +sqlalchemy/event/__pycache__/api.cpython-34.pyc,, sqlalchemy/dialects/__pycache__/postgres.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/collections.cpython-34.pyc,, -sqlalchemy/testing/__pycache__/requirements.cpython-34.pyc,, -sqlalchemy/ext/declarative/__pycache__/base.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/profiling.cpython-34.pyc,, +sqlalchemy/dialects/firebird/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/gaerdbms.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/oursql.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/test_types.cpython-34.pyc,, +sqlalchemy/event/__pycache__/registry.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/distutils_run.cpython-34.pyc,, +sqlalchemy/dialects/mssql/__pycache__/mxodbc.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/base.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/pypostgresql.cpython-34.pyc,, +sqlalchemy/util/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/util/__pycache__/topological.cpython-34.pyc,, +sqlalchemy/dialects/firebird/__pycache__/kinterbasdb.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/persistence.cpython-34.pyc,, +sqlalchemy/connectors/__pycache__/mxodbc.cpython-34.pyc,, +sqlalchemy/dialects/mssql/__pycache__/base.cpython-34.pyc,, +sqlalchemy/connectors/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/elements.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/dialects/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/util/__pycache__/langhelpers.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/entities.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/interfaces.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/schema.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/baked.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/zxjdbc.cpython-34.pyc,, sqlalchemy/orm/__pycache__/base.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/instrumentation.cpython-34.pyc,, -sqlalchemy/__pycache__/log.cpython-34.pyc,, -sqlalchemy/dialects/sqlite/__pycache__/pysqlcipher.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/loading.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/state.cpython-34.pyc,, -sqlalchemy/dialects/mysql/__pycache__/pyodbc.cpython-34.pyc,, -sqlalchemy/dialects/oracle/__pycache__/base.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/util.cpython-34.pyc,, +sqlalchemy/connectors/__pycache__/pyodbc.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/annotation.cpython-34.pyc,, +sqlalchemy/dialects/oracle/__pycache__/zxjdbc.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/runner.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/schema.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/relationships.cpython-34.pyc,, +sqlalchemy/__pycache__/pool.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/test_sequence.cpython-34.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/ddl.cpython-34.pyc,, +sqlalchemy/dialects/sybase/__pycache__/pyodbc.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/dependency.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/visitors.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/provision.cpython-34.pyc,, sqlalchemy/dialects/postgresql/__pycache__/json.cpython-34.pyc,, -sqlalchemy/dialects/mysql/__pycache__/base.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/selectable.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/exc.cpython-34.pyc,, +sqlalchemy/ext/declarative/__pycache__/clsregistry.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/interfaces.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/assertions.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/compiler.cpython-34.pyc,, +sqlalchemy/dialects/oracle/__pycache__/cx_oracle.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/test_select.cpython-34.pyc,, +sqlalchemy/dialects/firebird/__pycache__/fdb.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/unitofwork.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/util.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/psycopg2cffi.cpython-34.pyc,, +sqlalchemy/__pycache__/interfaces.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/util.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/__pycache__/schema.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/sync.cpython-34.pyc,, +sqlalchemy/__pycache__/processors.cpython-34.pyc,, +sqlalchemy/dialects/firebird/__pycache__/base.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-34.pyc,, +sqlalchemy/databases/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/sqltypes.cpython-34.pyc,, +sqlalchemy/dialects/oracle/__pycache__/base.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/functions.cpython-34.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/pysqlcipher.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/test_dialect.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/automap.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/mock.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/requirements.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/test_results.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/base.cpython-34.pyc,, +sqlalchemy/util/__pycache__/deprecations.cpython-34.pyc,, +sqlalchemy/dialects/mssql/__pycache__/pyodbc.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/state.cpython-34.pyc,, +sqlalchemy/event/__pycache__/base.cpython-34.pyc,, +sqlalchemy/__pycache__/log.cpython-34.pyc,, +sqlalchemy/connectors/__pycache__/zxJDBC.cpython-34.pyc,, +sqlalchemy/testing/plugin/__pycache__/plugin_base.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/identity.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/mysqlconnector.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/attributes.cpython-34.pyc,, +sqlalchemy/ext/declarative/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/base.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/serializer.cpython-34.pyc,, +sqlalchemy/testing/plugin/__pycache__/pytestplugin.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/properties.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/mapper.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/fixtures.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/base.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/events.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/zxjdbc.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/strategy_options.cpython-34.pyc,, +sqlalchemy/dialects/sybase/__pycache__/mxodbc.cpython-34.pyc,, +sqlalchemy/util/__pycache__/compat.cpython-34.pyc,, +sqlalchemy/testing/plugin/__pycache__/bootstrap.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/compiler.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/mysqldb.cpython-34.pyc,, +sqlalchemy/__pycache__/inspection.cpython-34.pyc,, +sqlalchemy/testing/plugin/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/dialects/mssql/__pycache__/adodbapi.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/url.cpython-34.pyc,, +sqlalchemy/dialects/oracle/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/result.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/test_insert.cpython-34.pyc,, +sqlalchemy/event/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/scoping.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/instrumentation.cpython-34.pyc,, sqlalchemy/dialects/sybase/__pycache__/base.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/pyodbc.cpython-34.pyc,, +sqlalchemy/testing/plugin/__pycache__/noseplugin.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/cymysql.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/exclusions.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/mutable.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/default_comparator.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/default.cpython-34.pyc,, +sqlalchemy/__pycache__/types.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/session.cpython-34.pyc,, +sqlalchemy/util/__pycache__/_collections.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/reflection.cpython-34.pyc,, +sqlalchemy/dialects/sybase/__pycache__/pysybase.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/assertsql.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/replay_fixture.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/pymysql.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/config.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/strategies.cpython-34.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/pysqlite.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/util.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/base.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/crud.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/loading.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/operators.cpython-34.pyc,, +sqlalchemy/dialects/mssql/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/pickleable.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/expression.cpython-34.pyc,, +sqlalchemy/dialects/mssql/__pycache__/pymssql.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/naming.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/horizontal_shard.cpython-34.pyc,, +sqlalchemy/dialects/sybase/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/threadlocal.cpython-34.pyc,, +sqlalchemy/ext/declarative/__pycache__/api.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/warnings.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/util.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/dml.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/collections.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/test_ddl.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/orderinglist.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/constraints.cpython-34.pyc,, +sqlalchemy/__pycache__/exc.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/test_update_delete.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/strategies.cpython-34.pyc,, +sqlalchemy/ext/declarative/__pycache__/base.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/evaluator.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/query.cpython-34.pyc,, +sqlalchemy/dialects/mssql/__pycache__/zxjdbc.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/descriptor_props.cpython-34.pyc,, +sqlalchemy/__pycache__/events.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/type_api.cpython-34.pyc,, +sqlalchemy/util/__pycache__/queue.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/hybrid.cpython-34.pyc,, +sqlalchemy/event/__pycache__/attr.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/test_reflection.cpython-34.pyc,, +sqlalchemy/dialects/mssql/__pycache__/information_schema.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/instrumentation.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/engines.cpython-34.pyc,, diff --git a/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/WHEEL b/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/WHEEL index db40973..1fdf70f 100644 --- a/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/WHEEL +++ b/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.29.0) +Generator: bdist_wheel (0.30.0) Root-Is-Purelib: false Tag: cp34-cp34m-linux_x86_64 diff --git a/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/metadata.json b/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/metadata.json index ef57ea4..a446465 100644 --- a/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/metadata.json +++ b/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/metadata.json @@ -1 +1 @@ -{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: Jython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Database :: Front-Ends", "Operating System :: OS Independent"], "extensions": {"python.details": {"contacts": [{"email": "mike_mp@zzzcomputing.com", "name": "Mike Bayer", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "http://www.sqlalchemy.org"}}}, "generator": "bdist_wheel (0.29.0)", "license": "MIT License", "metadata_version": "2.0", "name": "SQLAlchemy", "summary": "Database Abstraction Library", "test_requires": [{"requires": ["mock", "pytest (>=2.5.2)", "pytest-xdist"]}], "version": "1.0.12"} \ No newline at end of file +{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: Jython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Database :: Front-Ends", "Operating System :: OS Independent"], "description_content_type": "UNKNOWN", "extensions": {"python.details": {"contacts": [{"email": "mike_mp@zzzcomputing.com", "name": "Mike Bayer", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "http://www.sqlalchemy.org"}}}, "generator": "bdist_wheel (0.30.0)", "license": "MIT License", "metadata_version": "2.0", "name": "SQLAlchemy", "summary": "Database Abstraction Library", "test_requires": [{"requires": ["mock", "pytest (>=2.5.2)", "pytest-xdist"]}], "version": "1.0.12"} \ No newline at end of file diff --git a/lib/python3.4/site-packages/_markerlib-0.0.0.dist-info/DESCRIPTION.rst b/lib/python3.4/site-packages/_markerlib-0.0.0.dist-info/DESCRIPTION.rst deleted file mode 100644 index c6b6a1c..0000000 --- a/lib/python3.4/site-packages/_markerlib-0.0.0.dist-info/DESCRIPTION.rst +++ /dev/null @@ -1,238 +0,0 @@ -=============================== -Installing and Using Setuptools -=============================== - -.. contents:: **Table of Contents** - - -`Change History `_. - -------------------------- -Installation Instructions -------------------------- - -The recommended way to bootstrap setuptools on any system is to download -`ez_setup.py`_ and run it using the target Python environment. Different -operating systems have different recommended techniques to accomplish this -basic routine, so below are some examples to get you started. - -Setuptools requires Python 2.6 or later. To install setuptools -on Python 2.4 or Python 2.5, use the `bootstrap script for Setuptools 1.x -`_. - -The link provided to ez_setup.py is a bookmark to bootstrap script for the -latest known stable release. - -.. _ez_setup.py: https://bootstrap.pypa.io/ez_setup.py - -Windows (Powershell 3 or later) -=============================== - -For best results, uninstall previous versions FIRST (see `Uninstalling`_). - -Using Windows 8 (which includes PowerShell 3) or earlier versions of Windows -with PowerShell 3 installed, it's possible to install with one simple -Powershell command. Start up Powershell and paste this command:: - - > (Invoke-WebRequest https://bootstrap.pypa.io/ez_setup.py).Content | python - - -You must start the Powershell with Administrative privileges or you may choose -to install a user-local installation:: - - > (Invoke-WebRequest https://bootstrap.pypa.io/ez_setup.py).Content | python - --user - -If you have Python 3.3 or later, you can use the ``py`` command to install to -different Python versions. For example, to install to Python 3.3 if you have -Python 2.7 installed:: - - > (Invoke-WebRequest https://bootstrap.pypa.io/ez_setup.py).Content | py -3 - - -The recommended way to install setuptools on Windows is to download -`ez_setup.py`_ and run it. The script will download the appropriate -distribution file and install it for you. - -Once installation is complete, you will find an ``easy_install`` program in -your Python ``Scripts`` subdirectory. For simple invocation and best results, -add this directory to your ``PATH`` environment variable, if it is not already -present. If you did a user-local install, the ``Scripts`` subdirectory is -``$env:APPDATA\Python\Scripts``. - - -Windows (simplified) -==================== - -For Windows without PowerShell 3 or for installation without a command-line, -download `ez_setup.py`_ using your preferred web browser or other technique -and "run" that file. - - -Unix (wget) -=========== - -Most Linux distributions come with wget. - -Download `ez_setup.py`_ and run it using the target Python version. The script -will download the appropriate version and install it for you:: - - > wget https://bootstrap.pypa.io/ez_setup.py -O - | python - -Note that you will may need to invoke the command with superuser privileges to -install to the system Python:: - - > wget https://bootstrap.pypa.io/ez_setup.py -O - | sudo python - -Alternatively, Setuptools may be installed to a user-local path:: - - > wget https://bootstrap.pypa.io/ez_setup.py -O - | python - --user - -Note that on some older systems (noted on Debian 6 and CentOS 5 installations), -`wget` may refuse to download `ez_setup.py`, complaining that the certificate common name `*.c.ssl.fastly.net` -does not match the host name `bootstrap.pypa.io`. In addition, the `ez_setup.py` script may then encounter similar problems using -`wget` internally to download `setuptools-x.y.zip`, complaining that the certificate common name of `www.python.org` does not match the -host name `pypi.python.org`. Those are known issues, related to a bug in the older versions of `wget` -(see `Issue 59 `_). If you happen to encounter them, -install Setuptools as follows:: - - > wget --no-check-certificate https://bootstrap.pypa.io/ez_setup.py - > python ez_setup.py --insecure - - -Unix including Mac OS X (curl) -============================== - -If your system has curl installed, follow the ``wget`` instructions but -replace ``wget`` with ``curl`` and ``-O`` with ``-o``. For example:: - - > curl https://bootstrap.pypa.io/ez_setup.py -o - | python - - -Advanced Installation -===================== - -For more advanced installation options, such as installing to custom -locations or prefixes, download and extract the source -tarball from `Setuptools on PyPI `_ -and run setup.py with any supported distutils and Setuptools options. -For example:: - - setuptools-x.x$ python setup.py install --prefix=/opt/setuptools - -Use ``--help`` to get a full options list, but we recommend consulting -the `EasyInstall manual`_ for detailed instructions, especially `the section -on custom installation locations`_. - -.. _EasyInstall manual: https://pythonhosted.org/setuptools/EasyInstall -.. _the section on custom installation locations: https://pythonhosted.org/setuptools/EasyInstall#custom-installation-locations - - -Downloads -========= - -All setuptools downloads can be found at `the project's home page in the Python -Package Index`_. Scroll to the very bottom of the page to find the links. - -.. _the project's home page in the Python Package Index: https://pypi.python.org/pypi/setuptools - -In addition to the PyPI downloads, the development version of ``setuptools`` -is available from the `Bitbucket repo`_, and in-development versions of the -`0.6 branch`_ are available as well. - -.. _Bitbucket repo: https://bitbucket.org/pypa/setuptools/get/default.tar.gz#egg=setuptools-dev -.. _0.6 branch: http://svn.python.org/projects/sandbox/branches/setuptools-0.6/#egg=setuptools-dev06 - -Uninstalling -============ - -On Windows, if Setuptools was installed using an ``.exe`` or ``.msi`` -installer, simply use the uninstall feature of "Add/Remove Programs" in the -Control Panel. - -Otherwise, to uninstall Setuptools or Distribute, regardless of the Python -version, delete all ``setuptools*`` and ``distribute*`` files and -directories from your system's ``site-packages`` directory -(and any other ``sys.path`` directories) FIRST. - -If you are upgrading or otherwise plan to re-install Setuptools or Distribute, -nothing further needs to be done. If you want to completely remove Setuptools, -you may also want to remove the 'easy_install' and 'easy_install-x.x' scripts -and associated executables installed to the Python scripts directory. - --------------------------------- -Using Setuptools and EasyInstall --------------------------------- - -Here are some of the available manuals, tutorials, and other resources for -learning about Setuptools, Python Eggs, and EasyInstall: - -* `The EasyInstall user's guide and reference manual`_ -* `The setuptools Developer's Guide`_ -* `The pkg_resources API reference`_ -* `The Internal Structure of Python Eggs`_ - -Questions, comments, and bug reports should be directed to the `distutils-sig -mailing list`_. If you have written (or know of) any tutorials, documentation, -plug-ins, or other resources for setuptools users, please let us know about -them there, so this reference list can be updated. If you have working, -*tested* patches to correct problems or add features, you may submit them to -the `setuptools bug tracker`_. - -.. _setuptools bug tracker: https://bitbucket.org/pypa/setuptools/issues -.. _The Internal Structure of Python Eggs: https://pythonhosted.org/setuptools/formats.html -.. _The setuptools Developer's Guide: https://pythonhosted.org/setuptools/setuptools.html -.. _The pkg_resources API reference: https://pythonhosted.org/setuptools/pkg_resources.html -.. _The EasyInstall user's guide and reference manual: https://pythonhosted.org/setuptools/easy_install.html -.. _distutils-sig mailing list: http://mail.python.org/pipermail/distutils-sig/ - - -------- -Credits -------- - -* The original design for the ``.egg`` format and the ``pkg_resources`` API was - co-created by Phillip Eby and Bob Ippolito. Bob also implemented the first - version of ``pkg_resources``, and supplied the OS X operating system version - compatibility algorithm. - -* Ian Bicking implemented many early "creature comfort" features of - easy_install, including support for downloading via Sourceforge and - Subversion repositories. Ian's comments on the Web-SIG about WSGI - application deployment also inspired the concept of "entry points" in eggs, - and he has given talks at PyCon and elsewhere to inform and educate the - community about eggs and setuptools. - -* Jim Fulton contributed time and effort to build automated tests of various - aspects of ``easy_install``, and supplied the doctests for the command-line - ``.exe`` wrappers on Windows. - -* Phillip J. Eby is the seminal author of setuptools, and - first proposed the idea of an importable binary distribution format for - Python application plug-ins. - -* Significant parts of the implementation of setuptools were funded by the Open - Source Applications Foundation, to provide a plug-in infrastructure for the - Chandler PIM application. In addition, many OSAF staffers (such as Mike - "Code Bear" Taylor) contributed their time and stress as guinea pigs for the - use of eggs and setuptools, even before eggs were "cool". (Thanks, guys!) - -* Tarek Ziadé is the principal author of the Distribute fork, which - re-invigorated the community on the project, encouraged renewed innovation, - and addressed many defects. - -* Since the merge with Distribute, Jason R. Coombs is the - maintainer of setuptools. The project is maintained in coordination with - the Python Packaging Authority (PyPA) and the larger Python community. - -.. _files: - - ---------------- -Code of Conduct ---------------- - -Everyone interacting in the setuptools project's codebases, issue trackers, -chat rooms, and mailing lists is expected to follow the -`PyPA Code of Conduct`_. - -.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/ - - diff --git a/lib/python3.4/site-packages/_markerlib-0.0.0.dist-info/METADATA b/lib/python3.4/site-packages/_markerlib-0.0.0.dist-info/METADATA deleted file mode 100644 index 2a37585..0000000 --- a/lib/python3.4/site-packages/_markerlib-0.0.0.dist-info/METADATA +++ /dev/null @@ -1,263 +0,0 @@ -Metadata-Version: 2.0 -Name: setuptools -Version: 20.1.1 -Summary: Easily download, build, install, upgrade, and uninstall Python packages -Home-page: https://bitbucket.org/pypa/setuptools -Author: Python Packaging Authority -Author-email: distutils-sig@python.org -License: UNKNOWN -Keywords: CPAN PyPI distutils eggs package management -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python :: 2.6 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: System :: Archiving :: Packaging -Classifier: Topic :: System :: Systems Administration -Classifier: Topic :: Utilities - -=============================== -Installing and Using Setuptools -=============================== - -.. contents:: **Table of Contents** - - -`Change History `_. - -------------------------- -Installation Instructions -------------------------- - -The recommended way to bootstrap setuptools on any system is to download -`ez_setup.py`_ and run it using the target Python environment. Different -operating systems have different recommended techniques to accomplish this -basic routine, so below are some examples to get you started. - -Setuptools requires Python 2.6 or later. To install setuptools -on Python 2.4 or Python 2.5, use the `bootstrap script for Setuptools 1.x -`_. - -The link provided to ez_setup.py is a bookmark to bootstrap script for the -latest known stable release. - -.. _ez_setup.py: https://bootstrap.pypa.io/ez_setup.py - -Windows (Powershell 3 or later) -=============================== - -For best results, uninstall previous versions FIRST (see `Uninstalling`_). - -Using Windows 8 (which includes PowerShell 3) or earlier versions of Windows -with PowerShell 3 installed, it's possible to install with one simple -Powershell command. Start up Powershell and paste this command:: - - > (Invoke-WebRequest https://bootstrap.pypa.io/ez_setup.py).Content | python - - -You must start the Powershell with Administrative privileges or you may choose -to install a user-local installation:: - - > (Invoke-WebRequest https://bootstrap.pypa.io/ez_setup.py).Content | python - --user - -If you have Python 3.3 or later, you can use the ``py`` command to install to -different Python versions. For example, to install to Python 3.3 if you have -Python 2.7 installed:: - - > (Invoke-WebRequest https://bootstrap.pypa.io/ez_setup.py).Content | py -3 - - -The recommended way to install setuptools on Windows is to download -`ez_setup.py`_ and run it. The script will download the appropriate -distribution file and install it for you. - -Once installation is complete, you will find an ``easy_install`` program in -your Python ``Scripts`` subdirectory. For simple invocation and best results, -add this directory to your ``PATH`` environment variable, if it is not already -present. If you did a user-local install, the ``Scripts`` subdirectory is -``$env:APPDATA\Python\Scripts``. - - -Windows (simplified) -==================== - -For Windows without PowerShell 3 or for installation without a command-line, -download `ez_setup.py`_ using your preferred web browser or other technique -and "run" that file. - - -Unix (wget) -=========== - -Most Linux distributions come with wget. - -Download `ez_setup.py`_ and run it using the target Python version. The script -will download the appropriate version and install it for you:: - - > wget https://bootstrap.pypa.io/ez_setup.py -O - | python - -Note that you will may need to invoke the command with superuser privileges to -install to the system Python:: - - > wget https://bootstrap.pypa.io/ez_setup.py -O - | sudo python - -Alternatively, Setuptools may be installed to a user-local path:: - - > wget https://bootstrap.pypa.io/ez_setup.py -O - | python - --user - -Note that on some older systems (noted on Debian 6 and CentOS 5 installations), -`wget` may refuse to download `ez_setup.py`, complaining that the certificate common name `*.c.ssl.fastly.net` -does not match the host name `bootstrap.pypa.io`. In addition, the `ez_setup.py` script may then encounter similar problems using -`wget` internally to download `setuptools-x.y.zip`, complaining that the certificate common name of `www.python.org` does not match the -host name `pypi.python.org`. Those are known issues, related to a bug in the older versions of `wget` -(see `Issue 59 `_). If you happen to encounter them, -install Setuptools as follows:: - - > wget --no-check-certificate https://bootstrap.pypa.io/ez_setup.py - > python ez_setup.py --insecure - - -Unix including Mac OS X (curl) -============================== - -If your system has curl installed, follow the ``wget`` instructions but -replace ``wget`` with ``curl`` and ``-O`` with ``-o``. For example:: - - > curl https://bootstrap.pypa.io/ez_setup.py -o - | python - - -Advanced Installation -===================== - -For more advanced installation options, such as installing to custom -locations or prefixes, download and extract the source -tarball from `Setuptools on PyPI `_ -and run setup.py with any supported distutils and Setuptools options. -For example:: - - setuptools-x.x$ python setup.py install --prefix=/opt/setuptools - -Use ``--help`` to get a full options list, but we recommend consulting -the `EasyInstall manual`_ for detailed instructions, especially `the section -on custom installation locations`_. - -.. _EasyInstall manual: https://pythonhosted.org/setuptools/EasyInstall -.. _the section on custom installation locations: https://pythonhosted.org/setuptools/EasyInstall#custom-installation-locations - - -Downloads -========= - -All setuptools downloads can be found at `the project's home page in the Python -Package Index`_. Scroll to the very bottom of the page to find the links. - -.. _the project's home page in the Python Package Index: https://pypi.python.org/pypi/setuptools - -In addition to the PyPI downloads, the development version of ``setuptools`` -is available from the `Bitbucket repo`_, and in-development versions of the -`0.6 branch`_ are available as well. - -.. _Bitbucket repo: https://bitbucket.org/pypa/setuptools/get/default.tar.gz#egg=setuptools-dev -.. _0.6 branch: http://svn.python.org/projects/sandbox/branches/setuptools-0.6/#egg=setuptools-dev06 - -Uninstalling -============ - -On Windows, if Setuptools was installed using an ``.exe`` or ``.msi`` -installer, simply use the uninstall feature of "Add/Remove Programs" in the -Control Panel. - -Otherwise, to uninstall Setuptools or Distribute, regardless of the Python -version, delete all ``setuptools*`` and ``distribute*`` files and -directories from your system's ``site-packages`` directory -(and any other ``sys.path`` directories) FIRST. - -If you are upgrading or otherwise plan to re-install Setuptools or Distribute, -nothing further needs to be done. If you want to completely remove Setuptools, -you may also want to remove the 'easy_install' and 'easy_install-x.x' scripts -and associated executables installed to the Python scripts directory. - --------------------------------- -Using Setuptools and EasyInstall --------------------------------- - -Here are some of the available manuals, tutorials, and other resources for -learning about Setuptools, Python Eggs, and EasyInstall: - -* `The EasyInstall user's guide and reference manual`_ -* `The setuptools Developer's Guide`_ -* `The pkg_resources API reference`_ -* `The Internal Structure of Python Eggs`_ - -Questions, comments, and bug reports should be directed to the `distutils-sig -mailing list`_. If you have written (or know of) any tutorials, documentation, -plug-ins, or other resources for setuptools users, please let us know about -them there, so this reference list can be updated. If you have working, -*tested* patches to correct problems or add features, you may submit them to -the `setuptools bug tracker`_. - -.. _setuptools bug tracker: https://bitbucket.org/pypa/setuptools/issues -.. _The Internal Structure of Python Eggs: https://pythonhosted.org/setuptools/formats.html -.. _The setuptools Developer's Guide: https://pythonhosted.org/setuptools/setuptools.html -.. _The pkg_resources API reference: https://pythonhosted.org/setuptools/pkg_resources.html -.. _The EasyInstall user's guide and reference manual: https://pythonhosted.org/setuptools/easy_install.html -.. _distutils-sig mailing list: http://mail.python.org/pipermail/distutils-sig/ - - -------- -Credits -------- - -* The original design for the ``.egg`` format and the ``pkg_resources`` API was - co-created by Phillip Eby and Bob Ippolito. Bob also implemented the first - version of ``pkg_resources``, and supplied the OS X operating system version - compatibility algorithm. - -* Ian Bicking implemented many early "creature comfort" features of - easy_install, including support for downloading via Sourceforge and - Subversion repositories. Ian's comments on the Web-SIG about WSGI - application deployment also inspired the concept of "entry points" in eggs, - and he has given talks at PyCon and elsewhere to inform and educate the - community about eggs and setuptools. - -* Jim Fulton contributed time and effort to build automated tests of various - aspects of ``easy_install``, and supplied the doctests for the command-line - ``.exe`` wrappers on Windows. - -* Phillip J. Eby is the seminal author of setuptools, and - first proposed the idea of an importable binary distribution format for - Python application plug-ins. - -* Significant parts of the implementation of setuptools were funded by the Open - Source Applications Foundation, to provide a plug-in infrastructure for the - Chandler PIM application. In addition, many OSAF staffers (such as Mike - "Code Bear" Taylor) contributed their time and stress as guinea pigs for the - use of eggs and setuptools, even before eggs were "cool". (Thanks, guys!) - -* Tarek Ziadé is the principal author of the Distribute fork, which - re-invigorated the community on the project, encouraged renewed innovation, - and addressed many defects. - -* Since the merge with Distribute, Jason R. Coombs is the - maintainer of setuptools. The project is maintained in coordination with - the Python Packaging Authority (PyPA) and the larger Python community. - -.. _files: - - ---------------- -Code of Conduct ---------------- - -Everyone interacting in the setuptools project's codebases, issue trackers, -chat rooms, and mailing lists is expected to follow the -`PyPA Code of Conduct`_. - -.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/ - - diff --git a/lib/python3.4/site-packages/_markerlib-0.0.0.dist-info/RECORD b/lib/python3.4/site-packages/_markerlib-0.0.0.dist-info/RECORD deleted file mode 100644 index 044e982..0000000 --- a/lib/python3.4/site-packages/_markerlib-0.0.0.dist-info/RECORD +++ /dev/null @@ -1,104 +0,0 @@ -easy_install.py,sha256=MDC9vt5AxDsXX5qcKlBz2TnW6Tpuv_AobnfhCJ9X3PM,126 -_markerlib/__init__.py,sha256=GSmhZqvAitLJHhSgtqqusfq2nJ_ClP3oy3Lm0uZLIsU,552 -_markerlib/markers.py,sha256=YuFp0-osufFIoqnzG3L0Z2fDCx4Vln3VUDeXJ2DA_1I,3979 -_markerlib-0.0.0.dist-info/DESCRIPTION.rst,sha256=MDsJej8DPV2OKpAKpu74g-2xksRd-uGTeZn4W7D1dnI,9940 -_markerlib-0.0.0.dist-info/METADATA,sha256=l8LCWR8HLdKmOz1QMU2JQREbM9o4dCsMPkBdBSi_Jgo,10997 -_markerlib-0.0.0.dist-info/RECORD,, -_markerlib-0.0.0.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 -_markerlib-0.0.0.dist-info/dependency_links.txt,sha256=oUNXJEArClXFiSSvfFwUKY8TYjeIXhuFfCpXn5K0DCE,226 -_markerlib-0.0.0.dist-info/entry_points.txt,sha256=S6yRfyEABPIKq4cNMNO_7LHXzFVZW-exLSrKSI6kgNU,2779 -_markerlib-0.0.0.dist-info/metadata.json,sha256=OwUAZgU-PBMGwfXh2QKg7ec1Kh9aGVfWnOB5mrc48HA,4242 -_markerlib-0.0.0.dist-info/top_level.txt,sha256=7780fzudMJkykiTcIrAQ8m8Lll6kot3EEePye3VJgEE,49 -_markerlib-0.0.0.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 -setuptools/__init__.py,sha256=WEGb6BRGN2dz3eJTbNRUfInUAhb6_OZJyYAndPGJm6w,5440 -setuptools/archive_util.py,sha256=N30WE5ZQjkytzhAodAXw4FkK-9J5AP1ChrClHnZthOA,6609 -setuptools/depends.py,sha256=WyJIhjIX7D5-JpGSnMAPHEoDcVPQxaO0405keTQT6jM,6418 -setuptools/dist.py,sha256=txOleyyt2xCSTkUjCGW4MYZB8a1xsbC8MulDhSnoivQ,35701 -setuptools/extension.py,sha256=YvsyGHWVWzhNOXMHU239FR14wxw2WwdMLLzWsRP6_IY,1694 -setuptools/launch.py,sha256=hP3qZxDNu5Hf9C-VAkEP4IC_YYfR1XfxMTj6EguxxCg,730 -setuptools/lib2to3_ex.py,sha256=6jPF9sJuHiz0cyg4cwIBLl2VMAxcl3GYSZwWAOuJplU,1998 -setuptools/msvc9_support.py,sha256=fo2vjb-dna1SEuHezQCTuelCo6XFBv5cqaI56ABJ1vw,2187 -setuptools/package_index.py,sha256=T6tZGPHApup6Gl3kz1sCLtY7kmMUXLBKweSAORYS2Qc,39490 -setuptools/py26compat.py,sha256=1Vvuf-hj5bTM3OAXv6vgJQImulne12ann053caOgikU,481 -setuptools/py27compat.py,sha256=CGj-jZcFgHUkrEdLvArkxHj96tAaMbG2-yJtUVU7QVI,306 -setuptools/py31compat.py,sha256=cqYSVBd2pxvKl75185z40htfEr6EKC29KvSBiSoqHOA,1636 -setuptools/sandbox.py,sha256=tuMRu_8R0_w6Qer9VqDiOTqKy1qr_GjHi-2QAg7TMz0,14210 -setuptools/script (dev).tmpl,sha256=f7MR17dTkzaqkCMSVseyOCMVrPVSMdmTQsaB8cZzfuI,201 -setuptools/script.tmpl,sha256=WGTt5piezO27c-Dbx6l5Q4T3Ff20A5z7872hv3aAhYY,138 -setuptools/site-patch.py,sha256=K-0-cAx36mX_PG-qPZwosG9ZLCliRjquKQ4nHiJvvzg,2389 -setuptools/ssl_support.py,sha256=tAFeeyFPVle_GgarPkNrdfnCJgP9PyN_QYGXTgypoyc,8119 -setuptools/unicode_utils.py,sha256=8zVyrL_MFc6P5AmErs21rr7z-3N1pZ_NkOcDC7BPElU,995 -setuptools/utils.py,sha256=08Z7mt-9mvrx-XvmS5EyKoRn2lxNTlgFsUwBU3Eq9JQ,293 -setuptools/version.py,sha256=E3F8rAlTgCNpmTTY2YGy4T_1iQn3gKsePB7TVIcObu0,23 -setuptools/windows_support.py,sha256=5GrfqSP2-dLGJoZTq2g6dCKkyQxxa2n5IQiXlJCoYEE,714 -setuptools/command/__init__.py,sha256=1AM3hv_zCixE7kTXA-onWfK_2KF8GC8fUw3WSxzi5Fg,564 -setuptools/command/alias.py,sha256=KjpE0sz_SDIHv3fpZcIQK-sCkJz-SrC6Gmug6b9Nkc8,2426 -setuptools/command/bdist_egg.py,sha256=Km4CsGbevhvej6kKEfvTYxfkPoQijUyXmImNifrO4Tg,17184 -setuptools/command/bdist_rpm.py,sha256=B7l0TnzCGb-0nLlm6rS00jWLkojASwVmdhW2w5Qz_Ak,1508 -setuptools/command/bdist_wininst.py,sha256=_6dz3lpB1tY200LxKPLM7qgwTCceOMgaWFF-jW2-pm0,637 -setuptools/command/build_ext.py,sha256=pkQ8xp3YPVGGLkGv-SvfxC_GqFpboph1AFEoMFOgQMo,11964 -setuptools/command/build_py.py,sha256=HvJ88JuougDccaowYlfMV12kYtd0GLahg2DR2vQRqL4,7983 -setuptools/command/develop.py,sha256=VxSYbpM2jQqtRBn5klIjPVBo3sWKNZMlSbHHiRLUlZo,7383 -setuptools/command/easy_install.py,sha256=WDidYAhIEWCT-63bVvoazy8HcITEWDn4Xzgrj3YZgz0,88492 -setuptools/command/egg_info.py,sha256=0_8eI8hgLAlGt8Xk5kiodY_d9lxG6_RSescJISKBJgA,16890 -setuptools/command/install.py,sha256=QwaFiZRU3ytIHoPh8uJ9EqV3Fu9C4ca4B7UGAo95tws,4685 -setuptools/command/install_egg_info.py,sha256=fEqU1EplTs_vUjAzwiEB7LrtdZBQ3BefwuUZLZBDEQ0,5027 -setuptools/command/install_lib.py,sha256=5IZM251t4DzOdZAXCezdROr3X0SeeE41eyV059RNgZ4,5011 -setuptools/command/install_scripts.py,sha256=vX2JC6v7l090N7CrTfihWBklNbPvfNKAY2LRtukM9XE,2231 -setuptools/command/register.py,sha256=bHlMm1qmBbSdahTOT8w6UhA-EgeQIz7p6cD-qOauaiI,270 -setuptools/command/rotate.py,sha256=QGZS2t4CmBl7t79KQijNCjRMU50lu3nRhu4FXWB5LIE,2038 -setuptools/command/saveopts.py,sha256=za7QCBcQimKKriWcoCcbhxPjUz30gSB74zuTL47xpP4,658 -setuptools/command/sdist.py,sha256=kQetnPMw6ao3nurWGJZgS4HkOH4AknzMOSvqbVA6jGA,7050 -setuptools/command/setopt.py,sha256=cygJaJWJmiVhR0e_Uh_0_fWyCxMJIqK-Bu6K0LyYUtU,5086 -setuptools/command/test.py,sha256=N2f5RwxkjwU3YQzFYHtzHr636-pdX9XJDuPg5Y92kSo,6888 -setuptools/command/upload.py,sha256=OjAryq4ZoARZiaTN_MpuG1X8Pu9CJNCKmmbMg-gab5I,649 -setuptools/command/upload_docs.py,sha256=htXpASci5gKP0RIrGZRRmbll7RnTRuwvKWZkYsBlDMM,6815 -setuptools/extern/__init__.py,sha256=mTrrj4yLMdFeEwwnqKnSuvZM5RM-HPZ1iXLgaYDlB9o,132 -../../../bin/easy_install,sha256=LopKneUXvnG_dqCSZe6amF-Bf4WEh1ny0uo28w2_nQ0,299 -../../../bin/easy_install-3.4,sha256=LopKneUXvnG_dqCSZe6amF-Bf4WEh1ny0uo28w2_nQ0,299 -_markerlib-0.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -setuptools/extern/__pycache__/__init__.cpython-34.pyc,, -setuptools/command/__pycache__/bdist_wininst.cpython-34.pyc,, -setuptools/command/__pycache__/install_egg_info.cpython-34.pyc,, -setuptools/command/__pycache__/egg_info.cpython-34.pyc,, -setuptools/command/__pycache__/sdist.cpython-34.pyc,, -setuptools/__pycache__/launch.cpython-34.pyc,, -setuptools/__pycache__/version.cpython-34.pyc,, -setuptools/__pycache__/site-patch.cpython-34.pyc,, -setuptools/command/__pycache__/saveopts.cpython-34.pyc,, -setuptools/command/__pycache__/easy_install.cpython-34.pyc,, -setuptools/command/__pycache__/rotate.cpython-34.pyc,, -setuptools/command/__pycache__/build_ext.cpython-34.pyc,, -setuptools/__pycache__/utils.cpython-34.pyc,, -setuptools/__pycache__/windows_support.cpython-34.pyc,, -setuptools/__pycache__/py26compat.cpython-34.pyc,, -setuptools/__pycache__/__init__.cpython-34.pyc,, -setuptools/__pycache__/extension.cpython-34.pyc,, -setuptools/command/__pycache__/__init__.cpython-34.pyc,, -setuptools/__pycache__/msvc9_support.cpython-34.pyc,, -setuptools/__pycache__/py27compat.cpython-34.pyc,, -setuptools/command/__pycache__/upload_docs.cpython-34.pyc,, -setuptools/command/__pycache__/install_scripts.cpython-34.pyc,, -setuptools/command/__pycache__/build_py.cpython-34.pyc,, -setuptools/__pycache__/sandbox.cpython-34.pyc,, -_markerlib/__pycache__/markers.cpython-34.pyc,, -setuptools/__pycache__/py31compat.cpython-34.pyc,, -setuptools/command/__pycache__/alias.cpython-34.pyc,, -setuptools/command/__pycache__/setopt.cpython-34.pyc,, -setuptools/__pycache__/dist.cpython-34.pyc,, -setuptools/__pycache__/depends.cpython-34.pyc,, -setuptools/__pycache__/ssl_support.cpython-34.pyc,, -setuptools/command/__pycache__/register.cpython-34.pyc,, -_markerlib/__pycache__/__init__.cpython-34.pyc,, -setuptools/__pycache__/unicode_utils.cpython-34.pyc,, -setuptools/command/__pycache__/install_lib.cpython-34.pyc,, -setuptools/command/__pycache__/install.cpython-34.pyc,, -setuptools/command/__pycache__/bdist_rpm.cpython-34.pyc,, -__pycache__/easy_install.cpython-34.pyc,, -setuptools/command/__pycache__/upload.cpython-34.pyc,, -setuptools/__pycache__/lib2to3_ex.cpython-34.pyc,, -setuptools/command/__pycache__/bdist_egg.cpython-34.pyc,, -setuptools/command/__pycache__/test.cpython-34.pyc,, -setuptools/__pycache__/archive_util.cpython-34.pyc,, -setuptools/__pycache__/package_index.cpython-34.pyc,, -setuptools/command/__pycache__/develop.cpython-34.pyc,, diff --git a/lib/python3.4/site-packages/_markerlib-0.0.0.dist-info/dependency_links.txt b/lib/python3.4/site-packages/_markerlib-0.0.0.dist-info/dependency_links.txt deleted file mode 100644 index 47d1e81..0000000 --- a/lib/python3.4/site-packages/_markerlib-0.0.0.dist-info/dependency_links.txt +++ /dev/null @@ -1,2 +0,0 @@ -https://pypi.python.org/packages/source/c/certifi/certifi-2015.11.20.tar.gz#md5=25134646672c695c1ff1593c2dd75d08 -https://pypi.python.org/packages/source/w/wincertstore/wincertstore-0.2.zip#md5=ae728f2f007185648d0c7a8679b361e2 diff --git a/lib/python3.4/site-packages/_markerlib-0.0.0.dist-info/metadata.json b/lib/python3.4/site-packages/_markerlib-0.0.0.dist-info/metadata.json deleted file mode 100644 index 885b5f3..0000000 --- a/lib/python3.4/site-packages/_markerlib-0.0.0.dist-info/metadata.json +++ /dev/null @@ -1 +0,0 @@ -{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: System :: Archiving :: Packaging", "Topic :: System :: Systems Administration", "Topic :: Utilities"], "extensions": {"python.commands": {"wrap_console": {"easy_install": "setuptools.command.easy_install:main"}}, "python.details": {"contacts": [{"email": "distutils-sig@python.org", "name": "Python Packaging Authority", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://bitbucket.org/pypa/setuptools"}}, "python.exports": {"console_scripts": {"easy_install": "setuptools.command.easy_install:main"}, "distutils.commands": {"alias": "setuptools.command.alias:alias", "bdist_egg": "setuptools.command.bdist_egg:bdist_egg", "bdist_rpm": "setuptools.command.bdist_rpm:bdist_rpm", "bdist_wininst": "setuptools.command.bdist_wininst:bdist_wininst", "build_ext": "setuptools.command.build_ext:build_ext", "build_py": "setuptools.command.build_py:build_py", "develop": "setuptools.command.develop:develop", "easy_install": "setuptools.command.easy_install:easy_install", "egg_info": "setuptools.command.egg_info:egg_info", "install": "setuptools.command.install:install", "install_egg_info": "setuptools.command.install_egg_info:install_egg_info", "install_lib": "setuptools.command.install_lib:install_lib", "install_scripts": "setuptools.command.install_scripts:install_scripts", "register": "setuptools.command.register:register", "rotate": "setuptools.command.rotate:rotate", "saveopts": "setuptools.command.saveopts:saveopts", "sdist": "setuptools.command.sdist:sdist", "setopt": "setuptools.command.setopt:setopt", "test": "setuptools.command.test:test", "upload": "setuptools.command.upload:upload", "upload_docs": "setuptools.command.upload_docs:upload_docs"}, "distutils.setup_keywords": {"convert_2to3_doctests": "setuptools.dist:assert_string_list", "dependency_links": "setuptools.dist:assert_string_list", "eager_resources": "setuptools.dist:assert_string_list", "entry_points": "setuptools.dist:check_entry_points", "exclude_package_data": "setuptools.dist:check_package_data", "extras_require": "setuptools.dist:check_extras", "include_package_data": "setuptools.dist:assert_bool", "install_requires": "setuptools.dist:check_requirements", "namespace_packages": "setuptools.dist:check_nsp", "package_data": "setuptools.dist:check_package_data", "packages": "setuptools.dist:check_packages", "setup_requires": "setuptools.dist:check_requirements", "test_loader": "setuptools.dist:check_importable", "test_runner": "setuptools.dist:check_importable", "test_suite": "setuptools.dist:check_test_suite", "tests_require": "setuptools.dist:check_requirements", "use_2to3": "setuptools.dist:assert_bool", "use_2to3_exclude_fixers": "setuptools.dist:assert_string_list", "use_2to3_fixers": "setuptools.dist:assert_string_list", "zip_safe": "setuptools.dist:assert_bool"}, "egg_info.writers": {"PKG-INFO": "setuptools.command.egg_info:write_pkg_info", "dependency_links.txt": "setuptools.command.egg_info:overwrite_arg", "depends.txt": "setuptools.command.egg_info:warn_depends_obsolete", "eager_resources.txt": "setuptools.command.egg_info:overwrite_arg", "entry_points.txt": "setuptools.command.egg_info:write_entries", "namespace_packages.txt": "setuptools.command.egg_info:overwrite_arg", "requires.txt": "setuptools.command.egg_info:write_requirements", "top_level.txt": "setuptools.command.egg_info:write_toplevel_names"}, "setuptools.installation": {"eggsecutable": "setuptools.command.easy_install:bootstrap"}}}, "generator": "bdist_wheel (0.29.0)", "keywords": ["CPAN", "PyPI", "distutils", "eggs", "package", "management"], "metadata_version": "2.0", "name": "setuptools", "summary": "Easily download, build, install, upgrade, and uninstall Python packages", "version": "20.1.1"} \ No newline at end of file diff --git a/lib/python3.4/site-packages/_markerlib-0.0.0.dist-info/top_level.txt b/lib/python3.4/site-packages/_markerlib-0.0.0.dist-info/top_level.txt deleted file mode 100644 index 5fe9a7e..0000000 --- a/lib/python3.4/site-packages/_markerlib-0.0.0.dist-info/top_level.txt +++ /dev/null @@ -1,4 +0,0 @@ -_markerlib -easy_install -pkg_resources -setuptools diff --git a/lib/python3.4/site-packages/_markerlib/__init__.py b/lib/python3.4/site-packages/_markerlib/__init__.py deleted file mode 100644 index e2b237b..0000000 --- a/lib/python3.4/site-packages/_markerlib/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -try: - import ast - from _markerlib.markers import default_environment, compile, interpret -except ImportError: - if 'ast' in globals(): - raise - def default_environment(): - return {} - def compile(marker): - def marker_fn(environment=None, override=None): - # 'empty markers are True' heuristic won't install extra deps. - return not marker.strip() - marker_fn.__doc__ = marker - return marker_fn - def interpret(marker, environment=None, override=None): - return compile(marker)() diff --git a/lib/python3.4/site-packages/_markerlib/markers.py b/lib/python3.4/site-packages/_markerlib/markers.py deleted file mode 100644 index fa83706..0000000 --- a/lib/python3.4/site-packages/_markerlib/markers.py +++ /dev/null @@ -1,119 +0,0 @@ -# -*- coding: utf-8 -*- -"""Interpret PEP 345 environment markers. - -EXPR [in|==|!=|not in] EXPR [or|and] ... - -where EXPR belongs to any of those: - - python_version = '%s.%s' % (sys.version_info[0], sys.version_info[1]) - python_full_version = sys.version.split()[0] - os.name = os.name - sys.platform = sys.platform - platform.version = platform.version() - platform.machine = platform.machine() - platform.python_implementation = platform.python_implementation() - a free string, like '2.6', or 'win32' -""" - -__all__ = ['default_environment', 'compile', 'interpret'] - -import ast -import os -import platform -import sys -import weakref - -_builtin_compile = compile - -try: - from platform import python_implementation -except ImportError: - if os.name == "java": - # Jython 2.5 has ast module, but not platform.python_implementation() function. - def python_implementation(): - return "Jython" - else: - raise - - -# restricted set of variables -_VARS = {'sys.platform': sys.platform, - 'python_version': '%s.%s' % sys.version_info[:2], - # FIXME parsing sys.platform is not reliable, but there is no other - # way to get e.g. 2.7.2+, and the PEP is defined with sys.version - 'python_full_version': sys.version.split(' ', 1)[0], - 'os.name': os.name, - 'platform.version': platform.version(), - 'platform.machine': platform.machine(), - 'platform.python_implementation': python_implementation(), - 'extra': None # wheel extension - } - -for var in list(_VARS.keys()): - if '.' in var: - _VARS[var.replace('.', '_')] = _VARS[var] - -def default_environment(): - """Return copy of default PEP 385 globals dictionary.""" - return dict(_VARS) - -class ASTWhitelist(ast.NodeTransformer): - def __init__(self, statement): - self.statement = statement # for error messages - - ALLOWED = (ast.Compare, ast.BoolOp, ast.Attribute, ast.Name, ast.Load, ast.Str) - # Bool operations - ALLOWED += (ast.And, ast.Or) - # Comparison operations - ALLOWED += (ast.Eq, ast.Gt, ast.GtE, ast.In, ast.Is, ast.IsNot, ast.Lt, ast.LtE, ast.NotEq, ast.NotIn) - - def visit(self, node): - """Ensure statement only contains allowed nodes.""" - if not isinstance(node, self.ALLOWED): - raise SyntaxError('Not allowed in environment markers.\n%s\n%s' % - (self.statement, - (' ' * node.col_offset) + '^')) - return ast.NodeTransformer.visit(self, node) - - def visit_Attribute(self, node): - """Flatten one level of attribute access.""" - new_node = ast.Name("%s.%s" % (node.value.id, node.attr), node.ctx) - return ast.copy_location(new_node, node) - -def parse_marker(marker): - tree = ast.parse(marker, mode='eval') - new_tree = ASTWhitelist(marker).generic_visit(tree) - return new_tree - -def compile_marker(parsed_marker): - return _builtin_compile(parsed_marker, '', 'eval', - dont_inherit=True) - -_cache = weakref.WeakValueDictionary() - -def compile(marker): - """Return compiled marker as a function accepting an environment dict.""" - try: - return _cache[marker] - except KeyError: - pass - if not marker.strip(): - def marker_fn(environment=None, override=None): - """""" - return True - else: - compiled_marker = compile_marker(parse_marker(marker)) - def marker_fn(environment=None, override=None): - """override updates environment""" - if override is None: - override = {} - if environment is None: - environment = default_environment() - environment.update(override) - return eval(compiled_marker, environment) - marker_fn.__doc__ = marker - _cache[marker] = marker_fn - return _cache[marker] - -def interpret(marker, environment=None): - return compile(marker)(environment) diff --git a/lib/python3.4/site-packages/ed25519-1.4.dist-info/METADATA b/lib/python3.4/site-packages/ed25519-1.4.dist-info/METADATA index 10628e3..1aee179 100644 --- a/lib/python3.4/site-packages/ed25519-1.4.dist-info/METADATA +++ b/lib/python3.4/site-packages/ed25519-1.4.dist-info/METADATA @@ -6,6 +6,7 @@ Home-page: https://github.com/warner/python-ed25519 Author: Brian Warner Author-email: warner-python-ed25519@lothar.com License: MIT +Description-Content-Type: UNKNOWN Platform: UNKNOWN Classifier: Development Status :: 5 - Production/Stable Classifier: Intended Audience :: Developers diff --git a/lib/python3.4/site-packages/ed25519-1.4.dist-info/RECORD b/lib/python3.4/site-packages/ed25519-1.4.dist-info/RECORD index a65f958..7b7cbd4 100644 --- a/lib/python3.4/site-packages/ed25519-1.4.dist-info/RECORD +++ b/lib/python3.4/site-packages/ed25519-1.4.dist-info/RECORD @@ -1,17 +1,17 @@ ed25519/__init__.py,sha256=0AicD1xQAforRdrUWwmmURJkZ3Gi1lqaifukwZNYJos,401 -ed25519/_ed25519.cpython-34m.so,sha256=lQsx9-Rms4glBHDzWgcSZ4kgoWp-fVN9O3lGztOiNO8,255624 +ed25519/_ed25519.cpython-34m.so,sha256=-qvpNKMbtiJoFhWHlvH83lGmJEntE9ISrt8hYZE4zig,262968 ed25519/_version.py,sha256=yb119RosJrH_RO02_o3o12GWQvkxx3xD4X7UrJW9vTY,469 ed25519/keys.py,sha256=AbMFsbxn0qbwmQ6HntpNURsOGq_y4puwFxs6U7Of2eo,7123 ed25519/test_ed25519.py,sha256=IG8ot-yARHi6PoyJY6ixS1l2L23hE1lCXbSH-XQPCCM,12389 ../../../bin/edsig,sha256=SA1mUUWCjAAaSEe6MKSpVWg-2qXwuiuK3PodCAUwCN0,2853 ed25519-1.4.dist-info/DESCRIPTION.rst,sha256=8UWGEqjPrB7zPyxLA5Ep6JL58ANbe0Wybqth188exdc,434 -ed25519-1.4.dist-info/METADATA,sha256=5SfqBgerND9vMg8dq81VUTwDclNFkXr30DkD9K95gZU,1114 +ed25519-1.4.dist-info/METADATA,sha256=8xAIfsJS4nw5H1ui1jHsVntmwcMjIzm4j_LHEaW3wNQ,1148 ed25519-1.4.dist-info/RECORD,, -ed25519-1.4.dist-info/WHEEL,sha256=HslHw5cSLCuyOLxj8duGAooHNvXnupcmoBU1NzRPr2w,104 -ed25519-1.4.dist-info/metadata.json,sha256=LyRoPQ8zyOxjJH1CoRteHtukVr0HLA_z_rRyigiJl5c,802 +ed25519-1.4.dist-info/WHEEL,sha256=AEztX7vHDtcgysb-4-5-DyIKMLIPg6NMxY9dXTRdoXQ,104 +ed25519-1.4.dist-info/metadata.json,sha256=6X6ChTS1aIj99pNHtLNerEBCuO-F-P2Z1GgSMt2svQw,841 ed25519-1.4.dist-info/top_level.txt,sha256=U3-N9ZJMBO9MUuZLwoiMbsWSkxsd0TfkNSuzO6O_gYY,8 ed25519-1.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -ed25519/__pycache__/test_ed25519.cpython-34.pyc,, ed25519/__pycache__/keys.cpython-34.pyc,, ed25519/__pycache__/_version.cpython-34.pyc,, ed25519/__pycache__/__init__.cpython-34.pyc,, +ed25519/__pycache__/test_ed25519.cpython-34.pyc,, diff --git a/lib/python3.4/site-packages/ed25519-1.4.dist-info/WHEEL b/lib/python3.4/site-packages/ed25519-1.4.dist-info/WHEEL index db40973..1fdf70f 100644 --- a/lib/python3.4/site-packages/ed25519-1.4.dist-info/WHEEL +++ b/lib/python3.4/site-packages/ed25519-1.4.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.29.0) +Generator: bdist_wheel (0.30.0) Root-Is-Purelib: false Tag: cp34-cp34m-linux_x86_64 diff --git a/lib/python3.4/site-packages/ed25519-1.4.dist-info/metadata.json b/lib/python3.4/site-packages/ed25519-1.4.dist-info/metadata.json index 6a558b5..12a665f 100644 --- a/lib/python3.4/site-packages/ed25519-1.4.dist-info/metadata.json +++ b/lib/python3.4/site-packages/ed25519-1.4.dist-info/metadata.json @@ -1 +1 @@ -{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Topic :: Security :: Cryptography"], "extensions": {"python.details": {"contacts": [{"email": "warner-python-ed25519@lothar.com", "name": "Brian Warner", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/warner/python-ed25519"}}}, "generator": "bdist_wheel (0.29.0)", "license": "MIT", "metadata_version": "2.0", "name": "ed25519", "summary": "Ed25519 public-key signatures", "version": "1.4"} \ No newline at end of file +{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Topic :: Security :: Cryptography"], "description_content_type": "UNKNOWN", "extensions": {"python.details": {"contacts": [{"email": "warner-python-ed25519@lothar.com", "name": "Brian Warner", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/warner/python-ed25519"}}}, "generator": "bdist_wheel (0.30.0)", "license": "MIT", "metadata_version": "2.0", "name": "ed25519", "summary": "Ed25519 public-key signatures", "version": "1.4"} \ No newline at end of file diff --git a/lib/python3.4/site-packages/ed25519/_ed25519.cpython-34m.so b/lib/python3.4/site-packages/ed25519/_ed25519.cpython-34m.so index 1f12c9b..a07ea54 100755 Binary files a/lib/python3.4/site-packages/ed25519/_ed25519.cpython-34m.so and b/lib/python3.4/site-packages/ed25519/_ed25519.cpython-34m.so differ diff --git a/lib/python3.4/site-packages/ed25519/_ed25519.cpython-35m-x86_64-linux-gnu.so b/lib/python3.4/site-packages/ed25519/_ed25519.cpython-35m-x86_64-linux-gnu.so index 56dafc8..ea40ec0 100755 Binary files a/lib/python3.4/site-packages/ed25519/_ed25519.cpython-35m-x86_64-linux-gnu.so and b/lib/python3.4/site-packages/ed25519/_ed25519.cpython-35m-x86_64-linux-gnu.so differ diff --git a/lib/python3.4/site-packages/ed25519/_ed25519.cpython-36m-x86_64-linux-gnu.so b/lib/python3.4/site-packages/ed25519/_ed25519.cpython-36m-x86_64-linux-gnu.so new file mode 100755 index 0000000..15b7049 Binary files /dev/null and b/lib/python3.4/site-packages/ed25519/_ed25519.cpython-36m-x86_64-linux-gnu.so differ diff --git a/lib/python3.4/site-packages/netifaces-0.10.4.dist-info/RECORD b/lib/python3.4/site-packages/netifaces-0.10.4.dist-info/RECORD deleted file mode 100644 index 4a5b9b6..0000000 --- a/lib/python3.4/site-packages/netifaces-0.10.4.dist-info/RECORD +++ /dev/null @@ -1,9 +0,0 @@ -netifaces.cpython-34m.so,sha256=EeBiWGLxMMMKQaD6B1EoqpC_NIiyqj5dW8sxPOffQNY,61520 -netifaces-0.10.4.dist-info/DESCRIPTION.rst,sha256=EyJf6yFbUVkw5TpZ0M61ZeVbtCZdHYAqSdfTXT4lo6w,8397 -netifaces-0.10.4.dist-info/METADATA,sha256=nrFMGFClFWGJIsVo8nC9bLS6iermD9vl6cCQS56eG50,9130 -netifaces-0.10.4.dist-info/RECORD,, -netifaces-0.10.4.dist-info/WHEEL,sha256=HslHw5cSLCuyOLxj8duGAooHNvXnupcmoBU1NzRPr2w,104 -netifaces-0.10.4.dist-info/metadata.json,sha256=FHsfPfcnSFKu412WegZiPVzBB-sHxrG_wCDSiaB6SHQ,846 -netifaces-0.10.4.dist-info/top_level.txt,sha256=PqMTaIuWtSjkdQHX6lH1Lmpv2aqBUYAGqATB8z3A6TQ,10 -netifaces-0.10.4.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 -netifaces-0.10.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 diff --git a/lib/python3.4/site-packages/netifaces-0.10.4.dist-info/metadata.json b/lib/python3.4/site-packages/netifaces-0.10.4.dist-info/metadata.json deleted file mode 100644 index c7b0939..0000000 --- a/lib/python3.4/site-packages/netifaces-0.10.4.dist-info/metadata.json +++ /dev/null @@ -1 +0,0 @@ -{"classifiers": ["Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Topic :: System :: Networking", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.5", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3"], "extensions": {"python.details": {"contacts": [{"email": "alastair@alastairs-place.net", "name": "Alastair Houghton", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://bitbucket.org/al45tair/netifaces"}}}, "generator": "bdist_wheel (0.29.0)", "license": "MIT License", "metadata_version": "2.0", "name": "netifaces", "summary": "Portable network interface information.", "version": "0.10.4"} \ No newline at end of file diff --git a/lib/python3.4/site-packages/netifaces-0.10.4.dist-info/DESCRIPTION.rst b/lib/python3.4/site-packages/netifaces-0.10.6.dist-info/DESCRIPTION.rst similarity index 96% rename from lib/python3.4/site-packages/netifaces-0.10.4.dist-info/DESCRIPTION.rst rename to lib/python3.4/site-packages/netifaces-0.10.6.dist-info/DESCRIPTION.rst index 3b79ac6..27fd20f 100644 --- a/lib/python3.4/site-packages/netifaces-0.10.4.dist-info/DESCRIPTION.rst +++ b/lib/python3.4/site-packages/netifaces-0.10.6.dist-info/DESCRIPTION.rst @@ -1,4 +1,4 @@ -netifaces 0.10.4 +netifaces 0.10.6 ================ .. image:: https://drone.io/bitbucket.org/al45tair/netifaces/status.png @@ -24,10 +24,13 @@ This package attempts to solve that problem. First you need to install it, which you can do by typing:: - tar xvzf netifaces-0.10.4.tar.gz - cd netifaces-0.10.4 + tar xvzf netifaces-0.10.6.tar.gz + cd netifaces-0.10.6 python setup.py install +**Note that you will need the relevant developer tools for your platform**, +as netifaces is written in C and installing this way will compile the extension. + Once that's done, you'll need to start Python and do something like the following:: @@ -164,7 +167,7 @@ wish to contribute a patch, please use BitBucket to send a pull request. It's an MIT-style license. Here goes: -Copyright (c) 2007-2014 Alastair Houghton +Copyright (c) 2007-2017 Alastair Houghton Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/lib/python3.4/site-packages/_markerlib-0.0.0.dist-info/INSTALLER b/lib/python3.4/site-packages/netifaces-0.10.6.dist-info/INSTALLER similarity index 100% rename from lib/python3.4/site-packages/_markerlib-0.0.0.dist-info/INSTALLER rename to lib/python3.4/site-packages/netifaces-0.10.6.dist-info/INSTALLER diff --git a/lib/python3.4/site-packages/netifaces-0.10.4.dist-info/METADATA b/lib/python3.4/site-packages/netifaces-0.10.6.dist-info/METADATA similarity index 96% rename from lib/python3.4/site-packages/netifaces-0.10.4.dist-info/METADATA rename to lib/python3.4/site-packages/netifaces-0.10.6.dist-info/METADATA index 7ce084b..59b98bf 100644 --- a/lib/python3.4/site-packages/netifaces-0.10.4.dist-info/METADATA +++ b/lib/python3.4/site-packages/netifaces-0.10.6.dist-info/METADATA @@ -1,11 +1,12 @@ Metadata-Version: 2.0 Name: netifaces -Version: 0.10.4 +Version: 0.10.6 Summary: Portable network interface information. Home-page: https://bitbucket.org/al45tair/netifaces Author: Alastair Houghton Author-email: alastair@alastairs-place.net License: MIT License +Description-Content-Type: UNKNOWN Platform: UNKNOWN Classifier: Development Status :: 4 - Beta Classifier: Intended Audience :: Developers @@ -18,7 +19,7 @@ Classifier: Programming Language :: Python :: 2.6 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 -netifaces 0.10.4 +netifaces 0.10.6 ================ .. image:: https://drone.io/bitbucket.org/al45tair/netifaces/status.png @@ -44,10 +45,13 @@ This package attempts to solve that problem. First you need to install it, which you can do by typing:: - tar xvzf netifaces-0.10.4.tar.gz - cd netifaces-0.10.4 + tar xvzf netifaces-0.10.6.tar.gz + cd netifaces-0.10.6 python setup.py install +**Note that you will need the relevant developer tools for your platform**, +as netifaces is written in C and installing this way will compile the extension. + Once that's done, you'll need to start Python and do something like the following:: @@ -184,7 +188,7 @@ wish to contribute a patch, please use BitBucket to send a pull request. It's an MIT-style license. Here goes: -Copyright (c) 2007-2014 Alastair Houghton +Copyright (c) 2007-2017 Alastair Houghton Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/lib/python3.4/site-packages/netifaces-0.10.6.dist-info/RECORD b/lib/python3.4/site-packages/netifaces-0.10.6.dist-info/RECORD new file mode 100644 index 0000000..c9438a2 --- /dev/null +++ b/lib/python3.4/site-packages/netifaces-0.10.6.dist-info/RECORD @@ -0,0 +1,9 @@ +netifaces.cpython-34m.so,sha256=KiLZHMhvo_x40-9D0bLqZoVzQsGbimZY_33SUPowm9E,72976 +netifaces-0.10.6.dist-info/DESCRIPTION.rst,sha256=WCNR0xdB7g_1r_U6WwIedMlurGlPeDjvJX-NBElPoII,8555 +netifaces-0.10.6.dist-info/METADATA,sha256=InwXovYI_sgETAChE4hBUFbkSwYlZ_gWeKcNvyX8KOA,9322 +netifaces-0.10.6.dist-info/RECORD,, +netifaces-0.10.6.dist-info/WHEEL,sha256=AEztX7vHDtcgysb-4-5-DyIKMLIPg6NMxY9dXTRdoXQ,104 +netifaces-0.10.6.dist-info/metadata.json,sha256=W-IHSrO0Ma846gdBr18QTsvc9GjGN0SgAnZha0vW9tU,885 +netifaces-0.10.6.dist-info/top_level.txt,sha256=PqMTaIuWtSjkdQHX6lH1Lmpv2aqBUYAGqATB8z3A6TQ,10 +netifaces-0.10.6.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +netifaces-0.10.6.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 diff --git a/lib/python3.4/site-packages/netifaces-0.10.4.dist-info/WHEEL b/lib/python3.4/site-packages/netifaces-0.10.6.dist-info/WHEEL similarity index 69% rename from lib/python3.4/site-packages/netifaces-0.10.4.dist-info/WHEEL rename to lib/python3.4/site-packages/netifaces-0.10.6.dist-info/WHEEL index db40973..1fdf70f 100644 --- a/lib/python3.4/site-packages/netifaces-0.10.4.dist-info/WHEEL +++ b/lib/python3.4/site-packages/netifaces-0.10.6.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.29.0) +Generator: bdist_wheel (0.30.0) Root-Is-Purelib: false Tag: cp34-cp34m-linux_x86_64 diff --git a/lib/python3.4/site-packages/netifaces-0.10.6.dist-info/metadata.json b/lib/python3.4/site-packages/netifaces-0.10.6.dist-info/metadata.json new file mode 100644 index 0000000..7de3738 --- /dev/null +++ b/lib/python3.4/site-packages/netifaces-0.10.6.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Topic :: System :: Networking", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.5", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3"], "description_content_type": "UNKNOWN", "extensions": {"python.details": {"contacts": [{"email": "alastair@alastairs-place.net", "name": "Alastair Houghton", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://bitbucket.org/al45tair/netifaces"}}}, "generator": "bdist_wheel (0.30.0)", "license": "MIT License", "metadata_version": "2.0", "name": "netifaces", "summary": "Portable network interface information.", "version": "0.10.6"} \ No newline at end of file diff --git a/lib/python3.4/site-packages/netifaces-0.10.4.dist-info/top_level.txt b/lib/python3.4/site-packages/netifaces-0.10.6.dist-info/top_level.txt similarity index 100% rename from lib/python3.4/site-packages/netifaces-0.10.4.dist-info/top_level.txt rename to lib/python3.4/site-packages/netifaces-0.10.6.dist-info/top_level.txt diff --git a/lib/python3.4/site-packages/_markerlib-0.0.0.dist-info/zip-safe b/lib/python3.4/site-packages/netifaces-0.10.6.dist-info/zip-safe similarity index 100% rename from lib/python3.4/site-packages/_markerlib-0.0.0.dist-info/zip-safe rename to lib/python3.4/site-packages/netifaces-0.10.6.dist-info/zip-safe diff --git a/lib/python3.4/site-packages/netifaces.cpython-34m.so b/lib/python3.4/site-packages/netifaces.cpython-34m.so index 91c66c6..376c5cd 100755 Binary files a/lib/python3.4/site-packages/netifaces.cpython-34m.so and b/lib/python3.4/site-packages/netifaces.cpython-34m.so differ diff --git a/lib/python3.4/site-packages/netifaces.cpython-35m-x86_64-linux-gnu.so b/lib/python3.4/site-packages/netifaces.cpython-35m-x86_64-linux-gnu.so old mode 100644 new mode 100755 index 7e5851a..6393135 Binary files a/lib/python3.4/site-packages/netifaces.cpython-35m-x86_64-linux-gnu.so and b/lib/python3.4/site-packages/netifaces.cpython-35m-x86_64-linux-gnu.so differ diff --git a/lib/python3.4/site-packages/netifaces.cpython-36m-x86_64-linux-gnu.so b/lib/python3.4/site-packages/netifaces.cpython-36m-x86_64-linux-gnu.so new file mode 100755 index 0000000..f87df74 Binary files /dev/null and b/lib/python3.4/site-packages/netifaces.cpython-36m-x86_64-linux-gnu.so differ diff --git a/lib/python3.4/site-packages/pip-8.1.1.dist-info/DESCRIPTION.rst b/lib/python3.4/site-packages/pip-9.0.1.dist-info/DESCRIPTION.rst similarity index 85% rename from lib/python3.4/site-packages/pip-8.1.1.dist-info/DESCRIPTION.rst rename to lib/python3.4/site-packages/pip-9.0.1.dist-info/DESCRIPTION.rst index 39586d2..8ef94c4 100644 --- a/lib/python3.4/site-packages/pip-8.1.1.dist-info/DESCRIPTION.rst +++ b/lib/python3.4/site-packages/pip-9.0.1.dist-info/DESCRIPTION.rst @@ -19,9 +19,12 @@ tool for installing Python packages. .. image:: https://img.shields.io/pypi/v/pip.svg :target: https://pypi.python.org/pypi/pip -.. image:: https://img.shields.io/travis/pypa/pip/develop.svg +.. image:: https://img.shields.io/travis/pypa/pip/master.svg :target: http://travis-ci.org/pypa/pip +.. image:: https://img.shields.io/appveyor/ci/pypa/pip.svg + :target: https://ci.appveyor.com/project/pypa/pip/history + .. image:: https://readthedocs.org/projects/pip/badge/?version=stable :target: https://pip.pypa.io/en/stable diff --git a/lib/python3.4/site-packages/netifaces-0.10.4.dist-info/INSTALLER b/lib/python3.4/site-packages/pip-9.0.1.dist-info/INSTALLER similarity index 100% rename from lib/python3.4/site-packages/netifaces-0.10.4.dist-info/INSTALLER rename to lib/python3.4/site-packages/pip-9.0.1.dist-info/INSTALLER diff --git a/lib/python3.4/site-packages/pip-8.1.1.dist-info/METADATA b/lib/python3.4/site-packages/pip-9.0.1.dist-info/METADATA similarity index 90% rename from lib/python3.4/site-packages/pip-8.1.1.dist-info/METADATA rename to lib/python3.4/site-packages/pip-9.0.1.dist-info/METADATA index 79657d0..600a905 100644 --- a/lib/python3.4/site-packages/pip-8.1.1.dist-info/METADATA +++ b/lib/python3.4/site-packages/pip-9.0.1.dist-info/METADATA @@ -1,6 +1,6 @@ Metadata-Version: 2.0 Name: pip -Version: 8.1.1 +Version: 9.0.1 Summary: The PyPA recommended tool for installing Python packages. Home-page: https://pip.pypa.io/ Author: The pip developers @@ -20,6 +20,7 @@ Classifier: Programming Language :: Python :: 3.3 Classifier: Programming Language :: Python :: 3.4 Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: Implementation :: PyPy +Requires-Python: >=2.6,!=3.0.*,!=3.1.*,!=3.2.* Provides-Extra: testing Requires-Dist: mock; extra == 'testing' Requires-Dist: pretend; extra == 'testing' @@ -48,9 +49,12 @@ tool for installing Python packages. .. image:: https://img.shields.io/pypi/v/pip.svg :target: https://pypi.python.org/pypi/pip -.. image:: https://img.shields.io/travis/pypa/pip/develop.svg +.. image:: https://img.shields.io/travis/pypa/pip/master.svg :target: http://travis-ci.org/pypa/pip +.. image:: https://img.shields.io/appveyor/ci/pypa/pip.svg + :target: https://ci.appveyor.com/project/pypa/pip/history + .. image:: https://readthedocs.org/projects/pip/badge/?version=stable :target: https://pip.pypa.io/en/stable diff --git a/lib/python3.4/site-packages/pip-8.1.1.dist-info/RECORD b/lib/python3.4/site-packages/pip-9.0.1.dist-info/RECORD similarity index 51% rename from lib/python3.4/site-packages/pip-8.1.1.dist-info/RECORD rename to lib/python3.4/site-packages/pip-9.0.1.dist-info/RECORD index 7e70ad9..86fe98d 100644 --- a/lib/python3.4/site-packages/pip-8.1.1.dist-info/RECORD +++ b/lib/python3.4/site-packages/pip-9.0.1.dist-info/RECORD @@ -1,117 +1,123 @@ -pip/__init__.py,sha256=fFs-ytm2H4V2evGESaozmF7U0BaGIMM0drFJZ5Ifj4s,10427 +pip/__init__.py,sha256=00QWSreEBjb8Y8sPs8HeqgLXSB-3UrONJxo4J5APxEc,11348 pip/__main__.py,sha256=V6Kh-IEDEFpt1cahRE6MajUF_14qJR_Qsvn4MjWZXzE,584 -pip/basecommand.py,sha256=Zlg6SE42TIjRyt1mct0LCkgNxcKKnss3xvASJyDqucE,11429 -pip/baseparser.py,sha256=Nlc7Un9gat27xtB24SnKL_3pZZOoh62gNNRdS6tDRZY,10465 -pip/cmdoptions.py,sha256=pf24iszA39rhcJ5DjFA4oD_z5vTI0NG98qUahHs3qPM,15878 -pip/download.py,sha256=oJ3sZ8I6ct9X3eoXQ9xm_Ne0e6N85G_rWaERmMCVF2k,31722 -pip/exceptions.py,sha256=GdDhHOROBj-kW2rgerLJYXsxN8ENy1BX5RUb_Vs9TXM,7980 -pip/index.py,sha256=kpyj_O5c0VVlvhg5VuVm4oAGGh6RvD7Xr0syPN-eGa0,37191 -pip/locations.py,sha256=MqUzS8YI2wDa7oFzTQw4zM4s0Hci05yubxfU_kTXXlU,5632 -pip/pep425tags.py,sha256=4PNr9hd8OsXnKYR2q2oLzfDDhF5bFBwUZA-ZQxAClSI,11318 +pip/basecommand.py,sha256=TTlmZesQ4Vuxcto2KqwZGmgmN5ioHEl_DeFev9ie_SA,11910 +pip/baseparser.py,sha256=AKMOeF3fTrRroiv0DmTQbdiLW0DQux2KqGC_dJJB9d0,10465 +pip/cmdoptions.py,sha256=pRptFz05iFEfSW4Flg3x1_P92sYlFvq7elhnwujikNY,16473 +pip/download.py,sha256=rA0wbmqC2n9ejX481YJSidmKgQqQDjdaxkHkHlAN68k,32171 +pip/exceptions.py,sha256=BvqH-Jw3tP2b-2IJ2kjrQemOAPMqKrQMLRIZHZQpJXk,8121 +pip/index.py,sha256=L6UhtAEZc2qw7BqfQrkPQcw2gCgEw3GukLRSA95BNyI,39950 +pip/locations.py,sha256=9rJRlgonC6QC2zGDIn_7mXaoZ9_tF_IHM2BQhWVRgbo,5626 +pip/pep425tags.py,sha256=q3kec4f6NHszuGYIhGIbVvs896D06uJAnKFgJ_wce44,10980 pip/status_codes.py,sha256=F6uDG6Gj7RNKQJUDnd87QKqI16Us-t-B0wPF_4QMpWc,156 -pip/wheel.py,sha256=qg1DgjXtiQCnY-IJY5HC5VgpeQm9WCjDKYmefSfOjq0,32088 -pip/_vendor/__init__.py,sha256=9EPZ-JLxtXMt71Fp5_pKTTe1QbJZZVlN81rsRYEvlpA,4781 -pip/commands/__init__.py,sha256=naZ1iIWRutNznOVpLj8qyn1GPE0B5rhCWCrSUOZSt4M,2145 -pip/commands/completion.py,sha256=2BEUY3jowgemiIGgUP3rpk6A9My4Eu8rTPosFxlESOE,1967 -pip/commands/download.py,sha256=dMRtH0JMBhNGlJWr1qC29vOeiBzG2K0OjOAfzdxSVgA,4804 -pip/commands/freeze.py,sha256=KmQoLf-HruqBDzc-F2-ganGVn2lboNQqppfyrMsx3SU,2774 +pip/wheel.py,sha256=QSWmGs2ui-n4UMWm0JUY6aMCcwNKungVzbWsxI9KlJQ,32010 +pip/_vendor/__init__.py,sha256=L-0x9jj0HSZen1Fm2U0GUbxfjfwQPIXc4XJ4IAxy8D8,4804 +pip/commands/__init__.py,sha256=2Uq3HCdjchJD9FL1LB7rd5v6UySVAVizX0W3EX3hIoE,2244 +pip/commands/check.py,sha256=-A7GI1-WZBh9a4P6UoH_aR-J7I8Lz8ly7m3wnCjmevs,1382 +pip/commands/completion.py,sha256=kkPgVX7SUcJ_8Juw5GkgWaxHN9_45wmAr9mGs1zXEEs,2453 +pip/commands/download.py,sha256=8RuuPmSYgAq3iEDTqZY_1PDXRqREdUULHNjWJeAv7Mo,7810 +pip/commands/freeze.py,sha256=h6-yFMpjCjbNj8-gOm5UuoF6cg14N5rPV4TCi3_CeuI,2835 pip/commands/hash.py,sha256=MCt4jEFyfoce0lVeNEz1x49uaTY-VDkKiBvvxrVcHkw,1597 pip/commands/help.py,sha256=84HWkEdnGP_AEBHnn8gJP2Te0XTXRKFoXqXopbOZTNo,982 -pip/commands/install.py,sha256=DvRVVwfUy6LV-AtNcxl9kLl7XOc7G7087ZhdD4QbP60,15628 -pip/commands/list.py,sha256=u76U5TLODQ2g53sSUA4q6WhYus7usbuWuITQJsCnP3E,7412 -pip/commands/search.py,sha256=9ClAcFzkJ_7AksTkNrUed5qzsplpBtMlJByJLqiZFqw,4777 -pip/commands/show.py,sha256=dytBbI9XV-ChpV51tsuBygZJJO-QaO2Gtz5kbLkBCZE,5815 +pip/commands/install.py,sha256=o-CR1TKf-b1qaFv47nNlawqsIfDjXyIzv_iJUw1Trag,18069 +pip/commands/list.py,sha256=93bCiFyt2Qut_YHkYHJMZHpXladmxsjS-yOtZeb3uqI,11369 +pip/commands/search.py,sha256=oTs9QNdefnrmCV_JeftG0PGiMuYVmiEDF1OUaYsmDao,4502 +pip/commands/show.py,sha256=ZYM57_7U8KP9MQIIyHKQdZxmiEZByy-DRzB697VFoTY,5891 pip/commands/uninstall.py,sha256=tz8cXz4WdpUdnt3RvpdQwH6_SNMB50egBIZWa1dwfcc,2884 -pip/commands/wheel.py,sha256=iT92Uo8qpVILl_Yk8L7AtkFVYGmY0ep5oDeyQSpwkLs,7528 -pip/compat/__init__.py,sha256=7WN0B0XMYIldfminnT679VoEJLxNQPi9MFwCIt1_llU,4669 +pip/commands/wheel.py,sha256=z5SEhws2YRMb0Ml1IEkg6jFZMLRpLl86bHCrQbYt5zo,7729 +pip/compat/__init__.py,sha256=2Xs_IpsmdRgHbQgQO0c8_lPvHJnQXHyGWxPbLbYJL4c,4672 pip/compat/dictconfig.py,sha256=dRrelPDWrceDSzFT51RTEVY2GuM7UDyc5Igh_tn4Fvk,23096 -pip/compat/ordereddict.py,sha256=6RQCd4PyTE4tvLUoAnsygvrreOSTV4BRDbc_4gCSkTs,4110 pip/models/__init__.py,sha256=0Rs7_RA4DxeOkWT5Cq4CQzDrSEhvYcN3TH2cazr72PE,71 pip/models/index.py,sha256=pUfbO__v3mD9j-2n_ClwPS8pVyx4l2wIwyvWt8GMCRA,487 pip/operations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pip/operations/freeze.py,sha256=H6xpxe1XgoNm5f3UXK47kNy0OQfM5jzo4UUwQu7G-Lo,4048 +pip/operations/check.py,sha256=uwUN9cs1sPo7c0Sj6pRrSv7b22Pk29SXUImTelVchMQ,1590 +pip/operations/freeze.py,sha256=k-7w7LsM-RpPv7ERBzHiPpYkH-GuYfHLyR-Cp_1VPL0,5194 pip/req/__init__.py,sha256=vFwZY8_Vc1WU1zFAespg1My_r_AT3n7cN0W9eX0EFqk,276 -pip/req/req_file.py,sha256=3eaVnPMUAjikLdC5i8hZUAf8aAOby2UxmAVFf94FOXY,11928 -pip/req/req_install.py,sha256=aG0_hj8WqLLUH5tO40OFIncIxU50Vm4rFqYcx5hmoYk,45589 -pip/req/req_set.py,sha256=Xwia1h7o2Z3Qogae3RHIDCGlXS3w2AeQPG8LBz7GmFM,32312 +pip/req/req_file.py,sha256=fG9MDsXUNPhmGwxUiwrIXEynyD8Q7s3L47-hLZPDXq0,11926 +pip/req/req_install.py,sha256=gYrH-lwQMmt55VVbav_EtRIPu94cQbHFHm_Kq6AeHbg,46487 +pip/req/req_set.py,sha256=jHspXqcA2FxcF05dgUIAZ5huYPv6bn0wRUX0Z7PKmaA,34462 pip/req/req_uninstall.py,sha256=fdH2VgCjEC8NRYDS7fRu3ZJaBBUEy-N5muwxDX5MBNM,6897 -pip/utils/__init__.py,sha256=SSixMJeh2SdjNgra_50jaC0jdmXFewLkFh_-a3tw9ks,28256 -pip/utils/appdirs.py,sha256=KTpZANfjYw5K2tZ0_jNNdP_kMxQAns79qZWelwaJo0c,7896 +pip/utils/__init__.py,sha256=zk1vF2EzHZX1ZKPwgeC9I6yKvs8IJ6NZEfXgp2IP8hI,27912 +pip/utils/appdirs.py,sha256=kj2LK-I2fC5QnEh_A_v-ev_IQMcXaWWF5DE39sNvCLQ,8811 pip/utils/build.py,sha256=4smLRrfSCmXmjEnVnMFh2tBEpNcSLRe6J0ejZJ-wWJE,1312 -pip/utils/deprecation.py,sha256=DR3cKqzovYu9Pif7c9bT2KmwekfW95N3BsI45_5u38I,2239 +pip/utils/deprecation.py,sha256=X_FMjtDbMJqfqEkdRrki-mYyIdPB6I6DHUTCA_ChY6M,2232 pip/utils/encoding.py,sha256=NQxGiFS5GbeAveLZTnx92t5r0PYqvt0iRnP2u9SGG1w,971 pip/utils/filesystem.py,sha256=ZEVBuYM3fqr2_lgOESh4Y7fPFszGD474zVm_M3Mb5Tk,899 +pip/utils/glibc.py,sha256=jcQYjt_oJLPKVZB28Kauy4Sw70zS-wawxoU1HHX36_0,2939 pip/utils/hashes.py,sha256=oMk7cd3PbJgzpSQyXq1MytMud5f6H5Oa2YY5hYuCq6I,2866 pip/utils/logging.py,sha256=7yWu4gZw-Qclj7X80QVdpGWkdTWGKT4LiUVKcE04pro,3327 pip/utils/outdated.py,sha256=fNwOCL5r2EftPGhgCYGMKu032HC8cV-JAr9lp0HmToM,5455 -pip/utils/setuptools_build.py,sha256=8IGop-SZ6lxUl5HMOjLRaDlORPugIH_b_b2Y67x4jQc,240 +pip/utils/packaging.py,sha256=qhmli14odw6DIhWJgQYS2Q0RrSbr8nXNcG48f5yTRms,2080 +pip/utils/setuptools_build.py,sha256=0blfscmNJW_iZ5DcswJeDB_PbtTEjfK9RL1R1WEDW2E,278 pip/utils/ui.py,sha256=pbDkSAeumZ6jdZcOJ2yAbx8iBgeP2zfpqNnLJK1gskQ,11597 -pip/vcs/__init__.py,sha256=lnea41zMq9HqB1Qo7hxy2IjUzk5WtBvnoloCCMR6Vk4,12349 +pip/vcs/__init__.py,sha256=WafFliUTHMmsSISV8PHp1M5EXDNSWyJr78zKaQmPLdY,12374 pip/vcs/bazaar.py,sha256=tYTwc4b4off8mr0O2o8SiGejqBDJxcbDBMSMd9-ISYc,3803 -pip/vcs/git.py,sha256=u16VCiNW_a9AaYqLri2b8-f4lOZlOYwsGpHHV3uv_dQ,10218 +pip/vcs/git.py,sha256=5LfWryi78A-2ULjEZJvCTarJ_3l8venwXASlwm8hiug,11197 pip/vcs/mercurial.py,sha256=xG6rDiwHCRytJEs23SIHBXl_SwQo2jkkdD_6rVVP5h4,3472 -pip/vcs/subversion.py,sha256=mGT7sAzuVc1u-9MPoXJNyShnRzhdJpDdGNuhhzUPv6w,8687 -pip-8.1.1.dist-info/DESCRIPTION.rst,sha256=jSvW1qOjwzndvm_p_DexGCVJfwgg3rWPMJWzf6Rmsfc,1167 -pip-8.1.1.dist-info/METADATA,sha256=p_9D2tGGDX-wd8S14XVVx0K-qOjDrrwu-CmYn9Dndlc,2362 -pip-8.1.1.dist-info/RECORD,, -pip-8.1.1.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 -pip-8.1.1.dist-info/entry_points.txt,sha256=GWc-Wb9WUKZ1EuVWNz-G0l3BeIpbNJLx0OJbZ61AAV0,68 -pip-8.1.1.dist-info/metadata.json,sha256=wAnzudgBGV69N0kQOAgeAXIjQSbkBZhZEs98ULrfRUE,1513 -pip-8.1.1.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -../../../bin/pip,sha256=MEQVvFZcu35ZQaa5ungLLm_kHRGvmD4u-AbMJaTm8vU,271 -../../../bin/pip3,sha256=MEQVvFZcu35ZQaa5ungLLm_kHRGvmD4u-AbMJaTm8vU,271 -../../../bin/pip3.4,sha256=MEQVvFZcu35ZQaa5ungLLm_kHRGvmD4u-AbMJaTm8vU,271 -pip-8.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pip/utils/__pycache__/deprecation.cpython-34.pyc,, -pip/commands/__pycache__/completion.cpython-34.pyc,, -pip/vcs/__pycache__/mercurial.cpython-34.pyc,, -pip/req/__pycache__/req_file.cpython-34.pyc,, -pip/__pycache__/pep425tags.cpython-34.pyc,, -pip/vcs/__pycache__/bazaar.cpython-34.pyc,, -pip/utils/__pycache__/setuptools_build.cpython-34.pyc,, -pip/commands/__pycache__/list.cpython-34.pyc,, -pip/__pycache__/__init__.cpython-34.pyc,, -pip/models/__pycache__/__init__.cpython-34.pyc,, -pip/commands/__pycache__/wheel.cpython-34.pyc,, -pip/utils/__pycache__/ui.cpython-34.pyc,, -pip/commands/__pycache__/help.cpython-34.pyc,, -pip/compat/__pycache__/ordereddict.cpython-34.pyc,, -pip/compat/__pycache__/__init__.cpython-34.pyc,, -pip/utils/__pycache__/logging.cpython-34.pyc,, -pip/commands/__pycache__/show.cpython-34.pyc,, -pip/req/__pycache__/__init__.cpython-34.pyc,, -pip/utils/__pycache__/filesystem.cpython-34.pyc,, -pip/vcs/__pycache__/git.cpython-34.pyc,, -pip/req/__pycache__/req_set.cpython-34.pyc,, -pip/utils/__pycache__/__init__.cpython-34.pyc,, -pip/operations/__pycache__/freeze.cpython-34.pyc,, -pip/compat/__pycache__/dictconfig.cpython-34.pyc,, -pip/commands/__pycache__/hash.cpython-34.pyc,, -pip/req/__pycache__/req_uninstall.cpython-34.pyc,, -pip/operations/__pycache__/__init__.cpython-34.pyc,, -pip/commands/__pycache__/__init__.cpython-34.pyc,, -pip/req/__pycache__/req_install.cpython-34.pyc,, -pip/utils/__pycache__/build.cpython-34.pyc,, -pip/utils/__pycache__/encoding.cpython-34.pyc,, -pip/__pycache__/basecommand.cpython-34.pyc,, -pip/__pycache__/download.cpython-34.pyc,, -pip/__pycache__/cmdoptions.cpython-34.pyc,, -pip/utils/__pycache__/outdated.cpython-34.pyc,, -pip/commands/__pycache__/download.cpython-34.pyc,, -pip/utils/__pycache__/appdirs.cpython-34.pyc,, +pip/vcs/subversion.py,sha256=GAuX2Sk7IZvJyEzENKcVld_wGBrQ3fpXDlXjapZEYdI,9350 +pip-9.0.1.dist-info/DESCRIPTION.rst,sha256=Va8Wj1XBpTbVQ2Z41mZRJdALEeziiS_ZewWn1H2ecY4,1287 +pip-9.0.1.dist-info/METADATA,sha256=mvs_tLoKAbECXY_6QHiVWQsagSL-1UjolQTpScT8JSk,2529 +pip-9.0.1.dist-info/RECORD,, +pip-9.0.1.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 +pip-9.0.1.dist-info/entry_points.txt,sha256=GWc-Wb9WUKZ1EuVWNz-G0l3BeIpbNJLx0OJbZ61AAV0,68 +pip-9.0.1.dist-info/metadata.json,sha256=aqvkETDy4mHUBob-2Fn5WWlXORi_M2OSfQ2HQCUU_Fk,1565 +pip-9.0.1.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +../../../bin/pip,sha256=kUtfTrIe4CRluRco6nKs-hUx0Eir2ABPF8Rr_1zK534,272 +../../../bin/pip3,sha256=kUtfTrIe4CRluRco6nKs-hUx0Eir2ABPF8Rr_1zK534,272 +../../../bin/pip3.4,sha256=kUtfTrIe4CRluRco6nKs-hUx0Eir2ABPF8Rr_1zK534,272 +pip-9.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 pip/__pycache__/exceptions.cpython-34.pyc,, -pip/__pycache__/__main__.cpython-34.pyc,, +pip/utils/__pycache__/ui.cpython-34.pyc,, +pip/__pycache__/basecommand.cpython-34.pyc,, +pip/commands/__pycache__/check.cpython-34.pyc,, +pip/utils/__pycache__/packaging.cpython-34.pyc,, +pip/utils/__pycache__/build.cpython-34.pyc,, pip/vcs/__pycache__/__init__.cpython-34.pyc,, -pip/vcs/__pycache__/subversion.cpython-34.pyc,, -pip/utils/__pycache__/hashes.cpython-34.pyc,, -pip/commands/__pycache__/uninstall.cpython-34.pyc,, -pip/__pycache__/baseparser.cpython-34.pyc,, -pip/commands/__pycache__/freeze.cpython-34.pyc,, +pip/__pycache__/download.cpython-34.pyc,, +pip/utils/__pycache__/setuptools_build.cpython-34.pyc,, +pip/req/__pycache__/req_uninstall.cpython-34.pyc,, +pip/utils/__pycache__/deprecation.cpython-34.pyc,, +pip/operations/__pycache__/check.cpython-34.pyc,, pip/_vendor/__pycache__/__init__.cpython-34.pyc,, -pip/commands/__pycache__/search.cpython-34.pyc,, -pip/__pycache__/locations.cpython-34.pyc,, +pip/utils/__pycache__/outdated.cpython-34.pyc,, pip/commands/__pycache__/install.cpython-34.pyc,, -pip/models/__pycache__/index.cpython-34.pyc,, -pip/__pycache__/index.cpython-34.pyc,, -pip/__pycache__/wheel.cpython-34.pyc,, +pip/operations/__pycache__/__init__.cpython-34.pyc,, +pip/commands/__pycache__/freeze.cpython-34.pyc,, +pip/req/__pycache__/req_set.cpython-34.pyc,, +pip/operations/__pycache__/freeze.cpython-34.pyc,, +pip/__pycache__/baseparser.cpython-34.pyc,, +pip/commands/__pycache__/hash.cpython-34.pyc,, +pip/commands/__pycache__/download.cpython-34.pyc,, +pip/commands/__pycache__/wheel.cpython-34.pyc,, +pip/commands/__pycache__/help.cpython-34.pyc,, +pip/utils/__pycache__/glibc.cpython-34.pyc,, +pip/__pycache__/locations.cpython-34.pyc,, +pip/commands/__pycache__/list.cpython-34.pyc,, +pip/compat/__pycache__/dictconfig.cpython-34.pyc,, +pip/__pycache__/__init__.cpython-34.pyc,, +pip/utils/__pycache__/hashes.cpython-34.pyc,, +pip/compat/__pycache__/__init__.cpython-34.pyc,, +pip/vcs/__pycache__/git.cpython-34.pyc,, +pip/req/__pycache__/__init__.cpython-34.pyc,, +pip/__pycache__/__main__.cpython-34.pyc,, pip/__pycache__/status_codes.cpython-34.pyc,, +pip/models/__pycache__/index.cpython-34.pyc,, +pip/__pycache__/pep425tags.cpython-34.pyc,, +pip/commands/__pycache__/uninstall.cpython-34.pyc,, +pip/vcs/__pycache__/bazaar.cpython-34.pyc,, +pip/req/__pycache__/req_install.cpython-34.pyc,, +pip/vcs/__pycache__/mercurial.cpython-34.pyc,, +pip/commands/__pycache__/__init__.cpython-34.pyc,, +pip/commands/__pycache__/show.cpython-34.pyc,, +pip/__pycache__/index.cpython-34.pyc,, +pip/commands/__pycache__/completion.cpython-34.pyc,, +pip/req/__pycache__/req_file.cpython-34.pyc,, +pip/__pycache__/cmdoptions.cpython-34.pyc,, +pip/utils/__pycache__/filesystem.cpython-34.pyc,, +pip/__pycache__/wheel.cpython-34.pyc,, +pip/utils/__pycache__/appdirs.cpython-34.pyc,, +pip/utils/__pycache__/__init__.cpython-34.pyc,, +pip/vcs/__pycache__/subversion.cpython-34.pyc,, +pip/utils/__pycache__/logging.cpython-34.pyc,, +pip/commands/__pycache__/search.cpython-34.pyc,, +pip/utils/__pycache__/encoding.cpython-34.pyc,, +pip/models/__pycache__/__init__.cpython-34.pyc,, diff --git a/lib/python3.4/site-packages/_markerlib-0.0.0.dist-info/WHEEL b/lib/python3.4/site-packages/pip-9.0.1.dist-info/WHEEL similarity index 100% rename from lib/python3.4/site-packages/_markerlib-0.0.0.dist-info/WHEEL rename to lib/python3.4/site-packages/pip-9.0.1.dist-info/WHEEL diff --git a/lib/python3.4/site-packages/pip-8.1.1.dist-info/entry_points.txt b/lib/python3.4/site-packages/pip-9.0.1.dist-info/entry_points.txt similarity index 100% rename from lib/python3.4/site-packages/pip-8.1.1.dist-info/entry_points.txt rename to lib/python3.4/site-packages/pip-9.0.1.dist-info/entry_points.txt diff --git a/lib/python3.4/site-packages/pip-8.1.1.dist-info/metadata.json b/lib/python3.4/site-packages/pip-9.0.1.dist-info/metadata.json similarity index 73% rename from lib/python3.4/site-packages/pip-8.1.1.dist-info/metadata.json rename to lib/python3.4/site-packages/pip-9.0.1.dist-info/metadata.json index 91434c5..9eae02c 100644 --- a/lib/python3.4/site-packages/pip-8.1.1.dist-info/metadata.json +++ b/lib/python3.4/site-packages/pip-9.0.1.dist-info/metadata.json @@ -1 +1 @@ -{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Topic :: Software Development :: Build Tools", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: Implementation :: PyPy"], "extensions": {"python.commands": {"wrap_console": {"pip": "pip:main", "pip3": "pip:main", "pip3.5": "pip:main"}}, "python.details": {"contacts": [{"email": "python-virtualenv@groups.google.com", "name": "The pip developers", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://pip.pypa.io/"}}, "python.exports": {"console_scripts": {"pip": "pip:main", "pip3": "pip:main", "pip3.5": "pip:main"}}}, "extras": ["testing"], "generator": "bdist_wheel (0.29.0)", "keywords": ["easy_install", "distutils", "setuptools", "egg", "virtualenv"], "license": "MIT", "metadata_version": "2.0", "name": "pip", "run_requires": [{"extra": "testing", "requires": ["mock", "pretend", "pytest", "scripttest (>=1.3)", "virtualenv (>=1.10)"]}], "summary": "The PyPA recommended tool for installing Python packages.", "test_requires": [{"requires": ["mock", "pretend", "pytest", "scripttest (>=1.3)", "virtualenv (>=1.10)"]}], "version": "8.1.1"} \ No newline at end of file +{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Topic :: Software Development :: Build Tools", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: Implementation :: PyPy"], "extensions": {"python.commands": {"wrap_console": {"pip": "pip:main", "pip3": "pip:main", "pip3.5": "pip:main"}}, "python.details": {"contacts": [{"email": "python-virtualenv@groups.google.com", "name": "The pip developers", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://pip.pypa.io/"}}, "python.exports": {"console_scripts": {"pip": "pip:main", "pip3": "pip:main", "pip3.5": "pip:main"}}}, "extras": ["testing"], "generator": "bdist_wheel (0.29.0)", "keywords": ["easy_install", "distutils", "setuptools", "egg", "virtualenv"], "license": "MIT", "metadata_version": "2.0", "name": "pip", "requires_python": ">=2.6,!=3.0.*,!=3.1.*,!=3.2.*", "run_requires": [{"extra": "testing", "requires": ["mock", "pretend", "pytest", "scripttest (>=1.3)", "virtualenv (>=1.10)"]}], "summary": "The PyPA recommended tool for installing Python packages.", "test_requires": [{"requires": ["mock", "pretend", "pytest", "scripttest (>=1.3)", "virtualenv (>=1.10)"]}], "version": "9.0.1"} \ No newline at end of file diff --git a/lib/python3.4/site-packages/pip-8.1.1.dist-info/top_level.txt b/lib/python3.4/site-packages/pip-9.0.1.dist-info/top_level.txt similarity index 100% rename from lib/python3.4/site-packages/pip-8.1.1.dist-info/top_level.txt rename to lib/python3.4/site-packages/pip-9.0.1.dist-info/top_level.txt diff --git a/lib/python3.4/site-packages/pip/__init__.py b/lib/python3.4/site-packages/pip/__init__.py index 51e7eaf..9c1d8f9 100644 --- a/lib/python3.4/site-packages/pip/__init__.py +++ b/lib/python3.4/site-packages/pip/__init__.py @@ -10,6 +10,18 @@ import warnings import sys import re +# 2016-06-17 barry@debian.org: urllib3 1.14 added optional support for socks, +# but if invoked (i.e. imported), it will issue a warning to stderr if socks +# isn't available. requests unconditionally imports urllib3's socks contrib +# module, triggering this warning. The warning breaks DEP-8 tests (because of +# the stderr output) and is just plain annoying in normal usage. I don't want +# to add socks as yet another dependency for pip, nor do I want to allow-stder +# in the DEP-8 tests, so just suppress the warning. pdb tells me this has to +# be done before the import of pip.vcs. +from pip._vendor.requests.packages.urllib3.exceptions import DependencyWarning +warnings.filterwarnings("ignore", category=DependencyWarning) # noqa + + from pip.exceptions import InstallationError, CommandError, PipError from pip.utils import get_installed_distributions, get_prog from pip.utils import deprecation, dist_is_editable @@ -31,12 +43,12 @@ import pip.cmdoptions cmdoptions = pip.cmdoptions # The version as used in the setup.py and the docs conf.py -__version__ = "8.1.1" +__version__ = "9.0.1" logger = logging.getLogger(__name__) -# Hide the InsecureRequestWArning from urllib3 +# Hide the InsecureRequestWarning from urllib3 warnings.filterwarnings("ignore", category=InsecureRequestWarning) @@ -44,7 +56,7 @@ def autocomplete(): """Command and option completion for the main option parser (and options) and its subcommands (and options). - Enable by sourcing one of the completion shell scripts (bash or zsh). + Enable by sourcing one of the completion shell scripts (bash, zsh or fish). """ # Don't complete if user hasn't sourced bash_completion file. if 'PIP_AUTO_COMPLETE' not in os.environ: @@ -212,7 +224,11 @@ def main(args=None): # Needed for locale.getpreferredencoding(False) to work # in pip.utils.encoding.auto_decode - locale.setlocale(locale.LC_ALL, '') + try: + locale.setlocale(locale.LC_ALL, '') + except locale.Error as e: + # setlocale can apparently crash if locale are uninitialized + logger.debug("Ignoring error %s when setting locale", e) command = commands_dict[cmd_name](isolated=check_isolated(cmd_args)) return command.main(cmd_args) diff --git a/lib/python3.4/site-packages/pip/_vendor/__init__.py b/lib/python3.4/site-packages/pip/_vendor/__init__.py index a822a5b..8e76ab8 100644 --- a/lib/python3.4/site-packages/pip/_vendor/__init__.py +++ b/lib/python3.4/site-packages/pip/_vendor/__init__.py @@ -64,6 +64,7 @@ if DEBUNDLED: vendored("cachecontrol") vendored("colorama") vendored("distlib") + vendored("distro") vendored("html5lib") vendored("lockfile") vendored("six") diff --git a/lib/python3.4/site-packages/pip/basecommand.py b/lib/python3.4/site-packages/pip/basecommand.py index a07043a..54c6706 100644 --- a/lib/python3.4/site-packages/pip/basecommand.py +++ b/lib/python3.4/site-packages/pip/basecommand.py @@ -117,6 +117,12 @@ class Command(object): else: level = "INFO" + # The root logger should match the "console" level *unless* we + # specified "--log" to send debug logs to a file. + root_level = level + if options.log: + root_level = "DEBUG" + logging_dictConfig({ "version": 1, "disable_existing_loggers": False, @@ -155,7 +161,7 @@ class Command(object): }, }, "root": { - "level": level, + "level": root_level, "handlers": list(filter(None, [ "console", "console_errors", @@ -305,13 +311,15 @@ class RequirementCommand(Command): 'to %(name)s (see "pip help %(name)s")' % opts) logger.warning(msg) - def _build_package_finder(self, options, session): + def _build_package_finder(self, options, session, + platform=None, python_versions=None, + abi=None, implementation=None): """ Create a package finder appropriate to this requirement command. """ index_urls = [options.index_url] + options.extra_index_urls if options.no_index: - logger.info('Ignoring indexes: %s', ','.join(index_urls)) + logger.debug('Ignoring indexes: %s', ','.join(index_urls)) index_urls = [] return PackageFinder( @@ -322,4 +330,8 @@ class RequirementCommand(Command): allow_all_prereleases=options.pre, process_dependency_links=options.process_dependency_links, session=session, + platform=platform, + versions=python_versions, + abi=abi, + implementation=implementation, ) diff --git a/lib/python3.4/site-packages/pip/baseparser.py b/lib/python3.4/site-packages/pip/baseparser.py index ccbf36b..2dd4533 100644 --- a/lib/python3.4/site-packages/pip/baseparser.py +++ b/lib/python3.4/site-packages/pip/baseparser.py @@ -113,6 +113,7 @@ class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter): class CustomOptionParser(optparse.OptionParser): + def insert_option_group(self, idx, *args, **kwargs): """Insert an OptionGroup at a given position.""" group = self.add_option_group(*args, **kwargs) @@ -273,7 +274,7 @@ class ConfigOptionParser(CustomOptionParser): yield (_environ_prefix_re.sub("", key).lower(), val) def get_default_values(self): - """Overridding to make updating the defaults after instantiation of + """Overriding to make updating the defaults after instantiation of the option parser possible, _update_defaults() does the dirty work.""" if not self.process_default_values: # Old, pre-Optik 1.5 behaviour. diff --git a/lib/python3.4/site-packages/pip/cmdoptions.py b/lib/python3.4/site-packages/pip/cmdoptions.py index 1fade87..f75c093 100644 --- a/lib/python3.4/site-packages/pip/cmdoptions.py +++ b/lib/python3.4/site-packages/pip/cmdoptions.py @@ -114,7 +114,10 @@ quiet = partial( dest='quiet', action='count', default=0, - help='Give less output.') + help=('Give less output. Option is additive, and can be used up to 3' + ' times (corresponding to WARNING, ERROR, and CRITICAL logging' + ' levels).') +) log = partial( Option, @@ -184,12 +187,12 @@ def exists_action(): '--exists-action', dest='exists_action', type='choice', - choices=['s', 'i', 'w', 'b'], + choices=['s', 'i', 'w', 'b', 'a'], default=[], action='append', metavar='action', help="Default action when a path already exists: " - "(s)witch, (i)gnore, (w)ipe, (b)ackup.") + "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.") cert = partial( @@ -216,7 +219,10 @@ index_url = partial( dest='index_url', metavar='URL', default=PyPI.simple_url, - help='Base URL of Python Package Index (default %default).') + help="Base URL of Python Package Index (default %default). " + "This should point to a repository compliant with PEP 503 " + "(the simple repository API) or a local directory laid out " + "in the same format.") def extra_index_url(): @@ -226,7 +232,9 @@ def extra_index_url(): metavar='URL', action='append', default=[], - help='Extra URLs of package indexes to use in addition to --index-url.' + help="Extra URLs of package indexes to use in addition to " + "--index-url. Should follow the same rules as " + "--index-url." ) @@ -469,6 +477,13 @@ build_dir = partial( help='Directory to unpack packages into and build in.' ) +ignore_requires_python = partial( + Option, + '--ignore-requires-python', + dest='ignore_requires_python', + action='store_true', + help='Ignore the Requires-Python information.') + install_options = partial( Option, '--install-option', @@ -510,7 +525,7 @@ disable_pip_version_check = partial( "--disable-pip-version-check", dest="disable_pip_version_check", action="store_true", - default=False, + default=True, help="Don't periodically check PyPI to determine whether a new version " "of pip is available for download. Implied with --no-index.") diff --git a/lib/python3.4/site-packages/pip/commands/__init__.py b/lib/python3.4/site-packages/pip/commands/__init__.py index 92b7ff5..62c64eb 100644 --- a/lib/python3.4/site-packages/pip/commands/__init__.py +++ b/lib/python3.4/site-packages/pip/commands/__init__.py @@ -9,6 +9,7 @@ from pip.commands.freeze import FreezeCommand from pip.commands.hash import HashCommand from pip.commands.help import HelpCommand from pip.commands.list import ListCommand +from pip.commands.check import CheckCommand from pip.commands.search import SearchCommand from pip.commands.show import ShowCommand from pip.commands.install import InstallCommand @@ -27,6 +28,7 @@ commands_dict = { UninstallCommand.name: UninstallCommand, DownloadCommand.name: DownloadCommand, ListCommand.name: ListCommand, + CheckCommand.name: CheckCommand, WheelCommand.name: WheelCommand, } @@ -38,6 +40,7 @@ commands_order = [ FreezeCommand, ListCommand, ShowCommand, + CheckCommand, SearchCommand, WheelCommand, HashCommand, diff --git a/lib/python3.4/site-packages/pip/commands/check.py b/lib/python3.4/site-packages/pip/commands/check.py new file mode 100644 index 0000000..70458ad --- /dev/null +++ b/lib/python3.4/site-packages/pip/commands/check.py @@ -0,0 +1,39 @@ +import logging + +from pip.basecommand import Command +from pip.operations.check import check_requirements +from pip.utils import get_installed_distributions + + +logger = logging.getLogger(__name__) + + +class CheckCommand(Command): + """Verify installed packages have compatible dependencies.""" + name = 'check' + usage = """ + %prog [options]""" + summary = 'Verify installed packages have compatible dependencies.' + + def run(self, options, args): + dists = get_installed_distributions(local_only=False, skip=()) + missing_reqs_dict, incompatible_reqs_dict = check_requirements(dists) + + for dist in dists: + key = '%s==%s' % (dist.project_name, dist.version) + + for requirement in missing_reqs_dict.get(key, []): + logger.info( + "%s %s requires %s, which is not installed.", + dist.project_name, dist.version, requirement.project_name) + + for requirement, actual in incompatible_reqs_dict.get(key, []): + logger.info( + "%s %s has requirement %s, but you have %s %s.", + dist.project_name, dist.version, requirement, + actual.project_name, actual.version) + + if missing_reqs_dict or incompatible_reqs_dict: + return 1 + else: + logger.info("No broken requirements found.") diff --git a/lib/python3.4/site-packages/pip/commands/completion.py b/lib/python3.4/site-packages/pip/commands/completion.py index dc80af3..66e41a6 100644 --- a/lib/python3.4/site-packages/pip/commands/completion.py +++ b/lib/python3.4/site-packages/pip/commands/completion.py @@ -26,13 +26,21 @@ function _pip_completion { PIP_AUTO_COMPLETE=1 $words[1] ) ) } compctl -K _pip_completion pip +""", 'fish': """ +function __fish_complete_pip + set -lx COMP_WORDS (commandline -o) "" + set -lx COMP_CWORD (math (contains -i -- (commandline -t) $COMP_WORDS)-1) + set -lx PIP_AUTO_COMPLETE 1 + string split \ -- (eval $COMP_WORDS[1]) +end +complete -fa "(__fish_complete_pip)" -c pip """} class CompletionCommand(Command): """A helper command to be used for command completion.""" name = 'completion' - summary = 'A helper command used for command completion' + summary = 'A helper command used for command completion.' def __init__(self, *args, **kw): super(CompletionCommand, self).__init__(*args, **kw) @@ -51,6 +59,12 @@ class CompletionCommand(Command): const='zsh', dest='shell', help='Emit completion code for zsh') + cmd_opts.add_option( + '--fish', '-f', + action='store_const', + const='fish', + dest='shell', + help='Emit completion code for fish') self.parser.insert_option_group(0, cmd_opts) diff --git a/lib/python3.4/site-packages/pip/commands/download.py b/lib/python3.4/site-packages/pip/commands/download.py index 4155e05..4bc0640 100644 --- a/lib/python3.4/site-packages/pip/commands/download.py +++ b/lib/python3.4/site-packages/pip/commands/download.py @@ -3,6 +3,8 @@ from __future__ import absolute_import import logging import os +from pip.exceptions import CommandError +from pip.index import FormatControl from pip.req import RequirementSet from pip.basecommand import RequirementCommand from pip import cmdoptions @@ -63,6 +65,53 @@ class DownloadCommand(RequirementCommand): help=("Download packages into ."), ) + cmd_opts.add_option( + '--platform', + dest='platform', + metavar='platform', + default=None, + help=("Only download wheels compatible with . " + "Defaults to the platform of the running system."), + ) + + cmd_opts.add_option( + '--python-version', + dest='python_version', + metavar='python_version', + default=None, + help=("Only download wheels compatible with Python " + "interpreter version . If not specified, then the " + "current system interpreter minor version is used. A major " + "version (e.g. '2') can be specified to match all " + "minor revs of that major version. A minor version " + "(e.g. '34') can also be specified."), + ) + + cmd_opts.add_option( + '--implementation', + dest='implementation', + metavar='implementation', + default=None, + help=("Only download wheels compatible with Python " + "implementation , e.g. 'pp', 'jy', 'cp', " + " or 'ip'. If not specified, then the current " + "interpreter implementation is used. Use 'py' to force " + "implementation-agnostic wheels."), + ) + + cmd_opts.add_option( + '--abi', + dest='abi', + metavar='abi', + default=None, + help=("Only download wheels compatible with Python " + "abi , e.g. 'pypy_41'. If not specified, then the " + "current interpreter abi tag is used. Generally " + "you will need to specify --implementation, " + "--platform, and --python-version when using " + "this option."), + ) + index_opts = cmdoptions.make_option_group( cmdoptions.non_deprecated_index_group, self.parser, @@ -73,14 +122,41 @@ class DownloadCommand(RequirementCommand): def run(self, options, args): options.ignore_installed = True + + if options.python_version: + python_versions = [options.python_version] + else: + python_versions = None + + dist_restriction_set = any([ + options.python_version, + options.platform, + options.abi, + options.implementation, + ]) + binary_only = FormatControl(set(), set([':all:'])) + if dist_restriction_set and options.format_control != binary_only: + raise CommandError( + "--only-binary=:all: must be set and --no-binary must not " + "be set (or must be set to :none:) when restricting platform " + "and interpreter constraints using --python-version, " + "--platform, --abi, or --implementation." + ) + options.src_dir = os.path.abspath(options.src_dir) options.download_dir = normalize_path(options.download_dir) ensure_dir(options.download_dir) with self._build_session(options) as session: - - finder = self._build_package_finder(options, session) + finder = self._build_package_finder( + options=options, + session=session, + platform=options.platform, + python_versions=python_versions, + abi=options.abi, + implementation=options.implementation, + ) build_delete = (not (options.no_clean or options.build_dir)) if options.cache_dir and not check_path_owner(options.cache_dir): logger.warning( diff --git a/lib/python3.4/site-packages/pip/commands/freeze.py b/lib/python3.4/site-packages/pip/commands/freeze.py index 0485d5f..c198796 100644 --- a/lib/python3.4/site-packages/pip/commands/freeze.py +++ b/lib/python3.4/site-packages/pip/commands/freeze.py @@ -29,12 +29,13 @@ class FreezeCommand(Command): self.cmd_opts.add_option( '-r', '--requirement', - dest='requirement', - action='store', - default=None, + dest='requirements', + action='append', + default=[], metavar='file', help="Use the order in the given requirements file and its " - "comments when generating output.") + "comments when generating output. This option can be " + "used multiple times.") self.cmd_opts.add_option( '-f', '--find-links', dest='find_links', @@ -73,7 +74,7 @@ class FreezeCommand(Command): skip.update(DEV_PKGS) freeze_kwargs = dict( - requirement=options.requirement, + requirement=options.requirements, find_links=options.find_links, local_only=options.local, user_only=options.user, diff --git a/lib/python3.4/site-packages/pip/commands/install.py b/lib/python3.4/site-packages/pip/commands/install.py index 13b328f..39292b1 100644 --- a/lib/python3.4/site-packages/pip/commands/install.py +++ b/lib/python3.4/site-packages/pip/commands/install.py @@ -18,7 +18,7 @@ from pip.exceptions import ( InstallationError, CommandError, PreviousBuildDirError, ) from pip import cmdoptions -from pip.utils import ensure_dir +from pip.utils import ensure_dir, get_installed_version from pip.utils.build import BuildDirectory from pip.utils.deprecation import RemovedInPip10Warning from pip.utils.filesystem import check_path_owner @@ -95,8 +95,21 @@ class InstallCommand(RequirementCommand): dest='upgrade', action='store_true', help='Upgrade all specified packages to the newest available ' - 'version. This process is recursive regardless of whether ' - 'a dependency is already satisfied.' + 'version. The handling of dependencies depends on the ' + 'upgrade-strategy used.' + ) + + cmd_opts.add_option( + '--upgrade-strategy', + dest='upgrade_strategy', + default='eager', + choices=['only-if-needed', 'eager'], + help='Determines how dependency upgrading should be handled. ' + '"eager" - dependencies are upgraded regardless of ' + 'whether the currently installed version satisfies the ' + 'requirements of the upgraded package(s). ' + '"only-if-needed" - are upgraded only when they do not ' + 'satisfy the requirements of the upgraded package(s).' ) cmd_opts.add_option( @@ -113,6 +126,7 @@ class InstallCommand(RequirementCommand): default=default_user, help='Ignore the installed packages (reinstalling instead).') + cmd_opts.add_option(cmdoptions.ignore_requires_python()) cmd_opts.add_option(cmdoptions.no_deps()) cmd_opts.add_option(cmdoptions.install_options()) @@ -197,6 +211,15 @@ class InstallCommand(RequirementCommand): cmdoptions.resolve_wheel_no_use_binary(options) cmdoptions.check_install_build_global(options) + if options.as_egg: + warnings.warn( + "--egg has been deprecated and will be removed in the future. " + "This flag is mutually exclusive with large parts of pip, and " + "actually using it invalidates pip's ability to manage the " + "installation process.", + RemovedInPip10Warning, + ) + if options.allow_external: warnings.warn( "--allow-external has been deprecated and will be removed in " @@ -287,9 +310,11 @@ class InstallCommand(RequirementCommand): src_dir=options.src_dir, download_dir=options.download_dir, upgrade=options.upgrade, + upgrade_strategy=options.upgrade_strategy, as_egg=options.as_egg, ignore_installed=options.ignore_installed, ignore_dependencies=options.ignore_dependencies, + ignore_requires_python=options.ignore_requires_python, force_reinstall=options.force_reinstall, use_user_site=options.use_user_site, target_dir=temp_target_dir, @@ -334,6 +359,14 @@ class InstallCommand(RequirementCommand): root=options.root_path, prefix=options.prefix_path, ) + + possible_lib_locations = get_lib_location_guesses( + user=options.use_user_site, + home=temp_target_dir, + root=options.root_path, + prefix=options.prefix_path, + isolated=options.isolated_mode, + ) reqs = sorted( requirement_set.successfully_installed, key=operator.attrgetter('name')) @@ -341,9 +374,11 @@ class InstallCommand(RequirementCommand): for req in reqs: item = req.name try: - if hasattr(req, 'installed_version'): - if req.installed_version: - item += '-' + req.installed_version + installed_version = get_installed_version( + req.name, possible_lib_locations + ) + if installed_version: + item += '-' + installed_version except Exception: pass items.append(item) @@ -370,35 +405,51 @@ class InstallCommand(RequirementCommand): if options.target_dir: ensure_dir(options.target_dir) - lib_dir = distutils_scheme('', home=temp_target_dir)['purelib'] + # Checking both purelib and platlib directories for installed + # packages to be moved to target directory + lib_dir_list = [] - for item in os.listdir(lib_dir): - target_item_dir = os.path.join(options.target_dir, item) - if os.path.exists(target_item_dir): - if not options.upgrade: - logger.warning( - 'Target directory %s already exists. Specify ' - '--upgrade to force replacement.', - target_item_dir - ) - continue - if os.path.islink(target_item_dir): - logger.warning( - 'Target directory %s already exists and is ' - 'a link. Pip will not automatically replace ' - 'links, please remove if replacement is ' - 'desired.', - target_item_dir - ) - continue - if os.path.isdir(target_item_dir): - shutil.rmtree(target_item_dir) - else: - os.remove(target_item_dir) + purelib_dir = distutils_scheme('', home=temp_target_dir)['purelib'] + platlib_dir = distutils_scheme('', home=temp_target_dir)['platlib'] - shutil.move( - os.path.join(lib_dir, item), - target_item_dir - ) + if os.path.exists(purelib_dir): + lib_dir_list.append(purelib_dir) + if os.path.exists(platlib_dir) and platlib_dir != purelib_dir: + lib_dir_list.append(platlib_dir) + + for lib_dir in lib_dir_list: + for item in os.listdir(lib_dir): + target_item_dir = os.path.join(options.target_dir, item) + if os.path.exists(target_item_dir): + if not options.upgrade: + logger.warning( + 'Target directory %s already exists. Specify ' + '--upgrade to force replacement.', + target_item_dir + ) + continue + if os.path.islink(target_item_dir): + logger.warning( + 'Target directory %s already exists and is ' + 'a link. Pip will not automatically replace ' + 'links, please remove if replacement is ' + 'desired.', + target_item_dir + ) + continue + if os.path.isdir(target_item_dir): + shutil.rmtree(target_item_dir) + else: + os.remove(target_item_dir) + + shutil.move( + os.path.join(lib_dir, item), + target_item_dir + ) shutil.rmtree(temp_target_dir) return requirement_set + + +def get_lib_location_guesses(*args, **kwargs): + scheme = distutils_scheme('', *args, **kwargs) + return [scheme['purelib'], scheme['platlib']] diff --git a/lib/python3.4/site-packages/pip/commands/list.py b/lib/python3.4/site-packages/pip/commands/list.py index 5346488..6f6995d 100644 --- a/lib/python3.4/site-packages/pip/commands/list.py +++ b/lib/python3.4/site-packages/pip/commands/list.py @@ -1,7 +1,14 @@ from __future__ import absolute_import +import json import logging import warnings +try: + from itertools import zip_longest +except ImportError: + from itertools import izip_longest as zip_longest + +from pip._vendor import six from pip.basecommand import Command from pip.exceptions import CommandError @@ -11,7 +18,6 @@ from pip.utils import ( from pip.utils.deprecation import RemovedInPip10Warning from pip.cmdoptions import make_option_group, index_group - logger = logging.getLogger(__name__) @@ -68,6 +74,23 @@ class ListCommand(Command): "pip only finds stable versions."), ) + cmd_opts.add_option( + '--format', + action='store', + dest='list_format', + choices=('legacy', 'columns', 'freeze', 'json'), + help="Select the output format among: legacy (default), columns, " + "freeze or json.", + ) + + cmd_opts.add_option( + '--not-required', + action='store_true', + dest='not_required', + help="List packages that are not dependencies of " + "installed packages.", + ) + index_opts = make_option_group(index_group, self.parser) self.parser.insert_option_group(0, index_opts) @@ -110,38 +133,62 @@ class ListCommand(Command): "no longer has any effect.", RemovedInPip10Warning, ) + + if options.list_format is None: + warnings.warn( + "The default format will switch to columns in the future. " + "You can use --format=(legacy|columns) (or define a " + "format=(legacy|columns) in your pip.conf under the [list] " + "section) to disable this warning.", + RemovedInPip10Warning, + ) + if options.outdated and options.uptodate: raise CommandError( "Options --outdated and --uptodate cannot be combined.") + packages = get_installed_distributions( + local_only=options.local, + user_only=options.user, + editables_only=options.editable, + ) + if options.outdated: - self.run_outdated(options) + packages = self.get_outdated(packages, options) elif options.uptodate: - self.run_uptodate(options) - else: - self.run_listing(options) + packages = self.get_uptodate(packages, options) - def run_outdated(self, options): - for dist, latest_version, typ in sorted( - self.find_packages_latest_versions(options), - key=lambda p: p[0].project_name.lower()): - if latest_version > dist.parsed_version: - logger.info( - '%s - Latest: %s [%s]', - self.output_package(dist), latest_version, typ, - ) + if options.not_required: + packages = self.get_not_required(packages, options) - def find_packages_latest_versions(self, options): + self.output_package_listing(packages, options) + + def get_outdated(self, packages, options): + return [ + dist for dist in self.iter_packages_latest_infos(packages, options) + if dist.latest_version > dist.parsed_version + ] + + def get_uptodate(self, packages, options): + return [ + dist for dist in self.iter_packages_latest_infos(packages, options) + if dist.latest_version == dist.parsed_version + ] + + def get_not_required(self, packages, options): + dep_keys = set() + for dist in packages: + dep_keys.update(requirement.key for requirement in dist.requires()) + return set(pkg for pkg in packages if pkg.key not in dep_keys) + + def iter_packages_latest_infos(self, packages, options): index_urls = [options.index_url] + options.extra_index_urls if options.no_index: - logger.info('Ignoring indexes: %s', ','.join(index_urls)) + logger.debug('Ignoring indexes: %s', ','.join(index_urls)) index_urls = [] dependency_links = [] - for dist in get_installed_distributions( - local_only=options.local, - user_only=options.user, - editables_only=options.editable): + for dist in packages: if dist.has_metadata('dependency_links.txt'): dependency_links.extend( dist.get_metadata_lines('dependency_links.txt'), @@ -151,12 +198,7 @@ class ListCommand(Command): finder = self._build_package_finder(options, index_urls, session) finder.add_dependency_links(dependency_links) - installed_packages = get_installed_distributions( - local_only=options.local, - user_only=options.user, - editables_only=options.editable, - ) - for dist in installed_packages: + for dist in packages: typ = 'unknown' all_candidates = finder.find_all_candidates(dist.key) if not options.pre: @@ -173,17 +215,12 @@ class ListCommand(Command): typ = 'wheel' else: typ = 'sdist' - yield dist, remote_version, typ + # This is dirty but makes the rest of the code much cleaner + dist.latest_version = remote_version + dist.latest_filetype = typ + yield dist - def run_listing(self, options): - installed_packages = get_installed_distributions( - local_only=options.local, - user_only=options.user, - editables_only=options.editable, - ) - self.output_package_listing(installed_packages) - - def output_package(self, dist): + def output_legacy(self, dist): if dist_is_editable(dist): return '%s (%s, %s)' % ( dist.project_name, @@ -193,17 +230,108 @@ class ListCommand(Command): else: return '%s (%s)' % (dist.project_name, dist.version) - def output_package_listing(self, installed_packages): - installed_packages = sorted( - installed_packages, + def output_legacy_latest(self, dist): + return '%s - Latest: %s [%s]' % ( + self.output_legacy(dist), + dist.latest_version, + dist.latest_filetype, + ) + + def output_package_listing(self, packages, options): + packages = sorted( + packages, key=lambda dist: dist.project_name.lower(), ) - for dist in installed_packages: - logger.info(self.output_package(dist)) + if options.list_format == 'columns' and packages: + data, header = format_for_columns(packages, options) + self.output_package_listing_columns(data, header) + elif options.list_format == 'freeze': + for dist in packages: + logger.info("%s==%s", dist.project_name, dist.version) + elif options.list_format == 'json': + logger.info(format_for_json(packages, options)) + else: # legacy + for dist in packages: + if options.outdated: + logger.info(self.output_legacy_latest(dist)) + else: + logger.info(self.output_legacy(dist)) - def run_uptodate(self, options): - uptodate = [] - for dist, version, typ in self.find_packages_latest_versions(options): - if dist.parsed_version == version: - uptodate.append(dist) - self.output_package_listing(uptodate) + def output_package_listing_columns(self, data, header): + # insert the header first: we need to know the size of column names + if len(data) > 0: + data.insert(0, header) + + pkg_strings, sizes = tabulate(data) + + # Create and add a separator. + if len(data) > 0: + pkg_strings.insert(1, " ".join(map(lambda x: '-' * x, sizes))) + + for val in pkg_strings: + logger.info(val) + + +def tabulate(vals): + # From pfmoore on GitHub: + # https://github.com/pypa/pip/issues/3651#issuecomment-216932564 + assert len(vals) > 0 + + sizes = [0] * max(len(x) for x in vals) + for row in vals: + sizes = [max(s, len(str(c))) for s, c in zip_longest(sizes, row)] + + result = [] + for row in vals: + display = " ".join([str(c).ljust(s) if c is not None else '' + for s, c in zip_longest(sizes, row)]) + result.append(display) + + return result, sizes + + +def format_for_columns(pkgs, options): + """ + Convert the package data into something usable + by output_package_listing_columns. + """ + running_outdated = options.outdated + # Adjust the header for the `pip list --outdated` case. + if running_outdated: + header = ["Package", "Version", "Latest", "Type"] + else: + header = ["Package", "Version"] + + data = [] + if any(dist_is_editable(x) for x in pkgs): + header.append("Location") + + for proj in pkgs: + # if we're working on the 'outdated' list, separate out the + # latest_version and type + row = [proj.project_name, proj.version] + + if running_outdated: + row.append(proj.latest_version) + row.append(proj.latest_filetype) + + if dist_is_editable(proj): + row.append(proj.location) + + data.append(row) + + return data, header + + +def format_for_json(packages, options): + data = [] + for dist in packages: + info = { + 'name': dist.project_name, + 'version': six.text_type(dist.version), + } + if options.outdated: + info['latest_version'] = six.text_type(dist.latest_version) + info['latest_filetype'] = dist.latest_filetype + data.append(info) + return json.dumps(data) diff --git a/lib/python3.4/site-packages/pip/commands/search.py b/lib/python3.4/site-packages/pip/commands/search.py index 3155e18..bd2ea8a 100644 --- a/lib/python3.4/site-packages/pip/commands/search.py +++ b/lib/python3.4/site-packages/pip/commands/search.py @@ -5,12 +5,14 @@ import sys import textwrap from pip.basecommand import Command, SUCCESS +from pip.compat import OrderedDict from pip.download import PipXmlrpcTransport from pip.models import PyPI from pip.utils import get_terminal_size from pip.utils.logging import indent_log from pip.exceptions import CommandError from pip.status_codes import NO_MATCHES_FOUND +from pip._vendor.packaging.version import parse as parse_version from pip._vendor import pkg_resources from pip._vendor.six.moves import xmlrpc_client @@ -28,7 +30,7 @@ class SearchCommand(Command): def __init__(self, *args, **kw): super(SearchCommand, self).__init__(*args, **kw) self.cmd_opts.add_option( - '--index', + '-i', '--index', dest='index', metavar='URL', default=PyPI.pypi_url, @@ -67,21 +69,17 @@ def transform_hits(hits): packages with the list of versions stored inline. This converts the list from pypi into one we can use. """ - packages = {} + packages = OrderedDict() for hit in hits: name = hit['name'] summary = hit['summary'] version = hit['version'] - score = hit['_pypi_ordering'] - if score is None: - score = 0 if name not in packages.keys(): packages[name] = { 'name': name, 'summary': summary, 'versions': [version], - 'score': score, } else: packages[name]['versions'].append(version) @@ -89,16 +87,8 @@ def transform_hits(hits): # if this is the highest version, replace summary and score if version == highest_version(packages[name]['versions']): packages[name]['summary'] = summary - packages[name]['score'] = score - # each record has a unique name now, so we will convert the dict into a - # list sorted by score - package_list = sorted( - packages.values(), - key=lambda x: x['score'], - reverse=True, - ) - return package_list + return list(packages.values()) def print_results(hits, name_column_width=None, terminal_width=None): @@ -116,12 +106,11 @@ def print_results(hits, name_column_width=None, terminal_width=None): summary = hit['summary'] or '' version = hit.get('versions', ['-'])[-1] if terminal_width is not None: - # wrap and indent summary to fit terminal - summary = textwrap.wrap( - summary, - terminal_width - name_column_width - 5, - ) - summary = ('\n' + ' ' * (name_column_width + 3)).join(summary) + target_width = terminal_width - name_column_width - 5 + if target_width > 10: + # wrap and indent summary to fit terminal + summary = textwrap.wrap(summary, target_width) + summary = ('\n' + ' ' * (name_column_width + 3)).join(summary) line = '%-*s - %s' % (name_column_width, '%s (%s)' % (name, version), summary) @@ -141,6 +130,4 @@ def print_results(hits, name_column_width=None, terminal_width=None): def highest_version(versions): - return next(iter( - sorted(versions, key=pkg_resources.parse_version, reverse=True) - )) + return max(versions, key=parse_version) diff --git a/lib/python3.4/site-packages/pip/commands/show.py b/lib/python3.4/site-packages/pip/commands/show.py index 52a673a..111c16d 100644 --- a/lib/python3.4/site-packages/pip/commands/show.py +++ b/lib/python3.4/site-packages/pip/commands/show.py @@ -7,6 +7,7 @@ import os from pip.basecommand import Command from pip.status_codes import SUCCESS, ERROR from pip._vendor import pkg_resources +from pip._vendor.packaging.utils import canonicalize_name logger = logging.getLogger(__name__) @@ -37,7 +38,8 @@ class ShowCommand(Command): query = args results = search_packages_info(query) - if not print_results(results, options.files): + if not print_results( + results, list_files=options.files, verbose=options.verbose): return ERROR return SUCCESS @@ -49,9 +51,12 @@ def search_packages_info(query): pip generated 'installed-files.txt' in the distributions '.egg-info' directory. """ - installed = dict( - [(p.project_name.lower(), p) for p in pkg_resources.working_set]) - query_names = [name.lower() for name in query] + installed = {} + for p in pkg_resources.working_set: + installed[canonicalize_name(p.project_name)] = p + + query_names = [canonicalize_name(name) for name in query] + for dist in [installed[pkg] for pkg in query_names if pkg in installed]: package = { 'name': dist.project_name, @@ -85,13 +90,11 @@ def search_packages_info(query): entry_points = dist.get_metadata_lines('entry_points.txt') package['entry_points'] = entry_points - installer = None if dist.has_metadata('INSTALLER'): for line in dist.get_metadata_lines('INSTALLER'): if line.strip(): - installer = line.strip() + package['installer'] = line.strip() break - package['installer'] = installer # @todo: Should pkg_resources.Distribution have a # `get_pkg_info` method? @@ -102,12 +105,9 @@ def search_packages_info(query): 'home-page', 'author', 'author-email', 'license'): package[key] = pkg_info_dict.get(key) - # It looks like FeedParser can not deal with repeated headers + # It looks like FeedParser cannot deal with repeated headers classifiers = [] for line in metadata.splitlines(): - if not line: - break - # Classifier: License :: OSI Approved :: MIT License if line.startswith('Classifier: '): classifiers.append(line[len('Classifier: '):]) package['classifiers'] = classifiers @@ -117,38 +117,38 @@ def search_packages_info(query): yield package -def print_results(distributions, list_all_files): +def print_results(distributions, list_files=False, verbose=False): """ Print the informations from installed distributions found. """ results_printed = False - for dist in distributions: + for i, dist in enumerate(distributions): results_printed = True - logger.info("---") - logger.info("Metadata-Version: %s", dist.get('metadata-version')) - logger.info("Name: %s", dist['name']) - logger.info("Version: %s", dist['version']) - logger.info("Summary: %s", dist.get('summary')) - logger.info("Home-page: %s", dist.get('home-page')) - logger.info("Author: %s", dist.get('author')) - logger.info("Author-email: %s", dist.get('author-email')) - if dist['installer'] is not None: - logger.info("Installer: %s", dist['installer']) - logger.info("License: %s", dist.get('license')) - logger.info("Location: %s", dist['location']) - logger.info("Requires: %s", ', '.join(dist['requires'])) - logger.info("Classifiers:") - for classifier in dist['classifiers']: - logger.info(" %s", classifier) - if list_all_files: - logger.info("Files:") - if 'files' in dist: - for line in dist['files']: - logger.info(" %s", line.strip()) - else: - logger.info("Cannot locate installed-files.txt") - if 'entry_points' in dist: + if i > 0: + logger.info("---") + logger.info("Name: %s", dist.get('name', '')) + logger.info("Version: %s", dist.get('version', '')) + logger.info("Summary: %s", dist.get('summary', '')) + logger.info("Home-page: %s", dist.get('home-page', '')) + logger.info("Author: %s", dist.get('author', '')) + logger.info("Author-email: %s", dist.get('author-email', '')) + logger.info("License: %s", dist.get('license', '')) + logger.info("Location: %s", dist.get('location', '')) + logger.info("Requires: %s", ', '.join(dist.get('requires', []))) + if verbose: + logger.info("Metadata-Version: %s", + dist.get('metadata-version', '')) + logger.info("Installer: %s", dist.get('installer', '')) + logger.info("Classifiers:") + for classifier in dist.get('classifiers', []): + logger.info(" %s", classifier) logger.info("Entry-points:") - for line in dist['entry_points']: + for entry in dist.get('entry_points', []): + logger.info(" %s", entry.strip()) + if list_files: + logger.info("Files:") + for line in dist.get('files', []): logger.info(" %s", line.strip()) + if "files" not in dist: + logger.info("Cannot locate installed-files.txt") return results_printed diff --git a/lib/python3.4/site-packages/pip/commands/wheel.py b/lib/python3.4/site-packages/pip/commands/wheel.py index 1d77fe6..70e95eb 100644 --- a/lib/python3.4/site-packages/pip/commands/wheel.py +++ b/lib/python3.4/site-packages/pip/commands/wheel.py @@ -24,7 +24,7 @@ class WheelCommand(RequirementCommand): Wheel is a built-package format, and offers the advantage of not recompiling your software during every install. For more details, see the - wheel docs: http://wheel.readthedocs.org/en/latest. + wheel docs: https://wheel.readthedocs.io/en/latest/ Requirements: setuptools>=0.8, and wheel. @@ -70,6 +70,7 @@ class WheelCommand(RequirementCommand): cmd_opts.add_option(cmdoptions.editable()) cmd_opts.add_option(cmdoptions.requirements()) cmd_opts.add_option(cmdoptions.src()) + cmd_opts.add_option(cmdoptions.ignore_requires_python()) cmd_opts.add_option(cmdoptions.no_deps()) cmd_opts.add_option(cmdoptions.build_dir()) @@ -151,12 +152,14 @@ class WheelCommand(RequirementCommand): index_urls = [options.index_url] + options.extra_index_urls if options.no_index: - logger.info('Ignoring indexes: %s', ','.join(index_urls)) + logger.debug('Ignoring indexes: %s', ','.join(index_urls)) index_urls = [] if options.build_dir: options.build_dir = os.path.abspath(options.build_dir) + options.src_dir = os.path.abspath(options.src_dir) + with self._build_session(options) as session: finder = self._build_package_finder(options, session) build_delete = (not (options.no_clean or options.build_dir)) @@ -169,6 +172,7 @@ class WheelCommand(RequirementCommand): download_dir=None, ignore_dependencies=options.ignore_dependencies, ignore_installed=True, + ignore_requires_python=options.ignore_requires_python, isolated=options.isolated_mode, session=session, wheel_cache=wheel_cache, diff --git a/lib/python3.4/site-packages/pip/compat/__init__.py b/lib/python3.4/site-packages/pip/compat/__init__.py index 703852b..099672c 100644 --- a/lib/python3.4/site-packages/pip/compat/__init__.py +++ b/lib/python3.4/site-packages/pip/compat/__init__.py @@ -15,7 +15,7 @@ except ImportError: try: from collections import OrderedDict except ImportError: - from pip.compat.ordereddict import OrderedDict + from pip._vendor.ordereddict import OrderedDict try: import ipaddress @@ -115,7 +115,7 @@ def get_path_uid(path): file_uid = os.fstat(fd).st_uid os.close(fd) else: # AIX and Jython - # WARNING: time of check vulnerabity, but best we can do w/o NOFOLLOW + # WARNING: time of check vulnerability, but best we can do w/o NOFOLLOW if not os.path.islink(path): # older versions of Jython don't have `os.fstat` file_uid = os.stat(path).st_uid diff --git a/lib/python3.4/site-packages/pip/compat/ordereddict.py b/lib/python3.4/site-packages/pip/compat/ordereddict.py deleted file mode 100644 index 6eb3ba4..0000000 --- a/lib/python3.4/site-packages/pip/compat/ordereddict.py +++ /dev/null @@ -1,129 +0,0 @@ -# Copyright (c) 2009 Raymond Hettinger -# -# Permission is hereby granted, free of charge, to any person -# obtaining a copy of this software and associated documentation files -# (the "Software"), to deal in the Software without restriction, -# including without limitation the rights to use, copy, modify, merge, -# publish, distribute, sublicense, and/or sell copies of the Software, -# and to permit persons to whom the Software is furnished to do so, -# subject to the following conditions: -# -# The above copyright notice and this permission notice shall be -# included in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -# OTHER DEALINGS IN THE SOFTWARE. - -# flake8: noqa - -from UserDict import DictMixin - -class OrderedDict(dict, DictMixin): - - def __init__(self, *args, **kwds): - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - try: - self.__end - except AttributeError: - self.clear() - self.update(*args, **kwds) - - def clear(self): - self.__end = end = [] - end += [None, end, end] # sentinel node for doubly linked list - self.__map = {} # key --> [key, prev, next] - dict.clear(self) - - def __setitem__(self, key, value): - if key not in self: - end = self.__end - curr = end[1] - curr[2] = end[1] = self.__map[key] = [key, curr, end] - dict.__setitem__(self, key, value) - - def __delitem__(self, key): - dict.__delitem__(self, key) - key, prev, next = self.__map.pop(key) - prev[2] = next - next[1] = prev - - def __iter__(self): - end = self.__end - curr = end[2] - while curr is not end: - yield curr[0] - curr = curr[2] - - def __reversed__(self): - end = self.__end - curr = end[1] - while curr is not end: - yield curr[0] - curr = curr[1] - - def popitem(self, last=True): - if not self: - raise KeyError('dictionary is empty') - if last: - key = reversed(self).next() - else: - key = iter(self).next() - value = self.pop(key) - return key, value - - def __reduce__(self): - items = [[k, self[k]] for k in self] - tmp = self.__map, self.__end - del self.__map, self.__end - inst_dict = vars(self).copy() - self.__map, self.__end = tmp - if inst_dict: - return (self.__class__, (items,), inst_dict) - return self.__class__, (items,) - - def keys(self): - return list(self) - - setdefault = DictMixin.setdefault - update = DictMixin.update - pop = DictMixin.pop - values = DictMixin.values - items = DictMixin.items - iterkeys = DictMixin.iterkeys - itervalues = DictMixin.itervalues - iteritems = DictMixin.iteritems - - def __repr__(self): - if not self: - return '%s()' % (self.__class__.__name__,) - return '%s(%r)' % (self.__class__.__name__, self.items()) - - def copy(self): - return self.__class__(self) - - @classmethod - def fromkeys(cls, iterable, value=None): - d = cls() - for key in iterable: - d[key] = value - return d - - def __eq__(self, other): - if isinstance(other, OrderedDict): - if len(self) != len(other): - return False - for p, q in zip(self.items(), other.items()): - if p != q: - return False - return True - return dict.__eq__(self, other) - - def __ne__(self, other): - return not self == other diff --git a/lib/python3.4/site-packages/pip/download.py b/lib/python3.4/site-packages/pip/download.py index bbef9ea..54d3131 100644 --- a/lib/python3.4/site-packages/pip/download.py +++ b/lib/python3.4/site-packages/pip/download.py @@ -33,6 +33,7 @@ from pip.utils.encoding import auto_decode from pip.utils.filesystem import check_path_owner from pip.utils.logging import indent_log from pip.utils.setuptools_build import SETUPTOOLS_SHIM +from pip.utils.glibc import libc_ver from pip.utils.ui import DownloadProgressBar, DownloadProgressSpinner from pip.locations import write_delete_marker_file from pip.vcs import vcs @@ -40,6 +41,7 @@ from pip._vendor import requests, six from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response +from pip._vendor.requests.utils import get_netrc_auth from pip._vendor.requests.structures import CaseInsensitiveDict from pip._vendor.requests.packages import urllib3 from pip._vendor.cachecontrol import CacheControlAdapter @@ -88,21 +90,22 @@ def user_agent(): data["implementation"]["version"] = platform.python_version() if sys.platform.startswith("linux"): - distro = dict(filter( + from pip._vendor import distro + distro_infos = dict(filter( lambda x: x[1], - zip(["name", "version", "id"], platform.linux_distribution()), + zip(["name", "version", "id"], distro.linux_distribution()), )) libc = dict(filter( lambda x: x[1], - zip(["lib", "version"], platform.libc_ver()), + zip(["lib", "version"], libc_ver()), )) if libc: - distro["libc"] = libc - if distro: - data["distro"] = distro + distro_infos["libc"] = libc + if distro_infos: + data["distro"] = distro_infos if sys.platform.startswith("darwin") and platform.mac_ver()[0]: - data["distro"] = {"name": "OS X", "version": platform.mac_ver()[0]} + data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]} if platform.system(): data.setdefault("system", {})["name"] = platform.system() @@ -145,6 +148,11 @@ class MultiDomainBasicAuth(AuthBase): if username is None: username, password = self.parse_credentials(parsed.netloc) + # Get creds from netrc if we still don't have them + if username is None and password is None: + netrc_auth = get_netrc_auth(req.url) + username, password = netrc_auth if netrc_auth else (None, None) + if username or password: # Store the username and password self.passwords[netloc] = (username, password) @@ -163,7 +171,7 @@ class MultiDomainBasicAuth(AuthBase): if resp.status_code != 401: return resp - # We are not able to prompt the user so simple return the response + # We are not able to prompt the user so simply return the response if not self.prompting: return resp @@ -331,7 +339,7 @@ class PipSession(requests.Session): total=retries, # A 503 error from PyPI typically means that the Fastly -> Origin - # connection got interupted in some way. A 503 error in general + # connection got interrupted in some way. A 503 error in general # is typically considered a transient error so we'll go ahead and # retry it. status_forcelist=[503], @@ -602,8 +610,8 @@ def _copy_file(filename, location, link): download_location = os.path.join(location, link.filename) if os.path.exists(download_location): response = ask_path_exists( - 'The file %s exists. (i)gnore, (w)ipe, (b)ackup ' % - display_path(download_location), ('i', 'w', 'b')) + 'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort' % + display_path(download_location), ('i', 'w', 'b', 'a')) if response == 'i': copy = False elif response == 'w': @@ -617,6 +625,8 @@ def _copy_file(filename, location, link): display_path(dest_file), ) shutil.move(download_location, dest_file) + elif response == 'a': + sys.exit(-1) if copy: shutil.copy(filename, download_location) logger.info('Saved %s', display_path(download_location)) @@ -679,7 +689,7 @@ def unpack_file_url(link, location, download_dir=None, hashes=None): return # If --require-hashes is off, `hashes` is either empty, the - # link's embeddded hash, or MissingHashes; it is required to + # link's embedded hash, or MissingHashes; it is required to # match. If --require-hashes is on, we are satisfied by any # hash in `hashes` matching: a URL-based or an option-based # one; no internet-sourced hash will be in `hashes`. @@ -749,6 +759,7 @@ class PipXmlrpcTransport(xmlrpc_client.Transport): """Provide a `xmlrpclib.Transport` implementation via a `PipSession` object. """ + def __init__(self, index_url, session, use_datetime=False): xmlrpc_client.Transport.__init__(self, use_datetime) index_parts = urllib_parse.urlparse(index_url) diff --git a/lib/python3.4/site-packages/pip/exceptions.py b/lib/python3.4/site-packages/pip/exceptions.py index a529e40..50b527f 100644 --- a/lib/python3.4/site-packages/pip/exceptions.py +++ b/lib/python3.4/site-packages/pip/exceptions.py @@ -237,3 +237,8 @@ class HashMismatch(HashError): self.gots[hash_name].hexdigest()) prefix = ' or' return '\n'.join(lines) + + +class UnsupportedPythonVersion(InstallationError): + """Unsupported python version according to Requires-Python package + metadata.""" diff --git a/lib/python3.4/site-packages/pip/index.py b/lib/python3.4/site-packages/pip/index.py index ba0bd6c..f653f6e 100644 --- a/lib/python3.4/site-packages/pip/index.py +++ b/lib/python3.4/site-packages/pip/index.py @@ -20,19 +20,22 @@ from pip.utils import ( cached_property, splitext, normalize_path, ARCHIVE_EXTENSIONS, SUPPORTED_EXTENSIONS, ) -from pip.utils.deprecation import RemovedInPip9Warning, RemovedInPip10Warning +from pip.utils.deprecation import RemovedInPip10Warning from pip.utils.logging import indent_log +from pip.utils.packaging import check_requires_python from pip.exceptions import ( DistributionNotFound, BestVersionAlreadyInstalled, InvalidWheelFilename, UnsupportedWheel, ) from pip.download import HAS_TLS, is_url, path_to_url, url_to_path from pip.wheel import Wheel, wheel_ext -from pip.pep425tags import supported_tags +from pip.pep425tags import get_supported from pip._vendor import html5lib, requests, six from pip._vendor.packaging.version import parse as parse_version from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.packaging import specifiers from pip._vendor.requests.exceptions import SSLError +from pip._vendor.distlib.compat import unescape __all__ = ['FormatControl', 'fmt_ctl_handle_mutual_exclude', 'PackageFinder'] @@ -104,12 +107,24 @@ class PackageFinder(object): def __init__(self, find_links, index_urls, allow_all_prereleases=False, trusted_hosts=None, process_dependency_links=False, - session=None, format_control=None): + session=None, format_control=None, platform=None, + versions=None, abi=None, implementation=None): """Create a PackageFinder. :param format_control: A FormatControl object or None. Used to control the selection of source packages / binary packages when consulting the index and links. + :param platform: A string or None. If None, searches for packages + that are supported by the current system. Otherwise, will find + packages that can be built on the platform passed in. These + packages will only be downloaded for distribution: they will + not be built locally. + :param versions: A list of strings or None. This is passed directly + to pep425tags.py in the get_supported() method. + :param abi: A string or None. This is passed directly + to pep425tags.py in the get_supported() method. + :param implementation: A string or None. This is passed directly + to pep425tags.py in the get_supported() method. """ if session is None: raise TypeError( @@ -153,6 +168,14 @@ class PackageFinder(object): # The Session we'll use to make requests self.session = session + # The valid tags to check potential found wheel candidates against + self.valid_tags = get_supported( + versions=versions, + platform=platform, + abi=abi, + impl=implementation, + ) + # If we don't have TLS enabled, then WARN if anyplace we're looking # relies on TLS. if not HAS_TLS: @@ -175,7 +198,7 @@ class PackageFinder(object): warnings.warn( "Dependency Links processing has been deprecated and will be " "removed in a future release.", - RemovedInPip9Warning, + RemovedInPip10Warning, ) self.dependency_links.extend(links) @@ -236,22 +259,22 @@ class PackageFinder(object): If not finding wheels, then sorted by version only. If finding wheels, then the sort order is by version, then: 1. existing installs - 2. wheels ordered via Wheel.support_index_min() + 2. wheels ordered via Wheel.support_index_min(self.valid_tags) 3. source archives Note: it was considered to embed this logic into the Link comparison operators, but then different sdist links with the same version, would have to be considered equal """ - support_num = len(supported_tags) + support_num = len(self.valid_tags) if candidate.location.is_wheel: # can raise InvalidWheelFilename wheel = Wheel(candidate.location.filename) - if not wheel.supported(): + if not wheel.supported(self.valid_tags): raise UnsupportedWheel( "%s is not a supported wheel for this platform. It " "can't be sorted." % wheel.filename ) - pri = -(wheel.support_index_min()) + pri = -(wheel.support_index_min(self.valid_tags)) else: # sdist pri = -(support_num) return (candidate.version, pri) @@ -335,7 +358,9 @@ class PackageFinder(object): """ def mkurl_pypi_url(url): - loc = posixpath.join(url, urllib_parse.quote(project_name.lower())) + loc = posixpath.join( + url, + urllib_parse.quote(canonicalize_name(project_name))) # For maximum compatibility with easy_install, ensure the path # ends in a trailing slash. Although this isn't in the spec # (and PyPI can handle it without the slash) some other index @@ -579,7 +604,6 @@ class PackageFinder(object): def _link_package_versions(self, link, search): """Return an InstallationCandidate or None""" - version = None if link.egg_fragment: egg_info = link.egg_fragment @@ -610,7 +634,8 @@ class PackageFinder(object): self._log_skipped_link( link, 'wrong project name (not %s)' % search.supplied) return - if not wheel.supported(): + + if not wheel.supported(self.valid_tags): self._log_skipped_link( link, 'it is not compatible with this Python') return @@ -638,6 +663,18 @@ class PackageFinder(object): self._log_skipped_link( link, 'Python version is incorrect') return + try: + support_this_python = check_requires_python(link.requires_python) + except specifiers.InvalidSpecifier: + logger.debug("Package %s has an invalid Requires-Python entry: %s", + link.filename, link.requires_python) + support_this_python = True + + if not support_this_python: + logger.debug("The package %s is incompatible with the python" + "version in use. Acceptable python versions are:%s", + link, link.requires_python) + return logger.debug('Found link %s, version: %s', link, version) return InstallationCandidate(search.supplied, version, link) @@ -690,7 +727,7 @@ class HTMLPage(object): self.content = content self.parsed = html5lib.parse( self.content, - encoding=encoding, + transport_encoding=encoding, namespaceHTMLElements=False, ) self.url = url @@ -758,10 +795,10 @@ class HTMLPage(object): resp.raise_for_status() # The check for archives above only works if the url ends with - # something that looks like an archive. However that is not a - # requirement of an url. Unless we issue a HEAD request on every - # url we cannot know ahead of time for sure if something is HTML - # or not. However we can check after we've downloaded it. + # something that looks like an archive. However that is not a + # requirement of an url. Unless we issue a HEAD request on every + # url we cannot know ahead of time for sure if something is HTML + # or not. However we can check after we've downloaded it. content_type = resp.headers.get('Content-Type', 'unknown') if not content_type.lower().startswith("text/html"): logger.debug( @@ -826,7 +863,9 @@ class HTMLPage(object): url = self.clean_link( urllib_parse.urljoin(self.base_url, href) ) - yield Link(url, self) + pyrequire = anchor.get('data-requires-python') + pyrequire = unescape(pyrequire) if pyrequire else None + yield Link(url, self, requires_python=pyrequire) _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I) @@ -840,7 +879,19 @@ class HTMLPage(object): class Link(object): - def __init__(self, url, comes_from=None): + def __init__(self, url, comes_from=None, requires_python=None): + """ + Object representing a parsed link from https://pypi.python.org/simple/* + + url: + url of the resource pointed to (href of the link) + comes_from: + instance of HTMLPage where the link was found, or string. + requires_python: + String containing the `Requires-Python` metadata field, specified + in PEP 345. This may be specified by a data-requires-python + attribute in the HTML link tag, as described in PEP 503. + """ # url can be a UNC windows share if url.startswith('\\\\'): @@ -848,10 +899,15 @@ class Link(object): self.url = url self.comes_from = comes_from + self.requires_python = requires_python if requires_python else None def __str__(self): + if self.requires_python: + rp = ' (requires-python:%s)' % self.requires_python + else: + rp = '' if self.comes_from: - return '%s (from %s)' % (self.url, self.comes_from) + return '%s (from %s)%s' % (self.url, self.comes_from, rp) else: return str(self.url) diff --git a/lib/python3.4/site-packages/pip/locations.py b/lib/python3.4/site-packages/pip/locations.py index 1bd0fae..e598ef1 100644 --- a/lib/python3.4/site-packages/pip/locations.py +++ b/lib/python3.4/site-packages/pip/locations.py @@ -73,7 +73,7 @@ else: "The folder you are executing pip from can no longer be found." ) -# under Mac OS X + virtualenv sys.prefix is not properly resolved +# under macOS + virtualenv sys.prefix is not properly resolved # it is something like /path/to/python/bin/.. # Note: using realpath due to tmp dirs on OSX being symlinks src_prefix = os.path.abspath(src_prefix) @@ -110,7 +110,7 @@ else: config_basename, ) - # Forcing to use /usr/local/bin for standard Mac OS X framework installs + # Forcing to use /usr/local/bin for standard macOS framework installs # Also log to ~/Library/Logs/ for use with the Console.app log viewer if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/': bin_py = '/usr/local/bin' diff --git a/lib/python3.4/site-packages/pip/operations/check.py b/lib/python3.4/site-packages/pip/operations/check.py new file mode 100644 index 0000000..2cf67aa --- /dev/null +++ b/lib/python3.4/site-packages/pip/operations/check.py @@ -0,0 +1,49 @@ + + +def check_requirements(installed_dists): + missing_reqs_dict = {} + incompatible_reqs_dict = {} + + for dist in installed_dists: + key = '%s==%s' % (dist.project_name, dist.version) + + missing_reqs = list(get_missing_reqs(dist, installed_dists)) + if missing_reqs: + missing_reqs_dict[key] = missing_reqs + + incompatible_reqs = list(get_incompatible_reqs( + dist, installed_dists)) + if incompatible_reqs: + incompatible_reqs_dict[key] = incompatible_reqs + + return (missing_reqs_dict, incompatible_reqs_dict) + + +def get_missing_reqs(dist, installed_dists): + """Return all of the requirements of `dist` that aren't present in + `installed_dists`. + + """ + installed_names = set(d.project_name.lower() for d in installed_dists) + missing_requirements = set() + + for requirement in dist.requires(): + if requirement.project_name.lower() not in installed_names: + missing_requirements.add(requirement) + yield requirement + + +def get_incompatible_reqs(dist, installed_dists): + """Return all of the requirements of `dist` that are present in + `installed_dists`, but have incompatible versions. + + """ + installed_dists_by_name = {} + for installed_dist in installed_dists: + installed_dists_by_name[installed_dist.project_name] = installed_dist + + for requirement in dist.requires(): + present_dist = installed_dists_by_name.get(requirement.project_name) + + if present_dist and present_dist not in requirement: + yield (requirement, present_dist) diff --git a/lib/python3.4/site-packages/pip/operations/freeze.py b/lib/python3.4/site-packages/pip/operations/freeze.py index 086922e..920c2c1 100644 --- a/lib/python3.4/site-packages/pip/operations/freeze.py +++ b/lib/python3.4/site-packages/pip/operations/freeze.py @@ -5,9 +5,11 @@ import re import pip from pip.req import InstallRequirement +from pip.req.req_file import COMMENT_RE from pip.utils import get_installed_distributions from pip._vendor import pkg_resources from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.pkg_resources import RequirementParseError logger = logging.getLogger(__name__) @@ -42,67 +44,83 @@ def freeze( for dist in get_installed_distributions(local_only=local_only, skip=(), user_only=user_only): - req = pip.FrozenRequirement.from_dist( - dist, - dependency_links - ) + try: + req = pip.FrozenRequirement.from_dist( + dist, + dependency_links + ) + except RequirementParseError: + logger.warning( + "Could not parse requirement: %s", + dist.project_name + ) + continue installations[req.name] = req if requirement: - with open(requirement) as req_file: - for line in req_file: - if (not line.strip() or - line.strip().startswith('#') or - (skip_match and skip_match(line)) or - line.startswith(( - '-r', '--requirement', - '-Z', '--always-unzip', - '-f', '--find-links', - '-i', '--index-url', - '--pre', - '--trusted-host', - '--process-dependency-links', - '--extra-index-url'))): - yield line.rstrip() - continue + # the options that don't get turned into an InstallRequirement + # should only be emitted once, even if the same option is in multiple + # requirements files, so we need to keep track of what has been emitted + # so that we don't emit it again if it's seen again + emitted_options = set() + for req_file_path in requirement: + with open(req_file_path) as req_file: + for line in req_file: + if (not line.strip() or + line.strip().startswith('#') or + (skip_match and skip_match(line)) or + line.startswith(( + '-r', '--requirement', + '-Z', '--always-unzip', + '-f', '--find-links', + '-i', '--index-url', + '--pre', + '--trusted-host', + '--process-dependency-links', + '--extra-index-url'))): + line = line.rstrip() + if line not in emitted_options: + emitted_options.add(line) + yield line + continue - if line.startswith('-e') or line.startswith('--editable'): - if line.startswith('-e'): - line = line[2:].strip() + if line.startswith('-e') or line.startswith('--editable'): + if line.startswith('-e'): + line = line[2:].strip() + else: + line = line[len('--editable'):].strip().lstrip('=') + line_req = InstallRequirement.from_editable( + line, + default_vcs=default_vcs, + isolated=isolated, + wheel_cache=wheel_cache, + ) else: - line = line[len('--editable'):].strip().lstrip('=') - line_req = InstallRequirement.from_editable( - line, - default_vcs=default_vcs, - isolated=isolated, - wheel_cache=wheel_cache, - ) - else: - line_req = InstallRequirement.from_line( - line, - isolated=isolated, - wheel_cache=wheel_cache, - ) + line_req = InstallRequirement.from_line( + COMMENT_RE.sub('', line).strip(), + isolated=isolated, + wheel_cache=wheel_cache, + ) - if not line_req.name: - logger.info( - "Skipping line because it's not clear what it " - "would install: %s", - line.strip(), - ) - logger.info( - " (add #egg=PackageName to the URL to avoid" - " this warning)" - ) - elif line_req.name not in installations: - logger.warning( - "Requirement file contains %s, but that package is" - " not installed", - line.strip(), - ) - else: - yield str(installations[line_req.name]).rstrip() - del installations[line_req.name] + if not line_req.name: + logger.info( + "Skipping line in requirement file [%s] because " + "it's not clear what it would install: %s", + req_file_path, line.strip(), + ) + logger.info( + " (add #egg=PackageName to the URL to avoid" + " this warning)" + ) + elif line_req.name not in installations: + logger.warning( + "Requirement file [%s] contains %s, but that " + "package is not installed", + req_file_path, COMMENT_RE.sub('', line).strip(), + ) + else: + yield str(installations[line_req.name]).rstrip() + del installations[line_req.name] yield( '## The following requirements were added by ' diff --git a/lib/python3.4/site-packages/pip/pep425tags.py b/lib/python3.4/site-packages/pip/pep425tags.py index e118457..ad202ef 100644 --- a/lib/python3.4/site-packages/pip/pep425tags.py +++ b/lib/python3.4/site-packages/pip/pep425tags.py @@ -6,7 +6,6 @@ import sys import warnings import platform import logging -import ctypes try: import sysconfig @@ -16,7 +15,7 @@ except ImportError: # pragma nocover import distutils.util from pip.compat import OrderedDict - +import pip.utils.glibc logger = logging.getLogger(__name__) @@ -125,7 +124,7 @@ def get_platform(): if sys.platform == 'darwin': # distutils.util.get_platform() returns the release based on the value # of MACOSX_DEPLOYMENT_TARGET on which Python was built, which may - # be signficantly older than the user's current machine. + # be significantly older than the user's current machine. release, _, machine = platform.mac_ver() split_ver = release.split('.') @@ -160,46 +159,17 @@ def is_manylinux1_compatible(): pass # Check glibc version. CentOS 5 uses glibc 2.5. - return have_compatible_glibc(2, 5) - - -def have_compatible_glibc(major, minimum_minor): - # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen - # manpage says, "If filename is NULL, then the returned handle is for the - # main program". This way we can let the linker do the work to figure out - # which libc our process is actually using. - process_namespace = ctypes.CDLL(None) - try: - gnu_get_libc_version = process_namespace.gnu_get_libc_version - except AttributeError: - # Symbol doesn't exist -> therefore, we are not linked to - # glibc. - return False - - # Call gnu_get_libc_version, which returns a string like "2.5". - gnu_get_libc_version.restype = ctypes.c_char_p - version_str = gnu_get_libc_version() - # py2 / py3 compatibility: - if not isinstance(version_str, str): - version_str = version_str.decode("ascii") - - # Parse string and check against requested version. - version = [int(piece) for piece in version_str.split(".")] - if len(version) < 2: - warnings.warn("Expected glibc version with 2 components major.minor," - " got: %s" % version_str, RuntimeWarning) - return False - return version[0] == major and version[1] >= minimum_minor + return pip.utils.glibc.have_compatible_glibc(2, 5) def get_darwin_arches(major, minor, machine): """Return a list of supported arches (including group arches) for - the given major, minor and machine architecture of an OS X machine. + the given major, minor and machine architecture of an macOS machine. """ arches = [] def _supports_arch(major, minor, arch): - # Looking at the application support for OS X versions in the chart + # Looking at the application support for macOS versions in the chart # provided by https://en.wikipedia.org/wiki/OS_X#Versions it appears # our timeline looks roughly like: # @@ -253,12 +223,19 @@ def get_darwin_arches(major, minor, machine): return arches -def get_supported(versions=None, noarch=False): +def get_supported(versions=None, noarch=False, platform=None, + impl=None, abi=None): """Return a list of supported tags for each version specified in `versions`. :param versions: a list of string versions, of the form ["33", "32"], or None. The first version will be assumed to support our ABI. + :param platform: specify the exact platform you want valid + tags for, or None. If None, use the local system platform. + :param impl: specify the exact implementation you want valid + tags for, or None. If None, use the local interpreter impl. + :param abi: specify the exact abi you want valid + tags for, or None. If None, use the local interpreter abi. """ supported = [] @@ -271,11 +248,11 @@ def get_supported(versions=None, noarch=False): for minor in range(version_info[-1], -1, -1): versions.append(''.join(map(str, major + (minor,)))) - impl = get_abbr_impl() + impl = impl or get_abbr_impl() abis = [] - abi = get_abi_tag() + abi = abi or get_abi_tag() if abi: abis[0:0] = [abi] @@ -290,8 +267,8 @@ def get_supported(versions=None, noarch=False): abis.append('none') if not noarch: - arch = get_platform() - if sys.platform == 'darwin': + arch = platform or get_platform() + if arch.startswith('macosx'): # support macosx-10.6-intel on macosx-10.9-x86_64 match = _osx_arch_pat.match(arch) if match: @@ -304,7 +281,7 @@ def get_supported(versions=None, noarch=False): else: # arch pattern didn't match (?!) arches = [arch] - elif is_manylinux1_compatible(): + elif platform is None and is_manylinux1_compatible(): arches = [arch.replace('linux', 'manylinux1'), arch] else: arches = [arch] @@ -314,6 +291,15 @@ def get_supported(versions=None, noarch=False): for arch in arches: supported.append(('%s%s' % (impl, versions[0]), abi, arch)) + # abi3 modules compatible with older version of Python + for version in versions[1:]: + # abi3 was introduced in Python 3.2 + if version in ('31', '30'): + break + for abi in abi3s: # empty set if not Python 3 + for arch in arches: + supported.append(("%s%s" % (impl, version), abi, arch)) + # Has binaries, does not use the Python API: for arch in arches: supported.append(('py%s' % (versions[0][0]), 'none', arch)) diff --git a/lib/python3.4/site-packages/pip/req/req_file.py b/lib/python3.4/site-packages/pip/req/req_file.py index 2cfb479..821df22 100644 --- a/lib/python3.4/site-packages/pip/req/req_file.py +++ b/lib/python3.4/site-packages/pip/req/req_file.py @@ -135,7 +135,7 @@ def process_line(line, filename, line_number, finder=None, comes_from=None, defaults.format_control = finder.format_control args_str, options_str = break_args_options(line) if sys.version_info < (2, 7, 3): - # Priori to 2.7.3, shlex can not deal with unicode entries + # Prior to 2.7.3, shlex cannot deal with unicode entries options_str = options_str.encode('utf8') opts, _ = parser.parse_args(shlex.split(options_str), defaults) diff --git a/lib/python3.4/site-packages/pip/req/req_install.py b/lib/python3.4/site-packages/pip/req/req_install.py index caeda76..1a98f37 100644 --- a/lib/python3.4/site-packages/pip/req/req_install.py +++ b/lib/python3.4/site-packages/pip/req/req_install.py @@ -15,9 +15,11 @@ from distutils.util import change_root from email.parser import FeedParser from pip._vendor import pkg_resources, six -from pip._vendor.distlib.markers import interpret as markers_interpret from pip._vendor.packaging import specifiers +from pip._vendor.packaging.markers import Marker +from pip._vendor.packaging.requirements import InvalidRequirement, Requirement from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.packaging.version import Version, parse as parse_version from pip._vendor.six.moves import configparser import pip.wheel @@ -25,7 +27,7 @@ import pip.wheel from pip.compat import native_str, get_stdlib, WINDOWS from pip.download import is_url, url_to_path, path_to_url, is_archive_file from pip.exceptions import ( - InstallationError, UninstallationError, UnsupportedWheel, + InstallationError, UninstallationError, ) from pip.locations import ( bin_py, running_under_virtualenv, PIP_DELETE_MARKER_FILENAME, bin_user, @@ -38,14 +40,13 @@ from pip.utils import ( ) from pip.utils.hashes import Hashes -from pip.utils.deprecation import RemovedInPip9Warning, RemovedInPip10Warning +from pip.utils.deprecation import RemovedInPip10Warning from pip.utils.logging import indent_log from pip.utils.setuptools_build import SETUPTOOLS_SHIM from pip.utils.ui import open_spinner from pip.req.req_uninstall import UninstallPathSet from pip.vcs import vcs from pip.wheel import move_wheel_files, Wheel -from pip._vendor.packaging.version import Version logger = logging.getLogger(__name__) @@ -65,6 +66,10 @@ def _strip_extras(path): return path_no_extras, extras +def _safe_extras(extras): + return set(pkg_resources.safe_extra(extra) for extra in extras) + + class InstallRequirement(object): def __init__(self, req, comes_from, source_dir=None, editable=False, @@ -74,8 +79,8 @@ class InstallRequirement(object): self.extras = () if isinstance(req, six.string_types): try: - req = pkg_resources.Requirement.parse(req) - except pkg_resources.RequirementParseError: + req = Requirement(req) + except InvalidRequirement: if os.path.sep in req: add_msg = "It looks like a path. Does it exist ?" elif '=' in req and not any(op in req for op in operators): @@ -84,7 +89,7 @@ class InstallRequirement(object): add_msg = traceback.format_exc() raise InstallationError( "Invalid requirement: '%s'\n%s" % (req, add_msg)) - self.extras = req.extras + self.extras = _safe_extras(req.extras) self.req = req self.comes_from = comes_from @@ -95,7 +100,10 @@ class InstallRequirement(object): self._wheel_cache = wheel_cache self.link = self.original_link = link self.as_egg = as_egg - self.markers = markers + if markers is not None: + self.markers = markers + else: + self.markers = req and req.marker self._egg_info_path = None # This holds the pkg_resources.Distribution object if this requirement # is already available: @@ -148,7 +156,7 @@ class InstallRequirement(object): wheel_cache=wheel_cache) if extras_override is not None: - res.extras = extras_override + res.extras = _safe_extras(extras_override) return res @@ -170,6 +178,8 @@ class InstallRequirement(object): markers = markers.strip() if not markers: markers = None + else: + markers = Marker(markers) else: markers = None name = name.strip() @@ -209,11 +219,6 @@ class InstallRequirement(object): # wheel file if link.is_wheel: wheel = Wheel(link.filename) # can raise InvalidWheelFilename - if not wheel.supported(): - raise UnsupportedWheel( - "%s is not a supported wheel on this platform." % - wheel.filename - ) req = "%s==%s" % (wheel.name, wheel.version) else: # set the req to the egg fragment. when it's not there, this @@ -230,8 +235,8 @@ class InstallRequirement(object): wheel_cache=wheel_cache, constraint=constraint) if extras: - res.extras = pkg_resources.Requirement.parse('__placeholder__' + - extras).extras + res.extras = _safe_extras( + Requirement('placeholder' + extras).extras) return res @@ -312,7 +317,12 @@ class InstallRequirement(object): # package is not available yet so we create a temp directory # Once run_egg_info will have run, we'll be able # to fix it via _correct_build_location - self._temp_build_dir = tempfile.mkdtemp('-build', 'pip-') + # Some systems have /tmp as a symlink which confuses custom + # builds (such as numpy). Thus, we ensure that the real path + # is returned. + self._temp_build_dir = os.path.realpath( + tempfile.mkdtemp('-build', 'pip-') + ) self._ideal_build_dir = build_dir return self._temp_build_dir if self.editable: @@ -362,7 +372,7 @@ class InstallRequirement(object): def name(self): if self.req is None: return None - return native_str(self.req.project_name) + return native_str(pkg_resources.safe_name(self.req.name)) @property def setup_py_dir(self): @@ -426,33 +436,31 @@ class InstallRequirement(object): egg_info_cmd + egg_base_option, cwd=self.setup_py_dir, show_stdout=False, - command_level=logging.DEBUG, command_desc='python setup.py egg_info') if not self.req: - if isinstance( - pkg_resources.parse_version(self.pkg_info()["Version"]), - Version): + if isinstance(parse_version(self.pkg_info()["Version"]), Version): op = "==" else: op = "===" - self.req = pkg_resources.Requirement.parse( + self.req = Requirement( "".join([ self.pkg_info()["Name"], op, self.pkg_info()["Version"], - ])) + ]) + ) self._correct_build_location() else: metadata_name = canonicalize_name(self.pkg_info()["Name"]) - if canonicalize_name(self.req.project_name) != metadata_name: + if canonicalize_name(self.req.name) != metadata_name: logger.warning( 'Running setup.py (path:%s) egg_info for package %s ' 'produced metadata for project name %s. Fix your ' '#egg=%s fragments.', self.setup_py, self.name, metadata_name, self.name ) - self.req = pkg_resources.Requirement.parse(metadata_name) + self.req = Requirement(metadata_name) def egg_info_data(self, filename): if self.satisfied_by is not None: @@ -486,7 +494,7 @@ class InstallRequirement(object): # Don't search in anything that looks like a virtualenv # environment if ( - os.path.exists( + os.path.lexists( os.path.join(root, dir, 'bin', 'python') ) or os.path.exists( @@ -540,7 +548,7 @@ class InstallRequirement(object): def assert_source_matches_version(self): assert self.source_dir version = self.pkg_info()['version'] - if version not in self.req: + if self.req.specifier and version not in self.req.specifier: logger.warning( 'Requested %s, but installing version %s', self, @@ -682,6 +690,10 @@ class InstallRequirement(object): 'easy-install.pth') paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg) + elif egg_info_exists and dist.egg_info.endswith('.dist-info'): + for path in pip.wheel.uninstallation_paths(dist): + paths_to_remove.add(path) + elif develop_egg_link: # develop egg with open(develop_egg_link, 'r') as fh: @@ -695,10 +707,6 @@ class InstallRequirement(object): 'easy-install.pth') paths_to_remove.add_pth(easy_install_pth, dist.location) - elif egg_info_exists and dist.egg_info.endswith('.dist-info'): - for path in pip.wheel.uninstallation_paths(dist): - paths_to_remove.add(path) - else: logger.debug( 'Not sure how to uninstall: %s - Check: %s', @@ -769,8 +777,8 @@ class InstallRequirement(object): archive_path = os.path.join(build_dir, archive_name) if os.path.exists(archive_path): response = ask_path_exists( - 'The file %s exists. (i)gnore, (w)ipe, (b)ackup ' % - display_path(archive_path), ('i', 'w', 'b')) + 'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)bort ' % + display_path(archive_path), ('i', 'w', 'b', 'a')) if response == 'i': create_archive = False elif response == 'w': @@ -784,6 +792,8 @@ class InstallRequirement(object): display_path(dest_file), ) shutil.move(archive_path, dest_file) + elif response == 'a': + sys.exit(-1) if create_archive: zip = zipfile.ZipFile( archive_path, 'w', zipfile.ZIP_DEFLATED, @@ -816,9 +826,15 @@ class InstallRequirement(object): name = name.replace(os.path.sep, '/') return name - def match_markers(self): + def match_markers(self, extras_requested=None): + if not extras_requested: + # Provide an extra to safely evaluate the markers + # without matching any extra + extras_requested = ('',) if self.markers is not None: - return markers_interpret(self.markers) + return any( + self.markers.evaluate({'extra': extra}) + for extra in extras_requested) else: return True @@ -850,30 +866,8 @@ class InstallRequirement(object): temp_location = tempfile.mkdtemp('-record', 'pip-') record_filename = os.path.join(temp_location, 'install-record.txt') try: - install_args = [sys.executable, "-u"] - install_args.append('-c') - install_args.append(SETUPTOOLS_SHIM % self.setup_py) - install_args += list(global_options) + \ - ['install', '--record', record_filename] - - if not self.as_egg: - install_args += ['--single-version-externally-managed'] - - if root is not None: - install_args += ['--root', root] - if prefix is not None: - install_args += ['--prefix', prefix] - - if self.pycompile: - install_args += ["--compile"] - else: - install_args += ["--no-compile"] - - if running_under_virtualenv(): - py_ver_str = 'python' + sysconfig.get_python_version() - install_args += ['--install-headers', - os.path.join(sys.prefix, 'include', 'site', - py_ver_str, self.name)] + install_args = self.get_install_args( + global_options, record_filename, root, prefix) msg = 'Running setup.py install for %s' % (self.name,) with open_spinner(msg) as spinner: with indent_log(): @@ -946,6 +940,34 @@ class InstallRequirement(object): self.source_dir = self.build_location(parent_dir) return self.source_dir + def get_install_args(self, global_options, record_filename, root, prefix): + install_args = [sys.executable, "-u"] + install_args.append('-c') + install_args.append(SETUPTOOLS_SHIM % self.setup_py) + install_args += list(global_options) + \ + ['install', '--record', record_filename] + + if not self.as_egg: + install_args += ['--single-version-externally-managed'] + + if root is not None: + install_args += ['--root', root] + if prefix is not None: + install_args += ['--prefix', prefix] + + if self.pycompile: + install_args += ["--compile"] + else: + install_args += ["--no-compile"] + + if running_under_virtualenv(): + py_ver_str = 'python' + sysconfig.get_python_version() + install_args += ['--install-headers', + os.path.join(sys.prefix, 'include', 'site', + py_ver_str, self.name)] + + return install_args + def remove_temporary_source(self): """Remove the source files from this requirement, if they are marked for deletion""" @@ -994,12 +1016,24 @@ class InstallRequirement(object): if self.req is None: return False try: - self.satisfied_by = pkg_resources.get_distribution(self.req) + # get_distribution() will resolve the entire list of requirements + # anyway, and we've already determined that we need the requirement + # in question, so strip the marker so that we don't try to + # evaluate it. + no_marker = Requirement(str(self.req)) + no_marker.marker = None + self.satisfied_by = pkg_resources.get_distribution(str(no_marker)) + if self.editable and self.satisfied_by: + self.conflicts_with = self.satisfied_by + # when installing editables, nothing pre-existing should ever + # satisfy + self.satisfied_by = None + return True except pkg_resources.DistributionNotFound: return False except pkg_resources.VersionConflict: existing_dist = pkg_resources.get_distribution( - self.req.project_name + self.req.name ) if self.use_user_site: if dist_in_usersite(existing_dist): @@ -1085,24 +1119,6 @@ def _strip_postfix(req): return req -def _build_req_from_url(url): - - parts = [p for p in url.split('#', 1)[0].split('/') if p] - - req = None - if len(parts) > 2 and parts[-2] in ('tags', 'branches', 'tag', 'branch'): - req = parts[-3] - elif len(parts) > 1 and parts[-1] == 'trunk': - req = parts[-2] - if req: - warnings.warn( - 'Sniffing the requirement name from the url is deprecated and ' - 'will be removed in the future. Please specify an #egg segment ' - 'instead.', RemovedInPip9Warning, - stacklevel=2) - return req - - def parse_editable(editable_req, default_vcs=None): """Parses an editable requirement into: - a requirement name @@ -1142,9 +1158,7 @@ def parse_editable(editable_req, default_vcs=None): return ( package_name, url_no_extras, - pkg_resources.Requirement.parse( - '__placeholder__' + extras - ).extras, + Requirement("placeholder" + extras.lower()).extras, ) else: return package_name, url_no_extras, None @@ -1156,6 +1170,11 @@ def parse_editable(editable_req, default_vcs=None): if '+' not in url: if default_vcs: + warnings.warn( + "--default-vcs has been deprecated and will be removed in " + "the future.", + RemovedInPip10Warning, + ) url = default_vcs + '+' + url else: raise InstallationError( @@ -1174,7 +1193,9 @@ def parse_editable(editable_req, default_vcs=None): package_name = Link(url).egg_fragment if not package_name: - package_name = _build_req_from_url(editable_req) + raise InstallationError( + "Could not detect requirement name, please specify one with #egg=" + ) if not package_name: raise InstallationError( '--editable=%s is not the right format; it must have ' diff --git a/lib/python3.4/site-packages/pip/req/req_set.py b/lib/python3.4/site-packages/pip/req/req_set.py index e7a8d87..76aec06 100644 --- a/lib/python3.4/site-packages/pip/req/req_set.py +++ b/lib/python3.4/site-packages/pip/req/req_set.py @@ -14,14 +14,16 @@ from pip.download import (is_file_url, is_dir_url, is_vcs_url, url_to_path, from pip.exceptions import (InstallationError, BestVersionAlreadyInstalled, DistributionNotFound, PreviousBuildDirError, HashError, HashErrors, HashUnpinned, - DirectoryUrlHashUnsupported, VcsHashUnsupported) + DirectoryUrlHashUnsupported, VcsHashUnsupported, + UnsupportedPythonVersion) from pip.req.req_install import InstallRequirement from pip.utils import ( display_path, dist_in_usersite, ensure_dir, normalize_path) from pip.utils.hashes import MissingHashes from pip.utils.logging import indent_log +from pip.utils.packaging import check_dist_requires_python from pip.vcs import vcs - +from pip.wheel import Wheel logger = logging.getLogger(__name__) @@ -140,11 +142,12 @@ class Installed(DistAbstraction): class RequirementSet(object): def __init__(self, build_dir, src_dir, download_dir, upgrade=False, - ignore_installed=False, as_egg=False, target_dir=None, - ignore_dependencies=False, force_reinstall=False, - use_user_site=False, session=None, pycompile=True, - isolated=False, wheel_download_dir=None, - wheel_cache=None, require_hashes=False): + upgrade_strategy=None, ignore_installed=False, as_egg=False, + target_dir=None, ignore_dependencies=False, + force_reinstall=False, use_user_site=False, session=None, + pycompile=True, isolated=False, wheel_download_dir=None, + wheel_cache=None, require_hashes=False, + ignore_requires_python=False): """Create a RequirementSet. :param wheel_download_dir: Where still-packed .whl files should be @@ -170,6 +173,7 @@ class RequirementSet(object): # the wheelhouse output by 'pip wheel'. self.download_dir = download_dir self.upgrade = upgrade + self.upgrade_strategy = upgrade_strategy self.ignore_installed = ignore_installed self.force_reinstall = force_reinstall self.requirements = Requirements() @@ -177,6 +181,7 @@ class RequirementSet(object): self.requirement_aliases = {} self.unnamed_requirements = [] self.ignore_dependencies = ignore_dependencies + self.ignore_requires_python = ignore_requires_python self.successfully_downloaded = [] self.successfully_installed = [] self.reqs_to_cleanup = [] @@ -207,7 +212,8 @@ class RequirementSet(object): return ('<%s object; %d requirement(s): %s>' % (self.__class__.__name__, len(reqs), reqs_str)) - def add_requirement(self, install_req, parent_req_name=None): + def add_requirement(self, install_req, parent_req_name=None, + extras_requested=None): """Add install_req as a requirement to install. :param parent_req_name: The name of the requirement that needed this @@ -216,21 +222,35 @@ class RequirementSet(object): links that point outside the Requirements set. parent_req must already be added. Note that None implies that this is a user supplied requirement, vs an inferred one. + :param extras_requested: an iterable of extras used to evaluate the + environement markers. :return: Additional requirements to scan. That is either [] if the requirement is not applicable, or [install_req] if the requirement is applicable and has just been added. """ name = install_req.name - if not install_req.match_markers(): - logger.warning("Ignoring %s: markers %r don't match your " + if not install_req.match_markers(extras_requested): + logger.warning("Ignoring %s: markers '%s' don't match your " "environment", install_req.name, install_req.markers) return [] + # This check has to come after we filter requirements with the + # environment markers. + if install_req.link and install_req.link.is_wheel: + wheel = Wheel(install_req.link.filename) + if not wheel.supported(): + raise InstallationError( + "%s is not a supported wheel on this platform." % + wheel.filename + ) + install_req.as_egg = self.as_egg install_req.use_user_site = self.use_user_site install_req.target_dir = self.target_dir install_req.pycompile = self.pycompile + install_req.is_direct = (parent_req_name is None) + if not name: # url or path requirement w/o an egg fragment self.unnamed_requirements.append(install_req) @@ -243,7 +263,7 @@ class RequirementSet(object): if (parent_req_name is None and existing_req and not existing_req.constraint and existing_req.extras == install_req.extras and not - existing_req.req.specs == install_req.req.specs): + existing_req.req.specifier == install_req.req.specifier): raise InstallationError( 'Double requirement given: %s (already in %s, name=%r)' % (install_req, existing_req, name)) @@ -365,6 +385,13 @@ class RequirementSet(object): if hash_errors: raise hash_errors + def _is_upgrade_allowed(self, req): + return self.upgrade and ( + self.upgrade_strategy == "eager" or ( + self.upgrade_strategy == "only-if-needed" and req.is_direct + ) + ) + def _check_skip_installed(self, req_to_install, finder): """Check if req_to_install should be skipped. @@ -386,17 +413,20 @@ class RequirementSet(object): # Check whether to upgrade/reinstall this req or not. req_to_install.check_if_exists() if req_to_install.satisfied_by: - skip_reason = 'satisfied (use --upgrade to upgrade)' - if self.upgrade: - best_installed = False + upgrade_allowed = self._is_upgrade_allowed(req_to_install) + + # Is the best version is installed. + best_installed = False + + if upgrade_allowed: # For link based requirements we have to pull the # tree down and inspect to assess the version #, so # its handled way down. if not (self.force_reinstall or req_to_install.link): try: - finder.find_requirement(req_to_install, self.upgrade) + finder.find_requirement( + req_to_install, upgrade_allowed) except BestVersionAlreadyInstalled: - skip_reason = 'up-to-date' best_installed = True except DistributionNotFound: # No distribution found, so we squash the @@ -413,6 +443,15 @@ class RequirementSet(object): req_to_install.conflicts_with = \ req_to_install.satisfied_by req_to_install.satisfied_by = None + + # Figure out a nice message to say why we're skipping this. + if best_installed: + skip_reason = 'already up-to-date' + elif self.upgrade_strategy == "only-if-needed": + skip_reason = 'not upgraded as not directly required' + else: + skip_reason = 'already satisfied' + return skip_reason else: return None @@ -453,7 +492,7 @@ class RequirementSet(object): 'req_to_install.satisfied_by is set to %r' % (req_to_install.satisfied_by,)) logger.info( - 'Requirement already %s: %s', skip_reason, + 'Requirement %s: %s', skip_reason, req_to_install) else: if (req_to_install.link and @@ -479,6 +518,7 @@ class RequirementSet(object): abstract_dist.prep_for_dist() if self.is_download: req_to_install.archive(self.download_dir) + req_to_install.check_if_exists() elif req_to_install.satisfied_by: if require_hashes: logger.debug( @@ -509,7 +549,10 @@ class RequirementSet(object): % (req_to_install, req_to_install.source_dir) ) req_to_install.populate_link( - finder, self.upgrade, require_hashes) + finder, + self._is_upgrade_allowed(req_to_install), + require_hashes + ) # We can't hit this spot and have populate_link return None. # req_to_install.satisfied_by is None here (because we're # guarded) and upgrade has no impact except when satisfied_by @@ -619,9 +662,17 @@ class RequirementSet(object): # # parse dependencies # # # ###################### # dist = abstract_dist.dist(finder) + try: + check_dist_requires_python(dist) + except UnsupportedPythonVersion as e: + if self.ignore_requires_python: + logger.warning(e.args[0]) + else: + req_to_install.remove_temporary_source() + raise more_reqs = [] - def add_req(subreq): + def add_req(subreq, extras_requested): sub_install_req = InstallRequirement( str(subreq), req_to_install, @@ -629,7 +680,8 @@ class RequirementSet(object): wheel_cache=self._wheel_cache, ) more_reqs.extend(self.add_requirement( - sub_install_req, req_to_install.name)) + sub_install_req, req_to_install.name, + extras_requested=extras_requested)) # We add req_to_install before its dependencies, so that we # can refer to it when adding dependencies. @@ -656,7 +708,7 @@ class RequirementSet(object): set(dist.extras) & set(req_to_install.extras) ) for subreq in dist.requires(available_requested): - add_req(subreq) + add_req(subreq, extras_requested=available_requested) # cleanup tmp src self.reqs_to_cleanup.append(req_to_install) diff --git a/lib/python3.4/site-packages/pip/utils/__init__.py b/lib/python3.4/site-packages/pip/utils/__init__.py index 8ea2e38..0d25d91 100644 --- a/lib/python3.4/site-packages/pip/utils/__init__.py +++ b/lib/python3.4/site-packages/pip/utils/__init__.py @@ -42,7 +42,7 @@ __all__ = ['rmtree', 'display_path', 'backup_dir', 'normalize_path', 'renames', 'get_terminal_size', 'get_prog', 'unzip_file', 'untar_file', 'unpack_file', 'call_subprocess', - 'captured_stdout', 'remove_tracebacks', 'ensure_dir', + 'captured_stdout', 'ensure_dir', 'ARCHIVE_EXTENSIONS', 'SUPPORTED_EXTENSIONS', 'get_installed_version'] @@ -639,20 +639,9 @@ def unpack_file(filename, location, content_type, link): ) -def remove_tracebacks(output): - pattern = (r'(?:\W+File "(?:.*)", line (?:.*)\W+(?:.*)\W+\^\W+)?' - r'Syntax(?:Error|Warning): (?:.*)') - output = re.sub(pattern, '', output) - if PY2: - return output - # compileall.compile_dir() prints different messages to stdout - # in Python 3 - return re.sub(r"\*\*\* Error compiling (?:.*)", '', output) - - def call_subprocess(cmd, show_stdout=True, cwd=None, on_returncode='raise', - command_level=std_logging.DEBUG, command_desc=None, + command_desc=None, extra_environ=None, spinner=None): # This function's handling of subprocess output is confusing and I # previously broke it terribly, so as penance I will write a long comment @@ -686,7 +675,7 @@ def call_subprocess(cmd, show_stdout=True, cwd=None, part = '"%s"' % part.replace('"', '\\"') cmd_parts.append(part) command_desc = ' '.join(cmd_parts) - logger.log(command_level, "Running command %s", command_desc) + logger.debug("Running command %s", command_desc) env = os.environ.copy() if extra_environ: env.update(extra_environ) @@ -745,7 +734,7 @@ def call_subprocess(cmd, show_stdout=True, cwd=None, raise ValueError('Invalid value: on_returncode=%s' % repr(on_returncode)) if not show_stdout: - return remove_tracebacks(''.join(all_output)) + return ''.join(all_output) def read_text_file(filename): @@ -856,14 +845,17 @@ class cached_property(object): return value -def get_installed_version(dist_name): +def get_installed_version(dist_name, lookup_dirs=None): """Get the installed version of dist_name avoiding pkg_resources cache""" # Create a requirement that we'll look for inside of setuptools. req = pkg_resources.Requirement.parse(dist_name) # We want to avoid having this cached, so we need to construct a new # working set each time. - working_set = pkg_resources.WorkingSet() + if lookup_dirs is None: + working_set = pkg_resources.WorkingSet() + else: + working_set = pkg_resources.WorkingSet(lookup_dirs) # Get the installed distribution from our working set dist = working_set.find(req) diff --git a/lib/python3.4/site-packages/pip/utils/appdirs.py b/lib/python3.4/site-packages/pip/utils/appdirs.py index 60ae76e..9b82801 100644 --- a/lib/python3.4/site-packages/pip/utils/appdirs.py +++ b/lib/python3.4/site-packages/pip/utils/appdirs.py @@ -8,6 +8,7 @@ import os import sys from pip.compat import WINDOWS, expanduser +from pip._vendor.six import PY2, text_type def user_cache_dir(appname): @@ -17,9 +18,9 @@ def user_cache_dir(appname): "appname" is the name of application. Typical user cache directories are: - Mac OS X: ~/Library/Caches/ + macOS: ~/Library/Caches/ Unix: ~/.cache/ (XDG default) - Windows: C:\Users\\AppData\Local\\Cache + Windows: C:\Users\\AppData\Local\\Cache On Windows the only suggestion in the MSDN docs is that local settings go in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the @@ -35,6 +36,11 @@ def user_cache_dir(appname): # Get the base path path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) + # When using Python 2, return paths as bytes on Windows like we do on + # other operating systems. See helper function docs for more details. + if PY2 and isinstance(path, text_type): + path = _win_path_to_bytes(path) + # Add our app name and Cache directory to it path = os.path.join(path, appname, "Cache") elif sys.platform == "darwin": @@ -67,7 +73,7 @@ def user_data_dir(appname, roaming=False): for a discussion of issues. Typical user data directories are: - Mac OS X: ~/Library/Application Support/ + macOS: ~/Library/Application Support/ Unix: ~/.local/share/ # or in $XDG_DATA_HOME, if defined Win XP (not roaming): C:\Documents and Settings\\ ... @@ -110,12 +116,12 @@ def user_config_dir(appname, roaming=True): for a discussion of issues. Typical user data directories are: - Mac OS X: same as user_data_dir + macOS: same as user_data_dir Unix: ~/.config/ Win *: same as user_data_dir For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. - That means, by deafult "~/.config/". + That means, by default "~/.config/". """ if WINDOWS: path = user_data_dir(appname, roaming=roaming) @@ -136,7 +142,7 @@ def site_config_dirs(appname): "appname" is the name of application. Typical user config directories are: - Mac OS X: /Library/Application Support// + macOS: /Library/Application Support// Unix: /etc or $XDG_CONFIG_DIRS[i]// for each value in $XDG_CONFIG_DIRS Win XP: C:\Documents and Settings\All Users\Application ... @@ -222,3 +228,21 @@ if WINDOWS: _get_win_folder = _get_win_folder_with_ctypes except ImportError: _get_win_folder = _get_win_folder_from_registry + + +def _win_path_to_bytes(path): + """Encode Windows paths to bytes. Only used on Python 2. + + Motivation is to be consistent with other operating systems where paths + are also returned as bytes. This avoids problems mixing bytes and Unicode + elsewhere in the codebase. For more details and discussion see + . + + If encoding using ASCII and MBCS fails, return the original Unicode path. + """ + for encoding in ('ASCII', 'MBCS'): + try: + return path.encode(encoding) + except (UnicodeEncodeError, LookupError): + pass + return path diff --git a/lib/python3.4/site-packages/pip/utils/deprecation.py b/lib/python3.4/site-packages/pip/utils/deprecation.py index 2fb1d1e..c3f799e 100644 --- a/lib/python3.4/site-packages/pip/utils/deprecation.py +++ b/lib/python3.4/site-packages/pip/utils/deprecation.py @@ -1,5 +1,5 @@ """ -A module that implments tooling to enable easy warnings about deprecations. +A module that implements tooling to enable easy warnings about deprecations. """ from __future__ import absolute_import @@ -15,15 +15,15 @@ class Pending(object): pass -class RemovedInPip9Warning(PipDeprecationWarning): +class RemovedInPip10Warning(PipDeprecationWarning): pass -class RemovedInPip10Warning(PipDeprecationWarning, Pending): +class RemovedInPip11Warning(PipDeprecationWarning, Pending): pass -class Python26DeprecationWarning(PipDeprecationWarning, Pending): +class Python26DeprecationWarning(PipDeprecationWarning): pass diff --git a/lib/python3.4/site-packages/pip/utils/glibc.py b/lib/python3.4/site-packages/pip/utils/glibc.py new file mode 100644 index 0000000..7847885 --- /dev/null +++ b/lib/python3.4/site-packages/pip/utils/glibc.py @@ -0,0 +1,81 @@ +from __future__ import absolute_import + +import re +import ctypes +import platform +import warnings + + +def glibc_version_string(): + "Returns glibc version string, or None if not using glibc." + + # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen + # manpage says, "If filename is NULL, then the returned handle is for the + # main program". This way we can let the linker do the work to figure out + # which libc our process is actually using. + process_namespace = ctypes.CDLL(None) + try: + gnu_get_libc_version = process_namespace.gnu_get_libc_version + except AttributeError: + # Symbol doesn't exist -> therefore, we are not linked to + # glibc. + return None + + # Call gnu_get_libc_version, which returns a string like "2.5" + gnu_get_libc_version.restype = ctypes.c_char_p + version_str = gnu_get_libc_version() + # py2 / py3 compatibility: + if not isinstance(version_str, str): + version_str = version_str.decode("ascii") + + return version_str + + +# Separated out from have_compatible_glibc for easier unit testing +def check_glibc_version(version_str, required_major, minimum_minor): + # Parse string and check against requested version. + # + # We use a regexp instead of str.split because we want to discard any + # random junk that might come after the minor version -- this might happen + # in patched/forked versions of glibc (e.g. Linaro's version of glibc + # uses version strings like "2.20-2014.11"). See gh-3588. + m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str) + if not m: + warnings.warn("Expected glibc version with 2 components major.minor," + " got: %s" % version_str, RuntimeWarning) + return False + return (int(m.group("major")) == required_major and + int(m.group("minor")) >= minimum_minor) + + +def have_compatible_glibc(required_major, minimum_minor): + version_str = glibc_version_string() + if version_str is None: + return False + return check_glibc_version(version_str, required_major, minimum_minor) + + +# platform.libc_ver regularly returns completely nonsensical glibc +# versions. E.g. on my computer, platform says: +# +# ~$ python2.7 -c 'import platform; print(platform.libc_ver())' +# ('glibc', '2.7') +# ~$ python3.5 -c 'import platform; print(platform.libc_ver())' +# ('glibc', '2.9') +# +# But the truth is: +# +# ~$ ldd --version +# ldd (Debian GLIBC 2.22-11) 2.22 +# +# This is unfortunate, because it means that the linehaul data on libc +# versions that was generated by pip 8.1.2 and earlier is useless and +# misleading. Solution: instead of using platform, use our code that actually +# works. +def libc_ver(): + glibc_version = glibc_version_string() + if glibc_version is None: + # For non-glibc platforms, fall back on platform.libc_ver + return platform.libc_ver() + else: + return ("glibc", glibc_version) diff --git a/lib/python3.4/site-packages/pip/utils/packaging.py b/lib/python3.4/site-packages/pip/utils/packaging.py new file mode 100644 index 0000000..e93b20d --- /dev/null +++ b/lib/python3.4/site-packages/pip/utils/packaging.py @@ -0,0 +1,63 @@ +from __future__ import absolute_import + +from email.parser import FeedParser + +import logging +import sys + +from pip._vendor.packaging import specifiers +from pip._vendor.packaging import version +from pip._vendor import pkg_resources + +from pip import exceptions + +logger = logging.getLogger(__name__) + + +def check_requires_python(requires_python): + """ + Check if the python version in use match the `requires_python` specifier. + + Returns `True` if the version of python in use matches the requirement. + Returns `False` if the version of python in use does not matches the + requirement. + + Raises an InvalidSpecifier if `requires_python` have an invalid format. + """ + if requires_python is None: + # The package provides no information + return True + requires_python_specifier = specifiers.SpecifierSet(requires_python) + + # We only use major.minor.micro + python_version = version.parse('.'.join(map(str, sys.version_info[:3]))) + return python_version in requires_python_specifier + + +def get_metadata(dist): + if (isinstance(dist, pkg_resources.DistInfoDistribution) and + dist.has_metadata('METADATA')): + return dist.get_metadata('METADATA') + elif dist.has_metadata('PKG-INFO'): + return dist.get_metadata('PKG-INFO') + + +def check_dist_requires_python(dist): + metadata = get_metadata(dist) + feed_parser = FeedParser() + feed_parser.feed(metadata) + pkg_info_dict = feed_parser.close() + requires_python = pkg_info_dict.get('Requires-Python') + try: + if not check_requires_python(requires_python): + raise exceptions.UnsupportedPythonVersion( + "%s requires Python '%s' but the running Python is %s" % ( + dist.project_name, + requires_python, + '.'.join(map(str, sys.version_info[:3])),) + ) + except specifiers.InvalidSpecifier as e: + logger.warning( + "Package %s has an invalid Requires-Python entry %s - %s" % ( + dist.project_name, requires_python, e)) + return diff --git a/lib/python3.4/site-packages/pip/utils/setuptools_build.py b/lib/python3.4/site-packages/pip/utils/setuptools_build.py index 4c9095e..03973e9 100644 --- a/lib/python3.4/site-packages/pip/utils/setuptools_build.py +++ b/lib/python3.4/site-packages/pip/utils/setuptools_build.py @@ -1,6 +1,8 @@ # Shim to wrap setup.py invocation with setuptools SETUPTOOLS_SHIM = ( "import setuptools, tokenize;__file__=%r;" - "exec(compile(getattr(tokenize, 'open', open)(__file__).read()" - ".replace('\\r\\n', '\\n'), __file__, 'exec'))" + "f=getattr(tokenize, 'open', open)(__file__);" + "code=f.read().replace('\\r\\n', '\\n');" + "f.close();" + "exec(compile(code, __file__, 'exec'))" ) diff --git a/lib/python3.4/site-packages/pip/vcs/__init__.py b/lib/python3.4/site-packages/pip/vcs/__init__.py index 9dc1c60..8d3dbb2 100644 --- a/lib/python3.4/site-packages/pip/vcs/__init__.py +++ b/lib/python3.4/site-packages/pip/vcs/__init__.py @@ -5,6 +5,7 @@ import errno import logging import os import shutil +import sys from pip._vendor.six.moves.urllib import parse as urllib_parse @@ -106,7 +107,7 @@ class VersionControl(object): def _is_local_repository(self, repo): """ posix absolute paths start with os.path.sep, - win32 ones ones start with drive (like c:\\folder) + win32 ones start with drive (like c:\\folder) """ drive, tail = os.path.splitdrive(repo) return repo.startswith(os.path.sep) or drive @@ -271,6 +272,8 @@ class VersionControl(object): ) shutil.move(dest, dest_dir) checkout = True + elif response == 'a': + sys.exit(-1) return checkout def unpack(self, location): @@ -307,7 +310,7 @@ class VersionControl(object): def run_command(self, cmd, show_stdout=True, cwd=None, on_returncode='raise', - command_level=logging.DEBUG, command_desc=None, + command_desc=None, extra_environ=None, spinner=None): """ Run a VCS subcommand @@ -317,7 +320,7 @@ class VersionControl(object): cmd = [self.name] + cmd try: return call_subprocess(cmd, show_stdout, cwd, - on_returncode, command_level, + on_returncode, command_desc, extra_environ, spinner) except OSError as e: diff --git a/lib/python3.4/site-packages/pip/vcs/git.py b/lib/python3.4/site-packages/pip/vcs/git.py index 24528de..2187dd8 100644 --- a/lib/python3.4/site-packages/pip/vcs/git.py +++ b/lib/python3.4/site-packages/pip/vcs/git.py @@ -8,6 +8,7 @@ from pip.compat import samefile from pip.exceptions import BadCommand from pip._vendor.six.moves.urllib import parse as urllib_parse from pip._vendor.six.moves.urllib import request as urllib_request +from pip._vendor.packaging.version import parse as parse_version from pip.utils import display_path, rmtree from pip.vcs import vcs, VersionControl @@ -49,6 +50,19 @@ class Git(VersionControl): super(Git, self).__init__(url, *args, **kwargs) + def get_git_version(self): + VERSION_PFX = 'git version ' + version = self.run_command(['version'], show_stdout=False) + if version.startswith(VERSION_PFX): + version = version[len(VERSION_PFX):] + else: + version = '' + # get first 3 positions of the git version becasue + # on windows it is x.y.z.windows.t, and this parses as + # LegacyVersion which always smaller than a Version. + version = '.'.join(version.split('.')[:3]) + return parse_version(version) + def export(self, location): """Export the Git repository at the url to the destination location""" temp_dir = tempfile.mkdtemp('-export', 'pip-') @@ -99,7 +113,11 @@ class Git(VersionControl): def update(self, dest, rev_options): # First fetch changes from the default remote - self.run_command(['fetch', '-q'], cwd=dest) + if self.get_git_version() >= parse_version('1.9.0'): + # fetch tags in addition to everything else + self.run_command(['fetch', '-q', '--tags'], cwd=dest) + else: + self.run_command(['fetch', '-q'], cwd=dest) # Then reset to wanted revision (maybe even origin/master) if rev_options: rev_options = self.check_rev_options( @@ -139,8 +157,13 @@ class Git(VersionControl): remotes = self.run_command( ['config', '--get-regexp', 'remote\..*\.url'], show_stdout=False, cwd=location) - first_remote = remotes.splitlines()[0] - url = first_remote.split(' ')[1] + remotes = remotes.splitlines() + found_remote = remotes[0] + for remote in remotes: + if remote.startswith('remote.origin.url '): + found_remote = remote + break + url = found_remote.split(' ')[1] return url.strip() def get_revision(self, location): diff --git a/lib/python3.4/site-packages/pip/vcs/subversion.py b/lib/python3.4/site-packages/pip/vcs/subversion.py index aa78fa6..4b23156 100644 --- a/lib/python3.4/site-packages/pip/vcs/subversion.py +++ b/lib/python3.4/site-packages/pip/vcs/subversion.py @@ -60,6 +60,7 @@ class Subversion(VersionControl): """Export the svn repository at the url to the destination location""" url, rev = self.get_url_rev() rev_options = get_rev_options(url, rev) + url = self.remove_auth_from_url(url) logger.info('Exporting svn repository %s to %s', url, location) with indent_log(): if os.path.exists(location): @@ -79,6 +80,7 @@ class Subversion(VersionControl): def obtain(self, dest): url, rev = self.get_url_rev() rev_options = get_rev_options(url, rev) + url = self.remove_auth_from_url(url) if rev: rev_display = ' (to revision %s)' % rev else: @@ -217,6 +219,24 @@ class Subversion(VersionControl): """Always assume the versions don't match""" return False + @staticmethod + def remove_auth_from_url(url): + # Return a copy of url with 'username:password@' removed. + # username/pass params are passed to subversion through flags + # and are not recognized in the url. + + # parsed url + purl = urllib_parse.urlsplit(url) + stripped_netloc = \ + purl.netloc.split('@')[-1] + + # stripped url + url_pieces = ( + purl.scheme, stripped_netloc, purl.path, purl.query, purl.fragment + ) + surl = urllib_parse.urlunsplit(url_pieces) + return surl + def get_rev_options(url, rev): if rev: diff --git a/lib/python3.4/site-packages/pip/wheel.py b/lib/python3.4/site-packages/pip/wheel.py index 3e12402..9ac9dff 100644 --- a/lib/python3.4/site-packages/pip/wheel.py +++ b/lib/python3.4/site-packages/pip/wheel.py @@ -298,10 +298,11 @@ def move_wheel_files(name, req, wheeldir, user=False, home=None, root=None, continue elif (is_base and s.endswith('.dist-info') and - # is self.req.project_name case preserving? - s.lower().startswith( - req.project_name.replace('-', '_').lower())): - assert not info_dir, 'Multiple .dist-info directories' + canonicalize_name(s).startswith( + canonicalize_name(req.name))): + assert not info_dir, ('Multiple .dist-info directories: ' + + destsubdir + ', ' + + ', '.join(info_dir)) info_dir.append(destsubdir) for f in files: # Skip unwanted files @@ -417,7 +418,7 @@ import sys from %(module)s import %(import_name)s if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) sys.exit(%(func)s()) """ @@ -438,7 +439,7 @@ if __name__ == '__main__': # Because setuptools and pip are bundled with _ensurepip and virtualenv, # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we # override the versioned entry points in the wheel and generate the - # correct ones. This code is purely a short-term measure until Metadat 2.0 + # correct ones. This code is purely a short-term measure until Metadata 2.0 # is available. # # To add the level of hack in this section of code, in order to support @@ -757,11 +758,8 @@ class WheelBuilder(object): if not autobuilding: logger.info( 'Skipping %s, due to already being wheel.', req.name) - elif req.editable: - if not autobuilding: - logger.info( - 'Skipping bdist_wheel for %s, due to being editable', - req.name) + elif autobuilding and req.editable: + pass elif autobuilding and req.link and not req.link.is_artifact: pass elif autobuilding and not req.source_dir: @@ -801,8 +799,8 @@ class WheelBuilder(object): try: ensure_dir(output_dir) except OSError as e: - logger.warn("Building wheel for %s failed: %s", - req.name, e) + logger.warning("Building wheel for %s failed: %s", + req.name, e) build_failure.append(req) continue else: diff --git a/lib/python3.4/site-packages/pkg_resources-0.0.0.dist-info/RECORD b/lib/python3.4/site-packages/pkg_resources-0.0.0.dist-info/RECORD index d43543b..6e71c0c 100644 --- a/lib/python3.4/site-packages/pkg_resources-0.0.0.dist-info/RECORD +++ b/lib/python3.4/site-packages/pkg_resources-0.0.0.dist-info/RECORD @@ -1,26 +1,36 @@ -pkg_resources/__init__.py,sha256=bucu_98c11mzrGldEJeqxArn14F7ZmURsb-8CaNSbVo,108616 +pkg_resources/__init__.py,sha256=qasrGUKwGQ8dGJP5SOEhLJoWRizj5HinbD2bXfrOH28,103308 pkg_resources/_vendor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pkg_resources/_vendor/appdirs.py,sha256=tgGaL0m4Jo2VeuGfoOOifLv7a7oUEJu2n1vRkqoPw-0,22374 +pkg_resources/_vendor/pyparsing.py,sha256=PifeLY3-WhIcBVzLtv0U4T_pwDtPruBhBCkg5vLqa28,229867 pkg_resources/_vendor/six.py,sha256=A6hdJZVjI3t_geebZ9BzUvwRrIXo0lfwzQlM2LcKyas,30098 -pkg_resources/_vendor/packaging/__about__.py,sha256=YzdrW-1lWmyCBDyrcNkZbJo4tiDWXpoiqPjfyCYMzIE,1073 -pkg_resources/_vendor/packaging/__init__.py,sha256=2V8n-eEpSgBuXlV8hlMmhU7ZklpsrrusWMZNp2gC4Hs,906 -pkg_resources/_vendor/packaging/_compat.py,sha256=wofog8iYo_zudt_10i6JiXKHDs5GhCuXC09hCuSJiv4,1253 -pkg_resources/_vendor/packaging/_structures.py,sha256=93YvgrEE2HgFp8AdXy0pwCRVnZeutRHO_-puJ7T0cPw,1809 -pkg_resources/_vendor/packaging/specifiers.py,sha256=UV9T01_kKloA8PSeMI3HTYBSJ_4KLs00yLvrlciZ3yU,28079 -pkg_resources/_vendor/packaging/version.py,sha256=dEGrWZJZ6sef1xMxSfDCego2hS3Q86by0hUIFVk-AGc,11949 -pkg_resources/extern/__init__.py,sha256=azKvXDutMVFe3c641wdiwndjtku92Bl3_iGVAIMKnsM,2461 +pkg_resources/_vendor/packaging/__about__.py,sha256=zkcCPTN_6TcLW0Nrlg0176-R1QQ_WVPTm8sz1R4-HjM,720 +pkg_resources/_vendor/packaging/__init__.py,sha256=_vNac5TrzwsrzbOFIbF-5cHqc_Y2aPT2D7zrIR06BOo,513 +pkg_resources/_vendor/packaging/_compat.py,sha256=Vi_A0rAQeHbU-a9X0tt1yQm9RqkgQbDSxzRw8WlU9kA,860 +pkg_resources/_vendor/packaging/_structures.py,sha256=RImECJ4c_wTlaTYYwZYLHEiebDMaAJmK1oPARhw1T5o,1416 +pkg_resources/_vendor/packaging/markers.py,sha256=uEcBBtGvzqltgnArqb9c4RrcInXezDLos14zbBHhWJo,8248 +pkg_resources/_vendor/packaging/requirements.py,sha256=SikL2UynbsT0qtY9ltqngndha_sfo0w6XGFhAhoSoaQ,4355 +pkg_resources/_vendor/packaging/specifiers.py,sha256=SAMRerzO3fK2IkFZCaZkuwZaL_EGqHNOz4pni4vhnN0,28025 +pkg_resources/_vendor/packaging/utils.py,sha256=3m6WvPm6NNxE8rkTGmn0r75B_GZSGg7ikafxHsBN1WA,421 +pkg_resources/_vendor/packaging/version.py,sha256=OwGnxYfr2ghNzYx59qWIBkrK3SnB6n-Zfd1XaLpnnM0,11556 +pkg_resources/extern/__init__.py,sha256=JUtlHHvlxHSNuB4pWqNjcx7n6kG-fwXg7qmJ2zNJlIY,2487 pkg_resources-0.0.0.dist-info/DESCRIPTION.rst,sha256=OCTuuN6LcWulhHS3d5rfjdsQtW22n7HENFRh6jC6ego,10 pkg_resources-0.0.0.dist-info/METADATA,sha256=FOYDX6cmnDUkWo-yhqWQYtjKIMZR2IW2G1GFZhA6gUQ,177 pkg_resources-0.0.0.dist-info/RECORD,, pkg_resources-0.0.0.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 pkg_resources-0.0.0.dist-info/metadata.json,sha256=8ZVRFU96pY_wnWouockCkvXw981Y0iDB5nQFFGq8ZiY,221 pkg_resources-0.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pkg_resources/extern/__pycache__/__init__.cpython-34.pyc,, -pkg_resources/_vendor/__pycache__/__init__.cpython-34.pyc,, +pkg_resources/_vendor/packaging/__pycache__/version.cpython-34.pyc,, +pkg_resources/__pycache__/__init__.cpython-34.pyc,, +pkg_resources/_vendor/__pycache__/appdirs.cpython-34.pyc,, +pkg_resources/_vendor/packaging/__pycache__/_structures.cpython-34.pyc,, pkg_resources/_vendor/packaging/__pycache__/__init__.cpython-34.pyc,, +pkg_resources/_vendor/packaging/__pycache__/specifiers.cpython-34.pyc,, +pkg_resources/_vendor/__pycache__/__init__.cpython-34.pyc,, +pkg_resources/_vendor/packaging/__pycache__/utils.cpython-34.pyc,, +pkg_resources/_vendor/__pycache__/six.cpython-34.pyc,, +pkg_resources/_vendor/__pycache__/pyparsing.cpython-34.pyc,, +pkg_resources/_vendor/packaging/__pycache__/markers.cpython-34.pyc,, pkg_resources/_vendor/packaging/__pycache__/__about__.cpython-34.pyc,, pkg_resources/_vendor/packaging/__pycache__/_compat.cpython-34.pyc,, -pkg_resources/_vendor/packaging/__pycache__/specifiers.cpython-34.pyc,, -pkg_resources/__pycache__/__init__.cpython-34.pyc,, -pkg_resources/_vendor/packaging/__pycache__/_structures.cpython-34.pyc,, -pkg_resources/_vendor/packaging/__pycache__/version.cpython-34.pyc,, -pkg_resources/_vendor/__pycache__/six.cpython-34.pyc,, +pkg_resources/_vendor/packaging/__pycache__/requirements.cpython-34.pyc,, +pkg_resources/extern/__pycache__/__init__.cpython-34.pyc,, diff --git a/lib/python3.4/site-packages/pkg_resources/__init__.py b/lib/python3.4/site-packages/pkg_resources/__init__.py index d04cd34..4c9868c 100644 --- a/lib/python3.4/site-packages/pkg_resources/__init__.py +++ b/lib/python3.4/site-packages/pkg_resources/__init__.py @@ -1,3 +1,4 @@ +# coding: utf-8 """ Package resource API -------------------- @@ -28,8 +29,6 @@ import warnings import stat import functools import pkgutil -import token -import symbol import operator import platform import collections @@ -37,6 +36,7 @@ import plistlib import email.parser import tempfile import textwrap +import itertools from pkgutil import get_importer try: @@ -67,22 +67,15 @@ try: except ImportError: importlib_machinery = None -try: - import parser -except ImportError: - pass - +from pkg_resources.extern import appdirs from pkg_resources.extern import packaging __import__('pkg_resources.extern.packaging.version') __import__('pkg_resources.extern.packaging.specifiers') - +__import__('pkg_resources.extern.packaging.requirements') +__import__('pkg_resources.extern.packaging.markers') if (3, 0) < sys.version_info < (3, 3): - msg = ( - "Support for Python 3.0-3.2 has been dropped. Future versions " - "will fail here." - ) - warnings.warn(msg) + raise RuntimeError("Python 3.3 or later is required") # declare some globals that will be defined later to # satisfy the linters. @@ -98,7 +91,6 @@ class PEP440Warning(RuntimeWarning): class _SetuptoolsVersionMixin(object): - def __hash__(self): return super(_SetuptoolsVersionMixin, self).__hash__() @@ -160,7 +152,7 @@ class _SetuptoolsVersionMixin(object): # pad for numeric comparison yield part.zfill(8) else: - yield '*'+part + yield '*' + part # ensure that alpha/beta/candidate are before final yield '*final' @@ -214,36 +206,44 @@ def parse_version(v): _state_vars = {} + def _declare_state(vartype, **kw): globals().update(kw) _state_vars.update(dict.fromkeys(kw, vartype)) + def __getstate__(): state = {} g = globals() for k, v in _state_vars.items(): - state[k] = g['_sget_'+v](g[k]) + state[k] = g['_sget_' + v](g[k]) return state + def __setstate__(state): g = globals() for k, v in state.items(): - g['_sset_'+_state_vars[k]](k, g[k], v) + g['_sset_' + _state_vars[k]](k, g[k], v) return state + def _sget_dict(val): return val.copy() + def _sset_dict(key, ob, state): ob.clear() ob.update(state) + def _sget_object(val): return val.__getstate__() + def _sset_object(key, ob, state): ob.__setstate__(state) + _sget_none = _sset_none = lambda *args: None @@ -270,9 +270,10 @@ def get_supported_platform(): pass return plat + __all__ = [ # Basic resource access and distribution/entry point discovery - 'require', 'run_script', 'get_provider', 'get_distribution', + 'require', 'run_script', 'get_provider', 'get_distribution', 'load_entry_point', 'get_entry_map', 'get_entry_info', 'iter_entry_points', 'resource_string', 'resource_stream', 'resource_filename', @@ -316,10 +317,12 @@ __all__ = [ 'run_main', 'AvailableDistributions', ] + class ResolutionError(Exception): """Abstract base for dependency resolution errors""" + def __repr__(self): - return self.__class__.__name__+repr(self.args) + return self.__class__.__name__ + repr(self.args) class VersionConflict(ResolutionError): @@ -396,6 +399,8 @@ class DistributionNotFound(ResolutionError): class UnknownExtra(ResolutionError): """Distribution doesn't have an "extra feature" of the given name""" + + _provider_factories = {} PY_MAJOR = sys.version[:3] @@ -405,6 +410,7 @@ SOURCE_DIST = 1 CHECKOUT_DIST = 0 DEVELOP_DIST = -1 + def register_loader_type(loader_type, provider_factory): """Register `provider_factory` to make providers for `loader_type` @@ -414,6 +420,7 @@ def register_loader_type(loader_type, provider_factory): """ _provider_factories[loader_type] = provider_factory + def get_provider(moduleOrReq): """Return an IResourceProvider for the named module or requirement""" if isinstance(moduleOrReq, Requirement): @@ -426,6 +433,7 @@ def get_provider(moduleOrReq): loader = getattr(module, '__loader__', None) return _find_adapter(_provider_factories, loader)(module) + def _macosx_vers(_cache=[]): if not _cache: version = platform.mac_ver()[0] @@ -441,9 +449,11 @@ def _macosx_vers(_cache=[]): _cache.append(version.split('.')) return _cache[0] + def _macosx_arch(machine): return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine) + def get_build_platform(): """Return this platform's string for platform-specific distributions @@ -469,6 +479,7 @@ def get_build_platform(): pass return plat + macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)") darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)") # XXX backward compat @@ -482,7 +493,7 @@ def compatible_platforms(provided, required): XXX Needs compatibility checks for Linux and other unixy OSes. """ - if provided is None or required is None or provided==required: + if provided is None or required is None or provided == required: # easy case return True @@ -529,9 +540,11 @@ def run_script(dist_spec, script_name): ns['__name__'] = name require(dist_spec)[0].run_script(script_name, ns) + # backward compatibility run_main = run_script + def get_distribution(dist): """Return a current distribution object for a Requirement or string""" if isinstance(dist, six.string_types): @@ -542,21 +555,23 @@ def get_distribution(dist): raise TypeError("Expected string, Requirement, or Distribution", dist) return dist + def load_entry_point(dist, group, name): """Return `name` entry point of `group` for `dist` or raise ImportError""" return get_distribution(dist).load_entry_point(group, name) + def get_entry_map(dist, group=None): """Return the entry point map for `group`, or the full entry map""" return get_distribution(dist).get_entry_map(group) + def get_entry_info(dist, group, name): """Return the EntryPoint object for `group`+`name`, or ``None``""" return get_distribution(dist).get_entry_info(group, name) class IMetadataProvider: - def has_metadata(name): """Does the package's distribution contain the named metadata?""" @@ -737,7 +752,7 @@ class WorkingSet(object): for key in self.entry_keys[item]: if key not in seen: - seen[key]=1 + seen[key] = 1 yield self.by_key[key] def add(self, dist, entry=None, insert=True, replace=False): @@ -757,8 +772,8 @@ class WorkingSet(object): if entry is None: entry = dist.location - keys = self.entry_keys.setdefault(entry,[]) - keys2 = self.entry_keys.setdefault(dist.location,[]) + keys = self.entry_keys.setdefault(entry, []) + keys2 = self.entry_keys.setdefault(dist.location, []) if not replace and dist.key in self.by_key: # ignore hidden distros return @@ -797,6 +812,8 @@ class WorkingSet(object): best = {} to_activate = [] + req_extras = _ReqExtras() + # Mapping of requirement to set of distributions that required it; # useful for reporting info about conflicts. required_by = collections.defaultdict(set) @@ -807,6 +824,10 @@ class WorkingSet(object): if req in processed: # Ignore cyclic or redundant dependencies continue + + if not req_extras.markers_pass(req): + continue + dist = best.get(req.key) if dist is None: # Find the best distribution and add it to the map @@ -839,6 +860,7 @@ class WorkingSet(object): # Register the new requirements needed by req for new_requirement in new_requirements: required_by[new_requirement].add(req.project_name) + req_extras[new_requirement] = req.extras processed[req] = True @@ -945,11 +967,17 @@ class WorkingSet(object): return needed - def subscribe(self, callback): - """Invoke `callback` for all distributions (including existing ones)""" + def subscribe(self, callback, existing=True): + """Invoke `callback` for all distributions + + If `existing=True` (default), + call on all existing ones, as well. + """ if callback in self.callbacks: return self.callbacks.append(callback) + if not existing: + return for dist in self: callback(dist) @@ -971,6 +999,26 @@ class WorkingSet(object): self.callbacks = callbacks[:] +class _ReqExtras(dict): + """ + Map each requirement to the extras that demanded it. + """ + + def markers_pass(self, req): + """ + Evaluate markers for req against each extra that + demanded it. + + Return False if the req has a marker and fails + evaluation. Otherwise, return True. + """ + extra_evals = ( + req.marker.evaluate({'extra': extra}) + for extra in self.get(req, ()) + (None,) + ) + return not req.marker or any(extra_evals) + + class Environment(object): """Searchable snapshot of distributions on a search path""" @@ -1005,7 +1053,7 @@ class Environment(object): is returned. """ return (self.python is None or dist.py_version is None - or dist.py_version==self.python) \ + or dist.py_version == self.python) \ and compatible_platforms(dist.platform, self.platform) def remove(self, dist): @@ -1210,7 +1258,7 @@ class ResourceManager: extract, as it tracks the generated names for possible cleanup later. """ extract_path = self.extraction_path or get_default_cache() - target_path = os.path.join(extract_path, archive_name+'-tmp', *names) + target_path = os.path.join(extract_path, archive_name + '-tmp', *names) try: _bypass_ensure_directory(target_path) except: @@ -1304,49 +1352,18 @@ class ResourceManager: """ # XXX + def get_default_cache(): - """Determine the default cache location - - This returns the ``PYTHON_EGG_CACHE`` environment variable, if set. - Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the - "Application Data" directory. On all other systems, it's "~/.python-eggs". """ - try: - return os.environ['PYTHON_EGG_CACHE'] - except KeyError: - pass + Return the ``PYTHON_EGG_CACHE`` environment variable + or a platform-relevant user cache dir for an app + named "Python-Eggs". + """ + return ( + os.environ.get('PYTHON_EGG_CACHE') + or appdirs.user_cache_dir(appname='Python-Eggs') + ) - if os.name!='nt': - return os.path.expanduser('~/.python-eggs') - - # XXX this may be locale-specific! - app_data = 'Application Data' - app_homes = [ - # best option, should be locale-safe - (('APPDATA',), None), - (('USERPROFILE',), app_data), - (('HOMEDRIVE','HOMEPATH'), app_data), - (('HOMEPATH',), app_data), - (('HOME',), None), - # 95/98/ME - (('WINDIR',), app_data), - ] - - for keys, subdir in app_homes: - dirname = '' - for key in keys: - if key in os.environ: - dirname = os.path.join(dirname, os.environ[key]) - else: - break - else: - if subdir: - dirname = os.path.join(dirname, subdir) - return os.path.join(dirname, 'Python-Eggs') - else: - raise RuntimeError( - "Please set the PYTHON_EGG_CACHE enviroment variable" - ) def safe_name(name): """Convert an arbitrary string to a standard distribution name @@ -1364,7 +1381,7 @@ def safe_version(version): # normalize the version return str(packaging.version.Version(version)) except packaging.version.InvalidVersion: - version = version.replace(' ','.') + version = version.replace(' ', '.') return re.sub('[^A-Za-z0-9.]+', '-', version) @@ -1374,7 +1391,7 @@ def safe_extra(extra): Any runs of non-alphanumeric characters are replaced with a single '_', and the result is always lowercased. """ - return re.sub('[^A-Za-z0-9.]+', '_', extra).lower() + return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower() def to_filename(name): @@ -1382,205 +1399,37 @@ def to_filename(name): Any '-' characters are currently replaced with '_'. """ - return name.replace('-','_') + return name.replace('-', '_') -class MarkerEvaluation(object): - values = { - 'os_name': lambda: os.name, - 'sys_platform': lambda: sys.platform, - 'python_full_version': platform.python_version, - 'python_version': lambda: platform.python_version()[:3], - 'platform_version': platform.version, - 'platform_machine': platform.machine, - 'platform_python_implementation': platform.python_implementation, - 'python_implementation': platform.python_implementation, - } +def invalid_marker(text): + """ + Validate text as a PEP 508 environment marker; return an exception + if invalid or False otherwise. + """ + try: + evaluate_marker(text) + except SyntaxError as e: + e.filename = None + e.lineno = None + return e + return False - @classmethod - def is_invalid_marker(cls, text): - """ - Validate text as a PEP 426 environment marker; return an exception - if invalid or False otherwise. - """ - try: - cls.evaluate_marker(text) - except SyntaxError as e: - return cls.normalize_exception(e) - return False - @staticmethod - def normalize_exception(exc): - """ - Given a SyntaxError from a marker evaluation, normalize the error - message: - - Remove indications of filename and line number. - - Replace platform-specific error messages with standard error - messages. - """ - subs = { - 'unexpected EOF while parsing': 'invalid syntax', - 'parenthesis is never closed': 'invalid syntax', - } - exc.filename = None - exc.lineno = None - exc.msg = subs.get(exc.msg, exc.msg) - return exc +def evaluate_marker(text, extra=None): + """ + Evaluate a PEP 508 environment marker. + Return a boolean indicating the marker result in this environment. + Raise SyntaxError if marker is invalid. - @classmethod - def and_test(cls, nodelist): - # MUST NOT short-circuit evaluation, or invalid syntax can be skipped! - items = [ - cls.interpret(nodelist[i]) - for i in range(1, len(nodelist), 2) - ] - return functools.reduce(operator.and_, items) + This implementation uses the 'pyparsing' module. + """ + try: + marker = packaging.markers.Marker(text) + return marker.evaluate() + except packaging.markers.InvalidMarker as e: + raise SyntaxError(e) - @classmethod - def test(cls, nodelist): - # MUST NOT short-circuit evaluation, or invalid syntax can be skipped! - items = [ - cls.interpret(nodelist[i]) - for i in range(1, len(nodelist), 2) - ] - return functools.reduce(operator.or_, items) - - @classmethod - def atom(cls, nodelist): - t = nodelist[1][0] - if t == token.LPAR: - if nodelist[2][0] == token.RPAR: - raise SyntaxError("Empty parentheses") - return cls.interpret(nodelist[2]) - msg = "Language feature not supported in environment markers" - raise SyntaxError(msg) - - @classmethod - def comparison(cls, nodelist): - if len(nodelist) > 4: - msg = "Chained comparison not allowed in environment markers" - raise SyntaxError(msg) - comp = nodelist[2][1] - cop = comp[1] - if comp[0] == token.NAME: - if len(nodelist[2]) == 3: - if cop == 'not': - cop = 'not in' - else: - cop = 'is not' - try: - cop = cls.get_op(cop) - except KeyError: - msg = repr(cop) + " operator not allowed in environment markers" - raise SyntaxError(msg) - return cop(cls.evaluate(nodelist[1]), cls.evaluate(nodelist[3])) - - @classmethod - def get_op(cls, op): - ops = { - symbol.test: cls.test, - symbol.and_test: cls.and_test, - symbol.atom: cls.atom, - symbol.comparison: cls.comparison, - 'not in': lambda x, y: x not in y, - 'in': lambda x, y: x in y, - '==': operator.eq, - '!=': operator.ne, - '<': operator.lt, - '>': operator.gt, - '<=': operator.le, - '>=': operator.ge, - } - if hasattr(symbol, 'or_test'): - ops[symbol.or_test] = cls.test - return ops[op] - - @classmethod - def evaluate_marker(cls, text, extra=None): - """ - Evaluate a PEP 426 environment marker on CPython 2.4+. - Return a boolean indicating the marker result in this environment. - Raise SyntaxError if marker is invalid. - - This implementation uses the 'parser' module, which is not implemented - on - Jython and has been superseded by the 'ast' module in Python 2.6 and - later. - """ - return cls.interpret(parser.expr(text).totuple(1)[1]) - - @staticmethod - def _translate_metadata2(env): - """ - Markerlib implements Metadata 1.2 (PEP 345) environment markers. - Translate the variables to Metadata 2.0 (PEP 426). - """ - return dict( - (key.replace('.', '_'), value) - for key, value in env.items() - ) - - @classmethod - def _markerlib_evaluate(cls, text): - """ - Evaluate a PEP 426 environment marker using markerlib. - Return a boolean indicating the marker result in this environment. - Raise SyntaxError if marker is invalid. - """ - import _markerlib - - env = cls._translate_metadata2(_markerlib.default_environment()) - try: - result = _markerlib.interpret(text, env) - except NameError as e: - raise SyntaxError(e.args[0]) - return result - - if 'parser' not in globals(): - # Fall back to less-complete _markerlib implementation if 'parser' module - # is not available. - evaluate_marker = _markerlib_evaluate - - @classmethod - def interpret(cls, nodelist): - while len(nodelist)==2: nodelist = nodelist[1] - try: - op = cls.get_op(nodelist[0]) - except KeyError: - raise SyntaxError("Comparison or logical expression expected") - return op(nodelist) - - @classmethod - def evaluate(cls, nodelist): - while len(nodelist)==2: nodelist = nodelist[1] - kind = nodelist[0] - name = nodelist[1] - if kind==token.NAME: - try: - op = cls.values[name] - except KeyError: - raise SyntaxError("Unknown name %r" % name) - return op() - if kind==token.STRING: - s = nodelist[1] - if not cls._safe_string(s): - raise SyntaxError( - "Only plain strings allowed in environment markers") - return s[1:-1] - msg = "Language feature not supported in environment markers" - raise SyntaxError(msg) - - @staticmethod - def _safe_string(cand): - return ( - cand[:1] in "'\"" and - not cand.startswith('"""') and - not cand.startswith("'''") and - '\\' not in cand - ) - -invalid_marker = MarkerEvaluation.is_invalid_marker -evaluate_marker = MarkerEvaluation.evaluate_marker class NullProvider: """Try to implement resources and metadata for arbitrary PEP 302 loaders""" @@ -1608,16 +1457,11 @@ class NullProvider: def has_metadata(self, name): return self.egg_info and self._has(self._fn(self.egg_info, name)) - if sys.version_info <= (3,): - def get_metadata(self, name): - if not self.egg_info: - return "" - return self._get(self._fn(self.egg_info, name)) - else: - def get_metadata(self, name): - if not self.egg_info: - return "" - return self._get(self._fn(self.egg_info, name)).decode("utf-8") + def get_metadata(self, name): + if not self.egg_info: + return "" + value = self._get(self._fn(self.egg_info, name)) + return value.decode('utf-8') if six.PY3 else value def get_metadata_lines(self, name): return yield_lines(self.get_metadata(name)) @@ -1637,7 +1481,7 @@ class NullProvider: return [] def run_script(self, script_name, namespace): - script = 'scripts/'+script_name + script = 'scripts/' + script_name if not self.has_metadata(script): raise ResolutionError("No script named %r" % script_name) script_text = self.get_metadata(script).replace('\r\n', '\n') @@ -1653,7 +1497,7 @@ class NullProvider: cache[script_filename] = ( len(script_text), 0, script_text.split('\n'), script_filename ) - script_code = compile(script_text, script_filename,'exec') + script_code = compile(script_text, script_filename, 'exec') exec(script_code, namespace, namespace) def _has(self, path): @@ -1683,6 +1527,7 @@ class NullProvider: "Can't perform this operation for loaders without 'get_data()'" ) + register_loader_type(object, NullProvider) @@ -1698,7 +1543,7 @@ class EggProvider(NullProvider): # of multiple eggs; that's why we use module_path instead of .archive path = self.module_path old = None - while path!=old: + while path != old: if _is_unpacked_egg(path): self.egg_name = os.path.basename(path) self.egg_info = os.path.join(path, 'EGG-INFO') @@ -1707,6 +1552,7 @@ class EggProvider(NullProvider): old = path path, base = os.path.split(path) + class DefaultProvider(EggProvider): """Provides access to package resources in the filesystem""" @@ -1732,6 +1578,7 @@ class DefaultProvider(EggProvider): type(None)) register_loader_type(loader_cls, cls) + DefaultProvider._register() @@ -1746,6 +1593,7 @@ class EmptyProvider(NullProvider): def __init__(self): pass + empty_provider = EmptyProvider() @@ -1824,7 +1672,7 @@ class ZipProvider(EggProvider): def __init__(self, module): EggProvider.__init__(self, module) - self.zip_pre = self.loader.archive+os.sep + self.zip_pre = self.loader.archive + os.sep def _zipinfo_name(self, fspath): # Convert a virtual filename (full path to file) into a zipfile subpath @@ -1838,9 +1686,9 @@ class ZipProvider(EggProvider): def _parts(self, zip_path): # Convert a zipfile subpath into an egg-relative path part list. # pseudo-fs path - fspath = self.zip_pre+zip_path - if fspath.startswith(self.egg_root+os.sep): - return fspath[len(self.egg_root)+1:].split(os.sep) + fspath = self.zip_pre + zip_path + if fspath.startswith(self.egg_root + os.sep): + return fspath[len(self.egg_root) + 1:].split(os.sep) raise AssertionError( "%s is not a subpath of %s" % (fspath, self.egg_root) ) @@ -1911,7 +1759,7 @@ class ZipProvider(EggProvider): # so proceed. return real_path # Windows, del old file and retry - elif os.name=='nt': + elif os.name == 'nt': unlink(real_path) rename(tmpnam, real_path) return real_path @@ -1931,7 +1779,7 @@ class ZipProvider(EggProvider): if not os.path.isfile(file_path): return False stat = os.stat(file_path) - if stat.st_size!=size or stat.st_mtime!=timestamp: + if stat.st_size != size or stat.st_mtime != timestamp: return False # check that the contents match zip_contents = self.loader.get_data(zip_path) @@ -1981,6 +1829,7 @@ class ZipProvider(EggProvider): def _resource_to_zip(self, resource_name): return self._zipinfo_name(self._fn(self.module_path, resource_name)) + register_loader_type(zipimport.zipimporter, ZipProvider) @@ -2000,14 +1849,24 @@ class FileMetadata(EmptyProvider): self.path = path def has_metadata(self, name): - return name=='PKG-INFO' and os.path.isfile(self.path) + return name == 'PKG-INFO' and os.path.isfile(self.path) def get_metadata(self, name): - if name=='PKG-INFO': - with io.open(self.path, encoding='utf-8') as f: - metadata = f.read() - return metadata - raise KeyError("No metadata except PKG-INFO is available") + if name != 'PKG-INFO': + raise KeyError("No metadata except PKG-INFO is available") + + with io.open(self.path, encoding='utf-8', errors="replace") as f: + metadata = f.read() + self._warn_on_replacement(metadata) + return metadata + + def _warn_on_replacement(self, metadata): + # Python 2.6 and 3.2 compat for: replacement_char = '�' + replacement_char = b'\xef\xbf\xbd'.decode('utf-8') + if replacement_char in metadata: + tmpl = "{self.path} could not be properly decoded in UTF-8" + msg = tmpl.format(**locals()) + warnings.warn(msg) def get_metadata_lines(self, name): return yield_lines(self.get_metadata(name)) @@ -2044,7 +1903,7 @@ class EggMetadata(ZipProvider): def __init__(self, importer): """Create a metadata provider from a zipimporter""" - self.zip_pre = importer.archive+os.sep + self.zip_pre = importer.archive + os.sep self.loader = importer if importer.prefix: self.module_path = os.path.join(importer.archive, importer.prefix) @@ -2052,7 +1911,9 @@ class EggMetadata(ZipProvider): self.module_path = importer.archive self._setup_prefix() -_declare_state('dict', _distribution_finders = {}) + +_declare_state('dict', _distribution_finders={}) + def register_finder(importer_type, distribution_finder): """Register `distribution_finder` to find distributions in sys.path items @@ -2070,6 +1931,7 @@ def find_distributions(path_item, only=False): finder = _find_adapter(_distribution_finders, importer) return finder(importer, path_item, only) + def find_eggs_in_zip(importer, path_item, only=False): """ Find eggs in zip files; possibly multiple nested eggs. @@ -2090,12 +1952,43 @@ def find_eggs_in_zip(importer, path_item, only=False): for dist in find_eggs_in_zip(zipimport.zipimporter(subpath), subpath): yield dist + register_finder(zipimport.zipimporter, find_eggs_in_zip) + def find_nothing(importer, path_item, only=False): return () + + register_finder(object, find_nothing) + +def _by_version_descending(names): + """ + Given a list of filenames, return them in descending order + by version number. + + >>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg' + >>> _by_version_descending(names) + ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'foo', 'bar'] + >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg' + >>> _by_version_descending(names) + ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg'] + >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg' + >>> _by_version_descending(names) + ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg'] + """ + def _by_version(name): + """ + Parse each component of the filename + """ + name, ext = os.path.splitext(name) + parts = itertools.chain(name.split('-'), [ext]) + return [packaging.version.parse(part) for part in parts] + + return sorted(names, key=_by_version, reverse=True) + + def find_on_path(importer, path_item, only=False): """Yield distributions accessible on a sys.path directory""" path_item = _normalize_cached(path_item) @@ -2104,17 +1997,21 @@ def find_on_path(importer, path_item, only=False): if _is_unpacked_egg(path_item): yield Distribution.from_filename( path_item, metadata=PathMetadata( - path_item, os.path.join(path_item,'EGG-INFO') + path_item, os.path.join(path_item, 'EGG-INFO') ) ) else: # scan for .egg and .egg-info in directory - for entry in os.listdir(path_item): + path_item_entries = _by_version_descending(os.listdir(path_item)) + for entry in path_item_entries: lower = entry.lower() if lower.endswith('.egg-info') or lower.endswith('.dist-info'): fullpath = os.path.join(path_item, entry) if os.path.isdir(fullpath): # egg-info directory, allow getting metadata + if len(os.listdir(fullpath)) == 0: + # Empty egg directory, skip. + continue metadata = PathMetadata(path_item, fullpath) else: metadata = FileMetadata(fullpath) @@ -2136,6 +2033,8 @@ def find_on_path(importer, path_item, only=False): for item in dists: yield item break + + register_finder(pkgutil.ImpImporter, find_on_path) if hasattr(importlib_machinery, 'FileFinder'): @@ -2162,6 +2061,7 @@ def register_namespace_handler(importer_type, namespace_handler): """ _namespace_handlers[importer_type] = namespace_handler + def _handle_ns(packageName, path_item): """Ensure that named package includes a subpath of path_item (if needed)""" @@ -2176,7 +2076,7 @@ def _handle_ns(packageName, path_item): module = sys.modules[packageName] = types.ModuleType(packageName) module.__path__ = [] _set_parent_ns(packageName) - elif not hasattr(module,'__path__'): + elif not hasattr(module, '__path__'): raise TypeError("Not a package:", packageName) handler = _find_adapter(_namespace_handlers, importer) subpath = handler(importer, path_item, packageName, module) @@ -2194,13 +2094,28 @@ def _rebuild_mod_path(orig_path, package_name, module): corresponding to their sys.path order """ sys_path = [_normalize_cached(p) for p in sys.path] - def position_in_sys_path(p): + + def safe_sys_path_index(entry): + """ + Workaround for #520 and #513. + """ + try: + return sys_path.index(entry) + except ValueError: + return float('inf') + + def position_in_sys_path(path): """ Return the ordinal of the path based on its position in sys.path """ - parts = p.split(os.sep) - parts = parts[:-(package_name.count('.') + 1)] - return sys_path.index(_normalize_cached(os.sep.join(parts))) + path_parts = path.split(os.sep) + module_parts = package_name.count('.') + 1 + parts = path_parts[:-module_parts] + return safe_sys_path_index(_normalize_cached(os.sep.join(parts))) + + if not isinstance(orig_path, list): + # Is this behavior useful when module.__path__ is not a list? + return orig_path.sort(key=position_in_sys_path) module.__path__[:] = [_normalize_cached(p) for p in orig_path] @@ -2227,8 +2142,8 @@ def declare_namespace(packageName): # Track what packages are namespaces, so when new path items are added, # they can be updated - _namespace_packages.setdefault(parent,[]).append(packageName) - _namespace_packages.setdefault(packageName,[]) + _namespace_packages.setdefault(parent, []).append(packageName) + _namespace_packages.setdefault(packageName, []) for path_item in path: # Ensure all the parent's path items are reflected in the child, @@ -2238,29 +2153,32 @@ def declare_namespace(packageName): finally: _imp.release_lock() + def fixup_namespace_packages(path_item, parent=None): """Ensure that previously-declared namespace packages include path_item""" _imp.acquire_lock() try: - for package in _namespace_packages.get(parent,()): + for package in _namespace_packages.get(parent, ()): subpath = _handle_ns(package, path_item) if subpath: fixup_namespace_packages(subpath, package) finally: _imp.release_lock() + def file_ns_handler(importer, path_item, packageName, module): """Compute an ns-package subpath for a filesystem or zipfile importer""" subpath = os.path.join(path_item, packageName.split('.')[-1]) normalized = _normalize_cached(subpath) for item in module.__path__: - if _normalize_cached(item)==normalized: + if _normalize_cached(item) == normalized: break else: # Only return the path if it's not already there return subpath + register_namespace_handler(pkgutil.ImpImporter, file_ns_handler) register_namespace_handler(zipimport.zipimporter, file_ns_handler) @@ -2271,6 +2189,7 @@ if hasattr(importlib_machinery, 'FileFinder'): def null_ns_handler(importer, path_item, packageName, module): return None + register_namespace_handler(object, null_ns_handler) @@ -2278,6 +2197,7 @@ def normalize_path(filename): """Normalize a file/dir name for comparison purposes""" return os.path.normcase(os.path.realpath(filename)) + def _normalize_cached(filename, _cache={}): try: return _cache[filename] @@ -2285,6 +2205,7 @@ def _normalize_cached(filename, _cache={}): _cache[filename] = result = normalize_path(filename) return result + def _is_unpacked_egg(path): """ Determine if given path appears to be an unpacked egg. @@ -2293,6 +2214,7 @@ def _is_unpacked_egg(path): path.lower().endswith('.egg') ) + def _set_parent_ns(packageName): parts = packageName.split('.') name = parts.pop() @@ -2314,18 +2236,7 @@ def yield_lines(strs): for s in yield_lines(ss): yield s -# whitespace and comment -LINE_END = re.compile(r"\s*(#.*)?$").match -# line continuation -CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match -# Distribution or extra -DISTRO = re.compile(r"\s*((\w|[-.])+)").match -# ver. info -VERSION = re.compile(r"\s*(<=?|>=?|===?|!=|~=)\s*((\w|[-.*_!+])+)").match -# comma between items -COMMA = re.compile(r"\s*,").match -OBRACKET = re.compile(r"\s*\[").match -CBRACKET = re.compile(r"\s*\]").match + MODULE = re.compile(r"\w+(\.\w+)*$").match EGG_NAME = re.compile( r""" @@ -2444,7 +2355,7 @@ class EntryPoint(object): ep = cls.parse(line, dist) if ep.name in this: raise ValueError("Duplicate entry point", group, ep.name) - this[ep.name]=ep + this[ep.name] = ep return this @classmethod @@ -2506,7 +2417,7 @@ class Distribution(object): @classmethod def from_location(cls, location, basename, metadata=None, **kw): - project_name, version, py_version, platform = [None]*4 + project_name, version, py_version, platform = [None] * 4 basename, ext = os.path.splitext(basename) if ext.lower() in _distributionImpl: cls = _distributionImpl[ext.lower()] @@ -2628,11 +2539,11 @@ class Distribution(object): extra, marker = extra.split(':', 1) if invalid_marker(marker): # XXX warn - reqs=[] + reqs = [] elif not evaluate_marker(marker): - reqs=[] + reqs = [] extra = safe_extra(extra) or None - dm.setdefault(extra,[]).extend(parse_requirements(reqs)) + dm.setdefault(extra, []).extend(parse_requirements(reqs)) return dm def requires(self, extras=()): @@ -2654,11 +2565,11 @@ class Distribution(object): for line in self.get_metadata_lines(name): yield line - def activate(self, path=None): + def activate(self, path=None, replace=False): """Ensure distribution is importable on `path` (default=sys.path)""" if path is None: path = sys.path - self.insert_on(path, replace=True) + self.insert_on(path, replace=replace) if path is sys.path: fixup_namespace_packages(self.location) for pkg in self._get_metadata('namespace_packages.txt'): @@ -2728,7 +2639,7 @@ class Distribution(object): self._get_metadata('entry_points.txt'), self ) if group is not None: - return ep_map.get(group,{}) + return ep_map.get(group, {}) return ep_map def get_entry_info(self, group, name): @@ -2736,7 +2647,24 @@ class Distribution(object): return self.get_entry_map(group).get(name) def insert_on(self, path, loc=None, replace=False): - """Insert self.location in path before its nearest parent directory""" + """Ensure self.location is on path + + If replace=False (default): + - If location is already in path anywhere, do nothing. + - Else: + - If it's an egg and its parent directory is on path, + insert just ahead of the parent. + - Else: add to the end of path. + If replace=True: + - If location is already on path anywhere (not eggs) + or higher priority than its parent (eggs) + do nothing. + - Else: + - If it's an egg and its parent directory is on path, + insert just ahead of the parent, + removing any lower-priority entries. + - Else: add it to the front of path. + """ loc = loc or self.location if not loc: @@ -2744,13 +2672,20 @@ class Distribution(object): nloc = _normalize_cached(loc) bdir = os.path.dirname(nloc) - npath= [(p and _normalize_cached(p) or p) for p in path] + npath = [(p and _normalize_cached(p) or p) for p in path] for p, item in enumerate(npath): if item == nloc: - break + if replace: + break + else: + # don't modify path (even removing duplicates) if found and not replace + return elif item == bdir and self.precedence == EGG_DIST: # if it's an .egg, give it precedence over its directory + # UNLESS it's already been added to sys.path and replace=False + if (not replace) and nloc in npath[p:]: + return if path is sys.path: self.check_version_conflict() path.insert(p, loc) @@ -2768,7 +2703,7 @@ class Distribution(object): # p is the spot where we found or inserted loc; now remove duplicates while True: try: - np = npath.index(nloc, p+1) + np = npath.index(nloc, p + 1) except ValueError: break else: @@ -2808,7 +2743,7 @@ class Distribution(object): return False return True - def clone(self,**kw): + def clone(self, **kw): """Copy this distribution, substituting in any changed keyword args""" names = 'project_name version py_version platform location precedence' for attr in names.split(): @@ -2822,7 +2757,6 @@ class Distribution(object): class EggInfoDistribution(Distribution): - def _reload_version(self): """ Packages installed by distutils (e.g. numpy or scipy), @@ -2864,42 +2798,26 @@ class DistInfoDistribution(Distribution): self.__dep_map = self._compute_dependencies() return self.__dep_map - def _preparse_requirement(self, requires_dist): - """Convert 'Foobar (1); baz' to ('Foobar ==1', 'baz') - Split environment marker, add == prefix to version specifiers as - necessary, and remove parenthesis. - """ - parts = requires_dist.split(';', 1) + [''] - distvers = parts[0].strip() - mark = parts[1].strip() - distvers = re.sub(self.EQEQ, r"\1==\2\3", distvers) - distvers = distvers.replace('(', '').replace(')', '') - return (distvers, mark) - def _compute_dependencies(self): """Recompute this distribution's dependencies.""" - from _markerlib import compile as compile_marker dm = self.__dep_map = {None: []} reqs = [] # Including any condition expressions for req in self._parsed_pkg_info.get_all('Requires-Dist') or []: - distvers, mark = self._preparse_requirement(req) - parsed = next(parse_requirements(distvers)) - parsed.marker_fn = compile_marker(mark) - reqs.append(parsed) + reqs.extend(parse_requirements(req)) def reqs_for_extra(extra): for req in reqs: - if req.marker_fn(override={'extra':extra}): + if not req.marker or req.marker.evaluate({'extra': extra}): yield req common = frozenset(reqs_for_extra(None)) dm[None].extend(common) for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []: - extra = safe_extra(extra.strip()) - dm[extra] = list(frozenset(reqs_for_extra(extra)) - common) + s_extra = safe_extra(extra.strip()) + dm[s_extra] = list(frozenset(reqs_for_extra(extra)) - common) return dm @@ -2911,7 +2829,7 @@ _distributionImpl = { } -def issue_warning(*args,**kw): +def issue_warning(*args, **kw): level = 1 g = globals() try: @@ -2937,85 +2855,38 @@ def parse_requirements(strs): # create a steppable iterator, so we can handle \-continuations lines = iter(yield_lines(strs)) - def scan_list(ITEM, TERMINATOR, line, p, groups, item_name): - - items = [] - - while not TERMINATOR(line, p): - if CONTINUE(line, p): - try: - line = next(lines) - p = 0 - except StopIteration: - msg = "\\ must not appear on the last nonblank line" - raise RequirementParseError(msg) - - match = ITEM(line, p) - if not match: - msg = "Expected " + item_name + " in" - raise RequirementParseError(msg, line, "at", line[p:]) - - items.append(match.group(*groups)) - p = match.end() - - match = COMMA(line, p) - if match: - # skip the comma - p = match.end() - elif not TERMINATOR(line, p): - msg = "Expected ',' or end-of-list in" - raise RequirementParseError(msg, line, "at", line[p:]) - - match = TERMINATOR(line, p) - # skip the terminator, if any - if match: - p = match.end() - return line, p, items - for line in lines: - match = DISTRO(line) - if not match: - raise RequirementParseError("Missing distribution spec", line) - project_name = match.group(1) - p = match.end() - extras = [] - - match = OBRACKET(line, p) - if match: - p = match.end() - line, p, extras = scan_list( - DISTRO, CBRACKET, line, p, (1,), "'extra' name" - ) - - line, p, specs = scan_list(VERSION, LINE_END, line, p, (1, 2), - "version spec") - specs = [(op, val) for op, val in specs] - yield Requirement(project_name, specs, extras) + # Drop comments -- a hash without a space may be in a URL. + if ' #' in line: + line = line[:line.find(' #')] + # If there is a line continuation, drop it, and append the next line. + if line.endswith('\\'): + line = line[:-2].strip() + line += next(lines) + yield Requirement(line) -class Requirement: - def __init__(self, project_name, specs, extras): +class Requirement(packaging.requirements.Requirement): + def __init__(self, requirement_string): """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!""" - self.unsafe_name, project_name = project_name, safe_name(project_name) + try: + super(Requirement, self).__init__(requirement_string) + except packaging.requirements.InvalidRequirement as e: + raise RequirementParseError(str(e)) + self.unsafe_name = self.name + project_name = safe_name(self.name) self.project_name, self.key = project_name, project_name.lower() - self.specifier = packaging.specifiers.SpecifierSet( - ",".join(["".join([x, y]) for x, y in specs]) - ) - self.specs = specs - self.extras = tuple(map(safe_extra, extras)) + self.specs = [ + (spec.operator, spec.version) for spec in self.specifier] + self.extras = tuple(map(safe_extra, self.extras)) self.hashCmp = ( self.key, self.specifier, frozenset(self.extras), + str(self.marker) if self.marker else None, ) self.__hash = hash(self.hashCmp) - def __str__(self): - extras = ','.join(self.extras) - if extras: - extras = '[%s]' % extras - return '%s%s%s' % (self.project_name, extras, self.specifier) - def __eq__(self, other): return ( isinstance(other, Requirement) and @@ -3051,10 +2922,14 @@ class Requirement: def _get_mro(cls): """Get an mro for a type or classic class""" if not isinstance(cls, type): - class cls(cls, object): pass + + class cls(cls, object): + pass + return cls.__mro__[1:] return cls.__mro__ + def _find_adapter(registry, ob): """Return an adapter factory for `ob` from `registry`""" for t in _get_mro(getattr(ob, '__class__', type(ob))): @@ -3104,12 +2979,13 @@ def split_sections(s): # wrap up last segment yield section, content -def _mkstemp(*args,**kw): + +def _mkstemp(*args, **kw): old_open = os.open try: # temporarily bypass sandboxing os.open = os_open - return tempfile.mkstemp(*args,**kw) + return tempfile.mkstemp(*args, **kw) finally: # and then put it back os.open = old_open @@ -3133,9 +3009,11 @@ def _initialize(g=globals()): "Set up global resource manager (deliberately not state-saved)" manager = ResourceManager() g['_manager'] = manager - for name in dir(manager): - if not name.startswith('_'): - g[name] = getattr(manager, name) + g.update( + (name, getattr(manager, name)) + for name in dir(manager) + if not name.startswith('_') + ) @_call_aside @@ -3160,11 +3038,16 @@ def _initialize_master_working_set(): run_script = working_set.run_script # backward compatibility run_main = run_script - # Activate all distributions already on sys.path, and ensure that - # all distributions added to the working set in the future (e.g. by - # calling ``require()``) will get activated as well. - add_activation_listener(lambda dist: dist.activate()) - working_set.entries=[] + # Activate all distributions already on sys.path with replace=False and + # ensure that all distributions added to the working set in the future + # (e.g. by calling ``require()``) will get activated as well, + # with higher priority (replace=True). + tuple( + dist.activate(replace=False) + for dist in working_set + ) + add_activation_listener(lambda dist: dist.activate(replace=True), existing=False) + working_set.entries = [] # match order list(map(working_set.add_entry, sys.path)) globals().update(locals()) diff --git a/lib/python3.4/site-packages/pkg_resources/_vendor/appdirs.py b/lib/python3.4/site-packages/pkg_resources/_vendor/appdirs.py new file mode 100644 index 0000000..f4dba09 --- /dev/null +++ b/lib/python3.4/site-packages/pkg_resources/_vendor/appdirs.py @@ -0,0 +1,552 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2005-2010 ActiveState Software Inc. +# Copyright (c) 2013 Eddy Petrișor + +"""Utilities for determining application-specific dirs. + +See for details and usage. +""" +# Dev Notes: +# - MSDN on where to store app data files: +# http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120 +# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html +# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html + +__version_info__ = (1, 4, 0) +__version__ = '.'.join(map(str, __version_info__)) + + +import sys +import os + +PY3 = sys.version_info[0] == 3 + +if PY3: + unicode = str + +if sys.platform.startswith('java'): + import platform + os_name = platform.java_ver()[3][0] + if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc. + system = 'win32' + elif os_name.startswith('Mac'): # "Mac OS X", etc. + system = 'darwin' + else: # "Linux", "SunOS", "FreeBSD", etc. + # Setting this to "linux2" is not ideal, but only Windows or Mac + # are actually checked for and the rest of the module expects + # *sys.platform* style strings. + system = 'linux2' +else: + system = sys.platform + + + +def user_data_dir(appname=None, appauthor=None, version=None, roaming=False): + r"""Return full path to the user-specific data dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "roaming" (boolean, default False) can be set True to use the Windows + roaming appdata directory. That means that for users on a Windows + network setup for roaming profiles, this user data will be + sync'd on login. See + + for a discussion of issues. + + Typical user data directories are: + Mac OS X: ~/Library/Application Support/ + Unix: ~/.local/share/ # or in $XDG_DATA_HOME, if defined + Win XP (not roaming): C:\Documents and Settings\\Application Data\\ + Win XP (roaming): C:\Documents and Settings\\Local Settings\Application Data\\ + Win 7 (not roaming): C:\Users\\AppData\Local\\ + Win 7 (roaming): C:\Users\\AppData\Roaming\\ + + For Unix, we follow the XDG spec and support $XDG_DATA_HOME. + That means, by default "~/.local/share/". + """ + if system == "win32": + if appauthor is None: + appauthor = appname + const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA" + path = os.path.normpath(_get_win_folder(const)) + if appname: + if appauthor is not False: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + elif system == 'darwin': + path = os.path.expanduser('~/Library/Application Support/') + if appname: + path = os.path.join(path, appname) + else: + path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share")) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path + + +def site_data_dir(appname=None, appauthor=None, version=None, multipath=False): + """Return full path to the user-shared data dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "multipath" is an optional parameter only applicable to *nix + which indicates that the entire list of data dirs should be + returned. By default, the first item from XDG_DATA_DIRS is + returned, or '/usr/local/share/', + if XDG_DATA_DIRS is not set + + Typical user data directories are: + Mac OS X: /Library/Application Support/ + Unix: /usr/local/share/ or /usr/share/ + Win XP: C:\Documents and Settings\All Users\Application Data\\ + Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) + Win 7: C:\ProgramData\\ # Hidden, but writeable on Win 7. + + For Unix, this is using the $XDG_DATA_DIRS[0] default. + + WARNING: Do not use this on Windows. See the Vista-Fail note above for why. + """ + if system == "win32": + if appauthor is None: + appauthor = appname + path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA")) + if appname: + if appauthor is not False: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + elif system == 'darwin': + path = os.path.expanduser('/Library/Application Support') + if appname: + path = os.path.join(path, appname) + else: + # XDG default for $XDG_DATA_DIRS + # only first, if multipath is False + path = os.getenv('XDG_DATA_DIRS', + os.pathsep.join(['/usr/local/share', '/usr/share'])) + pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] + if appname: + if version: + appname = os.path.join(appname, version) + pathlist = [os.sep.join([x, appname]) for x in pathlist] + + if multipath: + path = os.pathsep.join(pathlist) + else: + path = pathlist[0] + return path + + if appname and version: + path = os.path.join(path, version) + return path + + +def user_config_dir(appname=None, appauthor=None, version=None, roaming=False): + r"""Return full path to the user-specific config dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "roaming" (boolean, default False) can be set True to use the Windows + roaming appdata directory. That means that for users on a Windows + network setup for roaming profiles, this user data will be + sync'd on login. See + + for a discussion of issues. + + Typical user data directories are: + Mac OS X: same as user_data_dir + Unix: ~/.config/ # or in $XDG_CONFIG_HOME, if defined + Win *: same as user_data_dir + + For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. + That means, by deafult "~/.config/". + """ + if system in ["win32", "darwin"]: + path = user_data_dir(appname, appauthor, None, roaming) + else: + path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config")) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path + + +def site_config_dir(appname=None, appauthor=None, version=None, multipath=False): + """Return full path to the user-shared data dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "multipath" is an optional parameter only applicable to *nix + which indicates that the entire list of config dirs should be + returned. By default, the first item from XDG_CONFIG_DIRS is + returned, or '/etc/xdg/', if XDG_CONFIG_DIRS is not set + + Typical user data directories are: + Mac OS X: same as site_data_dir + Unix: /etc/xdg/ or $XDG_CONFIG_DIRS[i]/ for each value in + $XDG_CONFIG_DIRS + Win *: same as site_data_dir + Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) + + For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False + + WARNING: Do not use this on Windows. See the Vista-Fail note above for why. + """ + if system in ["win32", "darwin"]: + path = site_data_dir(appname, appauthor) + if appname and version: + path = os.path.join(path, version) + else: + # XDG default for $XDG_CONFIG_DIRS + # only first, if multipath is False + path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg') + pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] + if appname: + if version: + appname = os.path.join(appname, version) + pathlist = [os.sep.join([x, appname]) for x in pathlist] + + if multipath: + path = os.pathsep.join(pathlist) + else: + path = pathlist[0] + return path + + +def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True): + r"""Return full path to the user-specific cache dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "opinion" (boolean) can be False to disable the appending of + "Cache" to the base app data dir for Windows. See + discussion below. + + Typical user cache directories are: + Mac OS X: ~/Library/Caches/ + Unix: ~/.cache/ (XDG default) + Win XP: C:\Documents and Settings\\Local Settings\Application Data\\\Cache + Vista: C:\Users\\AppData\Local\\\Cache + + On Windows the only suggestion in the MSDN docs is that local settings go in + the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming + app data dir (the default returned by `user_data_dir` above). Apps typically + put cache data somewhere *under* the given dir here. Some examples: + ...\Mozilla\Firefox\Profiles\\Cache + ...\Acme\SuperApp\Cache\1.0 + OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. + This can be disabled with the `opinion=False` option. + """ + if system == "win32": + if appauthor is None: + appauthor = appname + path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) + if appname: + if appauthor is not False: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + if opinion: + path = os.path.join(path, "Cache") + elif system == 'darwin': + path = os.path.expanduser('~/Library/Caches') + if appname: + path = os.path.join(path, appname) + else: + path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache')) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path + + +def user_log_dir(appname=None, appauthor=None, version=None, opinion=True): + r"""Return full path to the user-specific log dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "opinion" (boolean) can be False to disable the appending of + "Logs" to the base app data dir for Windows, and "log" to the + base cache dir for Unix. See discussion below. + + Typical user cache directories are: + Mac OS X: ~/Library/Logs/ + Unix: ~/.cache//log # or under $XDG_CACHE_HOME if defined + Win XP: C:\Documents and Settings\\Local Settings\Application Data\\\Logs + Vista: C:\Users\\AppData\Local\\\Logs + + On Windows the only suggestion in the MSDN docs is that local settings + go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in + examples of what some windows apps use for a logs dir.) + + OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA` + value for Windows and appends "log" to the user cache dir for Unix. + This can be disabled with the `opinion=False` option. + """ + if system == "darwin": + path = os.path.join( + os.path.expanduser('~/Library/Logs'), + appname) + elif system == "win32": + path = user_data_dir(appname, appauthor, version) + version = False + if opinion: + path = os.path.join(path, "Logs") + else: + path = user_cache_dir(appname, appauthor, version) + version = False + if opinion: + path = os.path.join(path, "log") + if appname and version: + path = os.path.join(path, version) + return path + + +class AppDirs(object): + """Convenience wrapper for getting application dirs.""" + def __init__(self, appname, appauthor=None, version=None, roaming=False, + multipath=False): + self.appname = appname + self.appauthor = appauthor + self.version = version + self.roaming = roaming + self.multipath = multipath + + @property + def user_data_dir(self): + return user_data_dir(self.appname, self.appauthor, + version=self.version, roaming=self.roaming) + + @property + def site_data_dir(self): + return site_data_dir(self.appname, self.appauthor, + version=self.version, multipath=self.multipath) + + @property + def user_config_dir(self): + return user_config_dir(self.appname, self.appauthor, + version=self.version, roaming=self.roaming) + + @property + def site_config_dir(self): + return site_config_dir(self.appname, self.appauthor, + version=self.version, multipath=self.multipath) + + @property + def user_cache_dir(self): + return user_cache_dir(self.appname, self.appauthor, + version=self.version) + + @property + def user_log_dir(self): + return user_log_dir(self.appname, self.appauthor, + version=self.version) + + +#---- internal support stuff + +def _get_win_folder_from_registry(csidl_name): + """This is a fallback technique at best. I'm not sure if using the + registry for this guarantees us the correct answer for all CSIDL_* + names. + """ + import _winreg + + shell_folder_name = { + "CSIDL_APPDATA": "AppData", + "CSIDL_COMMON_APPDATA": "Common AppData", + "CSIDL_LOCAL_APPDATA": "Local AppData", + }[csidl_name] + + key = _winreg.OpenKey( + _winreg.HKEY_CURRENT_USER, + r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders" + ) + dir, type = _winreg.QueryValueEx(key, shell_folder_name) + return dir + + +def _get_win_folder_with_pywin32(csidl_name): + from win32com.shell import shellcon, shell + dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0) + # Try to make this a unicode path because SHGetFolderPath does + # not return unicode strings when there is unicode data in the + # path. + try: + dir = unicode(dir) + + # Downgrade to short path name if have highbit chars. See + # . + has_high_char = False + for c in dir: + if ord(c) > 255: + has_high_char = True + break + if has_high_char: + try: + import win32api + dir = win32api.GetShortPathName(dir) + except ImportError: + pass + except UnicodeError: + pass + return dir + + +def _get_win_folder_with_ctypes(csidl_name): + import ctypes + + csidl_const = { + "CSIDL_APPDATA": 26, + "CSIDL_COMMON_APPDATA": 35, + "CSIDL_LOCAL_APPDATA": 28, + }[csidl_name] + + buf = ctypes.create_unicode_buffer(1024) + ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf) + + # Downgrade to short path name if have highbit chars. See + # . + has_high_char = False + for c in buf: + if ord(c) > 255: + has_high_char = True + break + if has_high_char: + buf2 = ctypes.create_unicode_buffer(1024) + if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): + buf = buf2 + + return buf.value + +def _get_win_folder_with_jna(csidl_name): + import array + from com.sun import jna + from com.sun.jna.platform import win32 + + buf_size = win32.WinDef.MAX_PATH * 2 + buf = array.zeros('c', buf_size) + shell = win32.Shell32.INSTANCE + shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf) + dir = jna.Native.toString(buf.tostring()).rstrip("\0") + + # Downgrade to short path name if have highbit chars. See + # . + has_high_char = False + for c in dir: + if ord(c) > 255: + has_high_char = True + break + if has_high_char: + buf = array.zeros('c', buf_size) + kernel = win32.Kernel32.INSTANCE + if kernal.GetShortPathName(dir, buf, buf_size): + dir = jna.Native.toString(buf.tostring()).rstrip("\0") + + return dir + +if system == "win32": + try: + import win32com.shell + _get_win_folder = _get_win_folder_with_pywin32 + except ImportError: + try: + from ctypes import windll + _get_win_folder = _get_win_folder_with_ctypes + except ImportError: + try: + import com.sun.jna + _get_win_folder = _get_win_folder_with_jna + except ImportError: + _get_win_folder = _get_win_folder_from_registry + + +#---- self test code + +if __name__ == "__main__": + appname = "MyApp" + appauthor = "MyCompany" + + props = ("user_data_dir", "site_data_dir", + "user_config_dir", "site_config_dir", + "user_cache_dir", "user_log_dir") + + print("-- app dirs (with optional 'version')") + dirs = AppDirs(appname, appauthor, version="1.0") + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) + + print("\n-- app dirs (without optional 'version')") + dirs = AppDirs(appname, appauthor) + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) + + print("\n-- app dirs (without optional 'appauthor')") + dirs = AppDirs(appname) + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) + + print("\n-- app dirs (with disabled 'appauthor')") + dirs = AppDirs(appname, appauthor=False) + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) diff --git a/lib/python3.4/site-packages/pkg_resources/_vendor/packaging/__about__.py b/lib/python3.4/site-packages/pkg_resources/_vendor/packaging/__about__.py index eadb794..95d330e 100644 --- a/lib/python3.4/site-packages/pkg_resources/_vendor/packaging/__about__.py +++ b/lib/python3.4/site-packages/pkg_resources/_vendor/packaging/__about__.py @@ -1,16 +1,6 @@ -# Copyright 2014 Donald Stufft -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. from __future__ import absolute_import, division, print_function __all__ = [ @@ -22,10 +12,10 @@ __title__ = "packaging" __summary__ = "Core utilities for Python packages" __uri__ = "https://github.com/pypa/packaging" -__version__ = "15.3" +__version__ = "16.8" -__author__ = "Donald Stufft" +__author__ = "Donald Stufft and individual contributors" __email__ = "donald@stufft.io" -__license__ = "Apache License, Version 2.0" -__copyright__ = "Copyright 2014 %s" % __author__ +__license__ = "BSD or Apache License, Version 2.0" +__copyright__ = "Copyright 2014-2016 %s" % __author__ diff --git a/lib/python3.4/site-packages/pkg_resources/_vendor/packaging/__init__.py b/lib/python3.4/site-packages/pkg_resources/_vendor/packaging/__init__.py index c39a8ea..5ee6220 100644 --- a/lib/python3.4/site-packages/pkg_resources/_vendor/packaging/__init__.py +++ b/lib/python3.4/site-packages/pkg_resources/_vendor/packaging/__init__.py @@ -1,16 +1,6 @@ -# Copyright 2014 Donald Stufft -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. from __future__ import absolute_import, division, print_function from .__about__ import ( diff --git a/lib/python3.4/site-packages/pkg_resources/_vendor/packaging/_compat.py b/lib/python3.4/site-packages/pkg_resources/_vendor/packaging/_compat.py index 5c396ce..210bb80 100644 --- a/lib/python3.4/site-packages/pkg_resources/_vendor/packaging/_compat.py +++ b/lib/python3.4/site-packages/pkg_resources/_vendor/packaging/_compat.py @@ -1,16 +1,6 @@ -# Copyright 2014 Donald Stufft -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. from __future__ import absolute_import, division, print_function import sys diff --git a/lib/python3.4/site-packages/pkg_resources/_vendor/packaging/_structures.py b/lib/python3.4/site-packages/pkg_resources/_vendor/packaging/_structures.py index 0ae9bb5..ccc2786 100644 --- a/lib/python3.4/site-packages/pkg_resources/_vendor/packaging/_structures.py +++ b/lib/python3.4/site-packages/pkg_resources/_vendor/packaging/_structures.py @@ -1,16 +1,6 @@ -# Copyright 2014 Donald Stufft -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. from __future__ import absolute_import, division, print_function diff --git a/lib/python3.4/site-packages/pkg_resources/_vendor/packaging/markers.py b/lib/python3.4/site-packages/pkg_resources/_vendor/packaging/markers.py index 9e90601..892e578 100644 --- a/lib/python3.4/site-packages/pkg_resources/_vendor/packaging/markers.py +++ b/lib/python3.4/site-packages/pkg_resources/_vendor/packaging/markers.py @@ -52,13 +52,26 @@ class Node(object): def __repr__(self): return "<{0}({1!r})>".format(self.__class__.__name__, str(self)) + def serialize(self): + raise NotImplementedError + class Variable(Node): - pass + + def serialize(self): + return str(self) class Value(Node): - pass + + def serialize(self): + return '"{0}"'.format(self) + + +class Op(Node): + + def serialize(self): + return str(self) VARIABLE = ( @@ -73,9 +86,23 @@ VARIABLE = ( L("python_version") | L("sys_platform") | L("os_name") | + L("os.name") | # PEP-345 + L("sys.platform") | # PEP-345 + L("platform.version") | # PEP-345 + L("platform.machine") | # PEP-345 + L("platform.python_implementation") | # PEP-345 + L("python_implementation") | # undocumented setuptools legacy L("extra") ) -VARIABLE.setParseAction(lambda s, l, t: Variable(t[0])) +ALIASES = { + 'os.name': 'os_name', + 'sys.platform': 'sys_platform', + 'platform.version': 'platform_version', + 'platform.machine': 'platform_machine', + 'platform.python_implementation': 'platform_python_implementation', + 'python_implementation': 'platform_python_implementation' +} +VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0]))) VERSION_CMP = ( L("===") | @@ -89,6 +116,7 @@ VERSION_CMP = ( ) MARKER_OP = VERSION_CMP | L("not in") | L("in") +MARKER_OP.setParseAction(lambda s, l, t: Op(t[0])) MARKER_VALUE = QuotedString("'") | QuotedString('"') MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0])) @@ -135,7 +163,7 @@ def _format_marker(marker, first=True): else: return "(" + " ".join(inner) + ")" elif isinstance(marker, tuple): - return '{0} {1} "{2}"'.format(*marker) + return " ".join([m.serialize() for m in marker]) else: return marker @@ -154,13 +182,13 @@ _operators = { def _eval_op(lhs, op, rhs): try: - spec = Specifier("".join([op, rhs])) + spec = Specifier("".join([op.serialize(), rhs])) except InvalidSpecifier: pass else: return spec.contains(lhs) - oper = _operators.get(op) + oper = _operators.get(op.serialize()) if oper is None: raise UndefinedComparison( "Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs) diff --git a/lib/python3.4/site-packages/pkg_resources/_vendor/packaging/specifiers.py b/lib/python3.4/site-packages/pkg_resources/_vendor/packaging/specifiers.py index 891664f..7f5a76c 100644 --- a/lib/python3.4/site-packages/pkg_resources/_vendor/packaging/specifiers.py +++ b/lib/python3.4/site-packages/pkg_resources/_vendor/packaging/specifiers.py @@ -1,16 +1,6 @@ -# Copyright 2014 Donald Stufft -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. from __future__ import absolute_import, division, print_function import abc @@ -204,8 +194,8 @@ class _IndividualSpecifier(BaseSpecifier): # If our version is a prerelease, and we were not set to allow # prereleases, then we'll store it for later incase nothing # else matches this specifier. - if (parsed_version.is_prerelease - and not (prereleases or self.prereleases)): + if (parsed_version.is_prerelease and not + (prereleases or self.prereleases)): found_prereleases.append(version) # Either this is not a prerelease, or we should have been # accepting prereleases from the begining. @@ -223,23 +213,23 @@ class _IndividualSpecifier(BaseSpecifier): class LegacySpecifier(_IndividualSpecifier): - _regex = re.compile( + _regex_str = ( r""" - ^ - \s* (?P(==|!=|<=|>=|<|>)) \s* (?P - [^\s]* # We just match everything, except for whitespace since this - # is a "legacy" specifier and the version string can be just - # about anything. + [^,;\s)]* # Since this is a "legacy" specifier, and the version + # string can be just about anything, we match everything + # except for whitespace, a semi-colon for marker support, + # a closing paren since versions can be enclosed in + # them, and a comma since it's a version separator. ) - \s* - $ - """, - re.VERBOSE | re.IGNORECASE, + """ ) + _regex = re.compile( + r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) + _operators = { "==": "equal", "!=": "not_equal", @@ -284,10 +274,8 @@ def _require_version_compare(fn): class Specifier(_IndividualSpecifier): - _regex = re.compile( + _regex_str = ( r""" - ^ - \s* (?P(~=|==|!=|<=|>=|<|>|===)) (?P (?: @@ -378,12 +366,12 @@ class Specifier(_IndividualSpecifier): (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release ) ) - \s* - $ - """, - re.VERBOSE | re.IGNORECASE, + """ ) + _regex = re.compile( + r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) + _operators = { "~=": "compatible", "==": "equal", @@ -409,8 +397,8 @@ class Specifier(_IndividualSpecifier): prefix = ".".join( list( itertools.takewhile( - lambda x: (not x.startswith("post") - and not x.startswith("dev")), + lambda x: (not x.startswith("post") and not + x.startswith("dev")), _version_split(spec), ) )[:-1] @@ -419,13 +407,15 @@ class Specifier(_IndividualSpecifier): # Add the prefix notation to the end of our string prefix += ".*" - return (self._get_operator(">=")(prospective, spec) - and self._get_operator("==")(prospective, prefix)) + return (self._get_operator(">=")(prospective, spec) and + self._get_operator("==")(prospective, prefix)) @_require_version_compare def _compare_equal(self, prospective, spec): # We need special logic to handle prefix matching if spec.endswith(".*"): + # In the case of prefix matching we want to ignore local segment. + prospective = Version(prospective.public) # Split the spec out by dots, and pretend that there is an implicit # dot in between a release segment and a pre-release segment. spec = _version_split(spec[:-2]) # Remove the trailing .* @@ -577,8 +567,8 @@ def _pad_version(left, right): right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) # Get the rest of our versions - left_split.append(left[len(left_split):]) - right_split.append(left[len(right_split):]) + left_split.append(left[len(left_split[0]):]) + right_split.append(right[len(right_split[0]):]) # Insert our padding left_split.insert( diff --git a/lib/python3.4/site-packages/pkg_resources/_vendor/packaging/version.py b/lib/python3.4/site-packages/pkg_resources/_vendor/packaging/version.py index 4ba574b..83b5ee8 100644 --- a/lib/python3.4/site-packages/pkg_resources/_vendor/packaging/version.py +++ b/lib/python3.4/site-packages/pkg_resources/_vendor/packaging/version.py @@ -1,16 +1,6 @@ -# Copyright 2014 Donald Stufft -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. from __future__ import absolute_import, division, print_function import collections diff --git a/lib/python3.4/site-packages/pkg_resources/_vendor/pyparsing.py b/lib/python3.4/site-packages/pkg_resources/_vendor/pyparsing.py index 3e02dbe..a212243 100644 --- a/lib/python3.4/site-packages/pkg_resources/_vendor/pyparsing.py +++ b/lib/python3.4/site-packages/pkg_resources/_vendor/pyparsing.py @@ -1,6 +1,6 @@ # module pyparsing.py # -# Copyright (c) 2003-2015 Paul T. McGuire +# Copyright (c) 2003-2016 Paul T. McGuire # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -31,15 +31,18 @@ vs. the traditional lex/yacc approach, or the use of regular expressions. With don't need to learn a new syntax for defining grammars or matching expressions - the parsing module provides a library of classes that you use to construct the grammar directly in Python. -Here is a program to parse "Hello, World!" (or any greeting of the form C{", !"}):: +Here is a program to parse "Hello, World!" (or any greeting of the form +C{", !"}), built up using L{Word}, L{Literal}, and L{And} elements +(L{'+'} operator gives L{And} expressions, strings are auto-converted to +L{Literal} expressions):: from pyparsing import Word, alphas # define grammar of a greeting - greet = Word( alphas ) + "," + Word( alphas ) + "!" + greet = Word(alphas) + "," + Word(alphas) + "!" hello = "Hello, World!" - print (hello, "->", greet.parseString( hello )) + print (hello, "->", greet.parseString(hello)) The program outputs the following:: @@ -48,7 +51,7 @@ The program outputs the following:: The Python representation of the grammar is quite readable, owing to the self-explanatory class names, and the use of '+', '|' and '^' operators. -The parsed results returned from C{parseString()} can be accessed as a nested list, a dictionary, or an +The L{ParseResults} object returned from L{ParserElement.parseString} can be accessed as a nested list, a dictionary, or an object with named attributes. The pyparsing module handles some of the problems that are typically vexing when writing text parsers: @@ -57,8 +60,8 @@ The pyparsing module handles some of the problems that are typically vexing when - embedded comments """ -__version__ = "2.0.6" -__versionTime__ = "9 Nov 2015 19:03" +__version__ = "2.1.10" +__versionTime__ = "07 Oct 2016 01:31 UTC" __author__ = "Paul McGuire " import string @@ -70,8 +73,22 @@ import re import sre_constants import collections import pprint -import functools -import itertools +import traceback +import types +from datetime import datetime + +try: + from _thread import RLock +except ImportError: + from threading import RLock + +try: + from collections import OrderedDict as _OrderedDict +except ImportError: + try: + from ordereddict import OrderedDict as _OrderedDict + except ImportError: + _OrderedDict = None #~ sys.stderr.write( "testing pyparsing module, version %s, %s\n" % (__version__,__versionTime__ ) ) @@ -81,21 +98,23 @@ __all__ = [ 'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or', 'ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException', 'ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException', -'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter', 'Upcase', +'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter', 'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore', 'alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col', 'commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString', 'dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'hexnums', -'htmlComment', 'javaStyleComment', 'keepOriginalText', 'line', 'lineEnd', 'lineStart', 'lineno', +'htmlComment', 'javaStyleComment', 'line', 'lineEnd', 'lineStart', 'lineno', 'makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral', 'nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables', 'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity', 'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd', 'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute', 'indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation','locatedExpr', 'withClass', +'CloseMatch', 'tokenMap', 'pyparsing_common', ] -PY_3 = sys.version.startswith('3') +system_version = tuple(sys.version_info)[:3] +PY_3 = system_version[0] == 3 if PY_3: _MAX_INT = sys.maxsize basestring = str @@ -123,18 +142,11 @@ else: return str(obj) except UnicodeEncodeError: - # The Python docs (http://docs.python.org/ref/customization.html#l2h-182) - # state that "The return value must be a string object". However, does a - # unicode object (being a subclass of basestring) count as a "string - # object"? - # If so, then return a unicode object: - return unicode(obj) - # Else encode it... but how? There are many choices... :) - # Replace unprintables with escape codes? - #return unicode(obj).encode(sys.getdefaultencoding(), 'backslashreplace_errors') - # Replace unprintables with question marks? - #return unicode(obj).encode(sys.getdefaultencoding(), 'replace') - # ... + # Else encode it + ret = unicode(obj).encode(sys.getdefaultencoding(), 'xmlcharrefreplace') + xmlcharref = Regex('&#\d+;') + xmlcharref.setParseAction(lambda t: '\\u' + hex(int(t[0][2:-1]))[2:]) + return xmlcharref.transformString(ret) # build list of single arg builtins, tolerant of Python version, that can be used as parse actions singleArgBuiltins = [] @@ -160,7 +172,7 @@ def _xml_escape(data): class _Constants(object): pass -alphas = string.ascii_lowercase + string.ascii_uppercase +alphas = string.ascii_uppercase + string.ascii_lowercase nums = "0123456789" hexnums = nums + "ABCDEFabcdef" alphanums = alphas + nums @@ -180,6 +192,15 @@ class ParseBaseException(Exception): self.msg = msg self.pstr = pstr self.parserElement = elem + self.args = (pstr, loc, msg) + + @classmethod + def _from_exception(cls, pe): + """ + internal factory method to simplify creating one type of ParseException + from another - avoids having __init__ signature conflicts among subclasses + """ + return cls(pe.pstr, pe.loc, pe.msg, pe.parserElement) def __getattr__( self, aname ): """supported attributes by name are: @@ -212,15 +233,26 @@ class ParseBaseException(Exception): markerString, line_str[line_column:])) return line_str.strip() def __dir__(self): - return "loc msg pstr parserElement lineno col line " \ - "markInputline __str__ __repr__".split() + return "lineno col line".split() + dir(type(self)) class ParseException(ParseBaseException): - """exception thrown when parse expressions don't match class; - supported attributes by name are: - - lineno - returns the line number of the exception text - - col - returns the column number of the exception text - - line - returns the line containing the exception text + """ + Exception thrown when parse expressions don't match class; + supported attributes by name are: + - lineno - returns the line number of the exception text + - col - returns the column number of the exception text + - line - returns the line containing the exception text + + Example:: + try: + Word(nums).setName("integer").parseString("ABC") + except ParseException as pe: + print(pe) + print("column: {}".format(pe.col)) + + prints:: + Expected integer (at char 0), (line:1, col:1) + column: 1 """ pass @@ -230,12 +262,10 @@ class ParseFatalException(ParseBaseException): pass class ParseSyntaxException(ParseFatalException): - """just like C{L{ParseFatalException}}, but thrown internally when an - C{L{ErrorStop}} ('-' operator) indicates that parsing is to stop immediately because - an unbacktrackable syntax error has been found""" - def __init__(self, pe): - super(ParseSyntaxException, self).__init__( - pe.pstr, pe.loc, pe.msg, pe.parserElement) + """just like L{ParseFatalException}, but thrown internally when an + L{ErrorStop} ('-' operator) indicates that parsing is to stop + immediately because an unbacktrackable syntax error has been found""" + pass #~ class ReparseException(ParseBaseException): #~ """Experimental class - parse actions can raise this exception to cause @@ -251,7 +281,7 @@ class ParseSyntaxException(ParseFatalException): #~ self.reparseLoc = restartLoc class RecursiveGrammarException(Exception): - """exception thrown by C{validate()} if the grammar could be improperly recursive""" + """exception thrown by L{ParserElement.validate} if the grammar could be improperly recursive""" def __init__( self, parseElementList ): self.parseElementTrace = parseElementList @@ -264,17 +294,50 @@ class _ParseResultsWithOffset(object): def __getitem__(self,i): return self.tup[i] def __repr__(self): - return repr(self.tup) + return repr(self.tup[0]) def setOffset(self,i): self.tup = (self.tup[0],i) class ParseResults(object): - """Structured parse results, to provide multiple means of access to the parsed data: + """ + Structured parse results, to provide multiple means of access to the parsed data: - as a list (C{len(results)}) - by list index (C{results[0], results[1]}, etc.) - - by attribute (C{results.}) - """ - def __new__(cls, toklist, name=None, asList=True, modal=True ): + - by attribute (C{results.} - see L{ParserElement.setResultsName}) + + Example:: + integer = Word(nums) + date_str = (integer.setResultsName("year") + '/' + + integer.setResultsName("month") + '/' + + integer.setResultsName("day")) + # equivalent form: + # date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + # parseString returns a ParseResults object + result = date_str.parseString("1999/12/31") + + def test(s, fn=repr): + print("%s -> %s" % (s, fn(eval(s)))) + test("list(result)") + test("result[0]") + test("result['month']") + test("result.day") + test("'month' in result") + test("'minutes' in result") + test("result.dump()", str) + prints:: + list(result) -> ['1999', '/', '12', '/', '31'] + result[0] -> '1999' + result['month'] -> '12' + result.day -> '31' + 'month' in result -> True + 'minutes' in result -> False + result.dump() -> ['1999', '/', '12', '/', '31'] + - day: 31 + - month: 12 + - year: 1999 + """ + def __new__(cls, toklist=None, name=None, asList=True, modal=True ): if isinstance(toklist, cls): return toklist retobj = object.__new__(cls) @@ -283,12 +346,16 @@ class ParseResults(object): # Performance tuning: we construct a *lot* of these, so keep this # constructor as small and fast as possible - def __init__( self, toklist, name=None, asList=True, modal=True, isinstance=isinstance ): + def __init__( self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance ): if self.__doinit: self.__doinit = False self.__name = None self.__parent = None self.__accumNames = {} + self.__asList = asList + self.__modal = modal + if toklist is None: + toklist = [] if isinstance(toklist, list): self.__toklist = toklist[:] elif isinstance(toklist, _generatorType): @@ -331,7 +398,7 @@ class ParseResults(object): if isinstance(v,_ParseResultsWithOffset): self.__tokdict[k] = self.__tokdict.get(k,list()) + [v] sub = v[0] - elif isinstance(k,int): + elif isinstance(k,(int,slice)): self.__toklist[k] = v sub = v else: @@ -354,11 +421,6 @@ class ParseResults(object): removed = list(range(*i.indices(mylen))) removed.reverse() # fixup indices in token dictionary - #~ for name in self.__tokdict: - #~ occurrences = self.__tokdict[name] - #~ for j in removed: - #~ for k, (value, position) in enumerate(occurrences): - #~ occurrences[k] = _ParseResultsWithOffset(value, position - (position > j)) for name,occurrences in self.__tokdict.items(): for j in removed: for k, (value, position) in enumerate(occurrences): @@ -370,39 +432,52 @@ class ParseResults(object): return k in self.__tokdict def __len__( self ): return len( self.__toklist ) - def __bool__(self): return len( self.__toklist ) > 0 + def __bool__(self): return ( not not self.__toklist ) __nonzero__ = __bool__ def __iter__( self ): return iter( self.__toklist ) def __reversed__( self ): return iter( self.__toklist[::-1] ) - def iterkeys( self ): - """Returns all named result keys.""" + def _iterkeys( self ): if hasattr(self.__tokdict, "iterkeys"): return self.__tokdict.iterkeys() else: return iter(self.__tokdict) - def itervalues( self ): - """Returns all named result values.""" - return (self[k] for k in self.iterkeys()) + def _itervalues( self ): + return (self[k] for k in self._iterkeys()) - def iteritems( self ): - return ((k, self[k]) for k in self.iterkeys()) + def _iteritems( self ): + return ((k, self[k]) for k in self._iterkeys()) if PY_3: - keys = iterkeys - values = itervalues - items = iteritems + keys = _iterkeys + """Returns an iterator of all named result keys (Python 3.x only).""" + + values = _itervalues + """Returns an iterator of all named result values (Python 3.x only).""" + + items = _iteritems + """Returns an iterator of all named result key-value tuples (Python 3.x only).""" + else: + iterkeys = _iterkeys + """Returns an iterator of all named result keys (Python 2.x only).""" + + itervalues = _itervalues + """Returns an iterator of all named result values (Python 2.x only).""" + + iteritems = _iteritems + """Returns an iterator of all named result key-value tuples (Python 2.x only).""" + def keys( self ): - """Returns all named result keys.""" + """Returns all named result keys (as a list in Python 2.x, as an iterator in Python 3.x).""" return list(self.iterkeys()) def values( self ): - """Returns all named result values.""" + """Returns all named result values (as a list in Python 2.x, as an iterator in Python 3.x).""" return list(self.itervalues()) def items( self ): - """Returns all named result keys and values as a list of tuples.""" + """Returns all named result key-values (as a list of tuples in Python 2.x, as an iterator in Python 3.x).""" return list(self.iteritems()) def haskeys( self ): @@ -411,14 +486,39 @@ class ParseResults(object): return bool(self.__tokdict) def pop( self, *args, **kwargs): - """Removes and returns item at specified index (default=last). - Supports both list and dict semantics for pop(). If passed no - argument or an integer argument, it will use list semantics - and pop tokens from the list of parsed tokens. If passed a - non-integer argument (most likely a string), it will use dict - semantics and pop the corresponding value from any defined - results names. A second default return value argument is - supported, just as in dict.pop().""" + """ + Removes and returns item at specified index (default=C{last}). + Supports both C{list} and C{dict} semantics for C{pop()}. If passed no + argument or an integer argument, it will use C{list} semantics + and pop tokens from the list of parsed tokens. If passed a + non-integer argument (most likely a string), it will use C{dict} + semantics and pop the corresponding value from any defined + results names. A second default return value argument is + supported, just as in C{dict.pop()}. + + Example:: + def remove_first(tokens): + tokens.pop(0) + print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] + print(OneOrMore(Word(nums)).addParseAction(remove_first).parseString("0 123 321")) # -> ['123', '321'] + + label = Word(alphas) + patt = label("LABEL") + OneOrMore(Word(nums)) + print(patt.parseString("AAB 123 321").dump()) + + # Use pop() in a parse action to remove named result (note that corresponding value is not + # removed from list form of results) + def remove_LABEL(tokens): + tokens.pop("LABEL") + return tokens + patt.addParseAction(remove_LABEL) + print(patt.parseString("AAB 123 321").dump()) + prints:: + ['AAB', '123', '321'] + - LABEL: AAB + + ['AAB', '123', '321'] + """ if not args: args = [-1] for k,v in kwargs.items(): @@ -438,39 +538,83 @@ class ParseResults(object): return defaultvalue def get(self, key, defaultValue=None): - """Returns named result matching the given key, or if there is no - such name, then returns the given C{defaultValue} or C{None} if no - C{defaultValue} is specified.""" + """ + Returns named result matching the given key, or if there is no + such name, then returns the given C{defaultValue} or C{None} if no + C{defaultValue} is specified. + + Similar to C{dict.get()}. + + Example:: + integer = Word(nums) + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + result = date_str.parseString("1999/12/31") + print(result.get("year")) # -> '1999' + print(result.get("hour", "not specified")) # -> 'not specified' + print(result.get("hour")) # -> None + """ if key in self: return self[key] else: return defaultValue def insert( self, index, insStr ): - """Inserts new element at location index in the list of parsed tokens.""" + """ + Inserts new element at location index in the list of parsed tokens. + + Similar to C{list.insert()}. + + Example:: + print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] + + # use a parse action to insert the parse location in the front of the parsed results + def insert_locn(locn, tokens): + tokens.insert(0, locn) + print(OneOrMore(Word(nums)).addParseAction(insert_locn).parseString("0 123 321")) # -> [0, '0', '123', '321'] + """ self.__toklist.insert(index, insStr) # fixup indices in token dictionary - #~ for name in self.__tokdict: - #~ occurrences = self.__tokdict[name] - #~ for k, (value, position) in enumerate(occurrences): - #~ occurrences[k] = _ParseResultsWithOffset(value, position + (position > index)) for name,occurrences in self.__tokdict.items(): for k, (value, position) in enumerate(occurrences): occurrences[k] = _ParseResultsWithOffset(value, position + (position > index)) def append( self, item ): - """Add single element to end of ParseResults list of elements.""" + """ + Add single element to end of ParseResults list of elements. + + Example:: + print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] + + # use a parse action to compute the sum of the parsed integers, and add it to the end + def append_sum(tokens): + tokens.append(sum(map(int, tokens))) + print(OneOrMore(Word(nums)).addParseAction(append_sum).parseString("0 123 321")) # -> ['0', '123', '321', 444] + """ self.__toklist.append(item) def extend( self, itemseq ): - """Add sequence of elements to end of ParseResults list of elements.""" + """ + Add sequence of elements to end of ParseResults list of elements. + + Example:: + patt = OneOrMore(Word(alphas)) + + # use a parse action to append the reverse of the matched strings, to make a palindrome + def make_palindrome(tokens): + tokens.extend(reversed([t[::-1] for t in tokens])) + return ''.join(tokens) + print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl' + """ if isinstance(itemseq, ParseResults): self += itemseq else: self.__toklist.extend(itemseq) def clear( self ): - """Clear all elements and results names.""" + """ + Clear all elements and results names. + """ del self.__toklist[:] self.__tokdict.clear() @@ -511,7 +655,11 @@ class ParseResults(object): def __radd__(self, other): if isinstance(other,int) and other == 0: + # useful for merging many ParseResults using sum() builtin return self.copy() + else: + # this may raise a TypeError - so be it + return other + self def __repr__( self ): return "(%s, %s)" % ( repr( self.__toklist ), repr( self.__tokdict ) ) @@ -531,18 +679,60 @@ class ParseResults(object): return out def asList( self ): - """Returns the parse results as a nested list of matching tokens, all converted to strings.""" + """ + Returns the parse results as a nested list of matching tokens, all converted to strings. + + Example:: + patt = OneOrMore(Word(alphas)) + result = patt.parseString("sldkj lsdkj sldkj") + # even though the result prints in string-like form, it is actually a pyparsing ParseResults + print(type(result), result) # -> ['sldkj', 'lsdkj', 'sldkj'] + + # Use asList() to create an actual list + result_list = result.asList() + print(type(result_list), result_list) # -> ['sldkj', 'lsdkj', 'sldkj'] + """ return [res.asList() if isinstance(res,ParseResults) else res for res in self.__toklist] def asDict( self ): - """Returns the named parse results as dictionary.""" + """ + Returns the named parse results as a nested dictionary. + + Example:: + integer = Word(nums) + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + result = date_str.parseString('12/31/1999') + print(type(result), repr(result)) # -> (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]}) + + result_dict = result.asDict() + print(type(result_dict), repr(result_dict)) # -> {'day': '1999', 'year': '12', 'month': '31'} + + # even though a ParseResults supports dict-like access, sometime you just need to have a dict + import json + print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable + print(json.dumps(result.asDict())) # -> {"month": "31", "day": "1999", "year": "12"} + """ if PY_3: - return dict( self.items() ) + item_fn = self.items else: - return dict( self.iteritems() ) + item_fn = self.iteritems + + def toItem(obj): + if isinstance(obj, ParseResults): + if obj.haskeys(): + return obj.asDict() + else: + return [toItem(v) for v in obj] + else: + return obj + + return dict((k,toItem(v)) for k,v in item_fn()) def copy( self ): - """Returns a new copy of a C{ParseResults} object.""" + """ + Returns a new copy of a C{ParseResults} object. + """ ret = ParseResults( self.__toklist ) ret.__tokdict = self.__tokdict.copy() ret.__parent = self.__parent @@ -551,7 +741,9 @@ class ParseResults(object): return ret def asXML( self, doctag=None, namedItemsOnly=False, indent="", formatted=True ): - """Returns the parse results as XML. Tags are created for tokens and lists that have defined results names.""" + """ + (Deprecated) Returns the parse results as XML. Tags are created for tokens and lists that have defined results names. + """ nl = "\n" out = [] namedItems = dict((v[1],k) for (k,vlist) in self.__tokdict.items() @@ -617,7 +809,27 @@ class ParseResults(object): return None def getName(self): - """Returns the results name for this token expression.""" + """ + Returns the results name for this token expression. Useful when several + different expressions might match at a particular location. + + Example:: + integer = Word(nums) + ssn_expr = Regex(r"\d\d\d-\d\d-\d\d\d\d") + house_number_expr = Suppress('#') + Word(nums, alphanums) + user_data = (Group(house_number_expr)("house_number") + | Group(ssn_expr)("ssn") + | Group(integer)("age")) + user_info = OneOrMore(user_data) + + result = user_info.parseString("22 111-22-3333 #221B") + for item in result: + print(item.getName(), ':', item[0]) + prints:: + age : 22 + ssn : 111-22-3333 + house_number : 221B + """ if self.__name: return self.__name elif self.__parent: @@ -628,45 +840,77 @@ class ParseResults(object): return None elif (len(self) == 1 and len(self.__tokdict) == 1 and - self.__tokdict.values()[0][0][1] in (0,-1)): - return self.__tokdict.keys()[0] + next(iter(self.__tokdict.values()))[0][1] in (0,-1)): + return next(iter(self.__tokdict.keys())) else: return None - def dump(self,indent='',depth=0): - """Diagnostic method for listing out the contents of a C{ParseResults}. - Accepts an optional C{indent} argument so that this string can be embedded - in a nested display of other data.""" + def dump(self, indent='', depth=0, full=True): + """ + Diagnostic method for listing out the contents of a C{ParseResults}. + Accepts an optional C{indent} argument so that this string can be embedded + in a nested display of other data. + + Example:: + integer = Word(nums) + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + result = date_str.parseString('12/31/1999') + print(result.dump()) + prints:: + ['12', '/', '31', '/', '1999'] + - day: 1999 + - month: 31 + - year: 12 + """ out = [] NL = '\n' out.append( indent+_ustr(self.asList()) ) - if self.haskeys(): - items = sorted(self.items()) - for k,v in items: - if out: - out.append(NL) - out.append( "%s%s- %s: " % (indent,(' '*depth), k) ) - if isinstance(v,ParseResults): - if v: - out.append( v.dump(indent,depth+1) ) + if full: + if self.haskeys(): + items = sorted((str(k), v) for k,v in self.items()) + for k,v in items: + if out: + out.append(NL) + out.append( "%s%s- %s: " % (indent,(' '*depth), k) ) + if isinstance(v,ParseResults): + if v: + out.append( v.dump(indent,depth+1) ) + else: + out.append(_ustr(v)) else: - out.append(_ustr(v)) - else: - out.append(_ustr(v)) - elif any(isinstance(vv,ParseResults) for vv in self): - v = self - for i,vv in enumerate(v): - if isinstance(vv,ParseResults): - out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),vv.dump(indent,depth+1) )) - else: - out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),_ustr(vv))) + out.append(repr(v)) + elif any(isinstance(vv,ParseResults) for vv in self): + v = self + for i,vv in enumerate(v): + if isinstance(vv,ParseResults): + out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),vv.dump(indent,depth+1) )) + else: + out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),_ustr(vv))) return "".join(out) def pprint(self, *args, **kwargs): - """Pretty-printer for parsed results as a list, using the C{pprint} module. - Accepts additional positional or keyword args as defined for the - C{pprint.pprint} method. (U{http://docs.python.org/3/library/pprint.html#pprint.pprint})""" + """ + Pretty-printer for parsed results as a list, using the C{pprint} module. + Accepts additional positional or keyword args as defined for the + C{pprint.pprint} method. (U{http://docs.python.org/3/library/pprint.html#pprint.pprint}) + + Example:: + ident = Word(alphas, alphanums) + num = Word(nums) + func = Forward() + term = ident | num | Group('(' + func + ')') + func <<= ident + Group(Optional(delimitedList(term))) + result = func.parseString("fna a,b,(fnb c,d,200),100") + result.pprint(width=40) + prints:: + ['fna', + ['a', + 'b', + ['(', 'fnb', ['c', 'd', '200'], ')'], + '100']] + """ pprint.pprint(self.asList(), *args, **kwargs) # add support for pickle protocol @@ -690,8 +934,11 @@ class ParseResults(object): else: self.__parent = None + def __getnewargs__(self): + return self.__toklist, self.__name, self.__asList, self.__modal + def __dir__(self): - return dir(super(ParseResults,self)) + list(self.keys()) + return (dir(type(self)) + list(self.keys())) collections.MutableMapping.register(ParseResults) @@ -706,7 +953,7 @@ def col (loc,strg): positions within the parsed string. """ s = strg - return 1 if loc= (3,5): + def extract_stack(limit=0): + # special handling for Python 3.5.0 - extra deep call stack by 1 + offset = -3 if system_version == (3,5,0) else -2 + frame_summary = traceback.extract_stack(limit=-offset+limit-1)[offset] + return [(frame_summary.filename, frame_summary.lineno)] + def extract_tb(tb, limit=0): + frames = traceback.extract_tb(tb, limit=limit) + frame_summary = frames[-1] + return [(frame_summary.filename, frame_summary.lineno)] + else: + extract_stack = traceback.extract_stack + extract_tb = traceback.extract_tb + + # synthesize what would be returned by traceback.extract_stack at the call to + # user's parse action 'func', so that we don't incur call penalty at parse time + + LINE_DIFF = 6 + # IF ANY CODE CHANGES, EVEN JUST COMMENTS OR BLANK LINES, BETWEEN THE NEXT LINE AND + # THE CALL TO FUNC INSIDE WRAPPER, LINE_DIFF MUST BE MODIFIED!!!! + this_line = extract_stack(limit=2)[-1] + pa_call_line_synth = (this_line[0], this_line[1]+LINE_DIFF) + def wrapper(*args): while 1: try: @@ -778,12 +1050,33 @@ def _trim_arity(func, maxargs=2): foundArity[0] = True return ret except TypeError: - if limit[0] <= maxargs and not foundArity[0]: + # re-raise TypeErrors if they did not come from our arity testing + if foundArity[0]: + raise + else: + try: + tb = sys.exc_info()[-1] + if not extract_tb(tb, limit=2)[-1][:2] == pa_call_line_synth: + raise + finally: + del tb + + if limit[0] <= maxargs: limit[0] += 1 continue raise + + # copy func name to wrapper for sensible debug output + func_name = "" + try: + func_name = getattr(func, '__name__', + getattr(func, '__class__').__name__) + except Exception: + func_name = str(func) + wrapper.__name__ = func_name + return wrapper - + class ParserElement(object): """Abstract base level parser element class.""" DEFAULT_WHITE_CHARS = " \n\t\r" @@ -791,7 +1084,16 @@ class ParserElement(object): @staticmethod def setDefaultWhitespaceChars( chars ): - """Overrides the default whitespace chars + r""" + Overrides the default whitespace chars + + Example:: + # default whitespace chars are space, and newline + OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def', 'ghi', 'jkl'] + + # change to just treat newline as significant + ParserElement.setDefaultWhitespaceChars(" \t") + OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def'] """ ParserElement.DEFAULT_WHITE_CHARS = chars @@ -799,8 +1101,22 @@ class ParserElement(object): def inlineLiteralsUsing(cls): """ Set class to be used for inclusion of string literals into a parser. + + Example:: + # default literal class used is Literal + integer = Word(nums) + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31'] + + + # change to Suppress + ParserElement.inlineLiteralsUsing(Suppress) + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + date_str.parseString("1999/12/31") # -> ['1999', '12', '31'] """ - ParserElement.literalStringClass = cls + ParserElement._literalStringClass = cls def __init__( self, savelist=False ): self.parseAction = list() @@ -826,8 +1142,21 @@ class ParserElement(object): self.callDuringTry = False def copy( self ): - """Make a copy of this C{ParserElement}. Useful for defining different parse actions - for the same parsing pattern, using copies of the original parse element.""" + """ + Make a copy of this C{ParserElement}. Useful for defining different parse actions + for the same parsing pattern, using copies of the original parse element. + + Example:: + integer = Word(nums).setParseAction(lambda toks: int(toks[0])) + integerK = integer.copy().addParseAction(lambda toks: toks[0]*1024) + Suppress("K") + integerM = integer.copy().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M") + + print(OneOrMore(integerK | integerM | integer).parseString("5K 100 640K 256M")) + prints:: + [5120, 100, 655360, 268435456] + Equivalent form of C{expr.copy()} is just C{expr()}:: + integerM = integer().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M") + """ cpy = copy.copy( self ) cpy.parseAction = self.parseAction[:] cpy.ignoreExprs = self.ignoreExprs[:] @@ -836,7 +1165,13 @@ class ParserElement(object): return cpy def setName( self, name ): - """Define name for this expression, for use in debugging.""" + """ + Define name for this expression, makes debugging and exception messages clearer. + + Example:: + Word(nums).parseString("ABC") # -> Exception: Expected W:(0123...) (at char 0), (line:1, col:1) + Word(nums).setName("integer").parseString("ABC") # -> Exception: Expected integer (at char 0), (line:1, col:1) + """ self.name = name self.errmsg = "Expected " + self.name if hasattr(self,"exception"): @@ -844,15 +1179,24 @@ class ParserElement(object): return self def setResultsName( self, name, listAllMatches=False ): - """Define name for referencing matching tokens as a nested attribute - of the returned parse results. - NOTE: this returns a *copy* of the original C{ParserElement} object; - this is so that the client can define a basic element, such as an - integer, and reference it in multiple places with different names. - - You can also set results names using the abbreviated syntax, - C{expr("name")} in place of C{expr.setResultsName("name")} - - see L{I{__call__}<__call__>}. + """ + Define name for referencing matching tokens as a nested attribute + of the returned parse results. + NOTE: this returns a *copy* of the original C{ParserElement} object; + this is so that the client can define a basic element, such as an + integer, and reference it in multiple places with different names. + + You can also set results names using the abbreviated syntax, + C{expr("name")} in place of C{expr.setResultsName("name")} - + see L{I{__call__}<__call__>}. + + Example:: + date_str = (integer.setResultsName("year") + '/' + + integer.setResultsName("month") + '/' + + integer.setResultsName("day")) + + # equivalent form: + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") """ newself = self.copy() if name.endswith("*"): @@ -881,42 +1225,76 @@ class ParserElement(object): return self def setParseAction( self, *fns, **kwargs ): - """Define action to perform when successfully matching parse element definition. - Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)}, - C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where: - - s = the original string being parsed (see note below) - - loc = the location of the matching substring - - toks = a list of the matched tokens, packaged as a C{L{ParseResults}} object - If the functions in fns modify the tokens, they can return them as the return - value from fn, and the modified list of tokens will replace the original. - Otherwise, fn does not need to return any value. + """ + Define action to perform when successfully matching parse element definition. + Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)}, + C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where: + - s = the original string being parsed (see note below) + - loc = the location of the matching substring + - toks = a list of the matched tokens, packaged as a C{L{ParseResults}} object + If the functions in fns modify the tokens, they can return them as the return + value from fn, and the modified list of tokens will replace the original. + Otherwise, fn does not need to return any value. - Note: the default parsing behavior is to expand tabs in the input string - before starting the parsing process. See L{I{parseString}} for more information - on parsing strings containing C{}s, and suggested methods to maintain a - consistent view of the parsed string, the parse location, and line and column - positions within the parsed string. - """ + Optional keyword arguments: + - callDuringTry = (default=C{False}) indicate if parse action should be run during lookaheads and alternate testing + + Note: the default parsing behavior is to expand tabs in the input string + before starting the parsing process. See L{I{parseString}} for more information + on parsing strings containing C{}s, and suggested methods to maintain a + consistent view of the parsed string, the parse location, and line and column + positions within the parsed string. + + Example:: + integer = Word(nums) + date_str = integer + '/' + integer + '/' + integer + + date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31'] + + # use parse action to convert to ints at parse time + integer = Word(nums).setParseAction(lambda toks: int(toks[0])) + date_str = integer + '/' + integer + '/' + integer + + # note that integer fields are now ints, not strings + date_str.parseString("1999/12/31") # -> [1999, '/', 12, '/', 31] + """ self.parseAction = list(map(_trim_arity, list(fns))) self.callDuringTry = kwargs.get("callDuringTry", False) return self def addParseAction( self, *fns, **kwargs ): - """Add parse action to expression's list of parse actions. See L{I{setParseAction}}.""" + """ + Add parse action to expression's list of parse actions. See L{I{setParseAction}}. + + See examples in L{I{copy}}. + """ self.parseAction += list(map(_trim_arity, list(fns))) self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False) return self def addCondition(self, *fns, **kwargs): """Add a boolean predicate function to expression's list of parse actions. See - L{I{setParseAction}}. Optional keyword argument C{message} can - be used to define a custom message to be used in the raised exception.""" - msg = kwargs.get("message") or "failed user-defined condition" + L{I{setParseAction}} for function call signatures. Unlike C{setParseAction}, + functions passed to C{addCondition} need to return boolean success/fail of the condition. + + Optional keyword arguments: + - message = define a custom message to be used in the raised exception + - fatal = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseException + + Example:: + integer = Word(nums).setParseAction(lambda toks: int(toks[0])) + year_int = integer.copy() + year_int.addCondition(lambda toks: toks[0] >= 2000, message="Only support years 2000 and later") + date_str = year_int + '/' + integer + '/' + integer + + result = date_str.parseString("1999/12/31") # -> Exception: Only support years 2000 and later (at char 0), (line:1, col:1) + """ + msg = kwargs.get("message", "failed user-defined condition") + exc_type = ParseFatalException if kwargs.get("fatal", False) else ParseException for fn in fns: def pa(s,l,t): if not bool(_trim_arity(fn)(s,l,t)): - raise ParseException(s,l,msg) - return t + raise exc_type(s,l,msg) self.parseAction.append(pa) self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False) return self @@ -1043,43 +1421,132 @@ class ParserElement(object): return self._parse( instring, loc, doActions=False )[0] except ParseFatalException: raise ParseException( instring, loc, self.errmsg, self) + + def canParseNext(self, instring, loc): + try: + self.tryParse(instring, loc) + except (ParseException, IndexError): + return False + else: + return True + + class _UnboundedCache(object): + def __init__(self): + cache = {} + self.not_in_cache = not_in_cache = object() + + def get(self, key): + return cache.get(key, not_in_cache) + + def set(self, key, value): + cache[key] = value + + def clear(self): + cache.clear() + + self.get = types.MethodType(get, self) + self.set = types.MethodType(set, self) + self.clear = types.MethodType(clear, self) + + if _OrderedDict is not None: + class _FifoCache(object): + def __init__(self, size): + self.not_in_cache = not_in_cache = object() + + cache = _OrderedDict() + + def get(self, key): + return cache.get(key, not_in_cache) + + def set(self, key, value): + cache[key] = value + if len(cache) > size: + cache.popitem(False) + + def clear(self): + cache.clear() + + self.get = types.MethodType(get, self) + self.set = types.MethodType(set, self) + self.clear = types.MethodType(clear, self) + + else: + class _FifoCache(object): + def __init__(self, size): + self.not_in_cache = not_in_cache = object() + + cache = {} + key_fifo = collections.deque([], size) + + def get(self, key): + return cache.get(key, not_in_cache) + + def set(self, key, value): + cache[key] = value + if len(cache) > size: + cache.pop(key_fifo.popleft(), None) + key_fifo.append(key) + + def clear(self): + cache.clear() + key_fifo.clear() + + self.get = types.MethodType(get, self) + self.set = types.MethodType(set, self) + self.clear = types.MethodType(clear, self) + + # argument cache for optimizing repeated calls when backtracking through recursive expressions + packrat_cache = {} # this is set later by enabledPackrat(); this is here so that resetCache() doesn't fail + packrat_cache_lock = RLock() + packrat_cache_stats = [0, 0] # this method gets repeatedly called during backtracking with the same arguments - # we can cache these arguments and save ourselves the trouble of re-parsing the contained expression def _parseCache( self, instring, loc, doActions=True, callPreParse=True ): - lookup = (self,instring,loc,callPreParse,doActions) - if lookup in ParserElement._exprArgCache: - value = ParserElement._exprArgCache[ lookup ] - if isinstance(value, Exception): - raise value - return (value[0],value[1].copy()) - else: - try: - value = self._parseNoCache( instring, loc, doActions, callPreParse ) - ParserElement._exprArgCache[ lookup ] = (value[0],value[1].copy()) - return value - except ParseBaseException as pe: - pe.__traceback__ = None - ParserElement._exprArgCache[ lookup ] = pe - raise + HIT, MISS = 0, 1 + lookup = (self, instring, loc, callPreParse, doActions) + with ParserElement.packrat_cache_lock: + cache = ParserElement.packrat_cache + value = cache.get(lookup) + if value is cache.not_in_cache: + ParserElement.packrat_cache_stats[MISS] += 1 + try: + value = self._parseNoCache(instring, loc, doActions, callPreParse) + except ParseBaseException as pe: + # cache a copy of the exception, without the traceback + cache.set(lookup, pe.__class__(*pe.args)) + raise + else: + cache.set(lookup, (value[0], value[1].copy())) + return value + else: + ParserElement.packrat_cache_stats[HIT] += 1 + if isinstance(value, Exception): + raise value + return (value[0], value[1].copy()) _parse = _parseNoCache - # argument cache for optimizing repeated calls when backtracking through recursive expressions - _exprArgCache = {} @staticmethod def resetCache(): - ParserElement._exprArgCache.clear() + ParserElement.packrat_cache.clear() + ParserElement.packrat_cache_stats[:] = [0] * len(ParserElement.packrat_cache_stats) _packratEnabled = False @staticmethod - def enablePackrat(): + def enablePackrat(cache_size_limit=128): """Enables "packrat" parsing, which adds memoizing to the parsing logic. Repeated parse attempts at the same string location (which happens often in many complex grammars) can immediately return a cached value, instead of re-executing parsing/validating code. Memoizing is done of both valid results and parsing exceptions. - + + Parameters: + - cache_size_limit - (default=C{128}) - if an integer value is provided + will limit the size of the packrat cache; if None is passed, then + the cache size will be unbounded; if 0 is passed, the cache will + be effectively disabled. + This speedup may break existing programs that use parse actions that have side-effects. For this reason, packrat parsing is disabled when you first import pyparsing. To activate the packrat feature, your @@ -1088,32 +1555,45 @@ class ParserElement(object): C{enablePackrat} before calling C{psyco.full()}. If you do not do this, Python will crash. For best results, call C{enablePackrat()} immediately after importing pyparsing. + + Example:: + import pyparsing + pyparsing.ParserElement.enablePackrat() """ if not ParserElement._packratEnabled: ParserElement._packratEnabled = True + if cache_size_limit is None: + ParserElement.packrat_cache = ParserElement._UnboundedCache() + else: + ParserElement.packrat_cache = ParserElement._FifoCache(cache_size_limit) ParserElement._parse = ParserElement._parseCache def parseString( self, instring, parseAll=False ): - """Execute the parse expression with the given string. - This is the main interface to the client code, once the complete - expression has been built. + """ + Execute the parse expression with the given string. + This is the main interface to the client code, once the complete + expression has been built. - If you want the grammar to require that the entire input string be - successfully parsed, then set C{parseAll} to True (equivalent to ending - the grammar with C{L{StringEnd()}}). + If you want the grammar to require that the entire input string be + successfully parsed, then set C{parseAll} to True (equivalent to ending + the grammar with C{L{StringEnd()}}). - Note: C{parseString} implicitly calls C{expandtabs()} on the input string, - in order to report proper column numbers in parse actions. - If the input string contains tabs and - the grammar uses parse actions that use the C{loc} argument to index into the - string being parsed, you can ensure you have a consistent view of the input - string by: - - calling C{parseWithTabs} on your grammar before calling C{parseString} - (see L{I{parseWithTabs}}) - - define your parse action using the full C{(s,loc,toks)} signature, and - reference the input string using the parse action's C{s} argument - - explictly expand the tabs in your input string before calling - C{parseString} + Note: C{parseString} implicitly calls C{expandtabs()} on the input string, + in order to report proper column numbers in parse actions. + If the input string contains tabs and + the grammar uses parse actions that use the C{loc} argument to index into the + string being parsed, you can ensure you have a consistent view of the input + string by: + - calling C{parseWithTabs} on your grammar before calling C{parseString} + (see L{I{parseWithTabs}}) + - define your parse action using the full C{(s,loc,toks)} signature, and + reference the input string using the parse action's C{s} argument + - explictly expand the tabs in your input string before calling + C{parseString} + + Example:: + Word('a').parseString('aaaaabaaa') # -> ['aaaaa'] + Word('a').parseString('aaaaabaaa', parseAll=True) # -> Exception: Expected end of text """ ParserElement.resetCache() if not self.streamlined: @@ -1139,14 +1619,35 @@ class ParserElement(object): return tokens def scanString( self, instring, maxMatches=_MAX_INT, overlap=False ): - """Scan the input string for expression matches. Each match will return the - matching tokens, start location, and end location. May be called with optional - C{maxMatches} argument, to clip scanning after 'n' matches are found. If - C{overlap} is specified, then overlapping matches will be reported. + """ + Scan the input string for expression matches. Each match will return the + matching tokens, start location, and end location. May be called with optional + C{maxMatches} argument, to clip scanning after 'n' matches are found. If + C{overlap} is specified, then overlapping matches will be reported. - Note that the start and end locations are reported relative to the string - being parsed. See L{I{parseString}} for more information on parsing - strings with embedded tabs.""" + Note that the start and end locations are reported relative to the string + being parsed. See L{I{parseString}} for more information on parsing + strings with embedded tabs. + + Example:: + source = "sldjf123lsdjjkf345sldkjf879lkjsfd987" + print(source) + for tokens,start,end in Word(alphas).scanString(source): + print(' '*start + '^'*(end-start)) + print(' '*start + tokens[0]) + + prints:: + + sldjf123lsdjjkf345sldkjf879lkjsfd987 + ^^^^^ + sldjf + ^^^^^^^ + lsdjjkf + ^^^^^^ + sldkjf + ^^^^^^ + lkjsfd + """ if not self.streamlined: self.streamline() for e in self.ignoreExprs: @@ -1189,12 +1690,22 @@ class ParserElement(object): raise exc def transformString( self, instring ): - """Extension to C{L{scanString}}, to modify matching text with modified tokens that may - be returned from a parse action. To use C{transformString}, define a grammar and - attach a parse action to it that modifies the returned token list. - Invoking C{transformString()} on a target string will then scan for matches, - and replace the matched text patterns according to the logic in the parse - action. C{transformString()} returns the resulting transformed string.""" + """ + Extension to C{L{scanString}}, to modify matching text with modified tokens that may + be returned from a parse action. To use C{transformString}, define a grammar and + attach a parse action to it that modifies the returned token list. + Invoking C{transformString()} on a target string will then scan for matches, + and replace the matched text patterns according to the logic in the parse + action. C{transformString()} returns the resulting transformed string. + + Example:: + wd = Word(alphas) + wd.setParseAction(lambda toks: toks[0].title()) + + print(wd.transformString("now is the winter of our discontent made glorious summer by this sun of york.")) + Prints:: + Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York. + """ out = [] lastE = 0 # force preservation of s, to minimize unwanted transformation of string, and to @@ -1222,9 +1733,18 @@ class ParserElement(object): raise exc def searchString( self, instring, maxMatches=_MAX_INT ): - """Another extension to C{L{scanString}}, simplifying the access to the tokens found - to match the given parse expression. May be called with optional - C{maxMatches} argument, to clip searching after 'n' matches are found. + """ + Another extension to C{L{scanString}}, simplifying the access to the tokens found + to match the given parse expression. May be called with optional + C{maxMatches} argument, to clip searching after 'n' matches are found. + + Example:: + # a capitalized word starts with an uppercase letter, followed by zero or more lowercase letters + cap_word = Word(alphas.upper(), alphas.lower()) + + print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity")) + prints:: + ['More', 'Iron', 'Lead', 'Gold', 'I'] """ try: return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ]) @@ -1235,10 +1755,42 @@ class ParserElement(object): # catch and re-raise exception from here, clears out pyparsing internal stack trace raise exc + def split(self, instring, maxsplit=_MAX_INT, includeSeparators=False): + """ + Generator method to split a string using the given expression as a separator. + May be called with optional C{maxsplit} argument, to limit the number of splits; + and the optional C{includeSeparators} argument (default=C{False}), if the separating + matching text should be included in the split results. + + Example:: + punc = oneOf(list(".,;:/-!?")) + print(list(punc.split("This, this?, this sentence, is badly punctuated!"))) + prints:: + ['This', ' this', '', ' this sentence', ' is badly punctuated', ''] + """ + splits = 0 + last = 0 + for t,s,e in self.scanString(instring, maxMatches=maxsplit): + yield instring[last:s] + if includeSeparators: + yield t[0] + last = e + yield instring[last:] + def __add__(self, other ): - """Implementation of + operator - returns C{L{And}}""" + """ + Implementation of + operator - returns C{L{And}}. Adding strings to a ParserElement + converts them to L{Literal}s by default. + + Example:: + greet = Word(alphas) + "," + Word(alphas) + "!" + hello = "Hello, World!" + print (hello, "->", greet.parseString(hello)) + Prints:: + Hello, World! -> ['Hello', ',', 'World', '!'] + """ if isinstance( other, basestring ): - other = ParserElement.literalStringClass( other ) + other = ParserElement._literalStringClass( other ) if not isinstance( other, ParserElement ): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), SyntaxWarning, stacklevel=2) @@ -1246,9 +1798,11 @@ class ParserElement(object): return And( [ self, other ] ) def __radd__(self, other ): - """Implementation of + operator when left operand is not a C{L{ParserElement}}""" + """ + Implementation of + operator when left operand is not a C{L{ParserElement}} + """ if isinstance( other, basestring ): - other = ParserElement.literalStringClass( other ) + other = ParserElement._literalStringClass( other ) if not isinstance( other, ParserElement ): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), SyntaxWarning, stacklevel=2) @@ -1256,9 +1810,11 @@ class ParserElement(object): return other + self def __sub__(self, other): - """Implementation of - operator, returns C{L{And}} with error stop""" + """ + Implementation of - operator, returns C{L{And}} with error stop + """ if isinstance( other, basestring ): - other = ParserElement.literalStringClass( other ) + other = ParserElement._literalStringClass( other ) if not isinstance( other, ParserElement ): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), SyntaxWarning, stacklevel=2) @@ -1266,9 +1822,11 @@ class ParserElement(object): return And( [ self, And._ErrorStop(), other ] ) def __rsub__(self, other ): - """Implementation of - operator when left operand is not a C{L{ParserElement}}""" + """ + Implementation of - operator when left operand is not a C{L{ParserElement}} + """ if isinstance( other, basestring ): - other = ParserElement.literalStringClass( other ) + other = ParserElement._literalStringClass( other ) if not isinstance( other, ParserElement ): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), SyntaxWarning, stacklevel=2) @@ -1276,24 +1834,24 @@ class ParserElement(object): return other - self def __mul__(self,other): - """Implementation of * operator, allows use of C{expr * 3} in place of - C{expr + expr + expr}. Expressions may also me multiplied by a 2-integer - tuple, similar to C{{min,max}} multipliers in regular expressions. Tuples - may also include C{None} as in: - - C{expr*(n,None)} or C{expr*(n,)} is equivalent + """ + Implementation of * operator, allows use of C{expr * 3} in place of + C{expr + expr + expr}. Expressions may also me multiplied by a 2-integer + tuple, similar to C{{min,max}} multipliers in regular expressions. Tuples + may also include C{None} as in: + - C{expr*(n,None)} or C{expr*(n,)} is equivalent to C{expr*n + L{ZeroOrMore}(expr)} (read as "at least n instances of C{expr}") - - C{expr*(None,n)} is equivalent to C{expr*(0,n)} + - C{expr*(None,n)} is equivalent to C{expr*(0,n)} (read as "0 to n instances of C{expr}") - - C{expr*(None,None)} is equivalent to C{L{ZeroOrMore}(expr)} - - C{expr*(1,None)} is equivalent to C{L{OneOrMore}(expr)} - - Note that C{expr*(None,n)} does not raise an exception if - more than n exprs exist in the input stream; that is, - C{expr*(None,n)} does not enforce a maximum number of expr - occurrences. If this behavior is desired, then write - C{expr*(None,n) + ~expr} + - C{expr*(None,None)} is equivalent to C{L{ZeroOrMore}(expr)} + - C{expr*(1,None)} is equivalent to C{L{OneOrMore}(expr)} + Note that C{expr*(None,n)} does not raise an exception if + more than n exprs exist in the input stream; that is, + C{expr*(None,n)} does not enforce a maximum number of expr + occurrences. If this behavior is desired, then write + C{expr*(None,n) + ~expr} """ if isinstance(other,int): minElements, optElements = other,0 @@ -1347,9 +1905,11 @@ class ParserElement(object): return self.__mul__(other) def __or__(self, other ): - """Implementation of | operator - returns C{L{MatchFirst}}""" + """ + Implementation of | operator - returns C{L{MatchFirst}} + """ if isinstance( other, basestring ): - other = ParserElement.literalStringClass( other ) + other = ParserElement._literalStringClass( other ) if not isinstance( other, ParserElement ): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), SyntaxWarning, stacklevel=2) @@ -1357,9 +1917,11 @@ class ParserElement(object): return MatchFirst( [ self, other ] ) def __ror__(self, other ): - """Implementation of | operator when left operand is not a C{L{ParserElement}}""" + """ + Implementation of | operator when left operand is not a C{L{ParserElement}} + """ if isinstance( other, basestring ): - other = ParserElement.literalStringClass( other ) + other = ParserElement._literalStringClass( other ) if not isinstance( other, ParserElement ): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), SyntaxWarning, stacklevel=2) @@ -1367,9 +1929,11 @@ class ParserElement(object): return other | self def __xor__(self, other ): - """Implementation of ^ operator - returns C{L{Or}}""" + """ + Implementation of ^ operator - returns C{L{Or}} + """ if isinstance( other, basestring ): - other = ParserElement.literalStringClass( other ) + other = ParserElement._literalStringClass( other ) if not isinstance( other, ParserElement ): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), SyntaxWarning, stacklevel=2) @@ -1377,9 +1941,11 @@ class ParserElement(object): return Or( [ self, other ] ) def __rxor__(self, other ): - """Implementation of ^ operator when left operand is not a C{L{ParserElement}}""" + """ + Implementation of ^ operator when left operand is not a C{L{ParserElement}} + """ if isinstance( other, basestring ): - other = ParserElement.literalStringClass( other ) + other = ParserElement._literalStringClass( other ) if not isinstance( other, ParserElement ): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), SyntaxWarning, stacklevel=2) @@ -1387,9 +1953,11 @@ class ParserElement(object): return other ^ self def __and__(self, other ): - """Implementation of & operator - returns C{L{Each}}""" + """ + Implementation of & operator - returns C{L{Each}} + """ if isinstance( other, basestring ): - other = ParserElement.literalStringClass( other ) + other = ParserElement._literalStringClass( other ) if not isinstance( other, ParserElement ): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), SyntaxWarning, stacklevel=2) @@ -1397,9 +1965,11 @@ class ParserElement(object): return Each( [ self, other ] ) def __rand__(self, other ): - """Implementation of & operator when left operand is not a C{L{ParserElement}}""" + """ + Implementation of & operator when left operand is not a C{L{ParserElement}} + """ if isinstance( other, basestring ): - other = ParserElement.literalStringClass( other ) + other = ParserElement._literalStringClass( other ) if not isinstance( other, ParserElement ): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), SyntaxWarning, stacklevel=2) @@ -1407,41 +1977,49 @@ class ParserElement(object): return other & self def __invert__( self ): - """Implementation of ~ operator - returns C{L{NotAny}}""" + """ + Implementation of ~ operator - returns C{L{NotAny}} + """ return NotAny( self ) def __call__(self, name=None): - """Shortcut for C{L{setResultsName}}, with C{listAllMatches=default}:: - userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno") - could be written as:: - userdata = Word(alphas)("name") + Word(nums+"-")("socsecno") - - If C{name} is given with a trailing C{'*'} character, then C{listAllMatches} will be - passed as C{True}. + """ + Shortcut for C{L{setResultsName}}, with C{listAllMatches=False}. + + If C{name} is given with a trailing C{'*'} character, then C{listAllMatches} will be + passed as C{True}. - If C{name} is omitted, same as calling C{L{copy}}. - """ + If C{name} is omitted, same as calling C{L{copy}}. + + Example:: + # these are equivalent + userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno") + userdata = Word(alphas)("name") + Word(nums+"-")("socsecno") + """ if name is not None: return self.setResultsName(name) else: return self.copy() def suppress( self ): - """Suppresses the output of this C{ParserElement}; useful to keep punctuation from - cluttering up returned output. + """ + Suppresses the output of this C{ParserElement}; useful to keep punctuation from + cluttering up returned output. """ return Suppress( self ) def leaveWhitespace( self ): - """Disables the skipping of whitespace before matching the characters in the - C{ParserElement}'s defined pattern. This is normally only used internally by - the pyparsing module, but may be needed in some whitespace-sensitive grammars. + """ + Disables the skipping of whitespace before matching the characters in the + C{ParserElement}'s defined pattern. This is normally only used internally by + the pyparsing module, but may be needed in some whitespace-sensitive grammars. """ self.skipWhitespace = False return self def setWhitespaceChars( self, chars ): - """Overrides the default whitespace chars + """ + Overrides the default whitespace chars """ self.skipWhitespace = True self.whiteChars = chars @@ -1449,26 +2027,41 @@ class ParserElement(object): return self def parseWithTabs( self ): - """Overrides default behavior to expand C{}s to spaces before parsing the input string. - Must be called before C{parseString} when the input grammar contains elements that - match C{} characters.""" + """ + Overrides default behavior to expand C{}s to spaces before parsing the input string. + Must be called before C{parseString} when the input grammar contains elements that + match C{} characters. + """ self.keepTabs = True return self def ignore( self, other ): - """Define expression to be ignored (e.g., comments) while doing pattern - matching; may be called repeatedly, to define multiple comment or other - ignorable patterns. """ + Define expression to be ignored (e.g., comments) while doing pattern + matching; may be called repeatedly, to define multiple comment or other + ignorable patterns. + + Example:: + patt = OneOrMore(Word(alphas)) + patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj'] + + patt.ignore(cStyleComment) + patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd'] + """ + if isinstance(other, basestring): + other = Suppress(other) + if isinstance( other, Suppress ): if other not in self.ignoreExprs: - self.ignoreExprs.append( other.copy() ) + self.ignoreExprs.append(other) else: self.ignoreExprs.append( Suppress( other.copy() ) ) return self def setDebugActions( self, startAction, successAction, exceptionAction ): - """Enable display of debugging messages while doing pattern matching.""" + """ + Enable display of debugging messages while doing pattern matching. + """ self.debugActions = (startAction or _defaultStartDebugAction, successAction or _defaultSuccessDebugAction, exceptionAction or _defaultExceptionDebugAction) @@ -1476,8 +2069,40 @@ class ParserElement(object): return self def setDebug( self, flag=True ): - """Enable display of debugging messages while doing pattern matching. - Set C{flag} to True to enable, False to disable.""" + """ + Enable display of debugging messages while doing pattern matching. + Set C{flag} to True to enable, False to disable. + + Example:: + wd = Word(alphas).setName("alphaword") + integer = Word(nums).setName("numword") + term = wd | integer + + # turn on debugging for wd + wd.setDebug() + + OneOrMore(term).parseString("abc 123 xyz 890") + + prints:: + Match alphaword at loc 0(1,1) + Matched alphaword -> ['abc'] + Match alphaword at loc 3(1,4) + Exception raised:Expected alphaword (at char 4), (line:1, col:5) + Match alphaword at loc 7(1,8) + Matched alphaword -> ['xyz'] + Match alphaword at loc 11(1,12) + Exception raised:Expected alphaword (at char 12), (line:1, col:13) + Match alphaword at loc 15(1,16) + Exception raised:Expected alphaword (at char 15), (line:1, col:16) + + The output shown is that produced by the default debug actions - custom debug actions can be + specified using L{setDebugActions}. Prior to attempting + to match the C{wd} expression, the debugging message C{"Match at loc (,)"} + is shown. Then if the parse succeeds, a C{"Matched"} message is shown, or an C{"Exception raised"} + message is shown. Also note the use of L{setName} to assign a human-readable name to the expression, + which makes debugging and exception messages easier to understand - for instance, the default + name created for the C{Word} expression without calling C{setName} is C{"W:(ABCD...)"}. + """ if flag: self.setDebugActions( _defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction ) else: @@ -1499,20 +2124,22 @@ class ParserElement(object): pass def validate( self, validateTrace=[] ): - """Check defined expressions for valid structure, check for infinite recursive definitions.""" + """ + Check defined expressions for valid structure, check for infinite recursive definitions. + """ self.checkRecursion( [] ) def parseFile( self, file_or_filename, parseAll=False ): - """Execute the parse expression on the given file or filename. - If a filename is specified (instead of a file object), - the entire file is opened, read, and closed before parsing. + """ + Execute the parse expression on the given file or filename. + If a filename is specified (instead of a file object), + the entire file is opened, read, and closed before parsing. """ try: file_contents = file_or_filename.read() except AttributeError: - f = open(file_or_filename, "r") - file_contents = f.read() - f.close() + with open(file_or_filename, "r") as f: + file_contents = f.read() try: return self.parseString(file_contents, parseAll) except ParseBaseException as exc: @@ -1524,13 +2151,9 @@ class ParserElement(object): def __eq__(self,other): if isinstance(other, ParserElement): - return self is other or self.__dict__ == other.__dict__ + return self is other or vars(self) == vars(other) elif isinstance(other, basestring): - try: - self.parseString(_ustr(other), parseAll=True) - return True - except ParseBaseException: - return False + return self.matches(other) else: return super(ParserElement,self)==other @@ -1546,40 +2169,169 @@ class ParserElement(object): def __rne__(self,other): return not (self == other) - def runTests(self, tests, parseAll=False): - """Execute the parse expression on a series of test strings, showing each - test, the parsed results or where the parse failed. Quick and easy way to - run a parse expression against a list of sample strings. + def matches(self, testString, parseAll=True): + """ + Method for quick testing of a parser against a test string. Good for simple + inline microtests of sub expressions while building up larger parser. - Parameters: - - tests - a list of separate test strings, or a multiline string of test strings - - parseAll - (default=False) - flag to pass to C{L{parseString}} when running tests + Parameters: + - testString - to test against this expression for a match + - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests + + Example:: + expr = Word(nums) + assert expr.matches("100") + """ + try: + self.parseString(_ustr(testString), parseAll=parseAll) + return True + except ParseBaseException: + return False + + def runTests(self, tests, parseAll=True, comment='#', fullDump=True, printResults=True, failureTests=False): + """ + Execute the parse expression on a series of test strings, showing each + test, the parsed results or where the parse failed. Quick and easy way to + run a parse expression against a list of sample strings. + + Parameters: + - tests - a list of separate test strings, or a multiline string of test strings + - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests + - comment - (default=C{'#'}) - expression for indicating embedded comments in the test + string; pass None to disable comment filtering + - fullDump - (default=C{True}) - dump results as list followed by results names in nested outline; + if False, only dump nested list + - printResults - (default=C{True}) prints test output to stdout + - failureTests - (default=C{False}) indicates if these tests are expected to fail parsing + + Returns: a (success, results) tuple, where success indicates that all tests succeeded + (or failed if C{failureTests} is True), and the results contain a list of lines of each + test's output + + Example:: + number_expr = pyparsing_common.number.copy() + + result = number_expr.runTests(''' + # unsigned integer + 100 + # negative integer + -100 + # float with scientific notation + 6.02e23 + # integer with scientific notation + 1e-12 + ''') + print("Success" if result[0] else "Failed!") + + result = number_expr.runTests(''' + # stray character + 100Z + # missing leading digit before '.' + -.100 + # too many '.' + 3.14.159 + ''', failureTests=True) + print("Success" if result[0] else "Failed!") + prints:: + # unsigned integer + 100 + [100] + + # negative integer + -100 + [-100] + + # float with scientific notation + 6.02e23 + [6.02e+23] + + # integer with scientific notation + 1e-12 + [1e-12] + + Success + + # stray character + 100Z + ^ + FAIL: Expected end of text (at char 3), (line:1, col:4) + + # missing leading digit before '.' + -.100 + ^ + FAIL: Expected {real number with scientific notation | real number | signed integer} (at char 0), (line:1, col:1) + + # too many '.' + 3.14.159 + ^ + FAIL: Expected end of text (at char 4), (line:1, col:5) + + Success + + Each test string must be on a single line. If you want to test a string that spans multiple + lines, create a test like this:: + + expr.runTest(r"this is a test\\n of strings that spans \\n 3 lines") + + (Note that this is a raw string literal, you must include the leading 'r'.) """ if isinstance(tests, basestring): - tests = map(str.strip, tests.splitlines()) + tests = list(map(str.strip, tests.rstrip().splitlines())) + if isinstance(comment, basestring): + comment = Literal(comment) + allResults = [] + comments = [] + success = True for t in tests: - out = [t] + if comment is not None and comment.matches(t, False) or comments and not t: + comments.append(t) + continue + if not t: + continue + out = ['\n'.join(comments), t] + comments = [] try: - out.append(self.parseString(t, parseAll=parseAll).dump()) - except ParseException as pe: + t = t.replace(r'\n','\n') + result = self.parseString(t, parseAll=parseAll) + out.append(result.dump(full=fullDump)) + success = success and not failureTests + except ParseBaseException as pe: + fatal = "(FATAL)" if isinstance(pe, ParseFatalException) else "" if '\n' in t: out.append(line(pe.loc, t)) - out.append(' '*(col(pe.loc,t)-1) + '^') + out.append(' '*(col(pe.loc,t)-1) + '^' + fatal) else: - out.append(' '*pe.loc + '^') - out.append(str(pe)) - out.append('') - print('\n'.join(out)) + out.append(' '*pe.loc + '^' + fatal) + out.append("FAIL: " + str(pe)) + success = success and failureTests + result = pe + except Exception as exc: + out.append("FAIL-EXCEPTION: " + str(exc)) + success = success and failureTests + result = exc + + if printResults: + if fullDump: + out.append('') + print('\n'.join(out)) + + allResults.append((t, result)) + + return success, allResults class Token(ParserElement): - """Abstract C{ParserElement} subclass, for defining atomic matching patterns.""" + """ + Abstract C{ParserElement} subclass, for defining atomic matching patterns. + """ def __init__( self ): super(Token,self).__init__( savelist=False ) class Empty(Token): - """An empty token, will always match.""" + """ + An empty token, will always match. + """ def __init__( self ): super(Empty,self).__init__() self.name = "Empty" @@ -1588,7 +2340,9 @@ class Empty(Token): class NoMatch(Token): - """A token that will never match.""" + """ + A token that will never match. + """ def __init__( self ): super(NoMatch,self).__init__() self.name = "NoMatch" @@ -1601,7 +2355,19 @@ class NoMatch(Token): class Literal(Token): - """Token to exactly match a specified string.""" + """ + Token to exactly match a specified string. + + Example:: + Literal('blah').parseString('blah') # -> ['blah'] + Literal('blah').parseString('blahfooblah') # -> ['blah'] + Literal('blah').parseString('bla') # -> Exception: Expected "blah" + + For case-insensitive matching, use L{CaselessLiteral}. + + For keyword matching (force word break before and after the matched string), + use L{Keyword} or L{CaselessKeyword}. + """ def __init__( self, matchString ): super(Literal,self).__init__() self.match = matchString @@ -1627,22 +2393,31 @@ class Literal(Token): return loc+self.matchLen, self.match raise ParseException(instring, loc, self.errmsg, self) _L = Literal -ParserElement.literalStringClass = Literal +ParserElement._literalStringClass = Literal class Keyword(Token): - """Token to exactly match a specified string as a keyword, that is, it must be - immediately followed by a non-keyword character. Compare with C{L{Literal}}:: - Literal("if") will match the leading C{'if'} in C{'ifAndOnlyIf'}. - Keyword("if") will not; it will only match the leading C{'if'} in C{'if x=1'}, or C{'if(y==2)'} - Accepts two optional constructor arguments in addition to the keyword string: - C{identChars} is a string of characters that would be valid identifier characters, - defaulting to all alphanumerics + "_" and "$"; C{caseless} allows case-insensitive - matching, default is C{False}. + """ + Token to exactly match a specified string as a keyword, that is, it must be + immediately followed by a non-keyword character. Compare with C{L{Literal}}: + - C{Literal("if")} will match the leading C{'if'} in C{'ifAndOnlyIf'}. + - C{Keyword("if")} will not; it will only match the leading C{'if'} in C{'if x=1'}, or C{'if(y==2)'} + Accepts two optional constructor arguments in addition to the keyword string: + - C{identChars} is a string of characters that would be valid identifier characters, + defaulting to all alphanumerics + "_" and "$" + - C{caseless} allows case-insensitive matching, default is C{False}. + + Example:: + Keyword("start").parseString("start") # -> ['start'] + Keyword("start").parseString("starting") # -> Exception + + For case-insensitive matching, use L{CaselessKeyword}. """ DEFAULT_KEYWORD_CHARS = alphanums+"_$" - def __init__( self, matchString, identChars=DEFAULT_KEYWORD_CHARS, caseless=False ): + def __init__( self, matchString, identChars=None, caseless=False ): super(Keyword,self).__init__() + if identChars is None: + identChars = Keyword.DEFAULT_KEYWORD_CHARS self.match = matchString self.matchLen = len(matchString) try: @@ -1686,9 +2461,15 @@ class Keyword(Token): Keyword.DEFAULT_KEYWORD_CHARS = chars class CaselessLiteral(Literal): - """Token to match a specified string, ignoring case of letters. - Note: the matched results will always be in the case of the given - match string, NOT the case of the input text. + """ + Token to match a specified string, ignoring case of letters. + Note: the matched results will always be in the case of the given + match string, NOT the case of the input text. + + Example:: + OneOrMore(CaselessLiteral("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD', 'CMD'] + + (Contrast with example for L{CaselessKeyword}.) """ def __init__( self, matchString ): super(CaselessLiteral,self).__init__( matchString.upper() ) @@ -1703,7 +2484,15 @@ class CaselessLiteral(Literal): raise ParseException(instring, loc, self.errmsg, self) class CaselessKeyword(Keyword): - def __init__( self, matchString, identChars=Keyword.DEFAULT_KEYWORD_CHARS ): + """ + Caseless version of L{Keyword}. + + Example:: + OneOrMore(CaselessKeyword("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD'] + + (Contrast with example for L{CaselessLiteral}.) + """ + def __init__( self, matchString, identChars=None ): super(CaselessKeyword,self).__init__( matchString, identChars, caseless=True ) def parseImpl( self, instring, loc, doActions=True ): @@ -1712,17 +2501,113 @@ class CaselessKeyword(Keyword): return loc+self.matchLen, self.match raise ParseException(instring, loc, self.errmsg, self) +class CloseMatch(Token): + """ + A variation on L{Literal} which matches "close" matches, that is, + strings with at most 'n' mismatching characters. C{CloseMatch} takes parameters: + - C{match_string} - string to be matched + - C{maxMismatches} - (C{default=1}) maximum number of mismatches allowed to count as a match + + The results from a successful parse will contain the matched text from the input string and the following named results: + - C{mismatches} - a list of the positions within the match_string where mismatches were found + - C{original} - the original match_string used to compare against the input string + + If C{mismatches} is an empty list, then the match was an exact match. + + Example:: + patt = CloseMatch("ATCATCGAATGGA") + patt.parseString("ATCATCGAAXGGA") # -> (['ATCATCGAAXGGA'], {'mismatches': [[9]], 'original': ['ATCATCGAATGGA']}) + patt.parseString("ATCAXCGAAXGGA") # -> Exception: Expected 'ATCATCGAATGGA' (with up to 1 mismatches) (at char 0), (line:1, col:1) + + # exact match + patt.parseString("ATCATCGAATGGA") # -> (['ATCATCGAATGGA'], {'mismatches': [[]], 'original': ['ATCATCGAATGGA']}) + + # close match allowing up to 2 mismatches + patt = CloseMatch("ATCATCGAATGGA", maxMismatches=2) + patt.parseString("ATCAXCGAAXGGA") # -> (['ATCAXCGAAXGGA'], {'mismatches': [[4, 9]], 'original': ['ATCATCGAATGGA']}) + """ + def __init__(self, match_string, maxMismatches=1): + super(CloseMatch,self).__init__() + self.name = match_string + self.match_string = match_string + self.maxMismatches = maxMismatches + self.errmsg = "Expected %r (with up to %d mismatches)" % (self.match_string, self.maxMismatches) + self.mayIndexError = False + self.mayReturnEmpty = False + + def parseImpl( self, instring, loc, doActions=True ): + start = loc + instrlen = len(instring) + maxloc = start + len(self.match_string) + + if maxloc <= instrlen: + match_string = self.match_string + match_stringloc = 0 + mismatches = [] + maxMismatches = self.maxMismatches + + for match_stringloc,s_m in enumerate(zip(instring[loc:maxloc], self.match_string)): + src,mat = s_m + if src != mat: + mismatches.append(match_stringloc) + if len(mismatches) > maxMismatches: + break + else: + loc = match_stringloc + 1 + results = ParseResults([instring[start:loc]]) + results['original'] = self.match_string + results['mismatches'] = mismatches + return loc, results + + raise ParseException(instring, loc, self.errmsg, self) + + class Word(Token): - """Token for matching words composed of allowed character sets. - Defined with string containing all allowed initial characters, - an optional string containing allowed body characters (if omitted, - defaults to the initial character set), and an optional minimum, - maximum, and/or exact length. The default value for C{min} is 1 (a - minimum value < 1 is not valid); the default values for C{max} and C{exact} - are 0, meaning no maximum or exact length restriction. An optional - C{exclude} parameter can list characters that might be found in - the input C{bodyChars} string; useful to define a word of all printables - except for one or two characters, for instance. + """ + Token for matching words composed of allowed character sets. + Defined with string containing all allowed initial characters, + an optional string containing allowed body characters (if omitted, + defaults to the initial character set), and an optional minimum, + maximum, and/or exact length. The default value for C{min} is 1 (a + minimum value < 1 is not valid); the default values for C{max} and C{exact} + are 0, meaning no maximum or exact length restriction. An optional + C{excludeChars} parameter can list characters that might be found in + the input C{bodyChars} string; useful to define a word of all printables + except for one or two characters, for instance. + + L{srange} is useful for defining custom character set strings for defining + C{Word} expressions, using range notation from regular expression character sets. + + A common mistake is to use C{Word} to match a specific literal string, as in + C{Word("Address")}. Remember that C{Word} uses the string argument to define + I{sets} of matchable characters. This expression would match "Add", "AAA", + "dAred", or any other word made up of the characters 'A', 'd', 'r', 'e', and 's'. + To match an exact literal string, use L{Literal} or L{Keyword}. + + pyparsing includes helper strings for building Words: + - L{alphas} + - L{nums} + - L{alphanums} + - L{hexnums} + - L{alphas8bit} (alphabetic characters in ASCII range 128-255 - accented, tilded, umlauted, etc.) + - L{punc8bit} (non-alphabetic characters in ASCII range 128-255 - currency, symbols, superscripts, diacriticals, etc.) + - L{printables} (any non-whitespace character) + + Example:: + # a word composed of digits + integer = Word(nums) # equivalent to Word("0123456789") or Word(srange("0-9")) + + # a word with a leading capital, and zero or more lowercase + capital_word = Word(alphas.upper(), alphas.lower()) + + # hostnames are alphanumeric, with leading alpha, and '-' + hostname = Word(alphas, alphanums+'-') + + # roman numeral (not a strict parser, accepts invalid mix of characters) + roman = Word("IVXLCDM") + + # any string of non-whitespace characters, except for ',' + csv_value = Word(printables, excludeChars=",") """ def __init__( self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False, excludeChars=None ): super(Word,self).__init__() @@ -1775,7 +2660,7 @@ class Word(Token): self.reString = r"\b"+self.reString+r"\b" try: self.re = re.compile( self.reString ) - except: + except Exception: self.re = None def parseImpl( self, instring, loc, doActions=True ): @@ -1816,7 +2701,7 @@ class Word(Token): def __str__( self ): try: return super(Word,self).__str__() - except: + except Exception: pass @@ -1837,8 +2722,17 @@ class Word(Token): class Regex(Token): - """Token for matching strings that match a given regular expression. - Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module. + """ + Token for matching strings that match a given regular expression. + Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module. + If the given regex contains named groups (defined using C{(?P...)}), these will be preserved as + named parse results. + + Example:: + realnum = Regex(r"[+-]?\d+\.\d*") + date = Regex(r'(?P\d{4})-(?P\d\d?)-(?P\d\d?)') + # ref: http://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression + roman = Regex(r"M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})") """ compiledREtype = type(re.compile("[A-Z]")) def __init__( self, pattern, flags=0): @@ -1846,7 +2740,7 @@ class Regex(Token): super(Regex,self).__init__() if isinstance(pattern, basestring): - if len(pattern) == 0: + if not pattern: warnings.warn("null string passed to Regex; use Empty() instead", SyntaxWarning, stacklevel=2) @@ -1891,7 +2785,7 @@ class Regex(Token): def __str__( self ): try: return super(Regex,self).__str__() - except: + except Exception: pass if self.strRepr is None: @@ -1901,23 +2795,36 @@ class Regex(Token): class QuotedString(Token): - """Token for matching strings that are delimited by quoting characters. + r""" + Token for matching strings that are delimited by quoting characters. + + Defined with the following parameters: + - quoteChar - string of one or more characters defining the quote delimiting string + - escChar - character to escape quotes, typically backslash (default=C{None}) + - escQuote - special quote sequence to escape an embedded quote string (such as SQL's "" to escape an embedded ") (default=C{None}) + - multiline - boolean indicating whether quotes can span multiple lines (default=C{False}) + - unquoteResults - boolean indicating whether the matched text should be unquoted (default=C{True}) + - endQuoteChar - string of one or more characters defining the end of the quote delimited string (default=C{None} => same as quoteChar) + - convertWhitespaceEscapes - convert escaped whitespace (C{'\t'}, C{'\n'}, etc.) to actual whitespace (default=C{True}) + + Example:: + qs = QuotedString('"') + print(qs.searchString('lsjdf "This is the quote" sldjf')) + complex_qs = QuotedString('{{', endQuoteChar='}}') + print(complex_qs.searchString('lsjdf {{This is the "quote"}} sldjf')) + sql_qs = QuotedString('"', escQuote='""') + print(sql_qs.searchString('lsjdf "This is the quote with ""embedded"" quotes" sldjf')) + prints:: + [['This is the quote']] + [['This is the "quote"']] + [['This is the quote with "embedded" quotes']] """ - def __init__( self, quoteChar, escChar=None, escQuote=None, multiline=False, unquoteResults=True, endQuoteChar=None): - """ - Defined with the following parameters: - - quoteChar - string of one or more characters defining the quote delimiting string - - escChar - character to escape quotes, typically backslash (default=None) - - escQuote - special quote sequence to escape an embedded quote string (such as SQL's "" to escape an embedded ") (default=None) - - multiline - boolean indicating whether quotes can span multiple lines (default=C{False}) - - unquoteResults - boolean indicating whether the matched text should be unquoted (default=C{True}) - - endQuoteChar - string of one or more characters defining the end of the quote delimited string (default=C{None} => same as quoteChar) - """ + def __init__( self, quoteChar, escChar=None, escQuote=None, multiline=False, unquoteResults=True, endQuoteChar=None, convertWhitespaceEscapes=True): super(QuotedString,self).__init__() # remove white space from quote chars - wont work anyway quoteChar = quoteChar.strip() - if len(quoteChar) == 0: + if not quoteChar: warnings.warn("quoteChar cannot be the empty string",SyntaxWarning,stacklevel=2) raise SyntaxError() @@ -1925,7 +2832,7 @@ class QuotedString(Token): endQuoteChar = quoteChar else: endQuoteChar = endQuoteChar.strip() - if len(endQuoteChar) == 0: + if not endQuoteChar: warnings.warn("endQuoteChar cannot be the empty string",SyntaxWarning,stacklevel=2) raise SyntaxError() @@ -1937,6 +2844,7 @@ class QuotedString(Token): self.escChar = escChar self.escQuote = escQuote self.unquoteResults = unquoteResults + self.convertWhitespaceEscapes = convertWhitespaceEscapes if multiline: self.flags = re.MULTILINE | re.DOTALL @@ -1990,6 +2898,17 @@ class QuotedString(Token): ret = ret[self.quoteCharLen:-self.endQuoteCharLen] if isinstance(ret,basestring): + # replace escaped whitespace + if '\\' in ret and self.convertWhitespaceEscapes: + ws_map = { + r'\t' : '\t', + r'\n' : '\n', + r'\f' : '\f', + r'\r' : '\r', + } + for wslit,wschar in ws_map.items(): + ret = ret.replace(wslit, wschar) + # replace escaped characters if self.escChar: ret = re.sub(self.escCharReplacePattern,"\g<1>",ret) @@ -2003,7 +2922,7 @@ class QuotedString(Token): def __str__( self ): try: return super(QuotedString,self).__str__() - except: + except Exception: pass if self.strRepr is None: @@ -2013,11 +2932,20 @@ class QuotedString(Token): class CharsNotIn(Token): - """Token for matching words composed of characters *not* in a given set. - Defined with string containing all disallowed characters, and an optional - minimum, maximum, and/or exact length. The default value for C{min} is 1 (a - minimum value < 1 is not valid); the default values for C{max} and C{exact} - are 0, meaning no maximum or exact length restriction. + """ + Token for matching words composed of characters I{not} in a given set (will + include whitespace in matched characters if not listed in the provided exclusion set - see example). + Defined with string containing all disallowed characters, and an optional + minimum, maximum, and/or exact length. The default value for C{min} is 1 (a + minimum value < 1 is not valid); the default values for C{max} and C{exact} + are 0, meaning no maximum or exact length restriction. + + Example:: + # define a comma-separated-value as anything that is not a ',' + csv_value = CharsNotIn(',') + print(delimitedList(csv_value).parseString("dkls,lsdkjf,s12 34,@!#,213")) + prints:: + ['dkls', 'lsdkjf', 's12 34', '@!#', '213'] """ def __init__( self, notChars, min=1, max=0, exact=0 ): super(CharsNotIn,self).__init__() @@ -2063,7 +2991,7 @@ class CharsNotIn(Token): def __str__( self ): try: return super(CharsNotIn, self).__str__() - except: + except Exception: pass if self.strRepr is None: @@ -2075,11 +3003,13 @@ class CharsNotIn(Token): return self.strRepr class White(Token): - """Special matching class for matching whitespace. Normally, whitespace is ignored - by pyparsing grammars. This class is included when some whitespace structures - are significant. Define with a string containing the whitespace characters to be - matched; default is C{" \\t\\r\\n"}. Also takes optional C{min}, C{max}, and C{exact} arguments, - as defined for the C{L{Word}} class.""" + """ + Special matching class for matching whitespace. Normally, whitespace is ignored + by pyparsing grammars. This class is included when some whitespace structures + are significant. Define with a string containing the whitespace characters to be + matched; default is C{" \\t\\r\\n"}. Also takes optional C{min}, C{max}, and C{exact} arguments, + as defined for the C{L{Word}} class. + """ whiteStrs = { " " : "", "\t": "", @@ -2131,7 +3061,9 @@ class _PositionToken(Token): self.mayIndexError = False class GoToColumn(_PositionToken): - """Token to advance to a specific column of input text; useful for tabular report scraping.""" + """ + Token to advance to a specific column of input text; useful for tabular report scraping. + """ def __init__( self, colno ): super(GoToColumn,self).__init__() self.col = colno @@ -2153,28 +3085,41 @@ class GoToColumn(_PositionToken): ret = instring[ loc: newloc ] return newloc, ret + class LineStart(_PositionToken): - """Matches if current position is at the beginning of a line within the parse string""" + """ + Matches if current position is at the beginning of a line within the parse string + + Example:: + + test = '''\ + AAA this line + AAA and this line + AAA but not this one + B AAA and definitely not this one + ''' + + for t in (LineStart() + 'AAA' + restOfLine).searchString(test): + print(t) + + Prints:: + ['AAA', ' this line'] + ['AAA', ' and this line'] + + """ def __init__( self ): super(LineStart,self).__init__() - self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") ) self.errmsg = "Expected start of line" - def preParse( self, instring, loc ): - preloc = super(LineStart,self).preParse(instring,loc) - if instring[preloc] == "\n": - loc += 1 - return loc - def parseImpl( self, instring, loc, doActions=True ): - if not( loc==0 or - (loc == self.preParse( instring, 0 )) or - (instring[loc-1] == "\n") ): #col(loc, instring) != 1: - raise ParseException(instring, loc, self.errmsg, self) - return loc, [] + if col(loc, instring) == 1: + return loc, [] + raise ParseException(instring, loc, self.errmsg, self) class LineEnd(_PositionToken): - """Matches if current position is at the end of a line within the parse string""" + """ + Matches if current position is at the end of a line within the parse string + """ def __init__( self ): super(LineEnd,self).__init__() self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") ) @@ -2192,7 +3137,9 @@ class LineEnd(_PositionToken): raise ParseException(instring, loc, self.errmsg, self) class StringStart(_PositionToken): - """Matches if current position is at the beginning of the parse string""" + """ + Matches if current position is at the beginning of the parse string + """ def __init__( self ): super(StringStart,self).__init__() self.errmsg = "Expected start of text" @@ -2205,7 +3152,9 @@ class StringStart(_PositionToken): return loc, [] class StringEnd(_PositionToken): - """Matches if current position is at the end of the parse string""" + """ + Matches if current position is at the end of the parse string + """ def __init__( self ): super(StringEnd,self).__init__() self.errmsg = "Expected end of text" @@ -2221,11 +3170,12 @@ class StringEnd(_PositionToken): raise ParseException(instring, loc, self.errmsg, self) class WordStart(_PositionToken): - """Matches if the current position is at the beginning of a Word, and - is not preceded by any character in a given set of C{wordChars} - (default=C{printables}). To emulate the C{\b} behavior of regular expressions, - use C{WordStart(alphanums)}. C{WordStart} will also match at the beginning of - the string being parsed, or at the beginning of a line. + """ + Matches if the current position is at the beginning of a Word, and + is not preceded by any character in a given set of C{wordChars} + (default=C{printables}). To emulate the C{\b} behavior of regular expressions, + use C{WordStart(alphanums)}. C{WordStart} will also match at the beginning of + the string being parsed, or at the beginning of a line. """ def __init__(self, wordChars = printables): super(WordStart,self).__init__() @@ -2240,11 +3190,12 @@ class WordStart(_PositionToken): return loc, [] class WordEnd(_PositionToken): - """Matches if the current position is at the end of a Word, and - is not followed by any character in a given set of C{wordChars} - (default=C{printables}). To emulate the C{\b} behavior of regular expressions, - use C{WordEnd(alphanums)}. C{WordEnd} will also match at the end of - the string being parsed, or at the end of a line. + """ + Matches if the current position is at the end of a Word, and + is not followed by any character in a given set of C{wordChars} + (default=C{printables}). To emulate the C{\b} behavior of regular expressions, + use C{WordEnd(alphanums)}. C{WordEnd} will also match at the end of + the string being parsed, or at the end of a line. """ def __init__(self, wordChars = printables): super(WordEnd,self).__init__() @@ -2262,18 +3213,21 @@ class WordEnd(_PositionToken): class ParseExpression(ParserElement): - """Abstract subclass of ParserElement, for combining and post-processing parsed tokens.""" + """ + Abstract subclass of ParserElement, for combining and post-processing parsed tokens. + """ def __init__( self, exprs, savelist = False ): super(ParseExpression,self).__init__(savelist) if isinstance( exprs, _generatorType ): exprs = list(exprs) if isinstance( exprs, basestring ): - self.exprs = [ Literal( exprs ) ] - elif isinstance( exprs, collections.Sequence ): + self.exprs = [ ParserElement._literalStringClass( exprs ) ] + elif isinstance( exprs, collections.Iterable ): + exprs = list(exprs) # if sequence of strings provided, wrap with Literal if all(isinstance(expr, basestring) for expr in exprs): - exprs = map(Literal, exprs) + exprs = map(ParserElement._literalStringClass, exprs) self.exprs = list(exprs) else: try: @@ -2314,7 +3268,7 @@ class ParseExpression(ParserElement): def __str__( self ): try: return super(ParseExpression,self).__str__() - except: + except Exception: pass if self.strRepr is None: @@ -2351,7 +3305,7 @@ class ParseExpression(ParserElement): self.mayReturnEmpty |= other.mayReturnEmpty self.mayIndexError |= other.mayIndexError - self.errmsg = "Expected " + str(self) + self.errmsg = "Expected " + _ustr(self) return self @@ -2371,9 +3325,19 @@ class ParseExpression(ParserElement): return ret class And(ParseExpression): - """Requires all given C{ParseExpression}s to be found in the given order. - Expressions may be separated by whitespace. - May be constructed using the C{'+'} operator. + """ + Requires all given C{ParseExpression}s to be found in the given order. + Expressions may be separated by whitespace. + May be constructed using the C{'+'} operator. + May also be constructed using the C{'-'} operator, which will suppress backtracking. + + Example:: + integer = Word(nums) + name_expr = OneOrMore(Word(alphas)) + + expr = And([integer("id"),name_expr("name"),integer("age")]) + # more easily written as: + expr = integer("id") + name_expr("name") + integer("age") """ class _ErrorStop(Empty): @@ -2405,9 +3369,9 @@ class And(ParseExpression): raise except ParseBaseException as pe: pe.__traceback__ = None - raise ParseSyntaxException(pe) + raise ParseSyntaxException._from_exception(pe) except IndexError: - raise ParseSyntaxException( ParseException(instring, len(instring), self.errmsg, self) ) + raise ParseSyntaxException(instring, len(instring), self.errmsg, self) else: loc, exprtokens = e._parse( instring, loc, doActions ) if exprtokens or exprtokens.haskeys(): @@ -2416,7 +3380,7 @@ class And(ParseExpression): def __iadd__(self, other ): if isinstance( other, basestring ): - other = Literal( other ) + other = ParserElement._literalStringClass( other ) return self.append( other ) #And( [ self, other ] ) def checkRecursion( self, parseElementList ): @@ -2437,9 +3401,18 @@ class And(ParseExpression): class Or(ParseExpression): - """Requires that at least one C{ParseExpression} is found. - If two expressions match, the expression that matches the longest string will be used. - May be constructed using the C{'^'} operator. + """ + Requires that at least one C{ParseExpression} is found. + If two expressions match, the expression that matches the longest string will be used. + May be constructed using the C{'^'} operator. + + Example:: + # construct Or using '^' operator + + number = Word(nums) ^ Combine(Word(nums) + '.' + Word(nums)) + print(number.searchString("123 3.1416 789")) + prints:: + [['123'], ['3.1416'], ['789']] """ def __init__( self, exprs, savelist = False ): super(Or,self).__init__(exprs, savelist) @@ -2488,7 +3461,7 @@ class Or(ParseExpression): def __ixor__(self, other ): if isinstance( other, basestring ): - other = ParserElement.literalStringClass( other ) + other = ParserElement._literalStringClass( other ) return self.append( other ) #Or( [ self, other ] ) def __str__( self ): @@ -2507,9 +3480,21 @@ class Or(ParseExpression): class MatchFirst(ParseExpression): - """Requires that at least one C{ParseExpression} is found. - If two expressions match, the first one listed is the one that will match. - May be constructed using the C{'|'} operator. + """ + Requires that at least one C{ParseExpression} is found. + If two expressions match, the first one listed is the one that will match. + May be constructed using the C{'|'} operator. + + Example:: + # construct MatchFirst using '|' operator + + # watch the order of expressions to match + number = Word(nums) | Combine(Word(nums) + '.' + Word(nums)) + print(number.searchString("123 3.1416 789")) # Fail! -> [['123'], ['3'], ['1416'], ['789']] + + # put more selective expression first + number = Combine(Word(nums) + '.' + Word(nums)) | Word(nums) + print(number.searchString("123 3.1416 789")) # Better -> [['123'], ['3.1416'], ['789']] """ def __init__( self, exprs, savelist = False ): super(MatchFirst,self).__init__(exprs, savelist) @@ -2544,7 +3529,7 @@ class MatchFirst(ParseExpression): def __ior__(self, other ): if isinstance( other, basestring ): - other = ParserElement.literalStringClass( other ) + other = ParserElement._literalStringClass( other ) return self.append( other ) #MatchFirst( [ self, other ] ) def __str__( self ): @@ -2563,9 +3548,58 @@ class MatchFirst(ParseExpression): class Each(ParseExpression): - """Requires all given C{ParseExpression}s to be found, but in any order. - Expressions may be separated by whitespace. - May be constructed using the C{'&'} operator. + """ + Requires all given C{ParseExpression}s to be found, but in any order. + Expressions may be separated by whitespace. + May be constructed using the C{'&'} operator. + + Example:: + color = oneOf("RED ORANGE YELLOW GREEN BLUE PURPLE BLACK WHITE BROWN") + shape_type = oneOf("SQUARE CIRCLE TRIANGLE STAR HEXAGON OCTAGON") + integer = Word(nums) + shape_attr = "shape:" + shape_type("shape") + posn_attr = "posn:" + Group(integer("x") + ',' + integer("y"))("posn") + color_attr = "color:" + color("color") + size_attr = "size:" + integer("size") + + # use Each (using operator '&') to accept attributes in any order + # (shape and posn are required, color and size are optional) + shape_spec = shape_attr & posn_attr & Optional(color_attr) & Optional(size_attr) + + shape_spec.runTests(''' + shape: SQUARE color: BLACK posn: 100, 120 + shape: CIRCLE size: 50 color: BLUE posn: 50,80 + color:GREEN size:20 shape:TRIANGLE posn:20,40 + ''' + ) + prints:: + shape: SQUARE color: BLACK posn: 100, 120 + ['shape:', 'SQUARE', 'color:', 'BLACK', 'posn:', ['100', ',', '120']] + - color: BLACK + - posn: ['100', ',', '120'] + - x: 100 + - y: 120 + - shape: SQUARE + + + shape: CIRCLE size: 50 color: BLUE posn: 50,80 + ['shape:', 'CIRCLE', 'size:', '50', 'color:', 'BLUE', 'posn:', ['50', ',', '80']] + - color: BLUE + - posn: ['50', ',', '80'] + - x: 50 + - y: 80 + - shape: CIRCLE + - size: 50 + + + color: GREEN size: 20 shape: TRIANGLE posn: 20,40 + ['color:', 'GREEN', 'size:', '20', 'shape:', 'TRIANGLE', 'posn:', ['20', ',', '40']] + - color: GREEN + - posn: ['20', ',', '40'] + - x: 20 + - y: 40 + - shape: TRIANGLE + - size: 20 """ def __init__( self, exprs, savelist = True ): super(Each,self).__init__(exprs, savelist) @@ -2619,17 +3653,7 @@ class Each(ParseExpression): loc,results = e._parse(instring,loc,doActions) resultlist.append(results) - finalResults = ParseResults([]) - for r in resultlist: - dups = {} - for k in r.keys(): - if k in finalResults: - tmp = ParseResults(finalResults[k]) - tmp += ParseResults(r[k]) - dups[k] = tmp - finalResults += ParseResults(r) - for k,v in dups.items(): - finalResults[k] = v + finalResults = sum(resultlist, ParseResults([])) return loc, finalResults def __str__( self ): @@ -2648,11 +3672,16 @@ class Each(ParseExpression): class ParseElementEnhance(ParserElement): - """Abstract subclass of C{ParserElement}, for combining and post-processing parsed tokens.""" + """ + Abstract subclass of C{ParserElement}, for combining and post-processing parsed tokens. + """ def __init__( self, expr, savelist=False ): super(ParseElementEnhance,self).__init__(savelist) if isinstance( expr, basestring ): - expr = Literal(expr) + if issubclass(ParserElement._literalStringClass, Token): + expr = ParserElement._literalStringClass(expr) + else: + expr = ParserElement._literalStringClass(Literal(expr)) self.expr = expr self.strRepr = None if expr is not None: @@ -2711,7 +3740,7 @@ class ParseElementEnhance(ParserElement): def __str__( self ): try: return super(ParseElementEnhance,self).__str__() - except: + except Exception: pass if self.strRepr is None and self.expr is not None: @@ -2720,10 +3749,22 @@ class ParseElementEnhance(ParserElement): class FollowedBy(ParseElementEnhance): - """Lookahead matching of the given parse expression. C{FollowedBy} - does *not* advance the parsing position within the input string, it only + """ + Lookahead matching of the given parse expression. C{FollowedBy} + does I{not} advance the parsing position within the input string, it only verifies that the specified parse expression matches at the current - position. C{FollowedBy} always returns a null token list.""" + position. C{FollowedBy} always returns a null token list. + + Example:: + # use FollowedBy to match a label only if it is followed by a ':' + data_word = Word(alphas) + label = data_word + FollowedBy(':') + attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) + + OneOrMore(attr_expr).parseString("shape: SQUARE color: BLACK posn: upper left").pprint() + prints:: + [['shape', 'SQUARE'], ['color', 'BLACK'], ['posn', 'upper left']] + """ def __init__( self, expr ): super(FollowedBy,self).__init__(expr) self.mayReturnEmpty = True @@ -2734,11 +3775,16 @@ class FollowedBy(ParseElementEnhance): class NotAny(ParseElementEnhance): - """Lookahead to disallow matching with the given parse expression. C{NotAny} - does *not* advance the parsing position within the input string, it only - verifies that the specified parse expression does *not* match at the current - position. Also, C{NotAny} does *not* skip over leading whitespace. C{NotAny} - always returns a null token list. May be constructed using the '~' operator.""" + """ + Lookahead to disallow matching with the given parse expression. C{NotAny} + does I{not} advance the parsing position within the input string, it only + verifies that the specified parse expression does I{not} match at the current + position. Also, C{NotAny} does I{not} skip over leading whitespace. C{NotAny} + always returns a null token list. May be constructed using the '~' operator. + + Example:: + + """ def __init__( self, expr ): super(NotAny,self).__init__(expr) #~ self.leaveWhitespace() @@ -2747,11 +3793,7 @@ class NotAny(ParseElementEnhance): self.errmsg = "Found unwanted token, "+_ustr(self.expr) def parseImpl( self, instring, loc, doActions=True ): - try: - self.expr.tryParse( instring, loc ) - except (ParseException,IndexError): - pass - else: + if self.expr.canParseNext(instring, loc): raise ParseException(instring, loc, self.errmsg, self) return loc, [] @@ -2764,65 +3806,69 @@ class NotAny(ParseElementEnhance): return self.strRepr - -class ZeroOrMore(ParseElementEnhance): - """Optional repetition of zero or more of the given expression.""" - def __init__( self, expr ): - super(ZeroOrMore,self).__init__(expr) - self.mayReturnEmpty = True +class _MultipleMatch(ParseElementEnhance): + def __init__( self, expr, stopOn=None): + super(_MultipleMatch, self).__init__(expr) + self.saveAsList = True + ender = stopOn + if isinstance(ender, basestring): + ender = ParserElement._literalStringClass(ender) + self.not_ender = ~ender if ender is not None else None def parseImpl( self, instring, loc, doActions=True ): - tokens = [] + self_expr_parse = self.expr._parse + self_skip_ignorables = self._skipIgnorables + check_ender = self.not_ender is not None + if check_ender: + try_not_ender = self.not_ender.tryParse + + # must be at least one (but first see if we are the stopOn sentinel; + # if so, fail) + if check_ender: + try_not_ender(instring, loc) + loc, tokens = self_expr_parse( instring, loc, doActions, callPreParse=False ) try: - loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False ) - hasIgnoreExprs = ( len(self.ignoreExprs) > 0 ) + hasIgnoreExprs = (not not self.ignoreExprs) while 1: + if check_ender: + try_not_ender(instring, loc) if hasIgnoreExprs: - preloc = self._skipIgnorables( instring, loc ) + preloc = self_skip_ignorables( instring, loc ) else: preloc = loc - loc, tmptokens = self.expr._parse( instring, preloc, doActions ) + loc, tmptokens = self_expr_parse( instring, preloc, doActions ) if tmptokens or tmptokens.haskeys(): tokens += tmptokens except (ParseException,IndexError): pass return loc, tokens + +class OneOrMore(_MultipleMatch): + """ + Repetition of one or more of the given expression. + + Parameters: + - expr - expression that must match one or more times + - stopOn - (default=C{None}) - expression for a terminating sentinel + (only required if the sentinel would ordinarily match the repetition + expression) - def __str__( self ): - if hasattr(self,"name"): - return self.name + Example:: + data_word = Word(alphas) + label = data_word + FollowedBy(':') + attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) - if self.strRepr is None: - self.strRepr = "[" + _ustr(self.expr) + "]..." + text = "shape: SQUARE posn: upper left color: BLACK" + OneOrMore(attr_expr).parseString(text).pprint() # Fail! read 'color' as data instead of next label -> [['shape', 'SQUARE color']] - return self.strRepr - - def setResultsName( self, name, listAllMatches=False ): - ret = super(ZeroOrMore,self).setResultsName(name,listAllMatches) - ret.saveAsList = True - return ret - - -class OneOrMore(ParseElementEnhance): - """Repetition of one or more of the given expression.""" - def parseImpl( self, instring, loc, doActions=True ): - # must be at least one - loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False ) - try: - hasIgnoreExprs = ( len(self.ignoreExprs) > 0 ) - while 1: - if hasIgnoreExprs: - preloc = self._skipIgnorables( instring, loc ) - else: - preloc = loc - loc, tmptokens = self.expr._parse( instring, preloc, doActions ) - if tmptokens or tmptokens.haskeys(): - tokens += tmptokens - except (ParseException,IndexError): - pass - - return loc, tokens + # use stopOn attribute for OneOrMore to avoid reading label string as part of the data + attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) + OneOrMore(attr_expr).parseString(text).pprint() # Better -> [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'BLACK']] + + # could also be written as + (attr_expr * (1,)).parseString(text).pprint() + """ def __str__( self ): if hasattr(self,"name"): @@ -2833,10 +3879,36 @@ class OneOrMore(ParseElementEnhance): return self.strRepr - def setResultsName( self, name, listAllMatches=False ): - ret = super(OneOrMore,self).setResultsName(name,listAllMatches) - ret.saveAsList = True - return ret +class ZeroOrMore(_MultipleMatch): + """ + Optional repetition of zero or more of the given expression. + + Parameters: + - expr - expression that must match zero or more times + - stopOn - (default=C{None}) - expression for a terminating sentinel + (only required if the sentinel would ordinarily match the repetition + expression) + + Example: similar to L{OneOrMore} + """ + def __init__( self, expr, stopOn=None): + super(ZeroOrMore,self).__init__(expr, stopOn=stopOn) + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + try: + return super(ZeroOrMore, self).parseImpl(instring, loc, doActions) + except (ParseException,IndexError): + return loc, [] + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "[" + _ustr(self.expr) + "]..." + + return self.strRepr class _NullToken(object): def __bool__(self): @@ -2847,12 +3919,43 @@ class _NullToken(object): _optionalNotMatched = _NullToken() class Optional(ParseElementEnhance): - """Optional matching of the given expression. - A default return string can also be specified, if the optional expression - is not found. + """ + Optional matching of the given expression. + + Parameters: + - expr - expression that must match zero or more times + - default (optional) - value to be returned if the optional expression is not found. + + Example:: + # US postal code can be a 5-digit zip, plus optional 4-digit qualifier + zip = Combine(Word(nums, exact=5) + Optional('-' + Word(nums, exact=4))) + zip.runTests(''' + # traditional ZIP code + 12345 + + # ZIP+4 form + 12101-0001 + + # invalid ZIP + 98765- + ''') + prints:: + # traditional ZIP code + 12345 + ['12345'] + + # ZIP+4 form + 12101-0001 + ['12101-0001'] + + # invalid ZIP + 98765- + ^ + FAIL: Expected end of text (at char 5), (line:1, col:6) """ def __init__( self, expr, default=_optionalNotMatched ): super(Optional,self).__init__( expr, savelist=False ) + self.saveAsList = self.expr.saveAsList self.defaultValue = default self.mayReturnEmpty = True @@ -2879,13 +3982,60 @@ class Optional(ParseElementEnhance): return self.strRepr - class SkipTo(ParseElementEnhance): - """Token for skipping over all undefined text until the matched expression is found. - If C{include} is set to true, the matched expression is also parsed (the skipped text - and matched expression are returned as a 2-element list). The C{ignore} - argument is used to define grammars (typically quoted strings and comments) that - might contain false matches. + """ + Token for skipping over all undefined text until the matched expression is found. + + Parameters: + - expr - target expression marking the end of the data to be skipped + - include - (default=C{False}) if True, the target expression is also parsed + (the skipped text and target expression are returned as a 2-element list). + - ignore - (default=C{None}) used to define grammars (typically quoted strings and + comments) that might contain false matches to the target expression + - failOn - (default=C{None}) define expressions that are not allowed to be + included in the skipped test; if found before the target expression is found, + the SkipTo is not a match + + Example:: + report = ''' + Outstanding Issues Report - 1 Jan 2000 + + # | Severity | Description | Days Open + -----+----------+-------------------------------------------+----------- + 101 | Critical | Intermittent system crash | 6 + 94 | Cosmetic | Spelling error on Login ('log|n') | 14 + 79 | Minor | System slow when running too many reports | 47 + ''' + integer = Word(nums) + SEP = Suppress('|') + # use SkipTo to simply match everything up until the next SEP + # - ignore quoted strings, so that a '|' character inside a quoted string does not match + # - parse action will call token.strip() for each matched token, i.e., the description body + string_data = SkipTo(SEP, ignore=quotedString) + string_data.setParseAction(tokenMap(str.strip)) + ticket_expr = (integer("issue_num") + SEP + + string_data("sev") + SEP + + string_data("desc") + SEP + + integer("days_open")) + + for tkt in ticket_expr.searchString(report): + print tkt.dump() + prints:: + ['101', 'Critical', 'Intermittent system crash', '6'] + - days_open: 6 + - desc: Intermittent system crash + - issue_num: 101 + - sev: Critical + ['94', 'Cosmetic', "Spelling error on Login ('log|n')", '14'] + - days_open: 14 + - desc: Spelling error on Login ('log|n') + - issue_num: 94 + - sev: Cosmetic + ['79', 'Minor', 'System slow when running too many reports', '47'] + - days_open: 47 + - desc: System slow when running too many reports + - issue_num: 79 + - sev: Minor """ def __init__( self, other, include=False, ignore=None, failOn=None ): super( SkipTo, self ).__init__( other ) @@ -2894,77 +4044,85 @@ class SkipTo(ParseElementEnhance): self.mayIndexError = False self.includeMatch = include self.asList = False - if failOn is not None and isinstance(failOn, basestring): - self.failOn = Literal(failOn) + if isinstance(failOn, basestring): + self.failOn = ParserElement._literalStringClass(failOn) else: self.failOn = failOn self.errmsg = "No match found for "+_ustr(self.expr) def parseImpl( self, instring, loc, doActions=True ): - startLoc = loc + startloc = loc instrlen = len(instring) expr = self.expr - failParse = False - while loc <= instrlen: - try: - if self.failOn: + expr_parse = self.expr._parse + self_failOn_canParseNext = self.failOn.canParseNext if self.failOn is not None else None + self_ignoreExpr_tryParse = self.ignoreExpr.tryParse if self.ignoreExpr is not None else None + + tmploc = loc + while tmploc <= instrlen: + if self_failOn_canParseNext is not None: + # break if failOn expression matches + if self_failOn_canParseNext(instring, tmploc): + break + + if self_ignoreExpr_tryParse is not None: + # advance past ignore expressions + while 1: try: - self.failOn.tryParse(instring, loc) + tmploc = self_ignoreExpr_tryParse(instring, tmploc) except ParseBaseException: - pass - else: - failParse = True - raise ParseException(instring, loc, "Found expression " + str(self.failOn)) - failParse = False - if self.ignoreExpr is not None: - while 1: - try: - loc = self.ignoreExpr.tryParse(instring,loc) - # print("found ignoreExpr, advance to", loc) - except ParseBaseException: - break - expr._parse( instring, loc, doActions=False, callPreParse=False ) - skipText = instring[startLoc:loc] - if self.includeMatch: - loc,mat = expr._parse(instring,loc,doActions,callPreParse=False) - if mat: - skipRes = ParseResults( skipText ) - skipRes += mat - return loc, [ skipRes ] - else: - return loc, [ skipText ] - else: - return loc, [ skipText ] - except (ParseException,IndexError): - if failParse: - raise - else: - loc += 1 - raise ParseException(instring, loc, self.errmsg, self) + break + + try: + expr_parse(instring, tmploc, doActions=False, callPreParse=False) + except (ParseException, IndexError): + # no match, advance loc in string + tmploc += 1 + else: + # matched skipto expr, done + break + + else: + # ran off the end of the input string without matching skipto expr, fail + raise ParseException(instring, loc, self.errmsg, self) + + # build up return values + loc = tmploc + skiptext = instring[startloc:loc] + skipresult = ParseResults(skiptext) + + if self.includeMatch: + loc, mat = expr_parse(instring,loc,doActions,callPreParse=False) + skipresult += mat + + return loc, skipresult class Forward(ParseElementEnhance): - """Forward declaration of an expression to be defined later - - used for recursive grammars, such as algebraic infix notation. - When the expression is known, it is assigned to the C{Forward} variable using the '<<' operator. + """ + Forward declaration of an expression to be defined later - + used for recursive grammars, such as algebraic infix notation. + When the expression is known, it is assigned to the C{Forward} variable using the '<<' operator. - Note: take care when assigning to C{Forward} not to overlook precedence of operators. - Specifically, '|' has a lower precedence than '<<', so that:: - fwdExpr << a | b | c - will actually be evaluated as:: - (fwdExpr << a) | b | c - thereby leaving b and c out as parseable alternatives. It is recommended that you - explicitly group the values inserted into the C{Forward}:: - fwdExpr << (a | b | c) - Converting to use the '<<=' operator instead will avoid this problem. + Note: take care when assigning to C{Forward} not to overlook precedence of operators. + Specifically, '|' has a lower precedence than '<<', so that:: + fwdExpr << a | b | c + will actually be evaluated as:: + (fwdExpr << a) | b | c + thereby leaving b and c out as parseable alternatives. It is recommended that you + explicitly group the values inserted into the C{Forward}:: + fwdExpr << (a | b | c) + Converting to use the '<<=' operator instead will avoid this problem. + + See L{ParseResults.pprint} for an example of a recursive parser created using + C{Forward}. """ def __init__( self, other=None ): super(Forward,self).__init__( other, savelist=False ) def __lshift__( self, other ): if isinstance( other, basestring ): - other = ParserElement.literalStringClass(other) + other = ParserElement._literalStringClass(other) self.expr = other - self.mayReturnEmpty = other.mayReturnEmpty self.strRepr = None self.mayIndexError = self.expr.mayIndexError self.mayReturnEmpty = self.expr.mayReturnEmpty @@ -2998,7 +4156,9 @@ class Forward(ParseElementEnhance): def __str__( self ): if hasattr(self,"name"): return self.name + return self.__class__.__name__ + ": ..." + # stubbed out for now - creates awful memory and perf issues self._revertClass = self.__class__ self.__class__ = _ForwardNoRecurse try: @@ -3023,26 +4183,29 @@ class _ForwardNoRecurse(Forward): return "..." class TokenConverter(ParseElementEnhance): - """Abstract subclass of C{ParseExpression}, for converting parsed results.""" + """ + Abstract subclass of C{ParseExpression}, for converting parsed results. + """ def __init__( self, expr, savelist=False ): super(TokenConverter,self).__init__( expr )#, savelist ) self.saveAsList = False -class Upcase(TokenConverter): - """Converter to upper case all matching tokens.""" - def __init__(self, *args): - super(Upcase,self).__init__(*args) - warnings.warn("Upcase class is deprecated, use upcaseTokens parse action instead", - DeprecationWarning,stacklevel=2) - - def postParse( self, instring, loc, tokenlist ): - return list(map( str.upper, tokenlist )) - - class Combine(TokenConverter): - """Converter to concatenate all matching tokens to a single string. - By default, the matching patterns must also be contiguous in the input string; - this can be disabled by specifying C{'adjacent=False'} in the constructor. + """ + Converter to concatenate all matching tokens to a single string. + By default, the matching patterns must also be contiguous in the input string; + this can be disabled by specifying C{'adjacent=False'} in the constructor. + + Example:: + real = Word(nums) + '.' + Word(nums) + print(real.parseString('3.1416')) # -> ['3', '.', '1416'] + # will also erroneously match the following + print(real.parseString('3. 1416')) # -> ['3', '.', '1416'] + + real = Combine(Word(nums) + '.' + Word(nums)) + print(real.parseString('3.1416')) # -> ['3.1416'] + # no match when there are internal spaces + print(real.parseString('3. 1416')) # -> Exception: Expected W:(0123...) """ def __init__( self, expr, joinString="", adjacent=True ): super(Combine,self).__init__( expr ) @@ -3072,7 +4235,19 @@ class Combine(TokenConverter): return retToks class Group(TokenConverter): - """Converter to return the matched tokens as a list - useful for returning tokens of C{L{ZeroOrMore}} and C{L{OneOrMore}} expressions.""" + """ + Converter to return the matched tokens as a list - useful for returning tokens of C{L{ZeroOrMore}} and C{L{OneOrMore}} expressions. + + Example:: + ident = Word(alphas) + num = Word(nums) + term = ident | num + func = ident + Optional(delimitedList(term)) + print(func.parseString("fn a,b,100")) # -> ['fn', 'a', 'b', '100'] + + func = ident + Group(Optional(delimitedList(term))) + print(func.parseString("fn a,b,100")) # -> ['fn', ['a', 'b', '100']] + """ def __init__( self, expr ): super(Group,self).__init__( expr ) self.saveAsList = True @@ -3081,9 +4256,40 @@ class Group(TokenConverter): return [ tokenlist ] class Dict(TokenConverter): - """Converter to return a repetitive expression as a list, but also as a dictionary. - Each element can also be referenced using the first token in the expression as its key. - Useful for tabular report scraping when the first column can be used as a item key. + """ + Converter to return a repetitive expression as a list, but also as a dictionary. + Each element can also be referenced using the first token in the expression as its key. + Useful for tabular report scraping when the first column can be used as a item key. + + Example:: + data_word = Word(alphas) + label = data_word + FollowedBy(':') + attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) + + text = "shape: SQUARE posn: upper left color: light blue texture: burlap" + attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) + + # print attributes as plain groups + print(OneOrMore(attr_expr).parseString(text).dump()) + + # instead of OneOrMore(expr), parse using Dict(OneOrMore(Group(expr))) - Dict will auto-assign names + result = Dict(OneOrMore(Group(attr_expr))).parseString(text) + print(result.dump()) + + # access named fields as dict entries, or output as dict + print(result['shape']) + print(result.asDict()) + prints:: + ['shape', 'SQUARE', 'posn', 'upper left', 'color', 'light blue', 'texture', 'burlap'] + + [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] + - color: light blue + - posn: upper left + - shape: SQUARE + - texture: burlap + SQUARE + {'color': 'light blue', 'posn': 'upper left', 'texture': 'burlap', 'shape': 'SQUARE'} + See more examples at L{ParseResults} of accessing fields by results name. """ def __init__( self, expr ): super(Dict,self).__init__( expr ) @@ -3115,7 +4321,24 @@ class Dict(TokenConverter): class Suppress(TokenConverter): - """Converter for ignoring the results of a parsed expression.""" + """ + Converter for ignoring the results of a parsed expression. + + Example:: + source = "a, b, c,d" + wd = Word(alphas) + wd_list1 = wd + ZeroOrMore(',' + wd) + print(wd_list1.parseString(source)) + + # often, delimiters that are useful during parsing are just in the + # way afterward - use Suppress to keep them out of the parsed output + wd_list2 = wd + ZeroOrMore(Suppress(',') + wd) + print(wd_list2.parseString(source)) + prints:: + ['a', ',', 'b', ',', 'c', ',', 'd'] + ['a', 'b', 'c', 'd'] + (See also L{delimitedList}.) + """ def postParse( self, instring, loc, tokenlist ): return [] @@ -3124,7 +4347,9 @@ class Suppress(TokenConverter): class OnlyOnce(object): - """Wrapper for parse actions, to ensure they are only called once.""" + """ + Wrapper for parse actions, to ensure they are only called once. + """ def __init__(self, methodCall): self.callable = _trim_arity(methodCall) self.called = False @@ -3138,20 +4363,39 @@ class OnlyOnce(object): self.called = False def traceParseAction(f): - """Decorator for debugging parse actions.""" + """ + Decorator for debugging parse actions. + + When the parse action is called, this decorator will print C{">> entering I{method-name}(line:I{current_source_line}, I{parse_location}, I{matched_tokens})".} + When the parse action completes, the decorator will print C{"<<"} followed by the returned value, or any exception that the parse action raised. + + Example:: + wd = Word(alphas) + + @traceParseAction + def remove_duplicate_chars(tokens): + return ''.join(sorted(set(''.join(tokens))) + + wds = OneOrMore(wd).setParseAction(remove_duplicate_chars) + print(wds.parseString("slkdjs sld sldd sdlf sdljf")) + prints:: + >>entering remove_duplicate_chars(line: 'slkdjs sld sldd sdlf sdljf', 0, (['slkdjs', 'sld', 'sldd', 'sdlf', 'sdljf'], {})) + <3: thisFunc = paArgs[0].__class__.__name__ + '.' + thisFunc - sys.stderr.write( ">>entering %s(line: '%s', %d, %s)\n" % (thisFunc,line(l,s),l,t) ) + sys.stderr.write( ">>entering %s(line: '%s', %d, %r)\n" % (thisFunc,line(l,s),l,t) ) try: ret = f(*paArgs) except Exception as exc: sys.stderr.write( "< ['aa', 'bb', 'cc'] + delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE'] """ dlName = _ustr(expr)+" ["+_ustr(delim)+" "+_ustr(expr)+"]..." if combine: @@ -3177,11 +4426,22 @@ def delimitedList( expr, delim=",", combine=False ): return ( expr + ZeroOrMore( Suppress( delim ) + expr ) ).setName(dlName) def countedArray( expr, intExpr=None ): - """Helper to define a counted list of expressions. - This helper defines a pattern of the form:: - integer expr expr expr... - where the leading integer tells how many expr expressions follow. - The matched tokens returns the array of expr tokens as a list - the leading count token is suppressed. + """ + Helper to define a counted list of expressions. + This helper defines a pattern of the form:: + integer expr expr expr... + where the leading integer tells how many expr expressions follow. + The matched tokens returns the array of expr tokens as a list - the leading count token is suppressed. + + If C{intExpr} is specified, it should be a pyparsing expression that produces an integer value. + + Example:: + countedArray(Word(alphas)).parseString('2 ab cd ef') # -> ['ab', 'cd'] + + # in this parser, the leading integer value is given in binary, + # '10' indicating that 2 values are in the array + binaryConstant = Word('01').setParseAction(lambda t: int(t[0], 2)) + countedArray(Word(alphas), intExpr=binaryConstant).parseString('10 ab cd ef') # -> ['ab', 'cd'] """ arrayExpr = Forward() def countFieldParseAction(s,l,t): @@ -3194,7 +4454,7 @@ def countedArray( expr, intExpr=None ): intExpr = intExpr.copy() intExpr.setName("arrayLen") intExpr.addParseAction(countFieldParseAction, callDuringTry=True) - return ( intExpr + arrayExpr ) + return ( intExpr + arrayExpr ).setName('(len) ' + _ustr(expr) + '...') def _flatten(L): ret = [] @@ -3206,16 +4466,17 @@ def _flatten(L): return ret def matchPreviousLiteral(expr): - """Helper to define an expression that is indirectly defined from - the tokens matched in a previous expression, that is, it looks - for a 'repeat' of a previous expression. For example:: - first = Word(nums) - second = matchPreviousLiteral(first) - matchExpr = first + ":" + second - will match C{"1:1"}, but not C{"1:2"}. Because this matches a - previous literal, will also match the leading C{"1:1"} in C{"1:10"}. - If this is not desired, use C{matchPreviousExpr}. - Do *not* use with packrat parsing enabled. + """ + Helper to define an expression that is indirectly defined from + the tokens matched in a previous expression, that is, it looks + for a 'repeat' of a previous expression. For example:: + first = Word(nums) + second = matchPreviousLiteral(first) + matchExpr = first + ":" + second + will match C{"1:1"}, but not C{"1:2"}. Because this matches a + previous literal, will also match the leading C{"1:1"} in C{"1:10"}. + If this is not desired, use C{matchPreviousExpr}. + Do I{not} use with packrat parsing enabled. """ rep = Forward() def copyTokenToRepeater(s,l,t): @@ -3225,24 +4486,26 @@ def matchPreviousLiteral(expr): else: # flatten t tokens tflat = _flatten(t.asList()) - rep << And( [ Literal(tt) for tt in tflat ] ) + rep << And(Literal(tt) for tt in tflat) else: rep << Empty() expr.addParseAction(copyTokenToRepeater, callDuringTry=True) + rep.setName('(prev) ' + _ustr(expr)) return rep def matchPreviousExpr(expr): - """Helper to define an expression that is indirectly defined from - the tokens matched in a previous expression, that is, it looks - for a 'repeat' of a previous expression. For example:: - first = Word(nums) - second = matchPreviousExpr(first) - matchExpr = first + ":" + second - will match C{"1:1"}, but not C{"1:2"}. Because this matches by - expressions, will *not* match the leading C{"1:1"} in C{"1:10"}; - the expressions are evaluated first, and then compared, so - C{"1"} is compared with C{"10"}. - Do *not* use with packrat parsing enabled. + """ + Helper to define an expression that is indirectly defined from + the tokens matched in a previous expression, that is, it looks + for a 'repeat' of a previous expression. For example:: + first = Word(nums) + second = matchPreviousExpr(first) + matchExpr = first + ":" + second + will match C{"1:1"}, but not C{"1:2"}. Because this matches by + expressions, will I{not} match the leading C{"1:1"} in C{"1:10"}; + the expressions are evaluated first, and then compared, so + C{"1"} is compared with C{"10"}. + Do I{not} use with packrat parsing enabled. """ rep = Forward() e2 = expr.copy() @@ -3255,6 +4518,7 @@ def matchPreviousExpr(expr): raise ParseException("",0,"") rep.setParseAction( mustMatchTheseTokens, callDuringTry=True ) expr.addParseAction(copyTokenToRepeater, callDuringTry=True) + rep.setName('(prev) ' + _ustr(expr)) return rep def _escapeRegexRangeChars(s): @@ -3266,16 +4530,27 @@ def _escapeRegexRangeChars(s): return _ustr(s) def oneOf( strs, caseless=False, useRegex=True ): - """Helper to quickly define a set of alternative Literals, and makes sure to do - longest-first testing when there is a conflict, regardless of the input order, - but returns a C{L{MatchFirst}} for best performance. + """ + Helper to quickly define a set of alternative Literals, and makes sure to do + longest-first testing when there is a conflict, regardless of the input order, + but returns a C{L{MatchFirst}} for best performance. - Parameters: - - strs - a string of space-delimited literals, or a list of string literals - - caseless - (default=False) - treat all literals as caseless - - useRegex - (default=True) - as an optimization, will generate a Regex + Parameters: + - strs - a string of space-delimited literals, or a collection of string literals + - caseless - (default=C{False}) - treat all literals as caseless + - useRegex - (default=C{True}) - as an optimization, will generate a Regex object; otherwise, will generate a C{MatchFirst} object (if C{caseless=True}, or if creating a C{Regex} raises an exception) + + Example:: + comp_oper = oneOf("< = > <= >= !=") + var = Word(alphas) + number = Word(nums) + term = var | number + comparison_expr = term + comp_oper + term + print(comparison_expr.searchString("B = 12 AA=23 B<=AA AA>12")) + prints:: + [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']] """ if caseless: isequal = ( lambda a,b: a.upper() == b.upper() ) @@ -3289,12 +4564,10 @@ def oneOf( strs, caseless=False, useRegex=True ): symbols = [] if isinstance(strs,basestring): symbols = strs.split() - elif isinstance(strs, collections.Sequence): - symbols = list(strs[:]) - elif isinstance(strs, _generatorType): + elif isinstance(strs, collections.Iterable): symbols = list(strs) else: - warnings.warn("Invalid argument to oneOf, expected string or list", + warnings.warn("Invalid argument to oneOf, expected string or iterable", SyntaxWarning, stacklevel=2) if not symbols: return NoMatch() @@ -3318,41 +4591,76 @@ def oneOf( strs, caseless=False, useRegex=True ): #~ print (strs,"->", "|".join( [ _escapeRegexChars(sym) for sym in symbols] )) try: if len(symbols)==len("".join(symbols)): - return Regex( "[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols) ) + return Regex( "[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols) ).setName(' | '.join(symbols)) else: - return Regex( "|".join(re.escape(sym) for sym in symbols) ) - except: + return Regex( "|".join(re.escape(sym) for sym in symbols) ).setName(' | '.join(symbols)) + except Exception: warnings.warn("Exception creating Regex for oneOf, building MatchFirst", SyntaxWarning, stacklevel=2) # last resort, just use MatchFirst - return MatchFirst( [ parseElementClass(sym) for sym in symbols ] ) + return MatchFirst(parseElementClass(sym) for sym in symbols).setName(' | '.join(symbols)) def dictOf( key, value ): - """Helper to easily and clearly define a dictionary by specifying the respective patterns - for the key and value. Takes care of defining the C{L{Dict}}, C{L{ZeroOrMore}}, and C{L{Group}} tokens - in the proper order. The key pattern can include delimiting markers or punctuation, - as long as they are suppressed, thereby leaving the significant key text. The value - pattern can include named results, so that the C{Dict} results can include named token - fields. + """ + Helper to easily and clearly define a dictionary by specifying the respective patterns + for the key and value. Takes care of defining the C{L{Dict}}, C{L{ZeroOrMore}}, and C{L{Group}} tokens + in the proper order. The key pattern can include delimiting markers or punctuation, + as long as they are suppressed, thereby leaving the significant key text. The value + pattern can include named results, so that the C{Dict} results can include named token + fields. + + Example:: + text = "shape: SQUARE posn: upper left color: light blue texture: burlap" + attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) + print(OneOrMore(attr_expr).parseString(text).dump()) + + attr_label = label + attr_value = Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join) + + # similar to Dict, but simpler call format + result = dictOf(attr_label, attr_value).parseString(text) + print(result.dump()) + print(result['shape']) + print(result.shape) # object attribute access works too + print(result.asDict()) + prints:: + [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] + - color: light blue + - posn: upper left + - shape: SQUARE + - texture: burlap + SQUARE + SQUARE + {'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'} """ return Dict( ZeroOrMore( Group ( key + value ) ) ) def originalTextFor(expr, asString=True): - """Helper to return the original, untokenized text for a given expression. Useful to - restore the parsed fields of an HTML start tag into the raw tag text itself, or to - revert separate tokens with intervening whitespace back to the original matching - input text. Simpler to use than the parse action C{L{keepOriginalText}}, and does not - require the inspect module to chase up the call stack. By default, returns a - string containing the original parsed text. + """ + Helper to return the original, untokenized text for a given expression. Useful to + restore the parsed fields of an HTML start tag into the raw tag text itself, or to + revert separate tokens with intervening whitespace back to the original matching + input text. By default, returns astring containing the original parsed text. - If the optional C{asString} argument is passed as C{False}, then the return value is a - C{L{ParseResults}} containing any results names that were originally matched, and a - single token containing the original matched text from the input string. So if - the expression passed to C{L{originalTextFor}} contains expressions with defined - results names, you must set C{asString} to C{False} if you want to preserve those - results name values.""" + If the optional C{asString} argument is passed as C{False}, then the return value is a + C{L{ParseResults}} containing any results names that were originally matched, and a + single token containing the original matched text from the input string. So if + the expression passed to C{L{originalTextFor}} contains expressions with defined + results names, you must set C{asString} to C{False} if you want to preserve those + results name values. + + Example:: + src = "this is test bold text normal text " + for tag in ("b","i"): + opener,closer = makeHTMLTags(tag) + patt = originalTextFor(opener + SkipTo(closer) + closer) + print(patt.searchString(src)[0]) + prints:: + [' bold text '] + ['text'] + """ locMarker = Empty().setParseAction(lambda s,loc,t: loc) endlocMarker = locMarker.copy() endlocMarker.callPreparse = False @@ -3361,27 +4669,37 @@ def originalTextFor(expr, asString=True): extractText = lambda s,l,t: s[t._original_start:t._original_end] else: def extractText(s,l,t): - del t[:] - t.insert(0, s[t._original_start:t._original_end]) - del t["_original_start"] - del t["_original_end"] + t[:] = [s[t.pop('_original_start'):t.pop('_original_end')]] matchExpr.setParseAction(extractText) + matchExpr.ignoreExprs = expr.ignoreExprs return matchExpr def ungroup(expr): - """Helper to undo pyparsing's default grouping of And expressions, even - if all but one are non-empty.""" + """ + Helper to undo pyparsing's default grouping of And expressions, even + if all but one are non-empty. + """ return TokenConverter(expr).setParseAction(lambda t:t[0]) def locatedExpr(expr): - """Helper to decorate a returned token with its starting and ending locations in the input string. - This helper adds the following results names: - - locn_start = location where matched expression begins - - locn_end = location where matched expression ends - - value = the actual parsed results + """ + Helper to decorate a returned token with its starting and ending locations in the input string. + This helper adds the following results names: + - locn_start = location where matched expression begins + - locn_end = location where matched expression ends + - value = the actual parsed results - Be careful if the input text contains C{} characters, you may want to call - C{L{ParserElement.parseWithTabs}} + Be careful if the input text contains C{} characters, you may want to call + C{L{ParserElement.parseWithTabs}} + + Example:: + wd = Word(alphas) + for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"): + print(match) + prints:: + [[0, 'ljsdf', 5]] + [[8, 'lksdjjf', 15]] + [[18, 'lkkjj', 23]] """ locator = Empty().setParseAction(lambda s,l,t: l) return Group(locator("locn_start") + expr("value") + locator.copy().leaveWhitespace()("locn_end")) @@ -3402,31 +4720,33 @@ _charRange = Group(_singleChar + Suppress("-") + _singleChar) _reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]" def srange(s): - r"""Helper to easily define string ranges for use in Word construction. Borrows - syntax from regexp '[]' string range definitions:: - srange("[0-9]") -> "0123456789" - srange("[a-z]") -> "abcdefghijklmnopqrstuvwxyz" - srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_" - The input string must be enclosed in []'s, and the returned string is the expanded - character set joined into a single string. - The values enclosed in the []'s may be:: - a single character - an escaped character with a leading backslash (such as \- or \]) - an escaped hex character with a leading '\x' (\x21, which is a '!' character) - (\0x## is also supported for backwards compatibility) - an escaped octal character with a leading '\0' (\041, which is a '!' character) - a range of any of the above, separated by a dash ('a-z', etc.) - any combination of the above ('aeiouy', 'a-zA-Z0-9_$', etc.) + r""" + Helper to easily define string ranges for use in Word construction. Borrows + syntax from regexp '[]' string range definitions:: + srange("[0-9]") -> "0123456789" + srange("[a-z]") -> "abcdefghijklmnopqrstuvwxyz" + srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_" + The input string must be enclosed in []'s, and the returned string is the expanded + character set joined into a single string. + The values enclosed in the []'s may be: + - a single character + - an escaped character with a leading backslash (such as C{\-} or C{\]}) + - an escaped hex character with a leading C{'\x'} (C{\x21}, which is a C{'!'} character) + (C{\0x##} is also supported for backwards compatibility) + - an escaped octal character with a leading C{'\0'} (C{\041}, which is a C{'!'} character) + - a range of any of the above, separated by a dash (C{'a-z'}, etc.) + - any combination of the above (C{'aeiouy'}, C{'a-zA-Z0-9_$'}, etc.) """ _expanded = lambda p: p if not isinstance(p,ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]),ord(p[1])+1)) try: return "".join(_expanded(part) for part in _reBracketExpr.parseString(s).body) - except: + except Exception: return "" def matchOnlyAtCol(n): - """Helper method for defining parse actions that require matching at a specific - column in the input text. + """ + Helper method for defining parse actions that require matching at a specific + column in the input text. """ def verifyCol(strg,locn,toks): if col(locn,strg) != n: @@ -3434,57 +4754,83 @@ def matchOnlyAtCol(n): return verifyCol def replaceWith(replStr): - """Helper method for common parse actions that simply return a literal value. Especially - useful when used with C{L{transformString}()}. """ - #def _replFunc(*args): - # return [replStr] - #return _replFunc - return functools.partial(next, itertools.repeat([replStr])) + Helper method for common parse actions that simply return a literal value. Especially + useful when used with C{L{transformString}()}. + + Example:: + num = Word(nums).setParseAction(lambda toks: int(toks[0])) + na = oneOf("N/A NA").setParseAction(replaceWith(math.nan)) + term = na | num + + OneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234] + """ + return lambda s,l,t: [replStr] def removeQuotes(s,l,t): - """Helper parse action for removing quotation marks from parsed quoted strings. - To use, add this parse action to quoted string using:: - quotedString.setParseAction( removeQuotes ) + """ + Helper parse action for removing quotation marks from parsed quoted strings. + + Example:: + # by default, quotation marks are included in parsed results + quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"] + + # use removeQuotes to strip quotation marks from parsed results + quotedString.setParseAction(removeQuotes) + quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"] """ return t[0][1:-1] -def upcaseTokens(s,l,t): - """Helper parse action to convert tokens to upper case.""" - return [ tt.upper() for tt in map(_ustr,t) ] +def tokenMap(func, *args): + """ + Helper to define a parse action by mapping a function to all elements of a ParseResults list.If any additional + args are passed, they are forwarded to the given function as additional arguments after + the token, as in C{hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))}, which will convert the + parsed data to an integer using base 16. -def downcaseTokens(s,l,t): - """Helper parse action to convert tokens to lower case.""" - return [ tt.lower() for tt in map(_ustr,t) ] + Example (compare the last to example in L{ParserElement.transformString}:: + hex_ints = OneOrMore(Word(hexnums)).setParseAction(tokenMap(int, 16)) + hex_ints.runTests(''' + 00 11 22 aa FF 0a 0d 1a + ''') + + upperword = Word(alphas).setParseAction(tokenMap(str.upper)) + OneOrMore(upperword).runTests(''' + my kingdom for a horse + ''') + + wd = Word(alphas).setParseAction(tokenMap(str.title)) + OneOrMore(wd).setParseAction(' '.join).runTests(''' + now is the winter of our discontent made glorious summer by this sun of york + ''') + prints:: + 00 11 22 aa FF 0a 0d 1a + [0, 17, 34, 170, 255, 10, 13, 26] + + my kingdom for a horse + ['MY', 'KINGDOM', 'FOR', 'A', 'HORSE'] + + now is the winter of our discontent made glorious summer by this sun of york + ['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York'] + """ + def pa(s,l,t): + return [func(tokn, *args) for tokn in t] -def keepOriginalText(s,startLoc,t): - """DEPRECATED - use new helper method C{L{originalTextFor}}. - Helper parse action to preserve original parsed text, - overriding any nested parse actions.""" try: - endloc = getTokensEndLoc() - except ParseException: - raise ParseFatalException("incorrect usage of keepOriginalText - may only be called as a parse action") - del t[:] - t += ParseResults(s[startLoc:endloc]) - return t + func_name = getattr(func, '__name__', + getattr(func, '__class__').__name__) + except Exception: + func_name = str(func) + pa.__name__ = func_name -def getTokensEndLoc(): - """Method to be called from within a parse action to determine the end - location of the parsed tokens.""" - import inspect - fstack = inspect.stack() - try: - # search up the stack (through intervening argument normalizers) for correct calling routine - for f in fstack[2:]: - if f[3] == "_parseNoCache": - endloc = f[0].f_locals["loc"] - return endloc - else: - raise ParseFatalException("incorrect usage of getTokensEndLoc - may only be called from within a parse action") - finally: - del fstack + return pa +upcaseTokens = tokenMap(lambda t: _ustr(t).upper()) +"""(Deprecated) Helper parse action to convert tokens to upper case. Deprecated in favor of L{pyparsing_common.upcaseTokens}""" + +downcaseTokens = tokenMap(lambda t: _ustr(t).lower()) +"""(Deprecated) Helper parse action to convert tokens to lower case. Deprecated in favor of L{pyparsing_common.downcaseTokens}""" + def _makeTags(tagStr, xml): """Internal helper to construct opening and closing tag expressions, given a tag name""" if isinstance(tagStr,basestring): @@ -3508,40 +4854,90 @@ def _makeTags(tagStr, xml): Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">") closeTag = Combine(_L("") - openTag = openTag.setResultsName("start"+"".join(resname.replace(":"," ").title().split())).setName("<%s>" % tagStr) - closeTag = closeTag.setResultsName("end"+"".join(resname.replace(":"," ").title().split())).setName("" % tagStr) + openTag = openTag.setResultsName("start"+"".join(resname.replace(":"," ").title().split())).setName("<%s>" % resname) + closeTag = closeTag.setResultsName("end"+"".join(resname.replace(":"," ").title().split())).setName("" % resname) openTag.tag = resname closeTag.tag = resname return openTag, closeTag def makeHTMLTags(tagStr): - """Helper to construct opening and closing tag expressions for HTML, given a tag name""" + """ + Helper to construct opening and closing tag expressions for HTML, given a tag name. Matches + tags in either upper or lower case, attributes with namespaces and with quoted or unquoted values. + + Example:: + text = 'More info at the pyparsing wiki page' + # makeHTMLTags returns pyparsing expressions for the opening and closing tags as a 2-tuple + a,a_end = makeHTMLTags("A") + link_expr = a + SkipTo(a_end)("link_text") + a_end + + for link in link_expr.searchString(text): + # attributes in the tag (like "href" shown here) are also accessible as named results + print(link.link_text, '->', link.href) + prints:: + pyparsing -> http://pyparsing.wikispaces.com + """ return _makeTags( tagStr, False ) def makeXMLTags(tagStr): - """Helper to construct opening and closing tag expressions for XML, given a tag name""" + """ + Helper to construct opening and closing tag expressions for XML, given a tag name. Matches + tags only in the given upper/lower case. + + Example: similar to L{makeHTMLTags} + """ return _makeTags( tagStr, True ) def withAttribute(*args,**attrDict): - """Helper to create a validating parse action to be used with start tags created - with C{L{makeXMLTags}} or C{L{makeHTMLTags}}. Use C{withAttribute} to qualify a starting tag - with a required attribute value, to avoid false matches on common tags such as - C{} or C{
}. + """ + Helper to create a validating parse action to be used with start tags created + with C{L{makeXMLTags}} or C{L{makeHTMLTags}}. Use C{withAttribute} to qualify a starting tag + with a required attribute value, to avoid false matches on common tags such as + C{} or C{
}. - Call C{withAttribute} with a series of attribute names and values. Specify the list - of filter attributes names and values as: - - keyword arguments, as in C{(align="right")}, or - - as an explicit dict with C{**} operator, when an attribute name is also a Python + Call C{withAttribute} with a series of attribute names and values. Specify the list + of filter attributes names and values as: + - keyword arguments, as in C{(align="right")}, or + - as an explicit dict with C{**} operator, when an attribute name is also a Python reserved word, as in C{**{"class":"Customer", "align":"right"}} - - a list of name-value tuples, as in ( ("ns1:class", "Customer"), ("ns2:align","right") ) - For attribute names with a namespace prefix, you must use the second form. Attribute - names are matched insensitive to upper/lower case. + - a list of name-value tuples, as in ( ("ns1:class", "Customer"), ("ns2:align","right") ) + For attribute names with a namespace prefix, you must use the second form. Attribute + names are matched insensitive to upper/lower case. - If just testing for C{class} (with or without a namespace), use C{L{withClass}}. + If just testing for C{class} (with or without a namespace), use C{L{withClass}}. - To verify that the attribute exists, but without specifying a value, pass - C{withAttribute.ANY_VALUE} as the value. - """ + To verify that the attribute exists, but without specifying a value, pass + C{withAttribute.ANY_VALUE} as the value. + + Example:: + html = ''' +
+ Some text +
1 4 0 1 0
+
1,3 2,3 1,1
+
this has no type
+
+ + ''' + div,div_end = makeHTMLTags("div") + + # only match div tag having a type attribute with value "grid" + div_grid = div().setParseAction(withAttribute(type="grid")) + grid_expr = div_grid + SkipTo(div | div_end)("body") + for grid_header in grid_expr.searchString(html): + print(grid_header.body) + + # construct a match with any div tag having a type attribute, regardless of the value + div_any_type = div().setParseAction(withAttribute(type=withAttribute.ANY_VALUE)) + div_expr = div_any_type + SkipTo(div | div_end)("body") + for div_header in div_expr.searchString(html): + print(div_header.body) + prints:: + 1 4 0 1 0 + + 1 4 0 1 0 + 1,3 2,3 1,1 + """ if args: attrs = args[:] else: @@ -3558,9 +4954,37 @@ def withAttribute(*args,**attrDict): withAttribute.ANY_VALUE = object() def withClass(classname, namespace=''): - """Simplified version of C{L{withAttribute}} when matching on a div class - made - difficult because C{class} is a reserved word in Python. - """ + """ + Simplified version of C{L{withAttribute}} when matching on a div class - made + difficult because C{class} is a reserved word in Python. + + Example:: + html = ''' +
+ Some text +
1 4 0 1 0
+
1,3 2,3 1,1
+
this <div> has no class
+
+ + ''' + div,div_end = makeHTMLTags("div") + div_grid = div().setParseAction(withClass("grid")) + + grid_expr = div_grid + SkipTo(div | div_end)("body") + for grid_header in grid_expr.searchString(html): + print(grid_header.body) + + div_any_type = div().setParseAction(withClass(withAttribute.ANY_VALUE)) + div_expr = div_any_type + SkipTo(div | div_end)("body") + for div_header in div_expr.searchString(html): + print(div_header.body) + prints:: + 1 4 0 1 0 + + 1 4 0 1 0 + 1,3 2,3 1,1 + """ classattr = "%s:class" % namespace if namespace else "class" return withAttribute(**{classattr : classname}) @@ -3569,40 +4993,74 @@ opAssoc.LEFT = object() opAssoc.RIGHT = object() def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ): - """Helper method for constructing grammars of expressions made up of - operators working in a precedence hierarchy. Operators may be unary or - binary, left- or right-associative. Parse actions can also be attached - to operator expressions. + """ + Helper method for constructing grammars of expressions made up of + operators working in a precedence hierarchy. Operators may be unary or + binary, left- or right-associative. Parse actions can also be attached + to operator expressions. The generated parser will also recognize the use + of parentheses to override operator precedences (see example below). + + Note: if you define a deep operator list, you may see performance issues + when using infixNotation. See L{ParserElement.enablePackrat} for a + mechanism to potentially improve your parser performance. - Parameters: - - baseExpr - expression representing the most basic element for the nested - - opList - list of tuples, one for each operator precedence level in the - expression grammar; each tuple is of the form - (opExpr, numTerms, rightLeftAssoc, parseAction), where: - - opExpr is the pyparsing expression for the operator; - may also be a string, which will be converted to a Literal; - if numTerms is 3, opExpr is a tuple of two expressions, for the - two operators separating the 3 terms - - numTerms is the number of terms for this operator (must - be 1, 2, or 3) - - rightLeftAssoc is the indicator whether the operator is - right or left associative, using the pyparsing-defined - constants C{opAssoc.RIGHT} and C{opAssoc.LEFT}. - - parseAction is the parse action to be associated with - expressions matching this operator expression (the - parse action tuple member may be omitted) - - lpar - expression for matching left-parentheses (default=Suppress('(')) - - rpar - expression for matching right-parentheses (default=Suppress(')')) + Parameters: + - baseExpr - expression representing the most basic element for the nested + - opList - list of tuples, one for each operator precedence level in the + expression grammar; each tuple is of the form + (opExpr, numTerms, rightLeftAssoc, parseAction), where: + - opExpr is the pyparsing expression for the operator; + may also be a string, which will be converted to a Literal; + if numTerms is 3, opExpr is a tuple of two expressions, for the + two operators separating the 3 terms + - numTerms is the number of terms for this operator (must + be 1, 2, or 3) + - rightLeftAssoc is the indicator whether the operator is + right or left associative, using the pyparsing-defined + constants C{opAssoc.RIGHT} and C{opAssoc.LEFT}. + - parseAction is the parse action to be associated with + expressions matching this operator expression (the + parse action tuple member may be omitted) + - lpar - expression for matching left-parentheses (default=C{Suppress('(')}) + - rpar - expression for matching right-parentheses (default=C{Suppress(')')}) + + Example:: + # simple example of four-function arithmetic with ints and variable names + integer = pyparsing_common.signed_integer + varname = pyparsing_common.identifier + + arith_expr = infixNotation(integer | varname, + [ + ('-', 1, opAssoc.RIGHT), + (oneOf('* /'), 2, opAssoc.LEFT), + (oneOf('+ -'), 2, opAssoc.LEFT), + ]) + + arith_expr.runTests(''' + 5+3*6 + (5+3)*6 + -2--11 + ''', fullDump=False) + prints:: + 5+3*6 + [[5, '+', [3, '*', 6]]] + + (5+3)*6 + [[[5, '+', 3], '*', 6]] + + -2--11 + [[['-', 2], '-', ['-', 11]]] """ ret = Forward() lastExpr = baseExpr | ( lpar + ret + rpar ) for i,operDef in enumerate(opList): opExpr,arity,rightLeftAssoc,pa = (operDef + (None,))[:4] + termName = "%s term" % opExpr if arity < 3 else "%s%s term" % opExpr if arity == 3: if opExpr is None or len(opExpr) != 2: raise ValueError("if numterms=3, opExpr must be a tuple or list of two expressions") opExpr1, opExpr2 = opExpr - thisExpr = Forward()#.setName("expr%d" % i) + thisExpr = Forward().setName(termName) if rightLeftAssoc == opAssoc.LEFT: if arity == 1: matchExpr = FollowedBy(lastExpr + opExpr) + Group( lastExpr + OneOrMore( opExpr ) ) @@ -3636,37 +5094,77 @@ def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ): raise ValueError("operator must indicate right or left associativity") if pa: matchExpr.setParseAction( pa ) - thisExpr <<= ( matchExpr | lastExpr ) + thisExpr <<= ( matchExpr.setName(termName) | lastExpr ) lastExpr = thisExpr ret <<= lastExpr return ret -operatorPrecedence = infixNotation -dblQuotedString = Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\x[0-9a-fA-F]+)|(?:\\.))*"').setName("string enclosed in double quotes") -sglQuotedString = Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\x[0-9a-fA-F]+)|(?:\\.))*'").setName("string enclosed in single quotes") -quotedString = Regex(r'''(?:"(?:[^"\n\r\\]|(?:"")|(?:\\x[0-9a-fA-F]+)|(?:\\.))*")|(?:'(?:[^'\n\r\\]|(?:'')|(?:\\x[0-9a-fA-F]+)|(?:\\.))*')''').setName("quotedString using single or double quotes") -unicodeString = Combine(_L('u') + quotedString.copy()) +operatorPrecedence = infixNotation +"""(Deprecated) Former name of C{L{infixNotation}}, will be dropped in a future release.""" + +dblQuotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"').setName("string enclosed in double quotes") +sglQuotedString = Combine(Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("string enclosed in single quotes") +quotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"'| + Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("quotedString using single or double quotes") +unicodeString = Combine(_L('u') + quotedString.copy()).setName("unicode string literal") def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.copy()): - """Helper method for defining nested lists enclosed in opening and closing - delimiters ("(" and ")" are the default). + """ + Helper method for defining nested lists enclosed in opening and closing + delimiters ("(" and ")" are the default). - Parameters: - - opener - opening character for a nested list (default="("); can also be a pyparsing expression - - closer - closing character for a nested list (default=")"); can also be a pyparsing expression - - content - expression for items within the nested lists (default=None) - - ignoreExpr - expression for ignoring opening and closing delimiters (default=quotedString) + Parameters: + - opener - opening character for a nested list (default=C{"("}); can also be a pyparsing expression + - closer - closing character for a nested list (default=C{")"}); can also be a pyparsing expression + - content - expression for items within the nested lists (default=C{None}) + - ignoreExpr - expression for ignoring opening and closing delimiters (default=C{quotedString}) - If an expression is not provided for the content argument, the nested - expression will capture all whitespace-delimited content between delimiters - as a list of separate values. + If an expression is not provided for the content argument, the nested + expression will capture all whitespace-delimited content between delimiters + as a list of separate values. - Use the C{ignoreExpr} argument to define expressions that may contain - opening or closing characters that should not be treated as opening - or closing characters for nesting, such as quotedString or a comment - expression. Specify multiple expressions using an C{L{Or}} or C{L{MatchFirst}}. - The default is L{quotedString}, but if no expressions are to be ignored, - then pass C{None} for this argument. + Use the C{ignoreExpr} argument to define expressions that may contain + opening or closing characters that should not be treated as opening + or closing characters for nesting, such as quotedString or a comment + expression. Specify multiple expressions using an C{L{Or}} or C{L{MatchFirst}}. + The default is L{quotedString}, but if no expressions are to be ignored, + then pass C{None} for this argument. + + Example:: + data_type = oneOf("void int short long char float double") + decl_data_type = Combine(data_type + Optional(Word('*'))) + ident = Word(alphas+'_', alphanums+'_') + number = pyparsing_common.number + arg = Group(decl_data_type + ident) + LPAR,RPAR = map(Suppress, "()") + + code_body = nestedExpr('{', '}', ignoreExpr=(quotedString | cStyleComment)) + + c_function = (decl_data_type("type") + + ident("name") + + LPAR + Optional(delimitedList(arg), [])("args") + RPAR + + code_body("body")) + c_function.ignore(cStyleComment) + + source_code = ''' + int is_odd(int x) { + return (x%2); + } + + int dec_to_hex(char hchar) { + if (hchar >= '0' && hchar <= '9') { + return (ord(hchar)-ord('0')); + } else { + return (10+ord(hchar)-ord('A')); + } + } + ''' + for func in c_function.searchString(source_code): + print("%(name)s (%(type)s) args: %(args)s" % func) + + prints:: + is_odd (int) args: [['int', 'x']] + dec_to_hex (int) args: [['char', 'hchar']] """ if opener == closer: raise ValueError("opening and closing strings cannot be the same") @@ -3697,23 +5195,86 @@ def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.cop ret <<= Group( Suppress(opener) + ZeroOrMore( ignoreExpr | ret | content ) + Suppress(closer) ) else: ret <<= Group( Suppress(opener) + ZeroOrMore( ret | content ) + Suppress(closer) ) + ret.setName('nested %s%s expression' % (opener,closer)) return ret def indentedBlock(blockStatementExpr, indentStack, indent=True): - """Helper method for defining space-delimited indentation blocks, such as - those used to define block statements in Python source code. + """ + Helper method for defining space-delimited indentation blocks, such as + those used to define block statements in Python source code. - Parameters: - - blockStatementExpr - expression defining syntax of statement that + Parameters: + - blockStatementExpr - expression defining syntax of statement that is repeated within the indented block - - indentStack - list created by caller to manage indentation stack + - indentStack - list created by caller to manage indentation stack (multiple statementWithIndentedBlock expressions within a single grammar should share a common indentStack) - - indent - boolean indicating whether block must be indented beyond the + - indent - boolean indicating whether block must be indented beyond the the current level; set to False for block of left-most statements - (default=True) + (default=C{True}) - A valid block must contain at least one C{blockStatement}. + A valid block must contain at least one C{blockStatement}. + + Example:: + data = ''' + def A(z): + A1 + B = 100 + G = A2 + A2 + A3 + B + def BB(a,b,c): + BB1 + def BBA(): + bba1 + bba2 + bba3 + C + D + def spam(x,y): + def eggs(z): + pass + ''' + + + indentStack = [1] + stmt = Forward() + + identifier = Word(alphas, alphanums) + funcDecl = ("def" + identifier + Group( "(" + Optional( delimitedList(identifier) ) + ")" ) + ":") + func_body = indentedBlock(stmt, indentStack) + funcDef = Group( funcDecl + func_body ) + + rvalue = Forward() + funcCall = Group(identifier + "(" + Optional(delimitedList(rvalue)) + ")") + rvalue << (funcCall | identifier | Word(nums)) + assignment = Group(identifier + "=" + rvalue) + stmt << ( funcDef | assignment | identifier ) + + module_body = OneOrMore(stmt) + + parseTree = module_body.parseString(data) + parseTree.pprint() + prints:: + [['def', + 'A', + ['(', 'z', ')'], + ':', + [['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]], + 'B', + ['def', + 'BB', + ['(', 'a', 'b', 'c', ')'], + ':', + [['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]], + 'C', + 'D', + ['def', + 'spam', + ['(', 'x', 'y', ')'], + ':', + [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]] """ def checkPeerIndent(s,l,t): if l >= len(s): return @@ -3738,9 +5299,9 @@ def indentedBlock(blockStatementExpr, indentStack, indent=True): indentStack.pop() NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress()) - INDENT = Empty() + Empty().setParseAction(checkSubIndent) - PEER = Empty().setParseAction(checkPeerIndent) - UNDENT = Empty().setParseAction(checkUnindent) + INDENT = (Empty() + Empty().setParseAction(checkSubIndent)).setName('INDENT') + PEER = Empty().setParseAction(checkPeerIndent).setName('') + UNDENT = Empty().setParseAction(checkUnindent).setName('UNINDENT') if indent: smExpr = Group( Optional(NL) + #~ FollowedBy(blockStatementExpr) + @@ -3749,57 +5310,387 @@ def indentedBlock(blockStatementExpr, indentStack, indent=True): smExpr = Group( Optional(NL) + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) ) blockStatementExpr.ignore(_bslash + LineEnd()) - return smExpr + return smExpr.setName('indented block') alphas8bit = srange(r"[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]") punc8bit = srange(r"[\0xa1-\0xbf\0xd7\0xf7]") -anyOpenTag,anyCloseTag = makeHTMLTags(Word(alphas,alphanums+"_:")) -commonHTMLEntity = Combine(_L("&") + oneOf("gt lt amp nbsp quot").setResultsName("entity") +";").streamline() -_htmlEntityMap = dict(zip("gt lt amp nbsp quot".split(),'><& "')) -replaceHTMLEntity = lambda t : t.entity in _htmlEntityMap and _htmlEntityMap[t.entity] or None +anyOpenTag,anyCloseTag = makeHTMLTags(Word(alphas,alphanums+"_:").setName('any tag')) +_htmlEntityMap = dict(zip("gt lt amp nbsp quot apos".split(),'><& "\'')) +commonHTMLEntity = Regex('&(?P' + '|'.join(_htmlEntityMap.keys()) +");").setName("common HTML entity") +def replaceHTMLEntity(t): + """Helper parser action to replace common HTML entities with their special characters""" + return _htmlEntityMap.get(t.entity) # it's easy to get these comment structures wrong - they're very common, so may as well make them available -cStyleComment = Regex(r"/\*(?:[^*]*\*+)+?/").setName("C style comment") +cStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/').setName("C style comment") +"Comment of the form C{/* ... */}" -htmlComment = Regex(r"") -restOfLine = Regex(r".*").leaveWhitespace() -dblSlashComment = Regex(r"\/\/(\\\n|.)*").setName("// comment") -cppStyleComment = Regex(r"/(?:\*(?:[^*]*\*+)+?/|/[^\n]*(?:\n[^\n]*)*?(?:(?").setName("HTML comment") +"Comment of the form C{}" + +restOfLine = Regex(r".*").leaveWhitespace().setName("rest of line") +dblSlashComment = Regex(r"//(?:\\\n|[^\n])*").setName("// comment") +"Comment of the form C{// ... (to end of line)}" + +cppStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/'| dblSlashComment).setName("C++ style comment") +"Comment of either form C{L{cStyleComment}} or C{L{dblSlashComment}}" javaStyleComment = cppStyleComment +"Same as C{L{cppStyleComment}}" + pythonStyleComment = Regex(r"#.*").setName("Python style comment") +"Comment of the form C{# ... (to end of line)}" + _commasepitem = Combine(OneOrMore(Word(printables, excludeChars=',') + Optional( Word(" \t") + ~Literal(",") + ~LineEnd() ) ) ).streamline().setName("commaItem") commaSeparatedList = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("commaSeparatedList") +"""(Deprecated) Predefined expression of 1 or more printable words or quoted strings, separated by commas. + This expression is deprecated in favor of L{pyparsing_common.comma_separated_list}.""" + +# some other useful expressions - using lower-case class name since we are really using this as a namespace +class pyparsing_common: + """ + Here are some common low-level expressions that may be useful in jump-starting parser development: + - numeric forms (L{integers}, L{reals}, L{scientific notation}) + - common L{programming identifiers} + - network addresses (L{MAC}, L{IPv4}, L{IPv6}) + - ISO8601 L{dates} and L{datetime} + - L{UUID} + - L{comma-separated list} + Parse actions: + - C{L{convertToInteger}} + - C{L{convertToFloat}} + - C{L{convertToDate}} + - C{L{convertToDatetime}} + - C{L{stripHTMLTags}} + - C{L{upcaseTokens}} + - C{L{downcaseTokens}} + + Example:: + pyparsing_common.number.runTests(''' + # any int or real number, returned as the appropriate type + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + ''') + + pyparsing_common.fnumber.runTests(''' + # any int or real number, returned as float + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + ''') + + pyparsing_common.hex_integer.runTests(''' + # hex numbers + 100 + FF + ''') + + pyparsing_common.fraction.runTests(''' + # fractions + 1/2 + -3/4 + ''') + + pyparsing_common.mixed_integer.runTests(''' + # mixed fractions + 1 + 1/2 + -3/4 + 1-3/4 + ''') + + import uuid + pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) + pyparsing_common.uuid.runTests(''' + # uuid + 12345678-1234-5678-1234-567812345678 + ''') + prints:: + # any int or real number, returned as the appropriate type + 100 + [100] + + -100 + [-100] + + +100 + [100] + + 3.14159 + [3.14159] + + 6.02e23 + [6.02e+23] + + 1e-12 + [1e-12] + + # any int or real number, returned as float + 100 + [100.0] + + -100 + [-100.0] + + +100 + [100.0] + + 3.14159 + [3.14159] + + 6.02e23 + [6.02e+23] + + 1e-12 + [1e-12] + + # hex numbers + 100 + [256] + + FF + [255] + + # fractions + 1/2 + [0.5] + + -3/4 + [-0.75] + + # mixed fractions + 1 + [1] + + 1/2 + [0.5] + + -3/4 + [-0.75] + + 1-3/4 + [1.75] + + # uuid + 12345678-1234-5678-1234-567812345678 + [UUID('12345678-1234-5678-1234-567812345678')] + """ + + convertToInteger = tokenMap(int) + """ + Parse action for converting parsed integers to Python int + """ + + convertToFloat = tokenMap(float) + """ + Parse action for converting parsed numbers to Python float + """ + + integer = Word(nums).setName("integer").setParseAction(convertToInteger) + """expression that parses an unsigned integer, returns an int""" + + hex_integer = Word(hexnums).setName("hex integer").setParseAction(tokenMap(int,16)) + """expression that parses a hexadecimal integer, returns an int""" + + signed_integer = Regex(r'[+-]?\d+').setName("signed integer").setParseAction(convertToInteger) + """expression that parses an integer with optional leading sign, returns an int""" + + fraction = (signed_integer().setParseAction(convertToFloat) + '/' + signed_integer().setParseAction(convertToFloat)).setName("fraction") + """fractional expression of an integer divided by an integer, returns a float""" + fraction.addParseAction(lambda t: t[0]/t[-1]) + + mixed_integer = (fraction | signed_integer + Optional(Optional('-').suppress() + fraction)).setName("fraction or mixed integer-fraction") + """mixed integer of the form 'integer - fraction', with optional leading integer, returns float""" + mixed_integer.addParseAction(sum) + + real = Regex(r'[+-]?\d+\.\d*').setName("real number").setParseAction(convertToFloat) + """expression that parses a floating point number and returns a float""" + + sci_real = Regex(r'[+-]?\d+([eE][+-]?\d+|\.\d*([eE][+-]?\d+)?)').setName("real number with scientific notation").setParseAction(convertToFloat) + """expression that parses a floating point number with optional scientific notation and returns a float""" + + # streamlining this expression makes the docs nicer-looking + number = (sci_real | real | signed_integer).streamline() + """any numeric expression, returns the corresponding Python type""" + + fnumber = Regex(r'[+-]?\d+\.?\d*([eE][+-]?\d+)?').setName("fnumber").setParseAction(convertToFloat) + """any int or real number, returned as float""" + + identifier = Word(alphas+'_', alphanums+'_').setName("identifier") + """typical code identifier (leading alpha or '_', followed by 0 or more alphas, nums, or '_')""" + + ipv4_address = Regex(r'(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}').setName("IPv4 address") + "IPv4 address (C{0.0.0.0 - 255.255.255.255})" + + _ipv6_part = Regex(r'[0-9a-fA-F]{1,4}').setName("hex_integer") + _full_ipv6_address = (_ipv6_part + (':' + _ipv6_part)*7).setName("full IPv6 address") + _short_ipv6_address = (Optional(_ipv6_part + (':' + _ipv6_part)*(0,6)) + "::" + Optional(_ipv6_part + (':' + _ipv6_part)*(0,6))).setName("short IPv6 address") + _short_ipv6_address.addCondition(lambda t: sum(1 for tt in t if pyparsing_common._ipv6_part.matches(tt)) < 8) + _mixed_ipv6_address = ("::ffff:" + ipv4_address).setName("mixed IPv6 address") + ipv6_address = Combine((_full_ipv6_address | _mixed_ipv6_address | _short_ipv6_address).setName("IPv6 address")).setName("IPv6 address") + "IPv6 address (long, short, or mixed form)" + + mac_address = Regex(r'[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\1[0-9a-fA-F]{2}){4}').setName("MAC address") + "MAC address xx:xx:xx:xx:xx (may also have '-' or '.' delimiters)" + + @staticmethod + def convertToDate(fmt="%Y-%m-%d"): + """ + Helper to create a parse action for converting parsed date string to Python datetime.date + + Params - + - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%d"}) + + Example:: + date_expr = pyparsing_common.iso8601_date.copy() + date_expr.setParseAction(pyparsing_common.convertToDate()) + print(date_expr.parseString("1999-12-31")) + prints:: + [datetime.date(1999, 12, 31)] + """ + def cvt_fn(s,l,t): + try: + return datetime.strptime(t[0], fmt).date() + except ValueError as ve: + raise ParseException(s, l, str(ve)) + return cvt_fn + + @staticmethod + def convertToDatetime(fmt="%Y-%m-%dT%H:%M:%S.%f"): + """ + Helper to create a parse action for converting parsed datetime string to Python datetime.datetime + + Params - + - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%dT%H:%M:%S.%f"}) + + Example:: + dt_expr = pyparsing_common.iso8601_datetime.copy() + dt_expr.setParseAction(pyparsing_common.convertToDatetime()) + print(dt_expr.parseString("1999-12-31T23:59:59.999")) + prints:: + [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)] + """ + def cvt_fn(s,l,t): + try: + return datetime.strptime(t[0], fmt) + except ValueError as ve: + raise ParseException(s, l, str(ve)) + return cvt_fn + + iso8601_date = Regex(r'(?P\d{4})(?:-(?P\d\d)(?:-(?P\d\d))?)?').setName("ISO8601 date") + "ISO8601 date (C{yyyy-mm-dd})" + + iso8601_datetime = Regex(r'(?P\d{4})-(?P\d\d)-(?P\d\d)[T ](?P\d\d):(?P\d\d)(:(?P\d\d(\.\d*)?)?)?(?PZ|[+-]\d\d:?\d\d)?').setName("ISO8601 datetime") + "ISO8601 datetime (C{yyyy-mm-ddThh:mm:ss.s(Z|+-00:00)}) - trailing seconds, milliseconds, and timezone optional; accepts separating C{'T'} or C{' '}" + + uuid = Regex(r'[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}').setName("UUID") + "UUID (C{xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx})" + + _html_stripper = anyOpenTag.suppress() | anyCloseTag.suppress() + @staticmethod + def stripHTMLTags(s, l, tokens): + """ + Parse action to remove HTML tags from web page HTML source + + Example:: + # strip HTML links from normal text + text = 'More info at the
pyparsing wiki page' + td,td_end = makeHTMLTags("TD") + table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end + + print(table_text.parseString(text).body) # -> 'More info at the pyparsing wiki page' + """ + return pyparsing_common._html_stripper.transformString(tokens[0]) + + _commasepitem = Combine(OneOrMore(~Literal(",") + ~LineEnd() + Word(printables, excludeChars=',') + + Optional( White(" \t") ) ) ).streamline().setName("commaItem") + comma_separated_list = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("comma separated list") + """Predefined expression of 1 or more printable words or quoted strings, separated by commas.""" + + upcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).upper())) + """Parse action to convert tokens to upper case.""" + + downcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).lower())) + """Parse action to convert tokens to lower case.""" if __name__ == "__main__": - selectToken = CaselessLiteral( "select" ) - fromToken = CaselessLiteral( "from" ) + selectToken = CaselessLiteral("select") + fromToken = CaselessLiteral("from") - ident = Word( alphas, alphanums + "_$" ) - columnName = delimitedList( ident, ".", combine=True ).setParseAction( upcaseTokens ) - columnNameList = Group( delimitedList( columnName ) ).setName("columns") - tableName = delimitedList( ident, ".", combine=True ).setParseAction( upcaseTokens ) - tableNameList = Group( delimitedList( tableName ) ).setName("tables") - simpleSQL = ( selectToken + \ - ( '*' | columnNameList ).setResultsName( "columns" ) + \ - fromToken + \ - tableNameList.setResultsName( "tables" ) ) + ident = Word(alphas, alphanums + "_$") - simpleSQL.runTests("""\ - SELECT * from XYZZY, ABC - select * from SYS.XYZZY - Select A from Sys.dual - Select AA,BB,CC from Sys.dual - Select A, B, C from Sys.dual - Select A, B, C from Sys.dual - Xelect A, B, C from Sys.dual - Select A, B, C frox Sys.dual - Select - Select ^^^ frox Sys.dual - Select A, B, C from Sys.dual, Table2""") + columnName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) + columnNameList = Group(delimitedList(columnName)).setName("columns") + columnSpec = ('*' | columnNameList) + + tableName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) + tableNameList = Group(delimitedList(tableName)).setName("tables") + simpleSQL = selectToken("command") + columnSpec("columns") + fromToken + tableNameList("tables") + + # demo runTests method, including embedded comments in test string + simpleSQL.runTests(""" + # '*' as column list and dotted table name + select * from SYS.XYZZY + + # caseless match on "SELECT", and casts back to "select" + SELECT * from XYZZY, ABC + + # list of column names, and mixed case SELECT keyword + Select AA,BB,CC from Sys.dual + + # multiple tables + Select A, B, C from Sys.dual, Table2 + + # invalid SELECT keyword - should fail + Xelect A, B, C from Sys.dual + + # incomplete command - should fail + Select + + # invalid column name - should fail + Select ^^^ frox Sys.dual + + """) + + pyparsing_common.number.runTests(""" + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + """) + + # any int or real number, returned as float + pyparsing_common.fnumber.runTests(""" + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + """) + + pyparsing_common.hex_integer.runTests(""" + 100 + FF + """) + + import uuid + pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) + pyparsing_common.uuid.runTests(""" + 12345678-1234-5678-1234-567812345678 + """) diff --git a/lib/python3.4/site-packages/pkg_resources/extern/__init__.py b/lib/python3.4/site-packages/pkg_resources/extern/__init__.py index 317f4b8..b4156fe 100644 --- a/lib/python3.4/site-packages/pkg_resources/extern/__init__.py +++ b/lib/python3.4/site-packages/pkg_resources/extern/__init__.py @@ -6,6 +6,7 @@ class VendorImporter: A PEP 302 meta path importer for finding optionally-vendored or otherwise naturally-installed packages from root_name. """ + def __init__(self, root_name, vendored_names=(), vendor_pkg=None): self.root_name = root_name self.vendored_names = set(vendored_names) @@ -67,5 +68,6 @@ class VendorImporter: if self not in sys.meta_path: sys.meta_path.append(self) -names = 'packaging', 'six' + +names = 'packaging', 'pyparsing', 'six', 'appdirs' VendorImporter(__name__, names).install() diff --git a/lib/python3.4/site-packages/pkg_resources/py31compat.py b/lib/python3.4/site-packages/pkg_resources/py31compat.py new file mode 100644 index 0000000..331a51b --- /dev/null +++ b/lib/python3.4/site-packages/pkg_resources/py31compat.py @@ -0,0 +1,22 @@ +import os +import errno +import sys + + +def _makedirs_31(path, exist_ok=False): + try: + os.makedirs(path) + except OSError as exc: + if not exist_ok or exc.errno != errno.EEXIST: + raise + + +# rely on compatibility behavior until mode considerations +# and exists_ok considerations are disentangled. +# See https://github.com/pypa/setuptools/pull/1083#issuecomment-315168663 +needs_makedirs = ( + sys.version_info < (3, 2, 5) or + (3, 3) <= sys.version_info < (3, 3, 6) or + (3, 4) <= sys.version_info < (3, 4, 1) +) +makedirs = _makedirs_31 if needs_makedirs else os.makedirs diff --git a/lib/python3.4/site-packages/setuptools-20.3.1.dist-info/DESCRIPTION.rst b/lib/python3.4/site-packages/setuptools-20.3.1.dist-info/DESCRIPTION.rst deleted file mode 100644 index c6b6a1c..0000000 --- a/lib/python3.4/site-packages/setuptools-20.3.1.dist-info/DESCRIPTION.rst +++ /dev/null @@ -1,238 +0,0 @@ -=============================== -Installing and Using Setuptools -=============================== - -.. contents:: **Table of Contents** - - -`Change History `_. - -------------------------- -Installation Instructions -------------------------- - -The recommended way to bootstrap setuptools on any system is to download -`ez_setup.py`_ and run it using the target Python environment. Different -operating systems have different recommended techniques to accomplish this -basic routine, so below are some examples to get you started. - -Setuptools requires Python 2.6 or later. To install setuptools -on Python 2.4 or Python 2.5, use the `bootstrap script for Setuptools 1.x -`_. - -The link provided to ez_setup.py is a bookmark to bootstrap script for the -latest known stable release. - -.. _ez_setup.py: https://bootstrap.pypa.io/ez_setup.py - -Windows (Powershell 3 or later) -=============================== - -For best results, uninstall previous versions FIRST (see `Uninstalling`_). - -Using Windows 8 (which includes PowerShell 3) or earlier versions of Windows -with PowerShell 3 installed, it's possible to install with one simple -Powershell command. Start up Powershell and paste this command:: - - > (Invoke-WebRequest https://bootstrap.pypa.io/ez_setup.py).Content | python - - -You must start the Powershell with Administrative privileges or you may choose -to install a user-local installation:: - - > (Invoke-WebRequest https://bootstrap.pypa.io/ez_setup.py).Content | python - --user - -If you have Python 3.3 or later, you can use the ``py`` command to install to -different Python versions. For example, to install to Python 3.3 if you have -Python 2.7 installed:: - - > (Invoke-WebRequest https://bootstrap.pypa.io/ez_setup.py).Content | py -3 - - -The recommended way to install setuptools on Windows is to download -`ez_setup.py`_ and run it. The script will download the appropriate -distribution file and install it for you. - -Once installation is complete, you will find an ``easy_install`` program in -your Python ``Scripts`` subdirectory. For simple invocation and best results, -add this directory to your ``PATH`` environment variable, if it is not already -present. If you did a user-local install, the ``Scripts`` subdirectory is -``$env:APPDATA\Python\Scripts``. - - -Windows (simplified) -==================== - -For Windows without PowerShell 3 or for installation without a command-line, -download `ez_setup.py`_ using your preferred web browser or other technique -and "run" that file. - - -Unix (wget) -=========== - -Most Linux distributions come with wget. - -Download `ez_setup.py`_ and run it using the target Python version. The script -will download the appropriate version and install it for you:: - - > wget https://bootstrap.pypa.io/ez_setup.py -O - | python - -Note that you will may need to invoke the command with superuser privileges to -install to the system Python:: - - > wget https://bootstrap.pypa.io/ez_setup.py -O - | sudo python - -Alternatively, Setuptools may be installed to a user-local path:: - - > wget https://bootstrap.pypa.io/ez_setup.py -O - | python - --user - -Note that on some older systems (noted on Debian 6 and CentOS 5 installations), -`wget` may refuse to download `ez_setup.py`, complaining that the certificate common name `*.c.ssl.fastly.net` -does not match the host name `bootstrap.pypa.io`. In addition, the `ez_setup.py` script may then encounter similar problems using -`wget` internally to download `setuptools-x.y.zip`, complaining that the certificate common name of `www.python.org` does not match the -host name `pypi.python.org`. Those are known issues, related to a bug in the older versions of `wget` -(see `Issue 59 `_). If you happen to encounter them, -install Setuptools as follows:: - - > wget --no-check-certificate https://bootstrap.pypa.io/ez_setup.py - > python ez_setup.py --insecure - - -Unix including Mac OS X (curl) -============================== - -If your system has curl installed, follow the ``wget`` instructions but -replace ``wget`` with ``curl`` and ``-O`` with ``-o``. For example:: - - > curl https://bootstrap.pypa.io/ez_setup.py -o - | python - - -Advanced Installation -===================== - -For more advanced installation options, such as installing to custom -locations or prefixes, download and extract the source -tarball from `Setuptools on PyPI `_ -and run setup.py with any supported distutils and Setuptools options. -For example:: - - setuptools-x.x$ python setup.py install --prefix=/opt/setuptools - -Use ``--help`` to get a full options list, but we recommend consulting -the `EasyInstall manual`_ for detailed instructions, especially `the section -on custom installation locations`_. - -.. _EasyInstall manual: https://pythonhosted.org/setuptools/EasyInstall -.. _the section on custom installation locations: https://pythonhosted.org/setuptools/EasyInstall#custom-installation-locations - - -Downloads -========= - -All setuptools downloads can be found at `the project's home page in the Python -Package Index`_. Scroll to the very bottom of the page to find the links. - -.. _the project's home page in the Python Package Index: https://pypi.python.org/pypi/setuptools - -In addition to the PyPI downloads, the development version of ``setuptools`` -is available from the `Bitbucket repo`_, and in-development versions of the -`0.6 branch`_ are available as well. - -.. _Bitbucket repo: https://bitbucket.org/pypa/setuptools/get/default.tar.gz#egg=setuptools-dev -.. _0.6 branch: http://svn.python.org/projects/sandbox/branches/setuptools-0.6/#egg=setuptools-dev06 - -Uninstalling -============ - -On Windows, if Setuptools was installed using an ``.exe`` or ``.msi`` -installer, simply use the uninstall feature of "Add/Remove Programs" in the -Control Panel. - -Otherwise, to uninstall Setuptools or Distribute, regardless of the Python -version, delete all ``setuptools*`` and ``distribute*`` files and -directories from your system's ``site-packages`` directory -(and any other ``sys.path`` directories) FIRST. - -If you are upgrading or otherwise plan to re-install Setuptools or Distribute, -nothing further needs to be done. If you want to completely remove Setuptools, -you may also want to remove the 'easy_install' and 'easy_install-x.x' scripts -and associated executables installed to the Python scripts directory. - --------------------------------- -Using Setuptools and EasyInstall --------------------------------- - -Here are some of the available manuals, tutorials, and other resources for -learning about Setuptools, Python Eggs, and EasyInstall: - -* `The EasyInstall user's guide and reference manual`_ -* `The setuptools Developer's Guide`_ -* `The pkg_resources API reference`_ -* `The Internal Structure of Python Eggs`_ - -Questions, comments, and bug reports should be directed to the `distutils-sig -mailing list`_. If you have written (or know of) any tutorials, documentation, -plug-ins, or other resources for setuptools users, please let us know about -them there, so this reference list can be updated. If you have working, -*tested* patches to correct problems or add features, you may submit them to -the `setuptools bug tracker`_. - -.. _setuptools bug tracker: https://bitbucket.org/pypa/setuptools/issues -.. _The Internal Structure of Python Eggs: https://pythonhosted.org/setuptools/formats.html -.. _The setuptools Developer's Guide: https://pythonhosted.org/setuptools/setuptools.html -.. _The pkg_resources API reference: https://pythonhosted.org/setuptools/pkg_resources.html -.. _The EasyInstall user's guide and reference manual: https://pythonhosted.org/setuptools/easy_install.html -.. _distutils-sig mailing list: http://mail.python.org/pipermail/distutils-sig/ - - -------- -Credits -------- - -* The original design for the ``.egg`` format and the ``pkg_resources`` API was - co-created by Phillip Eby and Bob Ippolito. Bob also implemented the first - version of ``pkg_resources``, and supplied the OS X operating system version - compatibility algorithm. - -* Ian Bicking implemented many early "creature comfort" features of - easy_install, including support for downloading via Sourceforge and - Subversion repositories. Ian's comments on the Web-SIG about WSGI - application deployment also inspired the concept of "entry points" in eggs, - and he has given talks at PyCon and elsewhere to inform and educate the - community about eggs and setuptools. - -* Jim Fulton contributed time and effort to build automated tests of various - aspects of ``easy_install``, and supplied the doctests for the command-line - ``.exe`` wrappers on Windows. - -* Phillip J. Eby is the seminal author of setuptools, and - first proposed the idea of an importable binary distribution format for - Python application plug-ins. - -* Significant parts of the implementation of setuptools were funded by the Open - Source Applications Foundation, to provide a plug-in infrastructure for the - Chandler PIM application. In addition, many OSAF staffers (such as Mike - "Code Bear" Taylor) contributed their time and stress as guinea pigs for the - use of eggs and setuptools, even before eggs were "cool". (Thanks, guys!) - -* Tarek Ziadé is the principal author of the Distribute fork, which - re-invigorated the community on the project, encouraged renewed innovation, - and addressed many defects. - -* Since the merge with Distribute, Jason R. Coombs is the - maintainer of setuptools. The project is maintained in coordination with - the Python Packaging Authority (PyPA) and the larger Python community. - -.. _files: - - ---------------- -Code of Conduct ---------------- - -Everyone interacting in the setuptools project's codebases, issue trackers, -chat rooms, and mailing lists is expected to follow the -`PyPA Code of Conduct`_. - -.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/ - - diff --git a/lib/python3.4/site-packages/setuptools-20.3.1.dist-info/METADATA b/lib/python3.4/site-packages/setuptools-20.3.1.dist-info/METADATA deleted file mode 100644 index 16d3c14..0000000 --- a/lib/python3.4/site-packages/setuptools-20.3.1.dist-info/METADATA +++ /dev/null @@ -1,267 +0,0 @@ -Metadata-Version: 2.0 -Name: setuptools -Version: 20.3.1 -Summary: Easily download, build, install, upgrade, and uninstall Python packages -Home-page: https://bitbucket.org/pypa/setuptools -Author: Python Packaging Authority -Author-email: distutils-sig@python.org -License: UNKNOWN -Keywords: CPAN PyPI distutils eggs package management -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python :: 2.6 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: System :: Archiving :: Packaging -Classifier: Topic :: System :: Systems Administration -Classifier: Topic :: Utilities -Provides-Extra: certs -Requires-Dist: certifi (==2015.11.20); extra == 'certs' -Provides-Extra: ssl -Requires-Dist: wincertstore (==0.2); sys_platform=='win32' and extra == 'ssl' - -=============================== -Installing and Using Setuptools -=============================== - -.. contents:: **Table of Contents** - - -`Change History `_. - -------------------------- -Installation Instructions -------------------------- - -The recommended way to bootstrap setuptools on any system is to download -`ez_setup.py`_ and run it using the target Python environment. Different -operating systems have different recommended techniques to accomplish this -basic routine, so below are some examples to get you started. - -Setuptools requires Python 2.6 or later. To install setuptools -on Python 2.4 or Python 2.5, use the `bootstrap script for Setuptools 1.x -`_. - -The link provided to ez_setup.py is a bookmark to bootstrap script for the -latest known stable release. - -.. _ez_setup.py: https://bootstrap.pypa.io/ez_setup.py - -Windows (Powershell 3 or later) -=============================== - -For best results, uninstall previous versions FIRST (see `Uninstalling`_). - -Using Windows 8 (which includes PowerShell 3) or earlier versions of Windows -with PowerShell 3 installed, it's possible to install with one simple -Powershell command. Start up Powershell and paste this command:: - - > (Invoke-WebRequest https://bootstrap.pypa.io/ez_setup.py).Content | python - - -You must start the Powershell with Administrative privileges or you may choose -to install a user-local installation:: - - > (Invoke-WebRequest https://bootstrap.pypa.io/ez_setup.py).Content | python - --user - -If you have Python 3.3 or later, you can use the ``py`` command to install to -different Python versions. For example, to install to Python 3.3 if you have -Python 2.7 installed:: - - > (Invoke-WebRequest https://bootstrap.pypa.io/ez_setup.py).Content | py -3 - - -The recommended way to install setuptools on Windows is to download -`ez_setup.py`_ and run it. The script will download the appropriate -distribution file and install it for you. - -Once installation is complete, you will find an ``easy_install`` program in -your Python ``Scripts`` subdirectory. For simple invocation and best results, -add this directory to your ``PATH`` environment variable, if it is not already -present. If you did a user-local install, the ``Scripts`` subdirectory is -``$env:APPDATA\Python\Scripts``. - - -Windows (simplified) -==================== - -For Windows without PowerShell 3 or for installation without a command-line, -download `ez_setup.py`_ using your preferred web browser or other technique -and "run" that file. - - -Unix (wget) -=========== - -Most Linux distributions come with wget. - -Download `ez_setup.py`_ and run it using the target Python version. The script -will download the appropriate version and install it for you:: - - > wget https://bootstrap.pypa.io/ez_setup.py -O - | python - -Note that you will may need to invoke the command with superuser privileges to -install to the system Python:: - - > wget https://bootstrap.pypa.io/ez_setup.py -O - | sudo python - -Alternatively, Setuptools may be installed to a user-local path:: - - > wget https://bootstrap.pypa.io/ez_setup.py -O - | python - --user - -Note that on some older systems (noted on Debian 6 and CentOS 5 installations), -`wget` may refuse to download `ez_setup.py`, complaining that the certificate common name `*.c.ssl.fastly.net` -does not match the host name `bootstrap.pypa.io`. In addition, the `ez_setup.py` script may then encounter similar problems using -`wget` internally to download `setuptools-x.y.zip`, complaining that the certificate common name of `www.python.org` does not match the -host name `pypi.python.org`. Those are known issues, related to a bug in the older versions of `wget` -(see `Issue 59 `_). If you happen to encounter them, -install Setuptools as follows:: - - > wget --no-check-certificate https://bootstrap.pypa.io/ez_setup.py - > python ez_setup.py --insecure - - -Unix including Mac OS X (curl) -============================== - -If your system has curl installed, follow the ``wget`` instructions but -replace ``wget`` with ``curl`` and ``-O`` with ``-o``. For example:: - - > curl https://bootstrap.pypa.io/ez_setup.py -o - | python - - -Advanced Installation -===================== - -For more advanced installation options, such as installing to custom -locations or prefixes, download and extract the source -tarball from `Setuptools on PyPI `_ -and run setup.py with any supported distutils and Setuptools options. -For example:: - - setuptools-x.x$ python setup.py install --prefix=/opt/setuptools - -Use ``--help`` to get a full options list, but we recommend consulting -the `EasyInstall manual`_ for detailed instructions, especially `the section -on custom installation locations`_. - -.. _EasyInstall manual: https://pythonhosted.org/setuptools/EasyInstall -.. _the section on custom installation locations: https://pythonhosted.org/setuptools/EasyInstall#custom-installation-locations - - -Downloads -========= - -All setuptools downloads can be found at `the project's home page in the Python -Package Index`_. Scroll to the very bottom of the page to find the links. - -.. _the project's home page in the Python Package Index: https://pypi.python.org/pypi/setuptools - -In addition to the PyPI downloads, the development version of ``setuptools`` -is available from the `Bitbucket repo`_, and in-development versions of the -`0.6 branch`_ are available as well. - -.. _Bitbucket repo: https://bitbucket.org/pypa/setuptools/get/default.tar.gz#egg=setuptools-dev -.. _0.6 branch: http://svn.python.org/projects/sandbox/branches/setuptools-0.6/#egg=setuptools-dev06 - -Uninstalling -============ - -On Windows, if Setuptools was installed using an ``.exe`` or ``.msi`` -installer, simply use the uninstall feature of "Add/Remove Programs" in the -Control Panel. - -Otherwise, to uninstall Setuptools or Distribute, regardless of the Python -version, delete all ``setuptools*`` and ``distribute*`` files and -directories from your system's ``site-packages`` directory -(and any other ``sys.path`` directories) FIRST. - -If you are upgrading or otherwise plan to re-install Setuptools or Distribute, -nothing further needs to be done. If you want to completely remove Setuptools, -you may also want to remove the 'easy_install' and 'easy_install-x.x' scripts -and associated executables installed to the Python scripts directory. - --------------------------------- -Using Setuptools and EasyInstall --------------------------------- - -Here are some of the available manuals, tutorials, and other resources for -learning about Setuptools, Python Eggs, and EasyInstall: - -* `The EasyInstall user's guide and reference manual`_ -* `The setuptools Developer's Guide`_ -* `The pkg_resources API reference`_ -* `The Internal Structure of Python Eggs`_ - -Questions, comments, and bug reports should be directed to the `distutils-sig -mailing list`_. If you have written (or know of) any tutorials, documentation, -plug-ins, or other resources for setuptools users, please let us know about -them there, so this reference list can be updated. If you have working, -*tested* patches to correct problems or add features, you may submit them to -the `setuptools bug tracker`_. - -.. _setuptools bug tracker: https://bitbucket.org/pypa/setuptools/issues -.. _The Internal Structure of Python Eggs: https://pythonhosted.org/setuptools/formats.html -.. _The setuptools Developer's Guide: https://pythonhosted.org/setuptools/setuptools.html -.. _The pkg_resources API reference: https://pythonhosted.org/setuptools/pkg_resources.html -.. _The EasyInstall user's guide and reference manual: https://pythonhosted.org/setuptools/easy_install.html -.. _distutils-sig mailing list: http://mail.python.org/pipermail/distutils-sig/ - - -------- -Credits -------- - -* The original design for the ``.egg`` format and the ``pkg_resources`` API was - co-created by Phillip Eby and Bob Ippolito. Bob also implemented the first - version of ``pkg_resources``, and supplied the OS X operating system version - compatibility algorithm. - -* Ian Bicking implemented many early "creature comfort" features of - easy_install, including support for downloading via Sourceforge and - Subversion repositories. Ian's comments on the Web-SIG about WSGI - application deployment also inspired the concept of "entry points" in eggs, - and he has given talks at PyCon and elsewhere to inform and educate the - community about eggs and setuptools. - -* Jim Fulton contributed time and effort to build automated tests of various - aspects of ``easy_install``, and supplied the doctests for the command-line - ``.exe`` wrappers on Windows. - -* Phillip J. Eby is the seminal author of setuptools, and - first proposed the idea of an importable binary distribution format for - Python application plug-ins. - -* Significant parts of the implementation of setuptools were funded by the Open - Source Applications Foundation, to provide a plug-in infrastructure for the - Chandler PIM application. In addition, many OSAF staffers (such as Mike - "Code Bear" Taylor) contributed their time and stress as guinea pigs for the - use of eggs and setuptools, even before eggs were "cool". (Thanks, guys!) - -* Tarek Ziadé is the principal author of the Distribute fork, which - re-invigorated the community on the project, encouraged renewed innovation, - and addressed many defects. - -* Since the merge with Distribute, Jason R. Coombs is the - maintainer of setuptools. The project is maintained in coordination with - the Python Packaging Authority (PyPA) and the larger Python community. - -.. _files: - - ---------------- -Code of Conduct ---------------- - -Everyone interacting in the setuptools project's codebases, issue trackers, -chat rooms, and mailing lists is expected to follow the -`PyPA Code of Conduct`_. - -.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/ - - diff --git a/lib/python3.4/site-packages/setuptools-20.3.1.dist-info/RECORD b/lib/python3.4/site-packages/setuptools-20.3.1.dist-info/RECORD deleted file mode 100644 index c9d3645..0000000 --- a/lib/python3.4/site-packages/setuptools-20.3.1.dist-info/RECORD +++ /dev/null @@ -1,137 +0,0 @@ -easy_install.py,sha256=MDC9vt5AxDsXX5qcKlBz2TnW6Tpuv_AobnfhCJ9X3PM,126 -pkg_resources/__init__.py,sha256=QyiRSbbmXDCq2O3AIG9V-AjGJinvd3ImWCKsaucVneA,100439 -pkg_resources/_vendor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pkg_resources/_vendor/pyparsing.py,sha256=ic8qmDPiq8Li-Y0PeZcI56rEyMqevKNBK6hr6FbyVBc,160425 -pkg_resources/_vendor/six.py,sha256=A6hdJZVjI3t_geebZ9BzUvwRrIXo0lfwzQlM2LcKyas,30098 -pkg_resources/_vendor/packaging/__about__.py,sha256=AEwkfVSNgMMAAugtYao7b7wah9XryokeoXBuIw4h6d8,720 -pkg_resources/_vendor/packaging/__init__.py,sha256=_vNac5TrzwsrzbOFIbF-5cHqc_Y2aPT2D7zrIR06BOo,513 -pkg_resources/_vendor/packaging/_compat.py,sha256=Vi_A0rAQeHbU-a9X0tt1yQm9RqkgQbDSxzRw8WlU9kA,860 -pkg_resources/_vendor/packaging/_structures.py,sha256=RImECJ4c_wTlaTYYwZYLHEiebDMaAJmK1oPARhw1T5o,1416 -pkg_resources/_vendor/packaging/markers.py,sha256=0Z2in1kNfYn93n9uJj0hNEmu-sJpEQpa_qAbxpYXdS4,7359 -pkg_resources/_vendor/packaging/requirements.py,sha256=SikL2UynbsT0qtY9ltqngndha_sfo0w6XGFhAhoSoaQ,4355 -pkg_resources/_vendor/packaging/specifiers.py,sha256=SAMRerzO3fK2IkFZCaZkuwZaL_EGqHNOz4pni4vhnN0,28025 -pkg_resources/_vendor/packaging/utils.py,sha256=3m6WvPm6NNxE8rkTGmn0r75B_GZSGg7ikafxHsBN1WA,421 -pkg_resources/_vendor/packaging/version.py,sha256=OwGnxYfr2ghNzYx59qWIBkrK3SnB6n-Zfd1XaLpnnM0,11556 -pkg_resources/extern/__init__.py,sha256=rMBTxKimjNg8plSH94cB-y52pKO0zmM-AkFL30lZGfY,2474 -setuptools/__init__.py,sha256=WEGb6BRGN2dz3eJTbNRUfInUAhb6_OZJyYAndPGJm6w,5440 -setuptools/archive_util.py,sha256=N30WE5ZQjkytzhAodAXw4FkK-9J5AP1ChrClHnZthOA,6609 -setuptools/cli-32.exe,sha256=dfEuovMNnA2HLa3jRfMPVi5tk4R7alCbpTvuxtCyw0Y,65536 -setuptools/cli-64.exe,sha256=KLABu5pyrnokJCv6skjXZ6GsXeyYHGcqOUT3oHI3Xpo,74752 -setuptools/cli-arm-32.exe,sha256=0pFNIi2SmY2gdY91Y4LRhj1wuBsnv5cG1fus3iBJv40,69120 -setuptools/cli.exe,sha256=dfEuovMNnA2HLa3jRfMPVi5tk4R7alCbpTvuxtCyw0Y,65536 -setuptools/depends.py,sha256=WyJIhjIX7D5-JpGSnMAPHEoDcVPQxaO0405keTQT6jM,6418 -setuptools/dist.py,sha256=txOleyyt2xCSTkUjCGW4MYZB8a1xsbC8MulDhSnoivQ,35701 -setuptools/extension.py,sha256=YvsyGHWVWzhNOXMHU239FR14wxw2WwdMLLzWsRP6_IY,1694 -setuptools/gui-32.exe,sha256=XBr0bHMA6Hpz2s9s9Bzjl-PwXfa9nH4ie0rFn4V2kWA,65536 -setuptools/gui-64.exe,sha256=aYKMhX1IJLn4ULHgWX0sE0yREUt6B3TEHf_jOw6yNyE,75264 -setuptools/gui-arm-32.exe,sha256=R5gRWLkY7wvO_CVGxoi7LZVTv0h-DKsKScy6fkbp4XI,69120 -setuptools/gui.exe,sha256=XBr0bHMA6Hpz2s9s9Bzjl-PwXfa9nH4ie0rFn4V2kWA,65536 -setuptools/launch.py,sha256=hP3qZxDNu5Hf9C-VAkEP4IC_YYfR1XfxMTj6EguxxCg,730 -setuptools/lib2to3_ex.py,sha256=6jPF9sJuHiz0cyg4cwIBLl2VMAxcl3GYSZwWAOuJplU,1998 -setuptools/msvc9_support.py,sha256=fo2vjb-dna1SEuHezQCTuelCo6XFBv5cqaI56ABJ1vw,2187 -setuptools/package_index.py,sha256=T6tZGPHApup6Gl3kz1sCLtY7kmMUXLBKweSAORYS2Qc,39490 -setuptools/py26compat.py,sha256=1Vvuf-hj5bTM3OAXv6vgJQImulne12ann053caOgikU,481 -setuptools/py27compat.py,sha256=CGj-jZcFgHUkrEdLvArkxHj96tAaMbG2-yJtUVU7QVI,306 -setuptools/py31compat.py,sha256=cqYSVBd2pxvKl75185z40htfEr6EKC29KvSBiSoqHOA,1636 -setuptools/sandbox.py,sha256=tuMRu_8R0_w6Qer9VqDiOTqKy1qr_GjHi-2QAg7TMz0,14210 -setuptools/script (dev).tmpl,sha256=f7MR17dTkzaqkCMSVseyOCMVrPVSMdmTQsaB8cZzfuI,201 -setuptools/script.tmpl,sha256=WGTt5piezO27c-Dbx6l5Q4T3Ff20A5z7872hv3aAhYY,138 -setuptools/site-patch.py,sha256=K-0-cAx36mX_PG-qPZwosG9ZLCliRjquKQ4nHiJvvzg,2389 -setuptools/ssl_support.py,sha256=tAFeeyFPVle_GgarPkNrdfnCJgP9PyN_QYGXTgypoyc,8119 -setuptools/unicode_utils.py,sha256=8zVyrL_MFc6P5AmErs21rr7z-3N1pZ_NkOcDC7BPElU,995 -setuptools/utils.py,sha256=08Z7mt-9mvrx-XvmS5EyKoRn2lxNTlgFsUwBU3Eq9JQ,293 -setuptools/version.py,sha256=xArPY4wg3eOL9kWjMciQnbhiJsQ73H8SieeRtCDrSsU,23 -setuptools/windows_support.py,sha256=5GrfqSP2-dLGJoZTq2g6dCKkyQxxa2n5IQiXlJCoYEE,714 -setuptools/command/__init__.py,sha256=1AM3hv_zCixE7kTXA-onWfK_2KF8GC8fUw3WSxzi5Fg,564 -setuptools/command/alias.py,sha256=KjpE0sz_SDIHv3fpZcIQK-sCkJz-SrC6Gmug6b9Nkc8,2426 -setuptools/command/bdist_egg.py,sha256=Km4CsGbevhvej6kKEfvTYxfkPoQijUyXmImNifrO4Tg,17184 -setuptools/command/bdist_rpm.py,sha256=B7l0TnzCGb-0nLlm6rS00jWLkojASwVmdhW2w5Qz_Ak,1508 -setuptools/command/bdist_wininst.py,sha256=_6dz3lpB1tY200LxKPLM7qgwTCceOMgaWFF-jW2-pm0,637 -setuptools/command/build_ext.py,sha256=pkQ8xp3YPVGGLkGv-SvfxC_GqFpboph1AFEoMFOgQMo,11964 -setuptools/command/build_py.py,sha256=HvJ88JuougDccaowYlfMV12kYtd0GLahg2DR2vQRqL4,7983 -setuptools/command/develop.py,sha256=VxSYbpM2jQqtRBn5klIjPVBo3sWKNZMlSbHHiRLUlZo,7383 -setuptools/command/easy_install.py,sha256=_rqkt0iWctAjMn7Z2wEYTICMntcUaV28iN6cfKrpQ28,85913 -setuptools/command/egg_info.py,sha256=0_8eI8hgLAlGt8Xk5kiodY_d9lxG6_RSescJISKBJgA,16890 -setuptools/command/install.py,sha256=QwaFiZRU3ytIHoPh8uJ9EqV3Fu9C4ca4B7UGAo95tws,4685 -setuptools/command/install_egg_info.py,sha256=8J_cH4VbOJv-9Wey8Ijw5SnNI7YS_CA2IKYX569mP5Q,4035 -setuptools/command/install_lib.py,sha256=rWcysInRJHVzgneY41EKW3kW3-dR2q2CvyPzul5ASAk,3839 -setuptools/command/install_scripts.py,sha256=vX2JC6v7l090N7CrTfihWBklNbPvfNKAY2LRtukM9XE,2231 -setuptools/command/launcher manifest.xml,sha256=xlLbjWrB01tKC0-hlVkOKkiSPbzMml2eOPtJ_ucCnbE,628 -setuptools/command/register.py,sha256=bHlMm1qmBbSdahTOT8w6UhA-EgeQIz7p6cD-qOauaiI,270 -setuptools/command/rotate.py,sha256=QGZS2t4CmBl7t79KQijNCjRMU50lu3nRhu4FXWB5LIE,2038 -setuptools/command/saveopts.py,sha256=za7QCBcQimKKriWcoCcbhxPjUz30gSB74zuTL47xpP4,658 -setuptools/command/sdist.py,sha256=kQetnPMw6ao3nurWGJZgS4HkOH4AknzMOSvqbVA6jGA,7050 -setuptools/command/setopt.py,sha256=cygJaJWJmiVhR0e_Uh_0_fWyCxMJIqK-Bu6K0LyYUtU,5086 -setuptools/command/test.py,sha256=N2f5RwxkjwU3YQzFYHtzHr636-pdX9XJDuPg5Y92kSo,6888 -setuptools/command/upload.py,sha256=OjAryq4ZoARZiaTN_MpuG1X8Pu9CJNCKmmbMg-gab5I,649 -setuptools/command/upload_docs.py,sha256=htXpASci5gKP0RIrGZRRmbll7RnTRuwvKWZkYsBlDMM,6815 -setuptools/extern/__init__.py,sha256=mTrrj4yLMdFeEwwnqKnSuvZM5RM-HPZ1iXLgaYDlB9o,132 -setuptools-20.3.1.dist-info/DESCRIPTION.rst,sha256=MDsJej8DPV2OKpAKpu74g-2xksRd-uGTeZn4W7D1dnI,9940 -setuptools-20.3.1.dist-info/METADATA,sha256=c4yrbKrKA0BxICkR7iP79IHXigHPhWNy70y-Fq-ybKs,11173 -setuptools-20.3.1.dist-info/RECORD,, -setuptools-20.3.1.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 -setuptools-20.3.1.dist-info/dependency_links.txt,sha256=oUNXJEArClXFiSSvfFwUKY8TYjeIXhuFfCpXn5K0DCE,226 -setuptools-20.3.1.dist-info/entry_points.txt,sha256=revbaRBbkZ2b1B-hZlAXo_18J9GjdYHgA4DoW8wdTOU,2835 -setuptools-20.3.1.dist-info/metadata.json,sha256=Kj9x1vkm8jQKLu3ywdXgeNTtyBmWFbp6KvF528_C5N4,4636 -setuptools-20.3.1.dist-info/top_level.txt,sha256=2HUXVVwA4Pff1xgTFr3GsTXXKaPaO6vlG6oNJ_4u4Tg,38 -setuptools-20.3.1.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 -../../../bin/easy_install,sha256=LopKneUXvnG_dqCSZe6amF-Bf4WEh1ny0uo28w2_nQ0,299 -../../../bin/easy_install-3.4,sha256=LopKneUXvnG_dqCSZe6amF-Bf4WEh1ny0uo28w2_nQ0,299 -setuptools-20.3.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -setuptools/command/__pycache__/bdist_wininst.cpython-34.pyc,, -setuptools/__pycache__/dist.cpython-34.pyc,, -pkg_resources/_vendor/__pycache__/__init__.cpython-34.pyc,, -setuptools/command/__pycache__/sdist.cpython-34.pyc,, -setuptools/__pycache__/launch.cpython-34.pyc,, -setuptools/__pycache__/version.cpython-34.pyc,, -setuptools/extern/__pycache__/__init__.cpython-34.pyc,, -setuptools/command/__pycache__/saveopts.cpython-34.pyc,, -setuptools/command/__pycache__/easy_install.cpython-34.pyc,, -setuptools/command/__pycache__/rotate.cpython-34.pyc,, -setuptools/command/__pycache__/develop.cpython-34.pyc,, -pkg_resources/_vendor/packaging/__pycache__/markers.cpython-34.pyc,, -setuptools/command/__pycache__/build_ext.cpython-34.pyc,, -__pycache__/easy_install.cpython-34.pyc,, -setuptools/command/__pycache__/egg_info.cpython-34.pyc,, -setuptools/__pycache__/utils.cpython-34.pyc,, -pkg_resources/extern/__pycache__/__init__.cpython-34.pyc,, -setuptools/__pycache__/windows_support.cpython-34.pyc,, -setuptools/__pycache__/py26compat.cpython-34.pyc,, -setuptools/command/__pycache__/bdist_rpm.cpython-34.pyc,, -setuptools/__pycache__/__init__.cpython-34.pyc,, -pkg_resources/_vendor/packaging/__pycache__/specifiers.cpython-34.pyc,, -setuptools/__pycache__/extension.cpython-34.pyc,, -setuptools/command/__pycache__/__init__.cpython-34.pyc,, -setuptools/__pycache__/msvc9_support.cpython-34.pyc,, -setuptools/__pycache__/py27compat.cpython-34.pyc,, -pkg_resources/_vendor/__pycache__/six.cpython-34.pyc,, -setuptools/__pycache__/ssl_support.cpython-34.pyc,, -setuptools/command/__pycache__/upload_docs.cpython-34.pyc,, -setuptools/command/__pycache__/install_scripts.cpython-34.pyc,, -pkg_resources/_vendor/packaging/__pycache__/__init__.cpython-34.pyc,, -setuptools/command/__pycache__/build_py.cpython-34.pyc,, -setuptools/__pycache__/sandbox.cpython-34.pyc,, -pkg_resources/__pycache__/__init__.cpython-34.pyc,, -setuptools/command/__pycache__/install.cpython-34.pyc,, -pkg_resources/_vendor/packaging/__pycache__/_structures.cpython-34.pyc,, -setuptools/command/__pycache__/alias.cpython-34.pyc,, -setuptools/command/__pycache__/install_egg_info.cpython-34.pyc,, -pkg_resources/_vendor/packaging/__pycache__/requirements.cpython-34.pyc,, -setuptools/__pycache__/unicode_utils.cpython-34.pyc,, -pkg_resources/_vendor/__pycache__/pyparsing.cpython-34.pyc,, -setuptools/__pycache__/depends.cpython-34.pyc,, -setuptools/__pycache__/site-patch.cpython-34.pyc,, -setuptools/command/__pycache__/setopt.cpython-34.pyc,, -setuptools/command/__pycache__/register.cpython-34.pyc,, -pkg_resources/_vendor/packaging/__pycache__/__about__.cpython-34.pyc,, -pkg_resources/_vendor/packaging/__pycache__/_compat.cpython-34.pyc,, -setuptools/command/__pycache__/upload.cpython-34.pyc,, -setuptools/__pycache__/lib2to3_ex.cpython-34.pyc,, -setuptools/command/__pycache__/install_lib.cpython-34.pyc,, -pkg_resources/_vendor/packaging/__pycache__/version.cpython-34.pyc,, -setuptools/__pycache__/py31compat.cpython-34.pyc,, -setuptools/command/__pycache__/test.cpython-34.pyc,, -setuptools/__pycache__/archive_util.cpython-34.pyc,, -setuptools/__pycache__/package_index.cpython-34.pyc,, -setuptools/command/__pycache__/bdist_egg.cpython-34.pyc,, -pkg_resources/_vendor/packaging/__pycache__/utils.cpython-34.pyc,, diff --git a/lib/python3.4/site-packages/setuptools-20.3.1.dist-info/dependency_links.txt b/lib/python3.4/site-packages/setuptools-20.3.1.dist-info/dependency_links.txt deleted file mode 100644 index 47d1e81..0000000 --- a/lib/python3.4/site-packages/setuptools-20.3.1.dist-info/dependency_links.txt +++ /dev/null @@ -1,2 +0,0 @@ -https://pypi.python.org/packages/source/c/certifi/certifi-2015.11.20.tar.gz#md5=25134646672c695c1ff1593c2dd75d08 -https://pypi.python.org/packages/source/w/wincertstore/wincertstore-0.2.zip#md5=ae728f2f007185648d0c7a8679b361e2 diff --git a/lib/python3.4/site-packages/setuptools-20.3.1.dist-info/entry_points.txt b/lib/python3.4/site-packages/setuptools-20.3.1.dist-info/entry_points.txt deleted file mode 100644 index 924fed8..0000000 --- a/lib/python3.4/site-packages/setuptools-20.3.1.dist-info/entry_points.txt +++ /dev/null @@ -1,62 +0,0 @@ -[console_scripts] -easy_install = setuptools.command.easy_install:main -easy_install-3.5 = setuptools.command.easy_install:main - -[distutils.commands] -alias = setuptools.command.alias:alias -bdist_egg = setuptools.command.bdist_egg:bdist_egg -bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm -bdist_wininst = setuptools.command.bdist_wininst:bdist_wininst -build_ext = setuptools.command.build_ext:build_ext -build_py = setuptools.command.build_py:build_py -develop = setuptools.command.develop:develop -easy_install = setuptools.command.easy_install:easy_install -egg_info = setuptools.command.egg_info:egg_info -install = setuptools.command.install:install -install_egg_info = setuptools.command.install_egg_info:install_egg_info -install_lib = setuptools.command.install_lib:install_lib -install_scripts = setuptools.command.install_scripts:install_scripts -register = setuptools.command.register:register -rotate = setuptools.command.rotate:rotate -saveopts = setuptools.command.saveopts:saveopts -sdist = setuptools.command.sdist:sdist -setopt = setuptools.command.setopt:setopt -test = setuptools.command.test:test -upload = setuptools.command.upload:upload -upload_docs = setuptools.command.upload_docs:upload_docs - -[distutils.setup_keywords] -convert_2to3_doctests = setuptools.dist:assert_string_list -dependency_links = setuptools.dist:assert_string_list -eager_resources = setuptools.dist:assert_string_list -entry_points = setuptools.dist:check_entry_points -exclude_package_data = setuptools.dist:check_package_data -extras_require = setuptools.dist:check_extras -include_package_data = setuptools.dist:assert_bool -install_requires = setuptools.dist:check_requirements -namespace_packages = setuptools.dist:check_nsp -package_data = setuptools.dist:check_package_data -packages = setuptools.dist:check_packages -setup_requires = setuptools.dist:check_requirements -test_loader = setuptools.dist:check_importable -test_runner = setuptools.dist:check_importable -test_suite = setuptools.dist:check_test_suite -tests_require = setuptools.dist:check_requirements -use_2to3 = setuptools.dist:assert_bool -use_2to3_exclude_fixers = setuptools.dist:assert_string_list -use_2to3_fixers = setuptools.dist:assert_string_list -zip_safe = setuptools.dist:assert_bool - -[egg_info.writers] -PKG-INFO = setuptools.command.egg_info:write_pkg_info -dependency_links.txt = setuptools.command.egg_info:overwrite_arg -depends.txt = setuptools.command.egg_info:warn_depends_obsolete -eager_resources.txt = setuptools.command.egg_info:overwrite_arg -entry_points.txt = setuptools.command.egg_info:write_entries -namespace_packages.txt = setuptools.command.egg_info:overwrite_arg -requires.txt = setuptools.command.egg_info:write_requirements -top_level.txt = setuptools.command.egg_info:write_toplevel_names - -[setuptools.installation] -eggsecutable = setuptools.command.easy_install:bootstrap - diff --git a/lib/python3.4/site-packages/setuptools-20.3.1.dist-info/metadata.json b/lib/python3.4/site-packages/setuptools-20.3.1.dist-info/metadata.json deleted file mode 100644 index 1fbf63f..0000000 --- a/lib/python3.4/site-packages/setuptools-20.3.1.dist-info/metadata.json +++ /dev/null @@ -1 +0,0 @@ -{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: System :: Archiving :: Packaging", "Topic :: System :: Systems Administration", "Topic :: Utilities"], "extensions": {"python.commands": {"wrap_console": {"easy_install": "setuptools.command.easy_install:main", "easy_install-3.5": "setuptools.command.easy_install:main"}}, "python.details": {"contacts": [{"email": "distutils-sig@python.org", "name": "Python Packaging Authority", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://bitbucket.org/pypa/setuptools"}}, "python.exports": {"console_scripts": {"easy_install": "setuptools.command.easy_install:main", "easy_install-3.5": "setuptools.command.easy_install:main"}, "distutils.commands": {"alias": "setuptools.command.alias:alias", "bdist_egg": "setuptools.command.bdist_egg:bdist_egg", "bdist_rpm": "setuptools.command.bdist_rpm:bdist_rpm", "bdist_wininst": "setuptools.command.bdist_wininst:bdist_wininst", "build_ext": "setuptools.command.build_ext:build_ext", "build_py": "setuptools.command.build_py:build_py", "develop": "setuptools.command.develop:develop", "easy_install": "setuptools.command.easy_install:easy_install", "egg_info": "setuptools.command.egg_info:egg_info", "install": "setuptools.command.install:install", "install_egg_info": "setuptools.command.install_egg_info:install_egg_info", "install_lib": "setuptools.command.install_lib:install_lib", "install_scripts": "setuptools.command.install_scripts:install_scripts", "register": "setuptools.command.register:register", "rotate": "setuptools.command.rotate:rotate", "saveopts": "setuptools.command.saveopts:saveopts", "sdist": "setuptools.command.sdist:sdist", "setopt": "setuptools.command.setopt:setopt", "test": "setuptools.command.test:test", "upload": "setuptools.command.upload:upload", "upload_docs": "setuptools.command.upload_docs:upload_docs"}, "distutils.setup_keywords": {"convert_2to3_doctests": "setuptools.dist:assert_string_list", "dependency_links": "setuptools.dist:assert_string_list", "eager_resources": "setuptools.dist:assert_string_list", "entry_points": "setuptools.dist:check_entry_points", "exclude_package_data": "setuptools.dist:check_package_data", "extras_require": "setuptools.dist:check_extras", "include_package_data": "setuptools.dist:assert_bool", "install_requires": "setuptools.dist:check_requirements", "namespace_packages": "setuptools.dist:check_nsp", "package_data": "setuptools.dist:check_package_data", "packages": "setuptools.dist:check_packages", "setup_requires": "setuptools.dist:check_requirements", "test_loader": "setuptools.dist:check_importable", "test_runner": "setuptools.dist:check_importable", "test_suite": "setuptools.dist:check_test_suite", "tests_require": "setuptools.dist:check_requirements", "use_2to3": "setuptools.dist:assert_bool", "use_2to3_exclude_fixers": "setuptools.dist:assert_string_list", "use_2to3_fixers": "setuptools.dist:assert_string_list", "zip_safe": "setuptools.dist:assert_bool"}, "egg_info.writers": {"PKG-INFO": "setuptools.command.egg_info:write_pkg_info", "dependency_links.txt": "setuptools.command.egg_info:overwrite_arg", "depends.txt": "setuptools.command.egg_info:warn_depends_obsolete", "eager_resources.txt": "setuptools.command.egg_info:overwrite_arg", "entry_points.txt": "setuptools.command.egg_info:write_entries", "namespace_packages.txt": "setuptools.command.egg_info:overwrite_arg", "requires.txt": "setuptools.command.egg_info:write_requirements", "top_level.txt": "setuptools.command.egg_info:write_toplevel_names"}, "setuptools.installation": {"eggsecutable": "setuptools.command.easy_install:bootstrap"}}}, "extras": ["certs", "ssl"], "generator": "bdist_wheel (0.29.0)", "keywords": ["CPAN", "PyPI", "distutils", "eggs", "package", "management"], "metadata_version": "2.0", "name": "setuptools", "run_requires": [{"extra": "certs", "requires": ["certifi (==2015.11.20)"]}, {"environment": "sys_platform=='win32'", "extra": "ssl", "requires": ["wincertstore (==0.2)"]}], "summary": "Easily download, build, install, upgrade, and uninstall Python packages", "test_requires": [{"requires": ["pytest (>=2.8)", "setuptools[ssl]"]}], "version": "20.3.1"} \ No newline at end of file diff --git a/lib/python3.4/site-packages/setuptools-20.3.1.dist-info/zip-safe b/lib/python3.4/site-packages/setuptools-20.3.1.dist-info/zip-safe deleted file mode 100644 index 8b13789..0000000 --- a/lib/python3.4/site-packages/setuptools-20.3.1.dist-info/zip-safe +++ /dev/null @@ -1 +0,0 @@ - diff --git a/lib/python3.4/site-packages/setuptools-36.6.0.dist-info/DESCRIPTION.rst b/lib/python3.4/site-packages/setuptools-36.6.0.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..027d10d --- /dev/null +++ b/lib/python3.4/site-packages/setuptools-36.6.0.dist-info/DESCRIPTION.rst @@ -0,0 +1,36 @@ +.. image:: https://img.shields.io/pypi/v/setuptools.svg + :target: https://pypi.org/project/setuptools + +.. image:: https://readthedocs.org/projects/setuptools/badge/?version=latest + :target: https://setuptools.readthedocs.io + +.. image:: https://img.shields.io/travis/pypa/setuptools/master.svg?label=Linux%20build%20%40%20Travis%20CI + :target: http://travis-ci.org/pypa/setuptools + +.. image:: https://img.shields.io/appveyor/ci/jaraco/setuptools/master.svg?label=Windows%20build%20%40%20Appveyor + :target: https://ci.appveyor.com/project/jaraco/setuptools/branch/master + +.. image:: https://img.shields.io/pypi/pyversions/setuptools.svg + +See the `Installation Instructions +`_ in the Python Packaging +User's Guide for instructions on installing, upgrading, and uninstalling +Setuptools. + +The project is `maintained at GitHub `_. + +Questions and comments should be directed to the `distutils-sig +mailing list `_. +Bug reports and especially tested patches may be +submitted directly to the `bug tracker +`_. + + +Code of Conduct +--------------- + +Everyone interacting in the setuptools project's codebases, issue trackers, +chat rooms, and mailing lists is expected to follow the +`PyPA Code of Conduct `_. + + diff --git a/lib/python3.4/site-packages/pip-8.1.1.dist-info/INSTALLER b/lib/python3.4/site-packages/setuptools-36.6.0.dist-info/INSTALLER similarity index 100% rename from lib/python3.4/site-packages/pip-8.1.1.dist-info/INSTALLER rename to lib/python3.4/site-packages/setuptools-36.6.0.dist-info/INSTALLER diff --git a/lib/python3.4/site-packages/setuptools-36.6.0.dist-info/METADATA b/lib/python3.4/site-packages/setuptools-36.6.0.dist-info/METADATA new file mode 100644 index 0000000..725401a --- /dev/null +++ b/lib/python3.4/site-packages/setuptools-36.6.0.dist-info/METADATA @@ -0,0 +1,69 @@ +Metadata-Version: 2.0 +Name: setuptools +Version: 36.6.0 +Summary: Easily download, build, install, upgrade, and uninstall Python packages +Home-page: https://github.com/pypa/setuptools +Author: Python Packaging Authority +Author-email: distutils-sig@python.org +License: UNKNOWN +Description-Content-Type: text/x-rst; charset=UTF-8 +Keywords: CPAN PyPI distutils eggs package management +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: System :: Archiving :: Packaging +Classifier: Topic :: System :: Systems Administration +Classifier: Topic :: Utilities +Requires-Python: >=2.6,!=3.0.*,!=3.1.*,!=3.2.* +Provides-Extra: certs +Requires-Dist: certifi (==2016.9.26); extra == 'certs' +Provides-Extra: ssl +Requires-Dist: wincertstore (==0.2); sys_platform=='win32' and extra == 'ssl' + +.. image:: https://img.shields.io/pypi/v/setuptools.svg + :target: https://pypi.org/project/setuptools + +.. image:: https://readthedocs.org/projects/setuptools/badge/?version=latest + :target: https://setuptools.readthedocs.io + +.. image:: https://img.shields.io/travis/pypa/setuptools/master.svg?label=Linux%20build%20%40%20Travis%20CI + :target: http://travis-ci.org/pypa/setuptools + +.. image:: https://img.shields.io/appveyor/ci/jaraco/setuptools/master.svg?label=Windows%20build%20%40%20Appveyor + :target: https://ci.appveyor.com/project/jaraco/setuptools/branch/master + +.. image:: https://img.shields.io/pypi/pyversions/setuptools.svg + +See the `Installation Instructions +`_ in the Python Packaging +User's Guide for instructions on installing, upgrading, and uninstalling +Setuptools. + +The project is `maintained at GitHub `_. + +Questions and comments should be directed to the `distutils-sig +mailing list `_. +Bug reports and especially tested patches may be +submitted directly to the `bug tracker +`_. + + +Code of Conduct +--------------- + +Everyone interacting in the setuptools project's codebases, issue trackers, +chat rooms, and mailing lists is expected to follow the +`PyPA Code of Conduct `_. + + diff --git a/lib/python3.4/site-packages/setuptools-36.6.0.dist-info/RECORD b/lib/python3.4/site-packages/setuptools-36.6.0.dist-info/RECORD new file mode 100644 index 0000000..2b060aa --- /dev/null +++ b/lib/python3.4/site-packages/setuptools-36.6.0.dist-info/RECORD @@ -0,0 +1,159 @@ +easy_install.py,sha256=MDC9vt5AxDsXX5qcKlBz2TnW6Tpuv_AobnfhCJ9X3PM,126 +pkg_resources/__init__.py,sha256=0q4Rx1CSzw9caT4ewfrQmAAC60NZCjSQU-9vQjP34yo,106202 +pkg_resources/py31compat.py,sha256=-ysVqoxLetAnL94uM0kHkomKQTC1JZLN2ZUjqUhMeKE,600 +pkg_resources/_vendor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pkg_resources/_vendor/appdirs.py,sha256=tgGaL0m4Jo2VeuGfoOOifLv7a7oUEJu2n1vRkqoPw-0,22374 +pkg_resources/_vendor/pyparsing.py,sha256=PifeLY3-WhIcBVzLtv0U4T_pwDtPruBhBCkg5vLqa28,229867 +pkg_resources/_vendor/six.py,sha256=A6hdJZVjI3t_geebZ9BzUvwRrIXo0lfwzQlM2LcKyas,30098 +pkg_resources/_vendor/packaging/__about__.py,sha256=zkcCPTN_6TcLW0Nrlg0176-R1QQ_WVPTm8sz1R4-HjM,720 +pkg_resources/_vendor/packaging/__init__.py,sha256=_vNac5TrzwsrzbOFIbF-5cHqc_Y2aPT2D7zrIR06BOo,513 +pkg_resources/_vendor/packaging/_compat.py,sha256=Vi_A0rAQeHbU-a9X0tt1yQm9RqkgQbDSxzRw8WlU9kA,860 +pkg_resources/_vendor/packaging/_structures.py,sha256=RImECJ4c_wTlaTYYwZYLHEiebDMaAJmK1oPARhw1T5o,1416 +pkg_resources/_vendor/packaging/markers.py,sha256=uEcBBtGvzqltgnArqb9c4RrcInXezDLos14zbBHhWJo,8248 +pkg_resources/_vendor/packaging/requirements.py,sha256=SikL2UynbsT0qtY9ltqngndha_sfo0w6XGFhAhoSoaQ,4355 +pkg_resources/_vendor/packaging/specifiers.py,sha256=SAMRerzO3fK2IkFZCaZkuwZaL_EGqHNOz4pni4vhnN0,28025 +pkg_resources/_vendor/packaging/utils.py,sha256=3m6WvPm6NNxE8rkTGmn0r75B_GZSGg7ikafxHsBN1WA,421 +pkg_resources/_vendor/packaging/version.py,sha256=OwGnxYfr2ghNzYx59qWIBkrK3SnB6n-Zfd1XaLpnnM0,11556 +pkg_resources/extern/__init__.py,sha256=JUtlHHvlxHSNuB4pWqNjcx7n6kG-fwXg7qmJ2zNJlIY,2487 +setuptools/__init__.py,sha256=MsRcLyrl8E49pBeFZ-PSwST-I2adqjvkfCC1h9gl0TQ,5037 +setuptools/archive_util.py,sha256=Z58-gbZQ0j92UJy7X7uZevwI28JTVEXd__AjKy4aw78,6613 +setuptools/build_meta.py,sha256=Z8fCFFJooVDcBuSUlVBWgwV41B9raH1sINpOP5-4o2Y,4756 +setuptools/cli-32.exe,sha256=dfEuovMNnA2HLa3jRfMPVi5tk4R7alCbpTvuxtCyw0Y,65536 +setuptools/cli-64.exe,sha256=KLABu5pyrnokJCv6skjXZ6GsXeyYHGcqOUT3oHI3Xpo,74752 +setuptools/cli.exe,sha256=dfEuovMNnA2HLa3jRfMPVi5tk4R7alCbpTvuxtCyw0Y,65536 +setuptools/config.py,sha256=ms8JAS3aHsOun-OO-jyvrQq3txyRE2AwKOiZP1aTan8,16317 +setuptools/dep_util.py,sha256=fgixvC1R7sH3r13ktyf7N0FALoqEXL1cBarmNpSEoWg,935 +setuptools/depends.py,sha256=hC8QIDcM3VDpRXvRVA6OfL9AaQfxvhxHcN_w6sAyNq8,5837 +setuptools/dist.py,sha256=PZjofGBK1ZzA-VpbwuTlxf9XMkvwmGYPSIqUl8FpE2k,40364 +setuptools/extension.py,sha256=uc6nHI-MxwmNCNPbUiBnybSyqhpJqjbhvOQ-emdvt_E,1729 +setuptools/glob.py,sha256=Y-fpv8wdHZzv9DPCaGACpMSBWJ6amq_1e0R_i8_el4w,5207 +setuptools/gui-32.exe,sha256=XBr0bHMA6Hpz2s9s9Bzjl-PwXfa9nH4ie0rFn4V2kWA,65536 +setuptools/gui-64.exe,sha256=aYKMhX1IJLn4ULHgWX0sE0yREUt6B3TEHf_jOw6yNyE,75264 +setuptools/gui.exe,sha256=XBr0bHMA6Hpz2s9s9Bzjl-PwXfa9nH4ie0rFn4V2kWA,65536 +setuptools/launch.py,sha256=sd7ejwhBocCDx_wG9rIs0OaZ8HtmmFU8ZC6IR_S0Lvg,787 +setuptools/lib2to3_ex.py,sha256=t5e12hbR2pi9V4ezWDTB4JM-AISUnGOkmcnYHek3xjg,2013 +setuptools/monkey.py,sha256=s-yH6vfMFxXMrfVInT9_3gnEyAn-TYMHtXVNUOVI4T8,5791 +setuptools/msvc.py,sha256=AEbWNLJ0pTuHJSkQuBZET6wr_d2-yGGPkdHCMdIKWB4,40884 +setuptools/namespaces.py,sha256=F0Nrbv8KCT2OrO7rwa03om4N4GZKAlnce-rr-cgDQa8,3199 +setuptools/package_index.py,sha256=ELInXIlJZqNbeAKAHYZVDLbwOkYZt-o-vyaFK_eS_N0,39970 +setuptools/py26compat.py,sha256=VRGHC7z2gliR4_uICJsQNodUcNUzybpus3BrJkWbnK4,679 +setuptools/py27compat.py,sha256=3mwxRMDk5Q5O1rSXOERbQDXhFqwDJhhUitfMW_qpUCo,536 +setuptools/py31compat.py,sha256=qGRk3tefux8HbhNzhM0laR3mD8vhAZtffZgzLkBMXJs,1645 +setuptools/py33compat.py,sha256=W8_JFZr8WQbJT_7-JFWjc_6lHGtoMK-4pCrHIwk5JN0,998 +setuptools/py36compat.py,sha256=VUDWxmu5rt4QHlGTRtAFu6W5jvfL6WBjeDAzeoBy0OM,2891 +setuptools/sandbox.py,sha256=hkGRod5_yt3EBHkGnRBf7uK1YceoqFpTT4b__9ZZ5UU,14549 +setuptools/script (dev).tmpl,sha256=f7MR17dTkzaqkCMSVseyOCMVrPVSMdmTQsaB8cZzfuI,201 +setuptools/script.tmpl,sha256=WGTt5piezO27c-Dbx6l5Q4T3Ff20A5z7872hv3aAhYY,138 +setuptools/site-patch.py,sha256=BVt6yIrDMXJoflA5J6DJIcsJUfW_XEeVhOzelTTFDP4,2307 +setuptools/ssl_support.py,sha256=Axo1QtiAtsvuENZq_BvhW5PeWw2nrX39-4qoSiVpB6w,8220 +setuptools/unicode_utils.py,sha256=NOiZ_5hD72A6w-4wVj8awHFM3n51Kmw1Ic_vx15XFqw,996 +setuptools/version.py,sha256=og_cuZQb0QI6ukKZFfZWPlr1HgJBPPn2vO2m_bI9ZTE,144 +setuptools/windows_support.py,sha256=5GrfqSP2-dLGJoZTq2g6dCKkyQxxa2n5IQiXlJCoYEE,714 +setuptools/command/__init__.py,sha256=-X7tSQahlz8sbGu_Xq9bqumFE117jU56E96tDDufNqw,590 +setuptools/command/alias.py,sha256=KjpE0sz_SDIHv3fpZcIQK-sCkJz-SrC6Gmug6b9Nkc8,2426 +setuptools/command/bdist_egg.py,sha256=TGN1XVQb9V8Rf-msDKaIZWmeGQf81HT83oqXJ_3M0gg,17441 +setuptools/command/bdist_rpm.py,sha256=B7l0TnzCGb-0nLlm6rS00jWLkojASwVmdhW2w5Qz_Ak,1508 +setuptools/command/bdist_wininst.py,sha256=_6dz3lpB1tY200LxKPLM7qgwTCceOMgaWFF-jW2-pm0,637 +setuptools/command/build_clib.py,sha256=bQ9aBr-5ZSO-9fGsGsDLz0mnnFteHUZnftVLkhvHDq0,4484 +setuptools/command/build_ext.py,sha256=dO89j-IC0dAjSty1sSZxvi0LSdkPGR_ZPXFuAAFDZj4,13049 +setuptools/command/build_py.py,sha256=yWyYaaS9F3o9JbIczn064A5g1C5_UiKRDxGaTqYbtLE,9596 +setuptools/command/develop.py,sha256=PuVOjmGWGfvHZmOBMj_bdeU087kl0jhnMHqKcDODBDE,8024 +setuptools/command/dist_info.py,sha256=7Ewmog46orGjzME5UA_GQvqewRd1s25aCLxsfHCKqq8,924 +setuptools/command/easy_install.py,sha256=eruE4R4JfOTx0_0hDYMMElpup33Qkn0P44lclgP8dA0,85973 +setuptools/command/egg_info.py,sha256=HNUt2tQAAp8dULFS_6Qk9vflESI7jdqlCqq-VVQi7AA,25016 +setuptools/command/install.py,sha256=a0EZpL_A866KEdhicTGbuyD_TYl1sykfzdrri-zazT4,4683 +setuptools/command/install_egg_info.py,sha256=bMgeIeRiXzQ4DAGPV1328kcjwQjHjOWU4FngAWLV78Q,2203 +setuptools/command/install_lib.py,sha256=11mxf0Ch12NsuYwS8PHwXBRvyh671QAM4cTRh7epzG0,3840 +setuptools/command/install_scripts.py,sha256=UD0rEZ6861mTYhIdzcsqKnUl8PozocXWl9VBQ1VTWnc,2439 +setuptools/command/launcher manifest.xml,sha256=xlLbjWrB01tKC0-hlVkOKkiSPbzMml2eOPtJ_ucCnbE,628 +setuptools/command/py36compat.py,sha256=SzjZcOxF7zdFUT47Zv2n7AM3H8koDys_0OpS-n9gIfc,4986 +setuptools/command/register.py,sha256=bHlMm1qmBbSdahTOT8w6UhA-EgeQIz7p6cD-qOauaiI,270 +setuptools/command/rotate.py,sha256=co5C1EkI7P0GGT6Tqz-T2SIj2LBJTZXYELpmao6d4KQ,2164 +setuptools/command/saveopts.py,sha256=za7QCBcQimKKriWcoCcbhxPjUz30gSB74zuTL47xpP4,658 +setuptools/command/sdist.py,sha256=VldpcHRSlDrvvK2uV9O6HjQA2OtHCUa4QaMkYCYwTrA,6919 +setuptools/command/setopt.py,sha256=NTWDyx-gjDF-txf4dO577s7LOzHVoKR0Mq33rFxaRr8,5085 +setuptools/command/test.py,sha256=koi5lqjhXHlt0B3egYb98qRVETzKXKhWDD5OQY-AKuA,9044 +setuptools/command/upload.py,sha256=i1gfItZ3nQOn5FKXb8tLC2Kd7eKC8lWO4bdE6NqGpE4,1172 +setuptools/command/upload_docs.py,sha256=oXiGplM_cUKLwE4CWWw98RzCufAu8tBhMC97GegFcms,7311 +setuptools/extern/__init__.py,sha256=ZtCLYQ8JTtOtm7SYoxekZw-UzY3TR50SRIUaeqr2ROk,131 +setuptools-36.6.0.dist-info/DESCRIPTION.rst,sha256=1sSNG6a5L3fSMo1x9uE3jvumlEODgeqBUtSaYp_VVLw,1421 +setuptools-36.6.0.dist-info/METADATA,sha256=GLuJ3zbtJdt_nwgq9UIpUoXOis1Ub4tWeOTKQIZHT1s,2847 +setuptools-36.6.0.dist-info/RECORD,, +setuptools-36.6.0.dist-info/WHEEL,sha256=kdsN-5OJAZIiHN-iO4Rhl82KyS0bDWf4uBwMbkNafr8,110 +setuptools-36.6.0.dist-info/dependency_links.txt,sha256=HlkCFkoK5TbZ5EMLbLKYhLcY_E31kBWD8TqW2EgmatQ,239 +setuptools-36.6.0.dist-info/entry_points.txt,sha256=jBqCYDlVjl__sjYFGXo1JQGIMAYFJE-prYWUtnMZEew,2990 +setuptools-36.6.0.dist-info/metadata.json,sha256=4yqt7_oaFRn8AA20H0H5W2AByP8z-0HuDpwGyiQH6UU,4916 +setuptools-36.6.0.dist-info/top_level.txt,sha256=2HUXVVwA4Pff1xgTFr3GsTXXKaPaO6vlG6oNJ_4u4Tg,38 +setuptools-36.6.0.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +../../../bin/easy_install,sha256=tsci1id0sS7h2uWc2NQJYflZoKSI8AR-W02mXYmf7Es,300 +../../../bin/easy_install-3.4,sha256=tsci1id0sS7h2uWc2NQJYflZoKSI8AR-W02mXYmf7Es,300 +setuptools-36.6.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +setuptools/__pycache__/py31compat.cpython-34.pyc,, +setuptools/command/__pycache__/bdist_egg.cpython-34.pyc,, +setuptools/__pycache__/sandbox.cpython-34.pyc,, +setuptools/__pycache__/__init__.cpython-34.pyc,, +setuptools/extern/__pycache__/__init__.cpython-34.pyc,, +setuptools/__pycache__/site-patch.cpython-34.pyc,, +setuptools/__pycache__/config.cpython-34.pyc,, +setuptools/__pycache__/py26compat.cpython-34.pyc,, +setuptools/command/__pycache__/bdist_rpm.cpython-34.pyc,, +setuptools/command/__pycache__/test.cpython-34.pyc,, +pkg_resources/extern/__pycache__/__init__.cpython-34.pyc,, +setuptools/command/__pycache__/upload_docs.cpython-34.pyc,, +setuptools/__pycache__/ssl_support.cpython-34.pyc,, +setuptools/command/__pycache__/alias.cpython-34.pyc,, +pkg_resources/_vendor/__pycache__/pyparsing.cpython-34.pyc,, +pkg_resources/_vendor/packaging/__pycache__/__about__.cpython-34.pyc,, +setuptools/__pycache__/py33compat.cpython-34.pyc,, +setuptools/command/__pycache__/build_py.cpython-34.pyc,, +pkg_resources/_vendor/packaging/__pycache__/_compat.cpython-34.pyc,, +setuptools/command/__pycache__/install_lib.cpython-34.pyc,, +setuptools/command/__pycache__/dist_info.cpython-34.pyc,, +setuptools/__pycache__/build_meta.cpython-34.pyc,, +setuptools/command/__pycache__/bdist_wininst.cpython-34.pyc,, +setuptools/__pycache__/extension.cpython-34.pyc,, +pkg_resources/_vendor/packaging/__pycache__/_structures.cpython-34.pyc,, +pkg_resources/_vendor/packaging/__pycache__/__init__.cpython-34.pyc,, +setuptools/command/__pycache__/install_scripts.cpython-34.pyc,, +setuptools/command/__pycache__/install.cpython-34.pyc,, +setuptools/__pycache__/py27compat.cpython-34.pyc,, +setuptools/__pycache__/py36compat.cpython-34.pyc,, +setuptools/command/__pycache__/sdist.cpython-34.pyc,, +setuptools/__pycache__/package_index.cpython-34.pyc,, +setuptools/__pycache__/msvc.cpython-34.pyc,, +setuptools/__pycache__/archive_util.cpython-34.pyc,, +setuptools/command/__pycache__/egg_info.cpython-34.pyc,, +pkg_resources/_vendor/packaging/__pycache__/requirements.cpython-34.pyc,, +setuptools/__pycache__/lib2to3_ex.cpython-34.pyc,, +setuptools/command/__pycache__/install_egg_info.cpython-34.pyc,, +setuptools/command/__pycache__/upload.cpython-34.pyc,, +setuptools/command/__pycache__/build_ext.cpython-34.pyc,, +pkg_resources/__pycache__/__init__.cpython-34.pyc,, +pkg_resources/_vendor/__pycache__/appdirs.cpython-34.pyc,, +setuptools/__pycache__/namespaces.cpython-34.pyc,, +setuptools/__pycache__/monkey.cpython-34.pyc,, +setuptools/command/__pycache__/build_clib.cpython-34.pyc,, +pkg_resources/_vendor/__pycache__/six.cpython-34.pyc,, +pkg_resources/__pycache__/py31compat.cpython-34.pyc,, +pkg_resources/_vendor/packaging/__pycache__/specifiers.cpython-34.pyc,, +setuptools/__pycache__/dist.cpython-34.pyc,, +setuptools/__pycache__/depends.cpython-34.pyc,, +__pycache__/easy_install.cpython-34.pyc,, +setuptools/__pycache__/dep_util.cpython-34.pyc,, +setuptools/command/__pycache__/setopt.cpython-34.pyc,, +setuptools/__pycache__/version.cpython-34.pyc,, +setuptools/__pycache__/windows_support.cpython-34.pyc,, +pkg_resources/_vendor/packaging/__pycache__/utils.cpython-34.pyc,, +setuptools/__pycache__/glob.cpython-34.pyc,, +setuptools/command/__pycache__/develop.cpython-34.pyc,, +pkg_resources/_vendor/packaging/__pycache__/markers.cpython-34.pyc,, +setuptools/__pycache__/launch.cpython-34.pyc,, +setuptools/command/__pycache__/rotate.cpython-34.pyc,, +pkg_resources/_vendor/packaging/__pycache__/version.cpython-34.pyc,, +setuptools/command/__pycache__/py36compat.cpython-34.pyc,, +setuptools/command/__pycache__/__init__.cpython-34.pyc,, +setuptools/command/__pycache__/register.cpython-34.pyc,, +setuptools/__pycache__/unicode_utils.cpython-34.pyc,, +pkg_resources/_vendor/__pycache__/__init__.cpython-34.pyc,, +setuptools/command/__pycache__/easy_install.cpython-34.pyc,, +setuptools/command/__pycache__/saveopts.cpython-34.pyc,, diff --git a/lib/python3.4/site-packages/setuptools-20.3.1.dist-info/WHEEL b/lib/python3.4/site-packages/setuptools-36.6.0.dist-info/WHEEL similarity index 70% rename from lib/python3.4/site-packages/setuptools-20.3.1.dist-info/WHEEL rename to lib/python3.4/site-packages/setuptools-36.6.0.dist-info/WHEEL index 8b6dd1b..7332a41 100644 --- a/lib/python3.4/site-packages/setuptools-20.3.1.dist-info/WHEEL +++ b/lib/python3.4/site-packages/setuptools-36.6.0.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.29.0) +Generator: bdist_wheel (0.30.0) Root-Is-Purelib: true Tag: py2-none-any Tag: py3-none-any diff --git a/lib/python3.4/site-packages/setuptools-36.6.0.dist-info/dependency_links.txt b/lib/python3.4/site-packages/setuptools-36.6.0.dist-info/dependency_links.txt new file mode 100644 index 0000000..e87d021 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools-36.6.0.dist-info/dependency_links.txt @@ -0,0 +1,2 @@ +https://files.pythonhosted.org/packages/source/c/certifi/certifi-2016.9.26.tar.gz#md5=baa81e951a29958563689d868ef1064d +https://files.pythonhosted.org/packages/source/w/wincertstore/wincertstore-0.2.zip#md5=ae728f2f007185648d0c7a8679b361e2 diff --git a/lib/python3.4/site-packages/_markerlib-0.0.0.dist-info/entry_points.txt b/lib/python3.4/site-packages/setuptools-36.6.0.dist-info/entry_points.txt similarity index 92% rename from lib/python3.4/site-packages/_markerlib-0.0.0.dist-info/entry_points.txt rename to lib/python3.4/site-packages/setuptools-36.6.0.dist-info/entry_points.txt index 5270e4a..4159fd0 100644 --- a/lib/python3.4/site-packages/_markerlib-0.0.0.dist-info/entry_points.txt +++ b/lib/python3.4/site-packages/setuptools-36.6.0.dist-info/entry_points.txt @@ -1,14 +1,17 @@ [console_scripts] easy_install = setuptools.command.easy_install:main +easy_install-3.6 = setuptools.command.easy_install:main [distutils.commands] alias = setuptools.command.alias:alias bdist_egg = setuptools.command.bdist_egg:bdist_egg bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm bdist_wininst = setuptools.command.bdist_wininst:bdist_wininst +build_clib = setuptools.command.build_clib:build_clib build_ext = setuptools.command.build_ext:build_ext build_py = setuptools.command.build_py:build_py develop = setuptools.command.develop:develop +dist_info = setuptools.command.dist_info:dist_info easy_install = setuptools.command.easy_install:easy_install egg_info = setuptools.command.egg_info:egg_info install = setuptools.command.install:install @@ -36,6 +39,7 @@ install_requires = setuptools.dist:check_requirements namespace_packages = setuptools.dist:check_nsp package_data = setuptools.dist:check_package_data packages = setuptools.dist:check_packages +python_requires = setuptools.dist:check_specifier setup_requires = setuptools.dist:check_requirements test_loader = setuptools.dist:check_importable test_runner = setuptools.dist:check_importable diff --git a/lib/python3.4/site-packages/setuptools-36.6.0.dist-info/metadata.json b/lib/python3.4/site-packages/setuptools-36.6.0.dist-info/metadata.json new file mode 100644 index 0000000..cb17d38 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools-36.6.0.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: System :: Archiving :: Packaging", "Topic :: System :: Systems Administration", "Topic :: Utilities"], "description_content_type": "text/x-rst; charset=UTF-8", "extensions": {"python.commands": {"wrap_console": {"easy_install": "setuptools.command.easy_install:main", "easy_install-3.6": "setuptools.command.easy_install:main"}}, "python.details": {"contacts": [{"email": "distutils-sig@python.org", "name": "Python Packaging Authority", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/pypa/setuptools"}}, "python.exports": {"console_scripts": {"easy_install": "setuptools.command.easy_install:main", "easy_install-3.6": "setuptools.command.easy_install:main"}, "distutils.commands": {"alias": "setuptools.command.alias:alias", "bdist_egg": "setuptools.command.bdist_egg:bdist_egg", "bdist_rpm": "setuptools.command.bdist_rpm:bdist_rpm", "bdist_wininst": "setuptools.command.bdist_wininst:bdist_wininst", "build_clib": "setuptools.command.build_clib:build_clib", "build_ext": "setuptools.command.build_ext:build_ext", "build_py": "setuptools.command.build_py:build_py", "develop": "setuptools.command.develop:develop", "dist_info": "setuptools.command.dist_info:dist_info", "easy_install": "setuptools.command.easy_install:easy_install", "egg_info": "setuptools.command.egg_info:egg_info", "install": "setuptools.command.install:install", "install_egg_info": "setuptools.command.install_egg_info:install_egg_info", "install_lib": "setuptools.command.install_lib:install_lib", "install_scripts": "setuptools.command.install_scripts:install_scripts", "register": "setuptools.command.register:register", "rotate": "setuptools.command.rotate:rotate", "saveopts": "setuptools.command.saveopts:saveopts", "sdist": "setuptools.command.sdist:sdist", "setopt": "setuptools.command.setopt:setopt", "test": "setuptools.command.test:test", "upload": "setuptools.command.upload:upload", "upload_docs": "setuptools.command.upload_docs:upload_docs"}, "distutils.setup_keywords": {"convert_2to3_doctests": "setuptools.dist:assert_string_list", "dependency_links": "setuptools.dist:assert_string_list", "eager_resources": "setuptools.dist:assert_string_list", "entry_points": "setuptools.dist:check_entry_points", "exclude_package_data": "setuptools.dist:check_package_data", "extras_require": "setuptools.dist:check_extras", "include_package_data": "setuptools.dist:assert_bool", "install_requires": "setuptools.dist:check_requirements", "namespace_packages": "setuptools.dist:check_nsp", "package_data": "setuptools.dist:check_package_data", "packages": "setuptools.dist:check_packages", "python_requires": "setuptools.dist:check_specifier", "setup_requires": "setuptools.dist:check_requirements", "test_loader": "setuptools.dist:check_importable", "test_runner": "setuptools.dist:check_importable", "test_suite": "setuptools.dist:check_test_suite", "tests_require": "setuptools.dist:check_requirements", "use_2to3": "setuptools.dist:assert_bool", "use_2to3_exclude_fixers": "setuptools.dist:assert_string_list", "use_2to3_fixers": "setuptools.dist:assert_string_list", "zip_safe": "setuptools.dist:assert_bool"}, "egg_info.writers": {"PKG-INFO": "setuptools.command.egg_info:write_pkg_info", "dependency_links.txt": "setuptools.command.egg_info:overwrite_arg", "depends.txt": "setuptools.command.egg_info:warn_depends_obsolete", "eager_resources.txt": "setuptools.command.egg_info:overwrite_arg", "entry_points.txt": "setuptools.command.egg_info:write_entries", "namespace_packages.txt": "setuptools.command.egg_info:overwrite_arg", "requires.txt": "setuptools.command.egg_info:write_requirements", "top_level.txt": "setuptools.command.egg_info:write_toplevel_names"}, "setuptools.installation": {"eggsecutable": "setuptools.command.easy_install:bootstrap"}}}, "extras": ["certs", "ssl"], "generator": "bdist_wheel (0.30.0)", "keywords": ["CPAN", "PyPI", "distutils", "eggs", "package", "management"], "metadata_version": "2.0", "name": "setuptools", "requires_python": ">=2.6,!=3.0.*,!=3.1.*,!=3.2.*", "run_requires": [{"extra": "certs", "requires": ["certifi (==2016.9.26)"]}, {"environment": "sys_platform=='win32'", "extra": "ssl", "requires": ["wincertstore (==0.2)"]}], "summary": "Easily download, build, install, upgrade, and uninstall Python packages", "version": "36.6.0"} \ No newline at end of file diff --git a/lib/python3.4/site-packages/setuptools-20.3.1.dist-info/top_level.txt b/lib/python3.4/site-packages/setuptools-36.6.0.dist-info/top_level.txt similarity index 100% rename from lib/python3.4/site-packages/setuptools-20.3.1.dist-info/top_level.txt rename to lib/python3.4/site-packages/setuptools-36.6.0.dist-info/top_level.txt diff --git a/lib/python3.4/site-packages/netifaces-0.10.4.dist-info/zip-safe b/lib/python3.4/site-packages/setuptools-36.6.0.dist-info/zip-safe similarity index 100% rename from lib/python3.4/site-packages/netifaces-0.10.4.dist-info/zip-safe rename to lib/python3.4/site-packages/setuptools-36.6.0.dist-info/zip-safe diff --git a/lib/python3.4/site-packages/setuptools/__init__.py b/lib/python3.4/site-packages/setuptools/__init__.py index 67b57e4..04f7674 100644 --- a/lib/python3.4/site-packages/setuptools/__init__.py +++ b/lib/python3.4/site-packages/setuptools/__init__.py @@ -4,20 +4,20 @@ import os import functools import distutils.core import distutils.filelist -from distutils.core import Command as _Command from distutils.util import convert_path from fnmatch import fnmatchcase -from setuptools.extern.six.moves import filterfalse, map +from setuptools.extern.six.moves import filter, map import setuptools.version from setuptools.extension import Extension -from setuptools.dist import Distribution, Feature, _get_unpatched +from setuptools.dist import Distribution, Feature from setuptools.depends import Require +from . import monkey __all__ = [ 'setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require', - 'find_packages' + 'find_packages', ] __version__ = setuptools.version.__version__ @@ -32,12 +32,18 @@ lib2to3_fixer_packages = ['lib2to3.fixes'] class PackageFinder(object): + """ + Generate a list of all Python packages found within a directory + """ + @classmethod def find(cls, where='.', exclude=(), include=('*',)): """Return a list all Python packages found within directory 'where' - 'where' should be supplied as a "cross-platform" (i.e. URL-style) - path; it will be converted to the appropriate local path syntax. + 'where' is the root directory which will be searched for packages. It + should be supplied as a "cross-platform" (i.e. URL-style) path; it will + be converted to the appropriate local path syntax. + 'exclude' is a sequence of package names to exclude; '*' can be used as a wildcard in the names, such that 'foo.*' will exclude all subpackages of 'foo' (but not 'foo' itself). @@ -46,78 +52,67 @@ class PackageFinder(object): specified, only the named packages will be included. If it's not specified, all found packages will be included. 'include' can contain shell style wildcard patterns just like 'exclude'. + """ - The list of included packages is built up first and then any - explicitly excluded packages are removed from it. - """ - out = cls._find_packages_iter(convert_path(where)) - out = cls.require_parents(out) - includes = cls._build_filter(*include) - excludes = cls._build_filter('ez_setup', '*__pycache__', *exclude) - out = filter(includes, out) - out = filterfalse(excludes, out) - return list(out) - - @staticmethod - def require_parents(packages): - """ - Exclude any apparent package that apparently doesn't include its - parent. - - For example, exclude 'foo.bar' if 'foo' is not present. - """ - found = [] - for pkg in packages: - base, sep, child = pkg.rpartition('.') - if base and base not in found: - continue - found.append(pkg) - yield pkg - - @staticmethod - def _candidate_dirs(base_path): - """ - Return all dirs in base_path that might be packages. - """ - has_dot = lambda name: '.' in name - for root, dirs, files in os.walk(base_path, followlinks=True): - # Exclude directories that contain a period, as they cannot be - # packages. Mutate the list to avoid traversal. - dirs[:] = filterfalse(has_dot, dirs) - for dir in dirs: - yield os.path.relpath(os.path.join(root, dir), base_path) + return list(cls._find_packages_iter( + convert_path(where), + cls._build_filter('ez_setup', '*__pycache__', *exclude), + cls._build_filter(*include))) @classmethod - def _find_packages_iter(cls, base_path): - candidates = cls._candidate_dirs(base_path) - return ( - path.replace(os.path.sep, '.') - for path in candidates - if cls._looks_like_package(os.path.join(base_path, path)) - ) + def _find_packages_iter(cls, where, exclude, include): + """ + All the packages found in 'where' that pass the 'include' filter, but + not the 'exclude' filter. + """ + for root, dirs, files in os.walk(where, followlinks=True): + # Copy dirs to iterate over it, then empty dirs. + all_dirs = dirs[:] + dirs[:] = [] + + for dir in all_dirs: + full_path = os.path.join(root, dir) + rel_path = os.path.relpath(full_path, where) + package = rel_path.replace(os.path.sep, '.') + + # Skip directory trees that are not valid packages + if ('.' in dir or not cls._looks_like_package(full_path)): + continue + + # Should this package be included? + if include(package) and not exclude(package): + yield package + + # Keep searching subdirectories, as there may be more packages + # down there, even if the parent was excluded. + dirs.append(dir) @staticmethod def _looks_like_package(path): + """Does a directory look like a package?""" return os.path.isfile(os.path.join(path, '__init__.py')) @staticmethod def _build_filter(*patterns): """ Given a list of patterns, return a callable that will be true only if - the input matches one of the patterns. + the input matches at least one of the patterns. """ return lambda name: any(fnmatchcase(name, pat=pat) for pat in patterns) + class PEP420PackageFinder(PackageFinder): @staticmethod def _looks_like_package(path): return True + find_packages = PackageFinder.find setup = distutils.core.setup -_Command = _get_unpatched(_Command) +_Command = monkey.get_unpatched(distutils.core.Command) + class Command(_Command): __doc__ = _Command.__doc__ @@ -137,9 +132,6 @@ class Command(_Command): vars(cmd).update(kw) return cmd -# we can't patch distutils.cmd, alas -distutils.core.Command = Command - def _find_all_simple(path): """ @@ -165,5 +157,4 @@ def findall(dir=os.curdir): return list(files) -# fix findall bug in distutils (http://bugs.python.org/issue12885) -distutils.filelist.findall = findall +monkey.patch_all() diff --git a/lib/python3.4/site-packages/setuptools/archive_util.py b/lib/python3.4/site-packages/setuptools/archive_util.py index b3c9fa5..cc82b3d 100644 --- a/lib/python3.4/site-packages/setuptools/archive_util.py +++ b/lib/python3.4/site-packages/setuptools/archive_util.py @@ -1,24 +1,26 @@ """Utilities for extracting common archive formats""" - -__all__ = [ - "unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter", - "UnrecognizedFormat", "extraction_drivers", "unpack_directory", -] - import zipfile import tarfile import os import shutil import posixpath import contextlib -from pkg_resources import ensure_directory, ContextualZipFile from distutils.errors import DistutilsError +from pkg_resources import ensure_directory, ContextualZipFile + +__all__ = [ + "unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter", + "UnrecognizedFormat", "extraction_drivers", "unpack_directory", +] + + class UnrecognizedFormat(DistutilsError): """Couldn't recognize the archive type""" -def default_filter(src,dst): + +def default_filter(src, dst): """The default progress/filter callback; returns True for all files""" return dst @@ -167,4 +169,5 @@ def unpack_tarfile(filename, extract_dir, progress_filter=default_filter): pass return True + extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile diff --git a/lib/python3.4/site-packages/setuptools/build_meta.py b/lib/python3.4/site-packages/setuptools/build_meta.py new file mode 100644 index 0000000..54f2987 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/build_meta.py @@ -0,0 +1,148 @@ +"""A PEP 517 interface to setuptools + +Previously, when a user or a command line tool (let's call it a "frontend") +needed to make a request of setuptools to take a certain action, for +example, generating a list of installation requirements, the frontend would +would call "setup.py egg_info" or "setup.py bdist_wheel" on the command line. + +PEP 517 defines a different method of interfacing with setuptools. Rather +than calling "setup.py" directly, the frontend should: + + 1. Set the current directory to the directory with a setup.py file + 2. Import this module into a safe python interpreter (one in which + setuptools can potentially set global variables or crash hard). + 3. Call one of the functions defined in PEP 517. + +What each function does is defined in PEP 517. However, here is a "casual" +definition of the functions (this definition should not be relied on for +bug reports or API stability): + + - `build_wheel`: build a wheel in the folder and return the basename + - `get_requires_for_build_wheel`: get the `setup_requires` to build + - `prepare_metadata_for_build_wheel`: get the `install_requires` + - `build_sdist`: build an sdist in the folder and return the basename + - `get_requires_for_build_sdist`: get the `setup_requires` to build + +Again, this is not a formal definition! Just a "taste" of the module. +""" + +import os +import sys +import tokenize +import shutil +import contextlib + +import setuptools +import distutils + + +class SetupRequirementsError(BaseException): + def __init__(self, specifiers): + self.specifiers = specifiers + + +class Distribution(setuptools.dist.Distribution): + def fetch_build_eggs(self, specifiers): + raise SetupRequirementsError(specifiers) + + @classmethod + @contextlib.contextmanager + def patch(cls): + """ + Replace + distutils.dist.Distribution with this class + for the duration of this context. + """ + orig = distutils.core.Distribution + distutils.core.Distribution = cls + try: + yield + finally: + distutils.core.Distribution = orig + + +def _run_setup(setup_script='setup.py'): + # Note that we can reuse our build directory between calls + # Correctness comes first, then optimization later + __file__ = setup_script + f = getattr(tokenize, 'open', open)(__file__) + code = f.read().replace('\\r\\n', '\\n') + f.close() + exec(compile(code, __file__, 'exec')) + + +def _fix_config(config_settings): + config_settings = config_settings or {} + config_settings.setdefault('--global-option', []) + return config_settings + + +def _get_build_requires(config_settings): + config_settings = _fix_config(config_settings) + requirements = ['setuptools', 'wheel'] + + sys.argv = sys.argv[:1] + ['egg_info'] + \ + config_settings["--global-option"] + try: + with Distribution.patch(): + _run_setup() + except SetupRequirementsError as e: + requirements += e.specifiers + + return requirements + + +def get_requires_for_build_wheel(config_settings=None): + config_settings = _fix_config(config_settings) + return _get_build_requires(config_settings) + + +def get_requires_for_build_sdist(config_settings=None): + config_settings = _fix_config(config_settings) + return _get_build_requires(config_settings) + + +def prepare_metadata_for_build_wheel(metadata_directory, config_settings=None): + sys.argv = sys.argv[:1] + ['dist_info', '--egg-base', metadata_directory] + _run_setup() + + dist_infos = [f for f in os.listdir(metadata_directory) + if f.endswith('.dist-info')] + + assert len(dist_infos) == 1 + return dist_infos[0] + + +def build_wheel(wheel_directory, config_settings=None, + metadata_directory=None): + config_settings = _fix_config(config_settings) + wheel_directory = os.path.abspath(wheel_directory) + sys.argv = sys.argv[:1] + ['bdist_wheel'] + \ + config_settings["--global-option"] + _run_setup() + if wheel_directory != 'dist': + shutil.rmtree(wheel_directory) + shutil.copytree('dist', wheel_directory) + + wheels = [f for f in os.listdir(wheel_directory) + if f.endswith('.whl')] + + assert len(wheels) == 1 + return wheels[0] + + +def build_sdist(sdist_directory, config_settings=None): + config_settings = _fix_config(config_settings) + sdist_directory = os.path.abspath(sdist_directory) + sys.argv = sys.argv[:1] + ['sdist'] + \ + config_settings["--global-option"] + _run_setup() + if sdist_directory != 'dist': + shutil.rmtree(sdist_directory) + shutil.copytree('dist', sdist_directory) + + sdists = [f for f in os.listdir(sdist_directory) + if f.endswith('.tar.gz')] + + assert len(sdists) == 1 + return sdists[0] diff --git a/lib/python3.4/site-packages/setuptools/cli-arm-32.exe b/lib/python3.4/site-packages/setuptools/cli-arm-32.exe deleted file mode 100644 index 2f40402..0000000 Binary files a/lib/python3.4/site-packages/setuptools/cli-arm-32.exe and /dev/null differ diff --git a/lib/python3.4/site-packages/setuptools/command/__init__.py b/lib/python3.4/site-packages/setuptools/command/__init__.py index 3fb2f6d..4fe3bb5 100644 --- a/lib/python3.4/site-packages/setuptools/command/__init__.py +++ b/lib/python3.4/site-packages/setuptools/command/__init__.py @@ -2,7 +2,7 @@ __all__ = [ 'alias', 'bdist_egg', 'bdist_rpm', 'build_ext', 'build_py', 'develop', 'easy_install', 'egg_info', 'install', 'install_lib', 'rotate', 'saveopts', 'sdist', 'setopt', 'test', 'install_egg_info', 'install_scripts', - 'register', 'bdist_wininst', 'upload_docs', 'upload', + 'register', 'bdist_wininst', 'upload_docs', 'upload', 'build_clib', 'dist_info', ] from distutils.command.bdist import bdist @@ -10,7 +10,6 @@ import sys from setuptools.command import install_scripts - if 'egg' not in bdist.format_commands: bdist.format_command['egg'] = ('bdist_egg', "Python .egg file") bdist.format_commands.append('egg') diff --git a/lib/python3.4/site-packages/setuptools/command/bdist_egg.py b/lib/python3.4/site-packages/setuptools/command/bdist_egg.py index 9cebd7f..51755d5 100644 --- a/lib/python3.4/site-packages/setuptools/command/bdist_egg.py +++ b/lib/python3.4/site-packages/setuptools/command/bdist_egg.py @@ -8,8 +8,8 @@ from distutils import log from types import CodeType import sys import os -import marshal import textwrap +import marshal from setuptools.extern import six @@ -38,6 +38,14 @@ def strip_module(filename): filename = filename[:-6] return filename +def sorted_walk(dir): + """Do os.walk in a reproducible way, + independent of indeterministic filesystem readdir order + """ + for base, dirs, files in os.walk(dir): + dirs.sort() + files.sort() + yield base, dirs, files def write_stub(resource, pyfile): _stub_template = textwrap.dedent(""" @@ -129,7 +137,7 @@ class bdist_egg(Command): self.distribution.data_files.append(item) try: - log.info("installing package data to %s" % self.bdist_dir) + log.info("installing package data to %s", self.bdist_dir) self.call_command('install_data', force=0, root=None) finally: self.distribution.data_files = old @@ -152,7 +160,7 @@ class bdist_egg(Command): self.run_command("egg_info") # We run install_lib before install_data, because some data hacks # pull their data path from the install_lib command. - log.info("installing library code to %s" % self.bdist_dir) + log.info("installing library code to %s", self.bdist_dir) instcmd = self.get_finalized_command('install') old_root = instcmd.root instcmd.root = None @@ -169,7 +177,7 @@ class bdist_egg(Command): pyfile = os.path.join(self.bdist_dir, strip_module(filename) + '.py') self.stubs.append(pyfile) - log.info("creating stub loader for %s" % ext_name) + log.info("creating stub loader for %s", ext_name) if not self.dry_run: write_stub(os.path.basename(ext_name), pyfile) to_compile.append(pyfile) @@ -186,14 +194,14 @@ class bdist_egg(Command): self.mkpath(egg_info) if self.distribution.scripts: script_dir = os.path.join(egg_info, 'scripts') - log.info("installing scripts to %s" % script_dir) + log.info("installing scripts to %s", script_dir) self.call_command('install_scripts', install_dir=script_dir, no_ep=1) self.copy_metadata_to(egg_info) native_libs = os.path.join(egg_info, "native_libs.txt") if all_outputs: - log.info("writing %s" % native_libs) + log.info("writing %s", native_libs) if not self.dry_run: ensure_directory(native_libs) libs_file = open(native_libs, 'wt') @@ -201,7 +209,7 @@ class bdist_egg(Command): libs_file.write('\n') libs_file.close() elif os.path.isfile(native_libs): - log.info("removing %s" % native_libs) + log.info("removing %s", native_libs) if not self.dry_run: os.unlink(native_libs) @@ -302,7 +310,7 @@ class bdist_egg(Command): ext_outputs = [] paths = {self.bdist_dir: ''} - for base, dirs, files in os.walk(self.bdist_dir): + for base, dirs, files in sorted_walk(self.bdist_dir): for filename in files: if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS: all_outputs.append(paths[base] + filename) @@ -329,7 +337,7 @@ NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split()) def walk_egg(egg_dir): """Walk an unpacked egg's contents, skipping the metadata directory""" - walker = os.walk(egg_dir) + walker = sorted_walk(egg_dir) base, dirs, files = next(walker) if 'EGG-INFO' in dirs: dirs.remove('EGG-INFO') @@ -429,6 +437,7 @@ def can_scan(): log.warn("Please ask the author to include a 'zip_safe'" " setting (either True or False) in the package's setup.py") + # Attribute names of options for commands that might need to be convinced to # install to the egg build directory @@ -457,15 +466,15 @@ def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=True, p = path[len(base_dir) + 1:] if not dry_run: z.write(path, p) - log.debug("adding '%s'" % p) + log.debug("adding '%s'", p) compression = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED if not dry_run: z = zipfile.ZipFile(zip_filename, mode, compression=compression) - for dirname, dirs, files in os.walk(base_dir): + for dirname, dirs, files in sorted_walk(base_dir): visit(z, dirname, files) z.close() else: - for dirname, dirs, files in os.walk(base_dir): + for dirname, dirs, files in sorted_walk(base_dir): visit(None, dirname, files) return zip_filename diff --git a/lib/python3.4/site-packages/setuptools/command/build_clib.py b/lib/python3.4/site-packages/setuptools/command/build_clib.py new file mode 100644 index 0000000..09caff6 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/command/build_clib.py @@ -0,0 +1,98 @@ +import distutils.command.build_clib as orig +from distutils.errors import DistutilsSetupError +from distutils import log +from setuptools.dep_util import newer_pairwise_group + + +class build_clib(orig.build_clib): + """ + Override the default build_clib behaviour to do the following: + + 1. Implement a rudimentary timestamp-based dependency system + so 'compile()' doesn't run every time. + 2. Add more keys to the 'build_info' dictionary: + * obj_deps - specify dependencies for each object compiled. + this should be a dictionary mapping a key + with the source filename to a list of + dependencies. Use an empty string for global + dependencies. + * cflags - specify a list of additional flags to pass to + the compiler. + """ + + def build_libraries(self, libraries): + for (lib_name, build_info) in libraries: + sources = build_info.get('sources') + if sources is None or not isinstance(sources, (list, tuple)): + raise DistutilsSetupError( + "in 'libraries' option (library '%s'), " + "'sources' must be present and must be " + "a list of source filenames" % lib_name) + sources = list(sources) + + log.info("building '%s' library", lib_name) + + # Make sure everything is the correct type. + # obj_deps should be a dictionary of keys as sources + # and a list/tuple of files that are its dependencies. + obj_deps = build_info.get('obj_deps', dict()) + if not isinstance(obj_deps, dict): + raise DistutilsSetupError( + "in 'libraries' option (library '%s'), " + "'obj_deps' must be a dictionary of " + "type 'source: list'" % lib_name) + dependencies = [] + + # Get the global dependencies that are specified by the '' key. + # These will go into every source's dependency list. + global_deps = obj_deps.get('', list()) + if not isinstance(global_deps, (list, tuple)): + raise DistutilsSetupError( + "in 'libraries' option (library '%s'), " + "'obj_deps' must be a dictionary of " + "type 'source: list'" % lib_name) + + # Build the list to be used by newer_pairwise_group + # each source will be auto-added to its dependencies. + for source in sources: + src_deps = [source] + src_deps.extend(global_deps) + extra_deps = obj_deps.get(source, list()) + if not isinstance(extra_deps, (list, tuple)): + raise DistutilsSetupError( + "in 'libraries' option (library '%s'), " + "'obj_deps' must be a dictionary of " + "type 'source: list'" % lib_name) + src_deps.extend(extra_deps) + dependencies.append(src_deps) + + expected_objects = self.compiler.object_filenames( + sources, + output_dir=self.build_temp + ) + + if newer_pairwise_group(dependencies, expected_objects) != ([], []): + # First, compile the source code to object files in the library + # directory. (This should probably change to putting object + # files in a temporary build directory.) + macros = build_info.get('macros') + include_dirs = build_info.get('include_dirs') + cflags = build_info.get('cflags') + objects = self.compiler.compile( + sources, + output_dir=self.build_temp, + macros=macros, + include_dirs=include_dirs, + extra_postargs=cflags, + debug=self.debug + ) + + # Now "link" the object files together into a static library. + # (On Unix at least, this isn't really linking -- it just + # builds an archive. Whatever.) + self.compiler.create_static_lib( + expected_objects, + lib_name, + output_dir=self.build_clib, + debug=self.debug + ) diff --git a/lib/python3.4/site-packages/setuptools/command/build_ext.py b/lib/python3.4/site-packages/setuptools/command/build_ext.py index 92e4a18..36f53f0 100644 --- a/lib/python3.4/site-packages/setuptools/command/build_ext.py +++ b/lib/python3.4/site-packages/setuptools/command/build_ext.py @@ -1,14 +1,16 @@ -from distutils.command.build_ext import build_ext as _du_build_ext -from distutils.file_util import copy_file -from distutils.ccompiler import new_compiler -from distutils.sysconfig import customize_compiler -from distutils.errors import DistutilsError -from distutils import log import os import sys import itertools +import imp +from distutils.command.build_ext import build_ext as _du_build_ext +from distutils.file_util import copy_file +from distutils.ccompiler import new_compiler +from distutils.sysconfig import customize_compiler, get_config_var +from distutils.errors import DistutilsError +from distutils import log from setuptools.extension import Library +from setuptools.extern import six try: # Attempt to use Cython for building extensions, if available @@ -16,15 +18,30 @@ try: except ImportError: _build_ext = _du_build_ext -try: - # Python 2.7 or >=3.2 - from sysconfig import _CONFIG_VARS -except ImportError: - from distutils.sysconfig import get_config_var +# make sure _config_vars is initialized +get_config_var("LDSHARED") +from distutils.sysconfig import _config_vars as _CONFIG_VARS + + +def _customize_compiler_for_shlib(compiler): + if sys.platform == "darwin": + # building .dylib requires additional compiler flags on OSX; here we + # temporarily substitute the pyconfig.h variables so that distutils' + # 'customize_compiler' uses them before we build the shared libraries. + tmp = _CONFIG_VARS.copy() + try: + # XXX Help! I don't have any idea whether these are right... + _CONFIG_VARS['LDSHARED'] = ( + "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup") + _CONFIG_VARS['CCSHARED'] = " -dynamiclib" + _CONFIG_VARS['SO'] = ".dylib" + customize_compiler(compiler) + finally: + _CONFIG_VARS.clear() + _CONFIG_VARS.update(tmp) + else: + customize_compiler(compiler) - get_config_var("LDSHARED") # make sure _config_vars is initialized - del get_config_var - from distutils.sysconfig import _config_vars as _CONFIG_VARS have_rtld = False use_stubs = False @@ -39,9 +56,18 @@ elif os.name != 'nt': except ImportError: pass - if_dl = lambda s: s if have_rtld else '' + +def get_abi3_suffix(): + """Return the file extension for an abi3-compliant Extension()""" + for suffix, _, _ in (s for s in imp.get_suffixes() if s[2] == imp.C_EXTENSION): + if '.abi3' in suffix: # Unix + return suffix + elif suffix == '.pyd': # Windows + return suffix + + class build_ext(_build_ext): def run(self): """Build extensions in build directory, then copy if --inplace""" @@ -77,6 +103,15 @@ class build_ext(_build_ext): filename = _build_ext.get_ext_filename(self, fullname) if fullname in self.ext_map: ext = self.ext_map[fullname] + use_abi3 = ( + six.PY3 + and getattr(ext, 'py_limited_api') + and get_abi3_suffix() + ) + if use_abi3: + so_ext = _get_config_var_837('EXT_SUFFIX') + filename = filename[:-len(so_ext)] + filename = filename + get_abi3_suffix() if isinstance(ext, Library): fn, ext = os.path.splitext(filename) return self.shlib_compiler.library_filename(fn, libtype) @@ -124,20 +159,7 @@ class build_ext(_build_ext): compiler = self.shlib_compiler = new_compiler( compiler=self.compiler, dry_run=self.dry_run, force=self.force ) - if sys.platform == "darwin": - tmp = _CONFIG_VARS.copy() - try: - # XXX Help! I don't have any idea whether these are right... - _CONFIG_VARS['LDSHARED'] = ( - "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup") - _CONFIG_VARS['CCSHARED'] = " -dynamiclib" - _CONFIG_VARS['SO'] = ".dylib" - customize_compiler(compiler) - finally: - _CONFIG_VARS.clear() - _CONFIG_VARS.update(tmp) - else: - customize_compiler(compiler) + _customize_compiler_for_shlib(compiler) if self.include_dirs is not None: compiler.set_include_dirs(self.include_dirs) @@ -294,3 +316,13 @@ else: self.create_static_lib( objects, basename, output_dir, debug, target_lang ) + + +def _get_config_var_837(name): + """ + In https://github.com/pypa/setuptools/pull/837, we discovered + Python 3.3.0 exposes the extension suffix under the name 'SO'. + """ + if sys.version_info < (3, 3, 1): + name = 'SO' + return get_config_var(name) diff --git a/lib/python3.4/site-packages/setuptools/command/build_py.py b/lib/python3.4/site-packages/setuptools/command/build_py.py index 8623c77..b0314fd 100644 --- a/lib/python3.4/site-packages/setuptools/command/build_py.py +++ b/lib/python3.4/site-packages/setuptools/command/build_py.py @@ -6,14 +6,15 @@ import fnmatch import textwrap import io import distutils.errors -import collections import itertools -from setuptools.extern.six.moves import map +from setuptools.extern import six +from setuptools.extern.six.moves import map, filter, filterfalse try: from setuptools.lib2to3_ex import Mixin2to3 except ImportError: + class Mixin2to3: def run_2to3(self, files, doctests=True): "do nothing" @@ -67,6 +68,9 @@ class build_py(orig.build_py, Mixin2to3): return orig.build_py.__getattr__(self, attr) def build_module(self, module, module_file, package): + if six.PY2 and isinstance(package, six.string_types): + # avoid errors on Python 2 when unicode is passed (#190) + package = package.split('.') outfile, copied = orig.build_py.build_module(self, module, module_file, package) if copied: @@ -94,12 +98,19 @@ class build_py(orig.build_py, Mixin2to3): def find_data_files(self, package, src_dir): """Return filenames for package's data files in 'src_dir'""" - globs = (self.package_data.get('', []) - + self.package_data.get(package, [])) - files = self.manifest_files.get(package, [])[:] - for pattern in globs: - # Each pattern has to be converted to a platform-specific path - files.extend(glob(os.path.join(src_dir, convert_path(pattern)))) + patterns = self._get_platform_patterns( + self.package_data, + package, + src_dir, + ) + globs_expanded = map(glob, patterns) + # flatten the expanded globs into an iterable of matches + globs_matches = itertools.chain.from_iterable(globs_expanded) + glob_files = filter(os.path.isfile, globs_matches) + files = itertools.chain( + self.manifest_files.get(package, []), + glob_files, + ) return self.exclude_data_files(package, src_dir, files) def build_package_data(self): @@ -184,26 +195,63 @@ class build_py(orig.build_py, Mixin2to3): def exclude_data_files(self, package, src_dir, files): """Filter filenames for package's data files in 'src_dir'""" - globs = ( - self.exclude_package_data.get('', []) - + self.exclude_package_data.get(package, []) + files = list(files) + patterns = self._get_platform_patterns( + self.exclude_package_data, + package, + src_dir, ) - bad = set( - item - for pattern in globs - for item in fnmatch.filter( - files, - os.path.join(src_dir, convert_path(pattern)), - ) + match_groups = ( + fnmatch.filter(files, pattern) + for pattern in patterns ) - seen = collections.defaultdict(itertools.count) - return [ + # flatten the groups of matches into an iterable of matches + matches = itertools.chain.from_iterable(match_groups) + bad = set(matches) + keepers = ( fn for fn in files if fn not in bad - # ditch dupes - and not next(seen[fn]) - ] + ) + # ditch dupes + return list(_unique_everseen(keepers)) + + @staticmethod + def _get_platform_patterns(spec, package, src_dir): + """ + yield platform-specific path patterns (suitable for glob + or fn_match) from a glob-based spec (such as + self.package_data or self.exclude_package_data) + matching package in src_dir. + """ + raw_patterns = itertools.chain( + spec.get('', []), + spec.get(package, []), + ) + return ( + # Each pattern has to be converted to a platform-specific path + os.path.join(src_dir, convert_path(pattern)) + for pattern in raw_patterns + ) + + +# from Python docs +def _unique_everseen(iterable, key=None): + "List unique elements, preserving order. Remember all elements ever seen." + # unique_everseen('AAAABBBCCDAABBB') --> A B C D + # unique_everseen('ABBCcAD', str.lower) --> A B C D + seen = set() + seen_add = seen.add + if key is None: + for element in filterfalse(seen.__contains__, iterable): + seen_add(element) + yield element + else: + for element in iterable: + k = key(element) + if k not in seen: + seen_add(k) + yield element def assert_relative(path): diff --git a/lib/python3.4/site-packages/setuptools/command/develop.py b/lib/python3.4/site-packages/setuptools/command/develop.py index 11b5df1..85b23c6 100644 --- a/lib/python3.4/site-packages/setuptools/command/develop.py +++ b/lib/python3.4/site-packages/setuptools/command/develop.py @@ -9,10 +9,11 @@ from setuptools.extern import six from pkg_resources import Distribution, PathMetadata, normalize_path from setuptools.command.easy_install import easy_install +from setuptools import namespaces import setuptools -class develop(easy_install): +class develop(namespaces.DevelopInstaller, easy_install): """Set up package for development""" description = "install package in 'development mode'" @@ -30,6 +31,7 @@ class develop(easy_install): if self.uninstall: self.multi_version = True self.uninstall_link() + self.uninstall_namespaces() else: self.install_for_development() self.warn_deprecated_options() @@ -77,15 +79,28 @@ class develop(easy_install): project_name=ei.egg_name ) - p = self.egg_base.replace(os.sep, '/') - if p != os.curdir: - p = '../' * (p.count('/') + 1) - self.setup_path = p - p = normalize_path(os.path.join(self.install_dir, self.egg_path, p)) - if p != normalize_path(os.curdir): + self.setup_path = self._resolve_setup_path( + self.egg_base, + self.install_dir, + self.egg_path, + ) + + @staticmethod + def _resolve_setup_path(egg_base, install_dir, egg_path): + """ + Generate a path from egg_base back to '.' where the + setup script resides and ensure that path points to the + setup path from $install_dir/$egg_path. + """ + path_to_setup = egg_base.replace(os.sep, '/').rstrip('/') + if path_to_setup != os.curdir: + path_to_setup = '../' * (path_to_setup.count('/') + 1) + resolved = normalize_path(os.path.join(install_dir, egg_path, path_to_setup)) + if resolved != normalize_path(os.curdir): raise DistutilsOptionError( "Can't get a consistent path to setup script from" - " installation directory", p, normalize_path(os.curdir)) + " installation directory", resolved, normalize_path(os.curdir)) + return path_to_setup def install_for_development(self): if six.PY3 and getattr(self.distribution, 'use_2to3', False): @@ -123,6 +138,8 @@ class develop(easy_install): self.easy_install(setuptools.bootstrap_install_from) setuptools.bootstrap_install_from = None + self.install_namespaces() + # create an .egg-link in the installation dir, pointing to our egg log.info("Creating %s (link to %s)", self.egg_link, self.egg_base) if not self.dry_run: @@ -186,6 +203,7 @@ class VersionlessRequirement(object): >>> str(adapted_dist.as_requirement()) 'foo' """ + def __init__(self, dist): self.__dist = dist diff --git a/lib/python3.4/site-packages/setuptools/command/dist_info.py b/lib/python3.4/site-packages/setuptools/command/dist_info.py new file mode 100644 index 0000000..c8dc659 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/command/dist_info.py @@ -0,0 +1,37 @@ +""" +Create a dist_info directory +As defined in the wheel specification +""" + +import os +import shutil + +from distutils.core import Command + + +class dist_info(Command): + + description = 'create a .dist-info directory' + + user_options = [ + ('egg-base=', 'e', "directory containing .egg-info directories" + " (default: top of the source tree)"), + ] + + def initialize_options(self): + self.egg_base = None + + def finalize_options(self): + pass + + def run(self): + egg_info = self.get_finalized_command('egg_info') + egg_info.run() + dist_info_dir = egg_info.egg_info[:-len('.egg-info')] + '.dist-info' + + bdist_wheel = self.get_finalized_command('bdist_wheel') + bdist_wheel.egg2dist(egg_info.egg_info, dist_info_dir) + + if self.egg_base: + shutil.move(dist_info_dir, os.path.join( + self.egg_base, dist_info_dir)) diff --git a/lib/python3.4/site-packages/setuptools/command/easy_install.py b/lib/python3.4/site-packages/setuptools/command/easy_install.py index 9fc287e..8fba7b4 100644 --- a/lib/python3.4/site-packages/setuptools/command/easy_install.py +++ b/lib/python3.4/site-packages/setuptools/command/easy_install.py @@ -1,5 +1,4 @@ #!/usr/bin/env python - """ Easy Install ------------ @@ -8,15 +7,17 @@ A tool for doing automatic download/extract/build of distutils-based Python packages. For detailed documentation, see the accompanying EasyInstall.txt file, or visit the `EasyInstall home page`__. -__ https://pythonhosted.org/setuptools/easy_install.html +__ https://setuptools.readthedocs.io/en/latest/easy_install.html """ from glob import glob from distutils.util import get_platform from distutils.util import convert_path, subst_vars -from distutils.errors import DistutilsArgError, DistutilsOptionError, \ - DistutilsError, DistutilsPlatformError +from distutils.errors import ( + DistutilsArgError, DistutilsOptionError, + DistutilsError, DistutilsPlatformError, +) from distutils.command.install import INSTALL_SCHEMES, SCHEME_KEYS from distutils import log, dir_util from distutils.command.build_scripts import first_line_re @@ -30,7 +31,6 @@ import zipfile import re import stat import random -import platform import textwrap import warnings import site @@ -46,10 +46,12 @@ from setuptools.extern.six.moves import configparser, map from setuptools import Command from setuptools.sandbox import run_setup from setuptools.py31compat import get_path, get_config_vars +from setuptools.py27compat import rmtree_safe from setuptools.command import setopt from setuptools.archive_util import unpack_archive -from setuptools.package_index import PackageIndex -from setuptools.package_index import URL_SCHEME +from setuptools.package_index import ( + PackageIndex, parse_requirement_arg, URL_SCHEME, +) from setuptools.command import bdist_egg, egg_info from pkg_resources import ( yield_lines, normalize_path, resource_string, ensure_directory, @@ -57,12 +59,11 @@ from pkg_resources import ( Distribution, PathMetadata, EggMetadata, WorkingSet, DistributionNotFound, VersionConflict, DEVELOP_DIST, ) -import pkg_resources +import pkg_resources.py31compat # Turn on PEP440Warnings warnings.filterwarnings("default", category=pkg_resources.PEP440Warning) - __all__ = [ 'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg', 'main', 'get_exe_prefixes', @@ -74,6 +75,12 @@ def is_64bit(): def samefile(p1, p2): + """ + Determine if two paths reference the same file. + + Augments os.path.samefile to work on Windows and + suppresses errors if the path doesn't exist. + """ both_exist = os.path.exists(p1) and os.path.exists(p2) use_samefile = hasattr(os.path, 'samefile') and both_exist if use_samefile: @@ -84,6 +91,7 @@ def samefile(p1, p2): if six.PY2: + def _to_ascii(s): return s @@ -94,6 +102,7 @@ if six.PY2: except UnicodeError: return False else: + def _to_ascii(s): return s.encode('ascii') @@ -105,6 +114,9 @@ else: return False +_one_liner = lambda text: textwrap.dedent(text).strip().replace('\n', '; ') + + class easy_install(Command): """Manage a download/build/install process""" description = "Find/get/install Python packages" @@ -136,15 +148,13 @@ class easy_install(Command): ('local-snapshots-ok', 'l', "allow building eggs from local checkouts"), ('version', None, "print version information and exit"), - ('install-layout=', None, "installation layout to choose (known values: deb)"), - ('force-installation-into-system-dir', '0', "force installation into /usr"), ('no-find-links', None, "Don't load find-links defined in packages being installed") ] boolean_options = [ 'zip-ok', 'multi-version', 'exclude-scripts', 'upgrade', 'always-copy', 'editable', - 'no-deps', 'local-snapshots-ok', 'version', 'force-installation-into-system-dir' + 'no-deps', 'local-snapshots-ok', 'version' ] if site.ENABLE_USER_SITE: @@ -192,11 +202,6 @@ class easy_install(Command): self.site_dirs = None self.installed_projects = {} self.sitepy_installed = False - # enable custom installation, known values: deb - self.install_layout = None - self.force_installation_into_system_dir = None - self.multiarch = None - # Always read easy_install options, even if we are subclassed, or have # an independent instance created. This ensures that defaults will # always come from the standard configuration file(s)' "easy_install" @@ -265,15 +270,10 @@ class easy_install(Command): self.expand_basedirs() self.expand_dirs() - if self.install_layout: - if not self.install_layout.lower() in ['deb']: - raise DistutilsOptionError("unknown value for --install-layout") - self.install_layout = self.install_layout.lower() - import sysconfig - if sys.version_info[:2] >= (3, 3): - self.multiarch = sysconfig.get_config_var('MULTIARCH') - self._expand('install_dir', 'script_dir', 'build_directory', - 'site_dirs') + self._expand( + 'install_dir', 'script_dir', 'build_directory', + 'site_dirs', + ) # If a non-default installation directory was specified, default the # script directory to match it. if self.script_dir is None: @@ -296,15 +296,6 @@ class easy_install(Command): if self.user and self.install_purelib: self.install_dir = self.install_purelib self.script_dir = self.install_scripts - - if self.prefix == '/usr' and not self.force_installation_into_system_dir: - raise DistutilsOptionError("""installation into /usr - -Trying to install into the system managed parts of the file system. Please -consider to install to another location, or use the option ---force-installation-into-system-dir to overwrite this warning. -""") - # default --record from the install command self.set_undefined_options('install', ('record', 'record')) # Should this be moved to the if statement below? It's not used @@ -402,9 +393,15 @@ consider to install to another location, or use the option def expand_dirs(self): """Calls `os.path.expanduser` on install dirs.""" - self._expand_attrs(['install_purelib', 'install_platlib', - 'install_lib', 'install_headers', - 'install_scripts', 'install_data', ]) + dirs = [ + 'install_purelib', + 'install_platlib', + 'install_lib', + 'install_headers', + 'install_scripts', + 'install_data', + ] + self._expand_attrs(dirs) def run(self): if self.verbose != self.distribution.verbose: @@ -436,7 +433,7 @@ consider to install to another location, or use the option """ try: pid = os.getpid() - except: + except Exception: pid = random.randint(0, sys.maxsize) return os.path.join(self.install_dir, "test-easy-install-%s" % pid) @@ -477,8 +474,7 @@ consider to install to another location, or use the option else: self.pth_file = None - PYTHONPATH = os.environ.get('PYTHONPATH', '').split(os.pathsep) - if instdir not in map(normalize_path, filter(None, PYTHONPATH)): + if instdir not in map(normalize_path, _pythonpath()): # only PYTHONPATH dirs need a site.py, so pretend it's there self.sitepy_installed = True elif self.multi_version and not os.path.exists(pth_file): @@ -517,7 +513,7 @@ consider to install to another location, or use the option For information on other options, you may wish to consult the documentation at: - https://pythonhosted.org/setuptools/easy_install.html + https://setuptools.readthedocs.io/en/latest/easy_install.html Please make the appropriate changes for your system and try again. """).lstrip() @@ -538,27 +534,34 @@ consider to install to another location, or use the option pth_file = self.pseudo_tempname() + ".pth" ok_file = pth_file + '.ok' ok_exists = os.path.exists(ok_file) + tmpl = _one_liner(""" + import os + f = open({ok_file!r}, 'w') + f.write('OK') + f.close() + """) + '\n' try: if ok_exists: os.unlink(ok_file) dirname = os.path.dirname(ok_file) - if not os.path.exists(dirname): - os.makedirs(dirname) + pkg_resources.py31compat.makedirs(dirname, exist_ok=True) f = open(pth_file, 'w') except (OSError, IOError): self.cant_write_to_target() else: try: - f.write("import os; f = open(%r, 'w'); f.write('OK'); " - "f.close()\n" % (ok_file,)) + f.write(tmpl.format(**locals())) f.close() f = None executable = sys.executable if os.name == 'nt': dirname, basename = os.path.split(executable) alt = os.path.join(dirname, 'pythonw.exe') - if (basename.lower() == 'python.exe' and - os.path.exists(alt)): + use_alt = ( + basename.lower() == 'python.exe' and + os.path.exists(alt) + ) + if use_alt: # use pythonw.exe to avoid opening a console window executable = alt @@ -623,20 +626,26 @@ consider to install to another location, or use the option (spec.key, self.build_directory) ) + @contextlib.contextmanager + def _tmpdir(self): + tmpdir = tempfile.mkdtemp(prefix=six.u("easy_install-")) + try: + # cast to str as workaround for #709 and #710 and #712 + yield str(tmpdir) + finally: + os.path.exists(tmpdir) and rmtree(rmtree_safe(tmpdir)) + def easy_install(self, spec, deps=False): - tmpdir = tempfile.mkdtemp(prefix="easy_install-") - download = None if not self.editable: self.install_site_py() - try: + with self._tmpdir() as tmpdir: if not isinstance(spec, Requirement): if URL_SCHEME(spec): # It's a url, download it to tmpdir and process self.not_editable(spec) - download = self.package_index.download(spec, tmpdir) - return self.install_item(None, download, tmpdir, deps, - True) + dl = self.package_index.download(spec, tmpdir) + return self.install_item(None, dl, tmpdir, deps, True) elif os.path.exists(spec): # Existing file or directory, just process it directly @@ -662,10 +671,6 @@ consider to install to another location, or use the option else: return self.install_item(spec, dist.location, tmpdir, deps) - finally: - if os.path.exists(tmpdir): - rmtree(tmpdir) - def install_item(self, spec, download, tmpdir, deps, install_needed=False): # Installation is also needed if file in tmpdir or is not an egg @@ -733,10 +738,7 @@ consider to install to another location, or use the option elif requirement is None or dist not in requirement: # if we wound up with a different version, resolve what we've got distreq = dist.as_requirement() - requirement = requirement or distreq - requirement = Requirement( - distreq.project_name, distreq.specs, requirement.extras - ) + requirement = Requirement(str(distreq)) log.info("Processing dependencies for %s", requirement) try: distros = WorkingSet([]).resolve( @@ -765,8 +767,9 @@ consider to install to another location, or use the option def maybe_move(self, spec, dist_filename, setup_base): dst = os.path.join(self.build_directory, spec.key) if os.path.exists(dst): - msg = ("%r already exists in %s; build directory %s will not be " - "kept") + msg = ( + "%r already exists in %s; build directory %s will not be kept" + ) log.warn(msg, spec.key, self.build_directory, setup_base) return setup_base if os.path.isdir(dist_filename): @@ -806,7 +809,7 @@ consider to install to another location, or use the option There are a couple of template scripts in the package. This function loads one of them and prepares it for use. """ - # See https://bitbucket.org/pypa/setuptools/issue/134 for info + # See https://github.com/pypa/setuptools/issues/134 for info # on script file naming and downstream issues with SVR4 name = 'script.tmpl' if dev_path: @@ -884,8 +887,10 @@ consider to install to another location, or use the option return Distribution.from_filename(egg_path, metadata=metadata) def install_egg(self, egg_path, tmpdir): - destination = os.path.join(self.install_dir, - os.path.basename(egg_path)) + destination = os.path.join( + self.install_dir, + os.path.basename(egg_path), + ) destination = os.path.abspath(destination) if not self.dry_run: ensure_directory(destination) @@ -895,8 +900,11 @@ consider to install to another location, or use the option if os.path.isdir(destination) and not os.path.islink(destination): dir_util.remove_tree(destination, dry_run=self.dry_run) elif os.path.exists(destination): - self.execute(os.unlink, (destination,), "Removing " + - destination) + self.execute( + os.unlink, + (destination,), + "Removing " + destination, + ) try: new_dist_is_zipped = False if os.path.isdir(egg_path): @@ -913,13 +921,19 @@ consider to install to another location, or use the option f, m = shutil.move, "Moving" else: f, m = shutil.copy2, "Copying" - self.execute(f, (egg_path, destination), - (m + " %s to %s") % - (os.path.basename(egg_path), - os.path.dirname(destination))) - update_dist_caches(destination, - fix_zipimporter_caches=new_dist_is_zipped) - except: + self.execute( + f, + (egg_path, destination), + (m + " %s to %s") % ( + os.path.basename(egg_path), + os.path.dirname(destination) + ), + ) + update_dist_caches( + destination, + fix_zipimporter_caches=new_dist_is_zipped, + ) + except Exception: update_dist_caches(destination, fix_zipimporter_caches=False) raise @@ -941,8 +955,8 @@ consider to install to another location, or use the option ) # Convert the .exe to an unpacked egg - egg_path = dist.location = os.path.join(tmpdir, dist.egg_name() + - '.egg') + egg_path = os.path.join(tmpdir, dist.egg_name() + '.egg') + dist.location = egg_path egg_tmp = egg_path + '.tmp' _egg_info = os.path.join(egg_tmp, 'EGG-INFO') pkg_inf = os.path.join(_egg_info, 'PKG-INFO') @@ -960,13 +974,13 @@ consider to install to another location, or use the option f.close() script_dir = os.path.join(_egg_info, 'scripts') # delete entry-point scripts to avoid duping - self.delete_blockers( - [os.path.join(script_dir, args[0]) for args in - ScriptWriter.get_args(dist)] - ) + self.delete_blockers([ + os.path.join(script_dir, args[0]) + for args in ScriptWriter.get_args(dist) + ]) # Build .egg file from tmpdir bdist_egg.make_zipfile( - egg_path, egg_tmp, verbose=self.verbose, dry_run=self.dry_run + egg_path, egg_tmp, verbose=self.verbose, dry_run=self.dry_run, ) # install the .egg return self.install_egg(egg_path, tmpdir) @@ -1154,7 +1168,7 @@ consider to install to another location, or use the option if dist.location in self.pth_file.paths: log.info( "%s is already the active version in easy-install.pth", - dist + dist, ) else: log.info("Adding %s to easy-install.pth file", dist) @@ -1215,7 +1229,7 @@ consider to install to another location, or use the option if self.optimize: byte_compile( to_compile, optimize=self.optimize, force=1, - dry_run=self.dry_run + dry_run=self.dry_run, ) finally: log.set_verbosity(self.verbose) # restore original verbosity @@ -1246,7 +1260,8 @@ consider to install to another location, or use the option * You can set up the installation directory to support ".pth" files by using one of the approaches described here: - https://pythonhosted.org/setuptools/easy_install.html#custom-installation-locations + https://setuptools.readthedocs.io/en/latest/easy_install.html#custom-installation-locations + Please make the appropriate changes for your system and try again.""").lstrip() @@ -1262,17 +1277,14 @@ consider to install to another location, or use the option sitepy = os.path.join(self.install_dir, "site.py") source = resource_string("setuptools", "site-patch.py") + source = source.decode('utf-8') current = "" if os.path.exists(sitepy): log.debug("Checking existing site.py in %s", self.install_dir) - f = open(sitepy, 'rb') - current = f.read() - # we want str, not bytes - if six.PY3: - current = current.decode() + with io.open(sitepy) as strm: + current = strm.read() - f.close() if not current.startswith('def __boot():'): raise DistutilsError( "%s is not a setuptools-generated site.py; please" @@ -1283,9 +1295,8 @@ consider to install to another location, or use the option log.info("Creating %s", sitepy) if not self.dry_run: ensure_directory(sitepy) - f = open(sitepy, 'wb') - f.write(source) - f.close() + with io.open(sitepy, 'w', encoding='utf-8') as strm: + strm.write(source) self.byte_compile([sitepy]) self.sitepy_installed = True @@ -1300,28 +1311,11 @@ consider to install to another location, or use the option self.debug_print("os.makedirs('%s', 0o700)" % path) os.makedirs(path, 0o700) - if sys.version[:3] in ('2.3', '2.4', '2.5') or 'real_prefix' in sys.__dict__: - sitedir_name = 'site-packages' - else: - sitedir_name = 'dist-packages' - INSTALL_SCHEMES = dict( posix=dict( install_dir='$base/lib/python$py_version_short/site-packages', script_dir='$base/bin', ), - unix_local = dict( - install_dir = '$base/local/lib/python$py_version_short/%s' % sitedir_name, - script_dir = '$base/local/bin', - ), - posix_local = dict( - install_dir = '$base/local/lib/python$py_version_short/%s' % sitedir_name, - script_dir = '$base/local/bin', - ), - deb_system = dict( - install_dir = '$base/lib/python3/%s' % sitedir_name, - script_dir = '$base/bin', - ), ) DEFAULT_SCHEME = dict( @@ -1332,18 +1326,11 @@ consider to install to another location, or use the option def _expand(self, *attrs): config_vars = self.get_finalized_command('install').config_vars - if self.prefix or self.install_layout: - if self.install_layout and self.install_layout in ['deb']: - scheme_name = "deb_system" - self.prefix = '/usr' - elif self.prefix or 'real_prefix' in sys.__dict__: - scheme_name = os.name - else: - scheme_name = "posix_local" + if self.prefix: # Set default install_dir/scripts from --prefix config_vars = config_vars.copy() config_vars['base'] = self.prefix - scheme = self.INSTALL_SCHEMES.get(scheme_name,self.DEFAULT_SCHEME) + scheme = self.INSTALL_SCHEMES.get(os.name, self.DEFAULT_SCHEME) for attr, val in scheme.items(): if getattr(self, attr, None) is None: setattr(self, attr, val) @@ -1359,10 +1346,21 @@ consider to install to another location, or use the option setattr(self, attr, val) +def _pythonpath(): + items = os.environ.get('PYTHONPATH', '').split(os.pathsep) + return filter(None, items) + + def get_site_dirs(): - # return a list of 'site' dirs - sitedirs = [_f for _f in os.environ.get('PYTHONPATH', - '').split(os.pathsep) if _f] + """ + Return a list of 'site' dirs + """ + + sitedirs = [] + + # start with PYTHONPATH + sitedirs.extend(_pythonpath()) + prefixes = [sys.prefix] if sys.exec_prefix != sys.prefix: prefixes.append(sys.exec_prefix) @@ -1371,20 +1369,20 @@ def get_site_dirs(): if sys.platform in ('os2emx', 'riscos'): sitedirs.append(os.path.join(prefix, "Lib", "site-packages")) elif os.sep == '/': - sitedirs.extend([os.path.join(prefix, - "lib", - "python" + sys.version[:3], - "site-packages"), - os.path.join(prefix, "lib", "site-python")]) + sitedirs.extend([ + os.path.join( + prefix, + "lib", + "python" + sys.version[:3], + "site-packages", + ), + os.path.join(prefix, "lib", "site-python"), + ]) else: - if sys.version[:3] in ('2.3', '2.4', '2.5'): - sdir = "site-packages" - else: - sdir = "dist-packages" - sitedirs.extend( - [os.path.join(prefix, "local/lib", "python" + sys.version[:3], sdir), - os.path.join(prefix, "lib", "python" + sys.version[:3], sdir)] - ) + sitedirs.extend([ + prefix, + os.path.join(prefix, "lib", "site-packages"), + ]) if sys.platform == 'darwin': # for framework builds *only* we add the standard Apple # locations. Currently only per-user, but /Library and @@ -1392,12 +1390,14 @@ def get_site_dirs(): if 'Python.framework' in prefix: home = os.environ.get('HOME') if home: - sitedirs.append( - os.path.join(home, - 'Library', - 'Python', - sys.version[:3], - 'site-packages')) + home_sp = os.path.join( + home, + 'Library', + 'Python', + sys.version[:3], + 'site-packages', + ) + sitedirs.append(home_sp) lib_paths = get_path('purelib'), get_path('platlib') for site_lib in lib_paths: if site_lib not in sitedirs: @@ -1406,6 +1406,11 @@ def get_site_dirs(): if site.ENABLE_USER_SITE: sitedirs.append(site.USER_SITE) + try: + sitedirs.extend(site.getsitepackages()) + except AttributeError: + pass + sitedirs = list(map(normalize_path, sitedirs)) return sitedirs @@ -1473,8 +1478,8 @@ def extract_wininst_cfg(dist_filename): return None # not a valid tag f.seek(prepended - (12 + cfglen)) - cfg = configparser.RawConfigParser( - {'version': '', 'target_version': ''}) + init = {'version': '', 'target_version': ''} + cfg = configparser.RawConfigParser(init) try: part = f.read(cfglen) # Read up to the first null byte. @@ -1497,7 +1502,8 @@ def get_exe_prefixes(exe_filename): """Get exe->egg path translations for a given .exe file""" prefixes = [ - ('PURELIB/', ''), ('PLATLIB/pywin32_system32', ''), + ('PURELIB/', ''), + ('PLATLIB/pywin32_system32', ''), ('PLATLIB/', ''), ('SCRIPTS/', 'EGG-INFO/scripts/'), ('DATA/lib/site-packages', ''), @@ -1531,15 +1537,6 @@ def get_exe_prefixes(exe_filename): return prefixes -def parse_requirement_arg(spec): - try: - return Requirement.parse(spec) - except ValueError: - raise DistutilsError( - "Not a URL, existing file, or requirement spec: %r" % (spec,) - ) - - class PthDistributions(Environment): """A .pth file with Distribution paths in it""" @@ -1649,7 +1646,6 @@ class PthDistributions(Environment): class RewritePthDistributions(PthDistributions): - @classmethod def _wrap_lines(cls, lines): yield cls.prelude @@ -1657,12 +1653,11 @@ class RewritePthDistributions(PthDistributions): yield line yield cls.postlude - _inline = lambda text: textwrap.dedent(text).strip().replace('\n', '; ') - prelude = _inline(""" + prelude = _one_liner(""" import sys sys.__plen = len(sys.path) """) - postlude = _inline(""" + postlude = _one_liner(""" import sys new = sys.path[sys.__plen:] del sys.path[sys.__plen:] @@ -1672,7 +1667,7 @@ class RewritePthDistributions(PthDistributions): """) -if os.environ.get('SETUPTOOLS_SYS_PATH_TECHNIQUE', 'rewrite') == 'rewrite': +if os.environ.get('SETUPTOOLS_SYS_PATH_TECHNIQUE', 'raw') == 'rewrite': PthDistributions = RewritePthDistributions @@ -1689,7 +1684,7 @@ def _first_line_re(): def auto_chmod(func, arg, exc): - if func is os.remove and os.name == 'nt': + if func in [os.unlink, os.remove] and os.name == 'nt': chmod(arg, stat.S_IWRITE) return func(arg) et, ev, _ = sys.exc_info() @@ -1821,7 +1816,7 @@ def _update_zipimporter_cache(normalized_path, cache, updater=None): # * Does not support the dict.pop() method, forcing us to use the # get/del patterns instead. For more detailed information see the # following links: - # https://bitbucket.org/pypa/setuptools/issue/202/more-robust-zipimporter-cache-invalidation#comment-10495960 + # https://github.com/pypa/setuptools/issues/202#issuecomment-202913420 # https://bitbucket.org/pypy/pypy/src/dd07756a34a41f674c0cacfbc8ae1d4cc9ea2ae4/pypy/module/zipimport/interp_zipimport.py#cl-99 old_entry = cache[p] del cache[p] @@ -1842,6 +1837,7 @@ def _remove_and_clear_zip_directory_cache_data(normalized_path): normalized_path, zipimport._zip_directory_cache, updater=clear_and_remove_cached_zip_archive_directory_data) + # PyPy Python implementation does not allow directly writing to the # zipimport._zip_directory_cache and so prevents us from attempting to correct # its content. The best we can do there is clear the problematic cache content @@ -1854,6 +1850,7 @@ if '__pypy__' in sys.builtin_module_names: _replace_zip_directory_cache_data = \ _remove_and_clear_zip_directory_cache_data else: + def _replace_zip_directory_cache_data(normalized_path): def replace_cached_zip_archive_directory_data(path, old_entry): # N.B. In theory, we could load the zip directory information just @@ -1996,11 +1993,21 @@ class CommandSpec(list): def as_header(self): return self._render(self + list(self.options)) + @staticmethod + def _strip_quotes(item): + _QUOTES = '"\'' + for q in _QUOTES: + if item.startswith(q) and item.endswith(q): + return item[1:-1] + return item + @staticmethod def _render(items): - cmdline = subprocess.list2cmdline(items) + cmdline = subprocess.list2cmdline( + CommandSpec._strip_quotes(item.strip()) for item in items) return '#!' + cmdline + '\n' + # For pbr compat; will be removed in a future version. sys_executable = CommandSpec._sys_executable() @@ -2015,13 +2022,15 @@ class ScriptWriter(object): gui apps. """ - template = textwrap.dedent(""" + template = textwrap.dedent(r""" # EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r __requires__ = %(spec)r + import re import sys from pkg_resources import load_entry_point if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) sys.exit( load_entry_point(%(spec)r, %(group)r, %(name)r)() ) @@ -2130,8 +2139,11 @@ class WindowsScriptWriter(ScriptWriter): "For Windows, add a .py extension" ext = dict(console='.pya', gui='.pyw')[type_] if ext not in os.environ['PATHEXT'].lower().split(';'): - warnings.warn("%s not listed in PATHEXT; scripts will not be " - "recognized as executables." % ext, UserWarning) + msg = ( + "{ext} not listed in PATHEXT; scripts will not be " + "recognized as executables." + ).format(**locals()) + warnings.warn(msg, UserWarning) old = ['.pya', '.py', '-script.py', '.pyc', '.pyo', '.pyw', '.exe'] old.remove(ext) header = cls._adjust_header(type_, header) @@ -2210,8 +2222,6 @@ def get_win_launcher(type): Returns the executable as a byte string. """ launcher_fn = '%s.exe' % type - if platform.machine().lower() == 'arm': - launcher_fn = launcher_fn.replace(".", "-arm.") if is_64bit(): launcher_fn = launcher_fn.replace(".", "-64.") else: @@ -2228,39 +2238,7 @@ def load_launcher_manifest(name): def rmtree(path, ignore_errors=False, onerror=auto_chmod): - """Recursively delete a directory tree. - - This code is taken from the Python 2.4 version of 'shutil', because - the 2.3 version doesn't really work right. - """ - if ignore_errors: - def onerror(*args): - pass - elif onerror is None: - def onerror(*args): - raise - names = [] - try: - names = os.listdir(path) - except os.error: - onerror(os.listdir, path, sys.exc_info()) - for name in names: - fullname = os.path.join(path, name) - try: - mode = os.lstat(fullname).st_mode - except os.error: - mode = 0 - if stat.S_ISDIR(mode): - rmtree(fullname, ignore_errors, onerror) - else: - try: - os.remove(fullname) - except os.error: - onerror(os.remove, fullname, sys.exc_info()) - try: - os.rmdir(path) - except os.error: - onerror(os.rmdir, path, sys.exc_info()) + return shutil.rmtree(path, ignore_errors, onerror) def current_umask(): @@ -2297,7 +2275,8 @@ def main(argv=None, **kw): setup( script_args=['-q', 'easy_install', '-v'] + argv, script_name=sys.argv[0] or 'easy_install', - distclass=DistributionWithoutHelpCommands, **kw + distclass=DistributionWithoutHelpCommands, + **kw ) diff --git a/lib/python3.4/site-packages/setuptools/command/egg_info.py b/lib/python3.4/site-packages/setuptools/command/egg_info.py index d1bd9b0..a183d15 100644 --- a/lib/python3.4/site-packages/setuptools/command/egg_info.py +++ b/lib/python3.4/site-packages/setuptools/command/egg_info.py @@ -3,6 +3,7 @@ Create a distribution's .egg-info directory and contents""" from distutils.filelist import FileList as _FileList +from distutils.errors import DistutilsInternalError from distutils.util import convert_path from distutils import log import distutils.errors @@ -13,6 +14,7 @@ import sys import io import warnings import time +import collections from setuptools.extern import six from setuptools.extern.six.moves import map @@ -26,13 +28,92 @@ from pkg_resources import ( parse_requirements, safe_name, parse_version, safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename) import setuptools.unicode_utils as unicode_utils +from setuptools.glob import glob from pkg_resources.extern import packaging -try: - from setuptools_svn import svn_utils -except ImportError: - pass + +def translate_pattern(glob): + """ + Translate a file path glob like '*.txt' in to a regular expression. + This differs from fnmatch.translate which allows wildcards to match + directory separators. It also knows about '**/' which matches any number of + directories. + """ + pat = '' + + # This will split on '/' within [character classes]. This is deliberate. + chunks = glob.split(os.path.sep) + + sep = re.escape(os.sep) + valid_char = '[^%s]' % (sep,) + + for c, chunk in enumerate(chunks): + last_chunk = c == len(chunks) - 1 + + # Chunks that are a literal ** are globstars. They match anything. + if chunk == '**': + if last_chunk: + # Match anything if this is the last component + pat += '.*' + else: + # Match '(name/)*' + pat += '(?:%s+%s)*' % (valid_char, sep) + continue # Break here as the whole path component has been handled + + # Find any special characters in the remainder + i = 0 + chunk_len = len(chunk) + while i < chunk_len: + char = chunk[i] + if char == '*': + # Match any number of name characters + pat += valid_char + '*' + elif char == '?': + # Match a name character + pat += valid_char + elif char == '[': + # Character class + inner_i = i + 1 + # Skip initial !/] chars + if inner_i < chunk_len and chunk[inner_i] == '!': + inner_i = inner_i + 1 + if inner_i < chunk_len and chunk[inner_i] == ']': + inner_i = inner_i + 1 + + # Loop till the closing ] is found + while inner_i < chunk_len and chunk[inner_i] != ']': + inner_i = inner_i + 1 + + if inner_i >= chunk_len: + # Got to the end of the string without finding a closing ] + # Do not treat this as a matching group, but as a literal [ + pat += re.escape(char) + else: + # Grab the insides of the [brackets] + inner = chunk[i + 1:inner_i] + char_class = '' + + # Class negation + if inner[0] == '!': + char_class = '^' + inner = inner[1:] + + char_class += re.escape(inner) + pat += '[%s]' % (char_class,) + + # Skip to the end ] + i = inner_i + else: + pat += re.escape(char) + i += 1 + + # Join each chunk with the dir separator + if not last_chunk: + pat += sep + + pat += r'\Z' + return re.compile(pat, flags=re.MULTILINE|re.DOTALL) class egg_info(Command): @@ -41,18 +122,15 @@ class egg_info(Command): user_options = [ ('egg-base=', 'e', "directory containing .egg-info directories" " (default: top of the source tree)"), - ('tag-svn-revision', 'r', - "Add subversion revision ID to version number"), ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"), ('tag-build=', 'b', "Specify explicit tag to add to version number"), - ('no-svn-revision', 'R', - "Don't add subversion revision ID [default]"), ('no-date', 'D', "Don't include date stamp [default]"), ] - boolean_options = ['tag-date', 'tag-svn-revision'] - negative_opt = {'no-svn-revision': 'tag-svn-revision', - 'no-date': 'tag-date'} + boolean_options = ['tag-date'] + negative_opt = { + 'no-date': 'tag-date', + } def initialize_options(self): self.egg_name = None @@ -60,20 +138,36 @@ class egg_info(Command): self.egg_base = None self.egg_info = None self.tag_build = None - self.tag_svn_revision = 0 self.tag_date = 0 self.broken_egg_info = False self.vtags = None + #################################### + # allow the 'tag_svn_revision' to be detected and + # set, supporting sdists built on older Setuptools. + @property + def tag_svn_revision(self): + pass + + @tag_svn_revision.setter + def tag_svn_revision(self, value): + pass + #################################### + def save_version_info(self, filename): - values = dict( - egg_info=dict( - tag_svn_revision=0, - tag_date=0, - tag_build=self.tags(), - ) - ) - edit_config(filename, values) + """ + Materialize the value of date into the + build tag. Install build keys in a deterministic order + to avoid arbitrary reordering on subsequent builds. + """ + # python 2.6 compatibility + odict = getattr(collections, 'OrderedDict', dict) + egg_info = odict() + # follow the order these keys would have been added + # when PYTHONHASHSEED=0 + egg_info['tag_build'] = self.tags() + egg_info['tag_date'] = 0 + edit_config(filename, dict(egg_info=egg_info)) def finalize_options(self): self.egg_name = safe_name(self.distribution.get_name()) @@ -189,18 +283,10 @@ class egg_info(Command): version = '' if self.tag_build: version += self.tag_build - if self.tag_svn_revision: - version += '-r%s' % self.get_svn_revision() if self.tag_date: version += time.strftime("-%Y%m%d") return version - @staticmethod - def get_svn_revision(): - if 'svn_utils' not in globals(): - return "0" - return str(svn_utils.SvnInfo.load(os.curdir).get_revision()) - def find_sources(self): """Generate SOURCES.txt manifest file""" manifest_filename = os.path.join(self.egg_info, "SOURCES.txt") @@ -226,7 +312,155 @@ class egg_info(Command): class FileList(_FileList): - """File list that accepts only existing, platform-independent paths""" + # Implementations of the various MANIFEST.in commands + + def process_template_line(self, line): + # Parse the line: split it up, make sure the right number of words + # is there, and return the relevant words. 'action' is always + # defined: it's the first word of the line. Which of the other + # three are defined depends on the action; it'll be either + # patterns, (dir and patterns), or (dir_pattern). + (action, patterns, dir, dir_pattern) = self._parse_template_line(line) + + # OK, now we know that the action is valid and we have the + # right number of words on the line for that action -- so we + # can proceed with minimal error-checking. + if action == 'include': + self.debug_print("include " + ' '.join(patterns)) + for pattern in patterns: + if not self.include(pattern): + log.warn("warning: no files found matching '%s'", pattern) + + elif action == 'exclude': + self.debug_print("exclude " + ' '.join(patterns)) + for pattern in patterns: + if not self.exclude(pattern): + log.warn(("warning: no previously-included files " + "found matching '%s'"), pattern) + + elif action == 'global-include': + self.debug_print("global-include " + ' '.join(patterns)) + for pattern in patterns: + if not self.global_include(pattern): + log.warn(("warning: no files found matching '%s' " + "anywhere in distribution"), pattern) + + elif action == 'global-exclude': + self.debug_print("global-exclude " + ' '.join(patterns)) + for pattern in patterns: + if not self.global_exclude(pattern): + log.warn(("warning: no previously-included files matching " + "'%s' found anywhere in distribution"), + pattern) + + elif action == 'recursive-include': + self.debug_print("recursive-include %s %s" % + (dir, ' '.join(patterns))) + for pattern in patterns: + if not self.recursive_include(dir, pattern): + log.warn(("warning: no files found matching '%s' " + "under directory '%s'"), + pattern, dir) + + elif action == 'recursive-exclude': + self.debug_print("recursive-exclude %s %s" % + (dir, ' '.join(patterns))) + for pattern in patterns: + if not self.recursive_exclude(dir, pattern): + log.warn(("warning: no previously-included files matching " + "'%s' found under directory '%s'"), + pattern, dir) + + elif action == 'graft': + self.debug_print("graft " + dir_pattern) + if not self.graft(dir_pattern): + log.warn("warning: no directories found matching '%s'", + dir_pattern) + + elif action == 'prune': + self.debug_print("prune " + dir_pattern) + if not self.prune(dir_pattern): + log.warn(("no previously-included directories found " + "matching '%s'"), dir_pattern) + + else: + raise DistutilsInternalError( + "this cannot happen: invalid action '%s'" % action) + + def _remove_files(self, predicate): + """ + Remove all files from the file list that match the predicate. + Return True if any matching files were removed + """ + found = False + for i in range(len(self.files) - 1, -1, -1): + if predicate(self.files[i]): + self.debug_print(" removing " + self.files[i]) + del self.files[i] + found = True + return found + + def include(self, pattern): + """Include files that match 'pattern'.""" + found = [f for f in glob(pattern) if not os.path.isdir(f)] + self.extend(found) + return bool(found) + + def exclude(self, pattern): + """Exclude files that match 'pattern'.""" + match = translate_pattern(pattern) + return self._remove_files(match.match) + + def recursive_include(self, dir, pattern): + """ + Include all files anywhere in 'dir/' that match the pattern. + """ + full_pattern = os.path.join(dir, '**', pattern) + found = [f for f in glob(full_pattern, recursive=True) + if not os.path.isdir(f)] + self.extend(found) + return bool(found) + + def recursive_exclude(self, dir, pattern): + """ + Exclude any file anywhere in 'dir/' that match the pattern. + """ + match = translate_pattern(os.path.join(dir, '**', pattern)) + return self._remove_files(match.match) + + def graft(self, dir): + """Include all files from 'dir/'.""" + found = [ + item + for match_dir in glob(dir) + for item in distutils.filelist.findall(match_dir) + ] + self.extend(found) + return bool(found) + + def prune(self, dir): + """Filter out files from 'dir/'.""" + match = translate_pattern(os.path.join(dir, '**')) + return self._remove_files(match.match) + + def global_include(self, pattern): + """ + Include all files anywhere in the current directory that match the + pattern. This is very inefficient on large file trees. + """ + if self.allfiles is None: + self.findall() + match = translate_pattern(os.path.join('**', pattern)) + found = [f for f in self.allfiles if match.match(f)] + self.extend(found) + return bool(found) + + def global_exclude(self, pattern): + """ + Exclude all files anywhere that match the pattern. + """ + match = translate_pattern(os.path.join('**', pattern)) + return self._remove_files(match.match) def append(self, item): if item.endswith('\r'): # Fix older sdists built on Windows @@ -289,7 +523,6 @@ class manifest_maker(sdist): self.filelist = FileList() if not os.path.exists(self.manifest): self.write_manifest() # it must exist so it'll get in the list - self.filelist.findall() self.add_defaults() if os.path.exists(self.template): self.read_template() @@ -314,10 +547,17 @@ class manifest_maker(sdist): msg = "writing manifest file '%s'" % self.manifest self.execute(write_file, (self.manifest, files), msg) - def warn(self, msg): # suppress missing-file warnings from sdist - if not msg.startswith("standard file not found:"): + def warn(self, msg): + if not self._should_suppress_warning(msg): sdist.warn(self, msg) + @staticmethod + def _should_suppress_warning(msg): + """ + suppress missing-file warnings from sdist + """ + return re.match(r"standard file .*not found", msg) + def add_defaults(self): sdist.add_defaults(self) self.filelist.append(self.template) @@ -328,38 +568,13 @@ class manifest_maker(sdist): elif os.path.exists(self.manifest): self.read_manifest() ei_cmd = self.get_finalized_command('egg_info') - self._add_egg_info(cmd=ei_cmd) - self.filelist.include_pattern("*", prefix=ei_cmd.egg_info) - - def _add_egg_info(self, cmd): - """ - Add paths for egg-info files for an external egg-base. - - The egg-info files are written to egg-base. If egg-base is - outside the current working directory, this method - searchs the egg-base directory for files to include - in the manifest. Uses distutils.filelist.findall (which is - really the version monkeypatched in by setuptools/__init__.py) - to perform the search. - - Since findall records relative paths, prefix the returned - paths with cmd.egg_base, so add_default's include_pattern call - (which is looking for the absolute cmd.egg_info) will match - them. - """ - if cmd.egg_base == os.curdir: - # egg-info files were already added by something else - return - - discovered = distutils.filelist.findall(cmd.egg_base) - resolved = (os.path.join(cmd.egg_base, path) for path in discovered) - self.filelist.allfiles.extend(resolved) + self.filelist.graft(ei_cmd.egg_info) def prune_file_list(self): build = self.get_finalized_command('build') base_dir = self.distribution.get_fullname() - self.filelist.exclude_pattern(None, prefix=build.build_base) - self.filelist.exclude_pattern(None, prefix=base_dir) + self.filelist.prune(build.build_base) + self.filelist.prune(base_dir) sep = re.escape(os.sep) self.filelist.exclude_pattern(r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep, is_regex=1) @@ -384,6 +599,10 @@ def write_pkg_info(cmd, basename, filename): metadata = cmd.distribution.metadata metadata.version, oldver = cmd.egg_version, metadata.version metadata.name, oldname = cmd.egg_name, metadata.name + metadata.long_description_content_type = getattr( + cmd.distribution, + 'long_description_content_type' + ) try: # write unescaped data to PKG-INFO, so older pkg_resources # can still parse it diff --git a/lib/python3.4/site-packages/setuptools/command/install.py b/lib/python3.4/site-packages/setuptools/command/install.py index d2bca2e..31a5ddb 100644 --- a/lib/python3.4/site-packages/setuptools/command/install.py +++ b/lib/python3.4/site-packages/setuptools/command/install.py @@ -8,7 +8,7 @@ import distutils.command.install as orig import setuptools # Prior to numpy 1.9, NumPy relies on the '_install' name, so provide it for -# now. See https://bitbucket.org/pypa/setuptools/issue/199/ +# now. See https://github.com/pypa/setuptools/issues/199/ _install = orig.install diff --git a/lib/python3.4/site-packages/setuptools/command/install_egg_info.py b/lib/python3.4/site-packages/setuptools/command/install_egg_info.py index ae0325d..edc4718 100644 --- a/lib/python3.4/site-packages/setuptools/command/install_egg_info.py +++ b/lib/python3.4/site-packages/setuptools/command/install_egg_info.py @@ -1,14 +1,13 @@ from distutils import log, dir_util -import os, sys - -from setuptools.extern.six.moves import map +import os from setuptools import Command +from setuptools import namespaces from setuptools.archive_util import unpack_archive import pkg_resources -class install_egg_info(Command): +class install_egg_info(namespaces.Installer, Command): """Install an .egg-info directory for the package""" description = "Install an .egg-info directory for the package" @@ -19,31 +18,14 @@ class install_egg_info(Command): def initialize_options(self): self.install_dir = None - self.install_layout = None - self.prefix_option = None def finalize_options(self): self.set_undefined_options('install_lib', ('install_dir', 'install_dir')) - self.set_undefined_options('install',('install_layout','install_layout')) - if sys.hexversion > 0x2060000: - self.set_undefined_options('install',('prefix_option','prefix_option')) ei_cmd = self.get_finalized_command("egg_info") basename = pkg_resources.Distribution( None, None, ei_cmd.egg_name, ei_cmd.egg_version ).egg_name() + '.egg-info' - - if self.install_layout: - if not self.install_layout.lower() in ['deb']: - raise DistutilsOptionError("unknown value for --install-layout") - self.install_layout = self.install_layout.lower() - basename = basename.replace('-py%s' % pkg_resources.PY_MAJOR, '') - elif self.prefix_option or 'real_prefix' in sys.__dict__: - # don't modify for virtualenv - pass - else: - basename = basename.replace('-py%s' % pkg_resources.PY_MAJOR, '') - self.source = ei_cmd.egg_info self.target = os.path.join(self.install_dir, basename) self.outputs = [] @@ -73,66 +55,8 @@ class install_egg_info(Command): for skip in '.svn/', 'CVS/': if src.startswith(skip) or '/' + skip in src: return None - if self.install_layout and self.install_layout in ['deb'] and src.startswith('SOURCES.txt'): - log.info("Skipping SOURCES.txt") - return None self.outputs.append(dst) log.debug("Copying %s to %s", src, dst) return dst unpack_archive(self.source, self.target, skimmer) - - def install_namespaces(self): - nsp = self._get_all_ns_packages() - if not nsp: - return - filename, ext = os.path.splitext(self.target) - filename += '-nspkg.pth' - self.outputs.append(filename) - log.info("Installing %s", filename) - lines = map(self._gen_nspkg_line, nsp) - - if self.dry_run: - # always generate the lines, even in dry run - list(lines) - return - - with open(filename, 'wt') as f: - f.writelines(lines) - - _nspkg_tmpl = ( - "import sys, types, os", - "p = os.path.join(sys._getframe(1).f_locals['sitedir'], *%(pth)r)", - "ie = os.path.exists(os.path.join(p,'__init__.py'))", - "m = not ie and " - "sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))", - "mp = (m or []) and m.__dict__.setdefault('__path__',[])", - "(p not in mp) and mp.append(p)", - ) - "lines for the namespace installer" - - _nspkg_tmpl_multi = ( - 'm and setattr(sys.modules[%(parent)r], %(child)r, m)', - ) - "additional line(s) when a parent package is indicated" - - @classmethod - def _gen_nspkg_line(cls, pkg): - # ensure pkg is not a unicode string under Python 2.7 - pkg = str(pkg) - pth = tuple(pkg.split('.')) - tmpl_lines = cls._nspkg_tmpl - parent, sep, child = pkg.rpartition('.') - if parent: - tmpl_lines += cls._nspkg_tmpl_multi - return ';'.join(tmpl_lines) % locals() + '\n' - - def _get_all_ns_packages(self): - """Return sorted list of all package namespaces""" - nsp = set() - for pkg in self.distribution.namespace_packages or []: - pkg = pkg.split('.') - while pkg: - nsp.add('.'.join(pkg)) - pkg.pop() - return sorted(nsp) diff --git a/lib/python3.4/site-packages/setuptools/command/install_lib.py b/lib/python3.4/site-packages/setuptools/command/install_lib.py index 696b776..2b31c3e 100644 --- a/lib/python3.4/site-packages/setuptools/command/install_lib.py +++ b/lib/python3.4/site-packages/setuptools/command/install_lib.py @@ -1,24 +1,12 @@ import os -import sys import imp from itertools import product, starmap import distutils.command.install_lib as orig + class install_lib(orig.install_lib): """Don't add compiled flags to filenames of non-Python files""" - def initialize_options(self): - orig.install_lib.initialize_options(self) - self.multiarch = None - self.install_layout = None - - def finalize_options(self): - orig.install_lib.finalize_options(self) - self.set_undefined_options('install',('install_layout','install_layout')) - if self.install_layout == 'deb' and sys.version_info[:2] >= (3, 3): - import sysconfig - self.multiarch = sysconfig.get_config_var('MULTIARCH') - def run(self): self.build() outfiles = self.install() @@ -103,8 +91,6 @@ class install_lib(orig.install_lib): exclude = self.get_exclusions() if not exclude: - import distutils.dir_util - distutils.dir_util._multiarch = self.multiarch return orig.install_lib.copy_tree(self, infile, outfile) # Exclude namespace package __init__.py* files from the output @@ -114,24 +100,12 @@ class install_lib(orig.install_lib): outfiles = [] - if self.multiarch: - import sysconfig - ext_suffix = sysconfig.get_config_var ('EXT_SUFFIX') - if ext_suffix.endswith(self.multiarch + ext_suffix[-3:]): - new_suffix = None - else: - new_suffix = "%s-%s%s" % (ext_suffix[:-3], self.multiarch, ext_suffix[-3:]) - def pf(src, dst): if dst in exclude: log.warn("Skipping installation of %s (namespace package)", dst) return False - if self.multiarch and new_suffix and dst.endswith(ext_suffix) and not dst.endswith(new_suffix): - dst = dst.replace(ext_suffix, new_suffix) - log.info("renaming extension to %s", os.path.basename(dst)) - log.info("copying %s -> %s", src, os.path.dirname(dst)) outfiles.append(dst) return dst diff --git a/lib/python3.4/site-packages/setuptools/command/install_scripts.py b/lib/python3.4/site-packages/setuptools/command/install_scripts.py index be66cb2..1623427 100644 --- a/lib/python3.4/site-packages/setuptools/command/install_scripts.py +++ b/lib/python3.4/site-packages/setuptools/command/install_scripts.py @@ -1,6 +1,7 @@ from distutils import log import distutils.command.install_scripts as orig import os +import sys from pkg_resources import Distribution, PathMetadata, ensure_directory @@ -37,6 +38,10 @@ class install_scripts(orig.install_scripts): if is_wininst: exec_param = "python.exe" writer = ei.WindowsScriptWriter + if exec_param == sys.executable: + # In case the path to the Python executable contains a space, wrap + # it so it's not split up. + exec_param = [exec_param] # resolve the writer to the environment writer = writer.best() cmd = writer.command_spec_class.best().from_param(exec_param) diff --git a/lib/python3.4/site-packages/setuptools/command/py36compat.py b/lib/python3.4/site-packages/setuptools/command/py36compat.py new file mode 100644 index 0000000..61063e7 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/command/py36compat.py @@ -0,0 +1,136 @@ +import os +from glob import glob +from distutils.util import convert_path +from distutils.command import sdist + +from setuptools.extern.six.moves import filter + + +class sdist_add_defaults: + """ + Mix-in providing forward-compatibility for functionality as found in + distutils on Python 3.7. + + Do not edit the code in this class except to update functionality + as implemented in distutils. Instead, override in the subclass. + """ + + def add_defaults(self): + """Add all the default files to self.filelist: + - README or README.txt + - setup.py + - test/test*.py + - all pure Python modules mentioned in setup script + - all files pointed by package_data (build_py) + - all files defined in data_files. + - all files defined as scripts. + - all C sources listed as part of extensions or C libraries + in the setup script (doesn't catch C headers!) + Warns if (README or README.txt) or setup.py are missing; everything + else is optional. + """ + self._add_defaults_standards() + self._add_defaults_optional() + self._add_defaults_python() + self._add_defaults_data_files() + self._add_defaults_ext() + self._add_defaults_c_libs() + self._add_defaults_scripts() + + @staticmethod + def _cs_path_exists(fspath): + """ + Case-sensitive path existence check + + >>> sdist_add_defaults._cs_path_exists(__file__) + True + >>> sdist_add_defaults._cs_path_exists(__file__.upper()) + False + """ + if not os.path.exists(fspath): + return False + # make absolute so we always have a directory + abspath = os.path.abspath(fspath) + directory, filename = os.path.split(abspath) + return filename in os.listdir(directory) + + def _add_defaults_standards(self): + standards = [self.READMES, self.distribution.script_name] + for fn in standards: + if isinstance(fn, tuple): + alts = fn + got_it = False + for fn in alts: + if self._cs_path_exists(fn): + got_it = True + self.filelist.append(fn) + break + + if not got_it: + self.warn("standard file not found: should have one of " + + ', '.join(alts)) + else: + if self._cs_path_exists(fn): + self.filelist.append(fn) + else: + self.warn("standard file '%s' not found" % fn) + + def _add_defaults_optional(self): + optional = ['test/test*.py', 'setup.cfg'] + for pattern in optional: + files = filter(os.path.isfile, glob(pattern)) + self.filelist.extend(files) + + def _add_defaults_python(self): + # build_py is used to get: + # - python modules + # - files defined in package_data + build_py = self.get_finalized_command('build_py') + + # getting python files + if self.distribution.has_pure_modules(): + self.filelist.extend(build_py.get_source_files()) + + # getting package_data files + # (computed in build_py.data_files by build_py.finalize_options) + for pkg, src_dir, build_dir, filenames in build_py.data_files: + for filename in filenames: + self.filelist.append(os.path.join(src_dir, filename)) + + def _add_defaults_data_files(self): + # getting distribution.data_files + if self.distribution.has_data_files(): + for item in self.distribution.data_files: + if isinstance(item, str): + # plain file + item = convert_path(item) + if os.path.isfile(item): + self.filelist.append(item) + else: + # a (dirname, filenames) tuple + dirname, filenames = item + for f in filenames: + f = convert_path(f) + if os.path.isfile(f): + self.filelist.append(f) + + def _add_defaults_ext(self): + if self.distribution.has_ext_modules(): + build_ext = self.get_finalized_command('build_ext') + self.filelist.extend(build_ext.get_source_files()) + + def _add_defaults_c_libs(self): + if self.distribution.has_c_libraries(): + build_clib = self.get_finalized_command('build_clib') + self.filelist.extend(build_clib.get_source_files()) + + def _add_defaults_scripts(self): + if self.distribution.has_scripts(): + build_scripts = self.get_finalized_command('build_scripts') + self.filelist.extend(build_scripts.get_source_files()) + + +if hasattr(sdist.sdist, '_add_defaults_standards'): + # disable the functionality already available upstream + class sdist_add_defaults: + pass diff --git a/lib/python3.4/site-packages/setuptools/command/rotate.py b/lib/python3.4/site-packages/setuptools/command/rotate.py index 804f962..b89353f 100644 --- a/lib/python3.4/site-packages/setuptools/command/rotate.py +++ b/lib/python3.4/site-packages/setuptools/command/rotate.py @@ -2,6 +2,7 @@ from distutils.util import convert_path from distutils import log from distutils.errors import DistutilsOptionError import os +import shutil from setuptools.extern import six @@ -59,4 +60,7 @@ class rotate(Command): for (t, f) in files: log.info("Deleting %s", f) if not self.dry_run: - os.unlink(f) + if os.path.isdir(f): + shutil.rmtree(f) + else: + os.unlink(f) diff --git a/lib/python3.4/site-packages/setuptools/command/sdist.py b/lib/python3.4/site-packages/setuptools/command/sdist.py index 6640d4e..508148e 100644 --- a/lib/python3.4/site-packages/setuptools/command/sdist.py +++ b/lib/python3.4/site-packages/setuptools/command/sdist.py @@ -1,20 +1,19 @@ -from glob import glob from distutils import log import distutils.command.sdist as orig import os import sys import io +import contextlib from setuptools.extern import six -from setuptools.utils import cs_path_exists +from .py36compat import sdist_add_defaults import pkg_resources -READMES = 'README', 'README.rst', 'README.txt' - _default_revctrl = list + def walk_revctrl(dirname=''): """Find all files under revision control""" for ep in pkg_resources.iter_entry_points('setuptools.file_finders'): @@ -22,7 +21,7 @@ def walk_revctrl(dirname=''): yield item -class sdist(orig.sdist): +class sdist(sdist_add_defaults, orig.sdist): """Smart sdist that finds anything supported by revision control""" user_options = [ @@ -38,6 +37,9 @@ class sdist(orig.sdist): negative_opt = {} + README_EXTENSIONS = ['', '.rst', '.txt', '.md'] + READMES = tuple('README{0}'.format(ext) for ext in README_EXTENSIONS) + def run(self): self.run_command('egg_info') ei_cmd = self.get_finalized_command('egg_info') @@ -64,6 +66,45 @@ class sdist(orig.sdist): if data not in dist_files: dist_files.append(data) + def initialize_options(self): + orig.sdist.initialize_options(self) + + self._default_to_gztar() + + def _default_to_gztar(self): + # only needed on Python prior to 3.6. + if sys.version_info >= (3, 6, 0, 'beta', 1): + return + self.formats = ['gztar'] + + def make_distribution(self): + """ + Workaround for #516 + """ + with self._remove_os_link(): + orig.sdist.make_distribution(self) + + @staticmethod + @contextlib.contextmanager + def _remove_os_link(): + """ + In a context, remove and restore os.link if it exists + """ + + class NoValue: + pass + + orig_val = getattr(os, 'link', NoValue) + try: + del os.link + except Exception: + pass + try: + yield + finally: + if orig_val is not NoValue: + setattr(os, 'link', orig_val) + def __read_template_hack(self): # This grody hack closes the template file (MANIFEST.in) if an # exception occurs during read_template. @@ -71,7 +112,7 @@ class sdist(orig.sdist): # file. try: orig.sdist.read_template(self) - except: + except Exception: _, _, tb = sys.exc_info() tb.tb_next.tb_frame.f_locals['template'].close() raise @@ -87,35 +128,8 @@ class sdist(orig.sdist): if has_leaky_handle: read_template = __read_template_hack - def add_defaults(self): - standards = [READMES, - self.distribution.script_name] - for fn in standards: - if isinstance(fn, tuple): - alts = fn - got_it = 0 - for fn in alts: - if cs_path_exists(fn): - got_it = 1 - self.filelist.append(fn) - break - - if not got_it: - self.warn("standard file not found: should have one of " + - ', '.join(alts)) - else: - if cs_path_exists(fn): - self.filelist.append(fn) - else: - self.warn("standard file '%s' not found" % fn) - - optional = ['test/test*.py', 'setup.cfg'] - for pattern in optional: - files = list(filter(cs_path_exists, glob(pattern))) - if files: - self.filelist.extend(files) - - # getting python files + def _add_defaults_python(self): + """getting python files""" if self.distribution.has_pure_modules(): build_py = self.get_finalized_command('build_py') self.filelist.extend(build_py.get_source_files()) @@ -128,26 +142,23 @@ class sdist(orig.sdist): self.filelist.extend([os.path.join(src_dir, filename) for filename in filenames]) - if self.distribution.has_ext_modules(): - build_ext = self.get_finalized_command('build_ext') - self.filelist.extend(build_ext.get_source_files()) - - if self.distribution.has_c_libraries(): - build_clib = self.get_finalized_command('build_clib') - self.filelist.extend(build_clib.get_source_files()) - - if self.distribution.has_scripts(): - build_scripts = self.get_finalized_command('build_scripts') - self.filelist.extend(build_scripts.get_source_files()) + def _add_defaults_data_files(self): + try: + if six.PY2: + sdist_add_defaults._add_defaults_data_files(self) + else: + super()._add_defaults_data_files() + except TypeError: + log.warn("data_files contains unexpected objects") def check_readme(self): - for f in READMES: + for f in self.READMES: if os.path.exists(f): return else: self.warn( "standard file not found: should have one of " + - ', '.join(READMES) + ', '.join(self.READMES) ) def make_release_tree(self, base_dir, files): @@ -179,7 +190,7 @@ class sdist(orig.sdist): distribution. """ log.info("reading manifest file '%s'", self.manifest) - manifest = open(self.manifest, 'rbU') + manifest = open(self.manifest, 'rb') for line in manifest: # The manifest must contain UTF-8. See #303. if six.PY3: diff --git a/lib/python3.4/site-packages/setuptools/command/setopt.py b/lib/python3.4/site-packages/setuptools/command/setopt.py index 7f332be..7e57cc0 100644 --- a/lib/python3.4/site-packages/setuptools/command/setopt.py +++ b/lib/python3.4/site-packages/setuptools/command/setopt.py @@ -8,7 +8,6 @@ from setuptools.extern.six.moves import configparser from setuptools import Command - __all__ = ['config_file', 'edit_config', 'option_base', 'setopt'] diff --git a/lib/python3.4/site-packages/setuptools/command/test.py b/lib/python3.4/site-packages/setuptools/command/test.py index 371e913..638d0c5 100644 --- a/lib/python3.4/site-packages/setuptools/command/test.py +++ b/lib/python3.4/site-packages/setuptools/command/test.py @@ -1,12 +1,17 @@ -from distutils.errors import DistutilsOptionError -from unittest import TestLoader +import os +import operator import sys +import contextlib +import itertools +from distutils.errors import DistutilsError, DistutilsOptionError +from distutils import log +from unittest import TestLoader from setuptools.extern import six -from setuptools.extern.six.moves import map +from setuptools.extern.six.moves import map, filter from pkg_resources import (resource_listdir, resource_exists, normalize_path, - working_set, _namespace_packages, + working_set, _namespace_packages, evaluate_marker, add_activation_listener, require, EntryPoint) from setuptools import Command from setuptools.py31compat import unittest_main @@ -62,7 +67,7 @@ class test(Command): user_options = [ ('test-module=', 'm', "Run 'test_suite' in specified module"), ('test-suite=', 's', - "Test suite to run (e.g. 'some_module.test_suite')"), + "Run single test, case or suite (e.g. 'module.test_suite')"), ('test-runner=', 'r', "Test runner to use"), ] @@ -102,6 +107,14 @@ class test(Command): yield self.test_suite def with_project_on_sys_path(self, func): + """ + Backward compatibility for project_on_sys_path context. + """ + with self.project_on_sys_path(): + func() + + @contextlib.contextmanager + def project_on_sys_path(self, include_dists=[]): with_2to3 = six.PY3 and getattr(self.distribution, 'use_2to3', False) if with_2to3: @@ -133,30 +146,73 @@ class test(Command): old_modules = sys.modules.copy() try: - sys.path.insert(0, normalize_path(ei_cmd.egg_base)) + project_path = normalize_path(ei_cmd.egg_base) + sys.path.insert(0, project_path) working_set.__init__() add_activation_listener(lambda dist: dist.activate()) require('%s==%s' % (ei_cmd.egg_name, ei_cmd.egg_version)) - func() + with self.paths_on_pythonpath([project_path]): + yield finally: sys.path[:] = old_path sys.modules.clear() sys.modules.update(old_modules) working_set.__init__() + @staticmethod + @contextlib.contextmanager + def paths_on_pythonpath(paths): + """ + Add the indicated paths to the head of the PYTHONPATH environment + variable so that subprocesses will also see the packages at + these paths. + + Do this in a context that restores the value on exit. + """ + nothing = object() + orig_pythonpath = os.environ.get('PYTHONPATH', nothing) + current_pythonpath = os.environ.get('PYTHONPATH', '') + try: + prefix = os.pathsep.join(paths) + to_join = filter(None, [prefix, current_pythonpath]) + new_path = os.pathsep.join(to_join) + if new_path: + os.environ['PYTHONPATH'] = new_path + yield + finally: + if orig_pythonpath is nothing: + os.environ.pop('PYTHONPATH', None) + else: + os.environ['PYTHONPATH'] = orig_pythonpath + + @staticmethod + def install_dists(dist): + """ + Install the requirements indicated by self.distribution and + return an iterable of the dists that were built. + """ + ir_d = dist.fetch_build_eggs(dist.install_requires) + tr_d = dist.fetch_build_eggs(dist.tests_require or []) + er_d = dist.fetch_build_eggs( + v for k, v in dist.extras_require.items() + if k.startswith(':') and evaluate_marker(k[1:]) + ) + return itertools.chain(ir_d, tr_d, er_d) + def run(self): - if self.distribution.install_requires: - self.distribution.fetch_build_eggs( - self.distribution.install_requires) - if self.distribution.tests_require: - self.distribution.fetch_build_eggs(self.distribution.tests_require) + installed_dists = self.install_dists(self.distribution) cmd = ' '.join(self._argv) if self.dry_run: self.announce('skipping "%s" (dry run)' % cmd) - else: - self.announce('running "%s"' % cmd) - self.with_project_on_sys_path(self.run_tests) + return + + self.announce('running "%s"' % cmd) + + paths = map(operator.attrgetter('location'), installed_dists) + with self.paths_on_pythonpath(paths): + with self.project_on_sys_path(): + self.run_tests() def run_tests(self): # Purge modules under test from sys.modules. The test loader will @@ -174,11 +230,17 @@ class test(Command): del_modules.append(name) list(map(sys.modules.__delitem__, del_modules)) - unittest_main( + exit_kwarg = {} if sys.version_info < (2, 7) else {"exit": False} + test = unittest_main( None, None, self._argv, testLoader=self._resolve_as_ep(self.test_loader), testRunner=self._resolve_as_ep(self.test_runner), + **exit_kwarg ) + if not test.result.wasSuccessful(): + msg = 'Test failed: %s' % test.result + self.announce(msg, log.ERROR) + raise DistutilsError(msg) @property def _argv(self): diff --git a/lib/python3.4/site-packages/setuptools/command/upload.py b/lib/python3.4/site-packages/setuptools/command/upload.py index 08c20ba..a44173a 100644 --- a/lib/python3.4/site-packages/setuptools/command/upload.py +++ b/lib/python3.4/site-packages/setuptools/command/upload.py @@ -1,15 +1,26 @@ +import getpass from distutils.command import upload as orig class upload(orig.upload): """ - Override default upload behavior to look up password - in the keyring if available. + Override default upload behavior to obtain password + in a variety of different ways. """ def finalize_options(self): orig.upload.finalize_options(self) - self.password or self._load_password_from_keyring() + self.username = ( + self.username or + getpass.getuser() + ) + # Attempt to obtain password. Short circuit evaluation at the first + # sign of success. + self.password = ( + self.password or + self._load_password_from_keyring() or + self._prompt_for_password() + ) def _load_password_from_keyring(self): """ @@ -17,7 +28,15 @@ class upload(orig.upload): """ try: keyring = __import__('keyring') - self.password = keyring.get_password(self.repository, - self.username) + return keyring.get_password(self.repository, self.username) except Exception: pass + + def _prompt_for_password(self): + """ + Prompt for a password on the tty. Suppress Exceptions. + """ + try: + return getpass.getpass() + except (Exception, KeyboardInterrupt): + pass diff --git a/lib/python3.4/site-packages/setuptools/command/upload_docs.py b/lib/python3.4/site-packages/setuptools/command/upload_docs.py index f887b47..07aa564 100644 --- a/lib/python3.4/site-packages/setuptools/command/upload_docs.py +++ b/lib/python3.4/site-packages/setuptools/command/upload_docs.py @@ -13,6 +13,8 @@ import socket import zipfile import tempfile import shutil +import itertools +import functools from setuptools.extern import six from setuptools.extern.six.moves import http_client, urllib @@ -21,18 +23,16 @@ from pkg_resources import iter_entry_points from .upload import upload -errors = 'surrogateescape' if six.PY3 else 'strict' - - -# This is not just a replacement for byte literals -# but works as a general purpose encoder -def b(s, encoding='utf-8'): - if isinstance(s, six.text_type): - return s.encode(encoding, errors) - return s +def _encode(s): + errors = 'surrogateescape' if six.PY3 else 'strict' + return s.encode('utf-8', errors) class upload_docs(upload): + # override the default repository as upload_docs isn't + # supported by Warehouse (and won't be). + DEFAULT_REPOSITORY = 'https://pypi.python.org/pypi/' + description = 'Upload documentation to PyPI' user_options = [ @@ -68,6 +68,8 @@ class upload_docs(upload): else: self.ensure_dirname('upload_dir') self.target_dir = self.upload_dir + if 'pypi.python.org' in self.repository: + log.warn("Upload_docs command is deprecated. Use RTD instead.") self.announce('Using upload directory %s' % self.target_dir) def create_zipfile(self, filename): @@ -76,9 +78,8 @@ class upload_docs(upload): self.mkpath(self.target_dir) # just in case for root, dirs, files in os.walk(self.target_dir): if root == self.target_dir and not files: - raise DistutilsOptionError( - "no files found in upload directory '%s'" - % self.target_dir) + tmpl = "no files found in upload directory '%s'" + raise DistutilsOptionError(tmpl % self.target_dir) for name in files: full = os.path.join(root, name) relative = root[len(self.target_dir):].lstrip(os.path.sep) @@ -101,10 +102,48 @@ class upload_docs(upload): finally: shutil.rmtree(tmp_dir) + @staticmethod + def _build_part(item, sep_boundary): + key, values = item + title = '\nContent-Disposition: form-data; name="%s"' % key + # handle multiple entries for the same name + if not isinstance(values, list): + values = [values] + for value in values: + if isinstance(value, tuple): + title += '; filename="%s"' % value[0] + value = value[1] + else: + value = _encode(value) + yield sep_boundary + yield _encode(title) + yield b"\n\n" + yield value + if value and value[-1:] == b'\r': + yield b'\n' # write an extra newline (lurve Macs) + + @classmethod + def _build_multipart(cls, data): + """ + Build up the MIME payload for the POST data + """ + boundary = b'--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' + sep_boundary = b'\n--' + boundary + end_boundary = sep_boundary + b'--' + end_items = end_boundary, b"\n", + builder = functools.partial( + cls._build_part, + sep_boundary=sep_boundary, + ) + part_groups = map(builder, data.items()) + parts = itertools.chain.from_iterable(part_groups) + body_items = itertools.chain(parts, end_items) + content_type = 'multipart/form-data; boundary=%s' % boundary.decode('ascii') + return b''.join(body_items), content_type + def upload_file(self, filename): - f = open(filename, 'rb') - content = f.read() - f.close() + with open(filename, 'rb') as f: + content = f.read() meta = self.distribution.metadata data = { ':action': 'doc_upload', @@ -112,40 +151,16 @@ class upload_docs(upload): 'content': (os.path.basename(filename), content), } # set up the authentication - credentials = b(self.username + ':' + self.password) + credentials = _encode(self.username + ':' + self.password) credentials = standard_b64encode(credentials) if six.PY3: credentials = credentials.decode('ascii') auth = "Basic " + credentials - # Build up the MIME payload for the POST data - boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' - sep_boundary = b('\n--') + b(boundary) - end_boundary = sep_boundary + b('--') - body = [] - for key, values in six.iteritems(data): - title = '\nContent-Disposition: form-data; name="%s"' % key - # handle multiple entries for the same name - if not isinstance(values, list): - values = [values] - for value in values: - if type(value) is tuple: - title += '; filename="%s"' % value[0] - value = value[1] - else: - value = b(value) - body.append(sep_boundary) - body.append(b(title)) - body.append(b("\n\n")) - body.append(value) - if value and value[-1:] == b('\r'): - body.append(b('\n')) # write an extra newline (lurve Macs) - body.append(end_boundary) - body.append(b("\n")) - body = b('').join(body) + body, ct = self._build_multipart(data) - self.announce("Submitting documentation to %s" % (self.repository), - log.INFO) + msg = "Submitting documentation to %s" % (self.repository) + self.announce(msg, log.INFO) # build the Request # We can't use urllib2 since we need to send the Basic @@ -164,7 +179,7 @@ class upload_docs(upload): try: conn.connect() conn.putrequest("POST", url) - content_type = 'multipart/form-data; boundary=%s' % boundary + content_type = ct conn.putheader('Content-type', content_type) conn.putheader('Content-length', str(len(body))) conn.putheader('Authorization', auth) @@ -176,16 +191,16 @@ class upload_docs(upload): r = conn.getresponse() if r.status == 200: - self.announce('Server response (%s): %s' % (r.status, r.reason), - log.INFO) + msg = 'Server response (%s): %s' % (r.status, r.reason) + self.announce(msg, log.INFO) elif r.status == 301: location = r.getheader('Location') if location is None: location = 'https://pythonhosted.org/%s/' % meta.get_name() - self.announce('Upload successful. Visit %s' % location, - log.INFO) + msg = 'Upload successful. Visit %s' % location + self.announce(msg, log.INFO) else: - self.announce('Upload failed (%s): %s' % (r.status, r.reason), - log.ERROR) + msg = 'Upload failed (%s): %s' % (r.status, r.reason) + self.announce(msg, log.ERROR) if self.show_response: print('-' * 75, r.read(), '-' * 75) diff --git a/lib/python3.4/site-packages/setuptools/config.py b/lib/python3.4/site-packages/setuptools/config.py new file mode 100644 index 0000000..9a62e2e --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/config.py @@ -0,0 +1,554 @@ +from __future__ import absolute_import, unicode_literals +import io +import os +import sys +from collections import defaultdict +from functools import partial + +from distutils.errors import DistutilsOptionError, DistutilsFileError +from setuptools.py26compat import import_module +from setuptools.extern.six import string_types + + +def read_configuration( + filepath, find_others=False, ignore_option_errors=False): + """Read given configuration file and returns options from it as a dict. + + :param str|unicode filepath: Path to configuration file + to get options from. + + :param bool find_others: Whether to search for other configuration files + which could be on in various places. + + :param bool ignore_option_errors: Whether to silently ignore + options, values of which could not be resolved (e.g. due to exceptions + in directives such as file:, attr:, etc.). + If False exceptions are propagated as expected. + + :rtype: dict + """ + from setuptools.dist import Distribution, _Distribution + + filepath = os.path.abspath(filepath) + + if not os.path.isfile(filepath): + raise DistutilsFileError( + 'Configuration file %s does not exist.' % filepath) + + current_directory = os.getcwd() + os.chdir(os.path.dirname(filepath)) + + try: + dist = Distribution() + + filenames = dist.find_config_files() if find_others else [] + if filepath not in filenames: + filenames.append(filepath) + + _Distribution.parse_config_files(dist, filenames=filenames) + + handlers = parse_configuration( + dist, dist.command_options, + ignore_option_errors=ignore_option_errors) + + finally: + os.chdir(current_directory) + + return configuration_to_dict(handlers) + + +def configuration_to_dict(handlers): + """Returns configuration data gathered by given handlers as a dict. + + :param list[ConfigHandler] handlers: Handlers list, + usually from parse_configuration() + + :rtype: dict + """ + config_dict = defaultdict(dict) + + for handler in handlers: + + obj_alias = handler.section_prefix + target_obj = handler.target_obj + + for option in handler.set_options: + getter = getattr(target_obj, 'get_%s' % option, None) + + if getter is None: + value = getattr(target_obj, option) + + else: + value = getter() + + config_dict[obj_alias][option] = value + + return config_dict + + +def parse_configuration( + distribution, command_options, ignore_option_errors=False): + """Performs additional parsing of configuration options + for a distribution. + + Returns a list of used option handlers. + + :param Distribution distribution: + :param dict command_options: + :param bool ignore_option_errors: Whether to silently ignore + options, values of which could not be resolved (e.g. due to exceptions + in directives such as file:, attr:, etc.). + If False exceptions are propagated as expected. + :rtype: list + """ + meta = ConfigMetadataHandler( + distribution.metadata, command_options, ignore_option_errors) + meta.parse() + + options = ConfigOptionsHandler( + distribution, command_options, ignore_option_errors) + options.parse() + + return [meta, options] + + +class ConfigHandler(object): + """Handles metadata supplied in configuration files.""" + + section_prefix = None + """Prefix for config sections handled by this handler. + Must be provided by class heirs. + + """ + + aliases = {} + """Options aliases. + For compatibility with various packages. E.g.: d2to1 and pbr. + Note: `-` in keys is replaced with `_` by config parser. + + """ + + def __init__(self, target_obj, options, ignore_option_errors=False): + sections = {} + + section_prefix = self.section_prefix + for section_name, section_options in options.items(): + if not section_name.startswith(section_prefix): + continue + + section_name = section_name.replace(section_prefix, '').strip('.') + sections[section_name] = section_options + + self.ignore_option_errors = ignore_option_errors + self.target_obj = target_obj + self.sections = sections + self.set_options = [] + + @property + def parsers(self): + """Metadata item name to parser function mapping.""" + raise NotImplementedError( + '%s must provide .parsers property' % self.__class__.__name__) + + def __setitem__(self, option_name, value): + unknown = tuple() + target_obj = self.target_obj + + # Translate alias into real name. + option_name = self.aliases.get(option_name, option_name) + + current_value = getattr(target_obj, option_name, unknown) + + if current_value is unknown: + raise KeyError(option_name) + + if current_value: + # Already inhabited. Skipping. + return + + skip_option = False + parser = self.parsers.get(option_name) + if parser: + try: + value = parser(value) + + except Exception: + skip_option = True + if not self.ignore_option_errors: + raise + + if skip_option: + return + + setter = getattr(target_obj, 'set_%s' % option_name, None) + if setter is None: + setattr(target_obj, option_name, value) + else: + setter(value) + + self.set_options.append(option_name) + + @classmethod + def _parse_list(cls, value, separator=','): + """Represents value as a list. + + Value is split either by separator (defaults to comma) or by lines. + + :param value: + :param separator: List items separator character. + :rtype: list + """ + if isinstance(value, list): # _get_parser_compound case + return value + + if '\n' in value: + value = value.splitlines() + else: + value = value.split(separator) + + return [chunk.strip() for chunk in value if chunk.strip()] + + @classmethod + def _parse_dict(cls, value): + """Represents value as a dict. + + :param value: + :rtype: dict + """ + separator = '=' + result = {} + for line in cls._parse_list(value): + key, sep, val = line.partition(separator) + if sep != separator: + raise DistutilsOptionError( + 'Unable to parse option value to dict: %s' % value) + result[key.strip()] = val.strip() + + return result + + @classmethod + def _parse_bool(cls, value): + """Represents value as boolean. + + :param value: + :rtype: bool + """ + value = value.lower() + return value in ('1', 'true', 'yes') + + @classmethod + def _parse_file(cls, value): + """Represents value as a string, allowing including text + from nearest files using `file:` directive. + + Directive is sandboxed and won't reach anything outside + directory with setup.py. + + Examples: + file: LICENSE + file: README.rst, CHANGELOG.md, src/file.txt + + :param str value: + :rtype: str + """ + include_directive = 'file:' + + if not isinstance(value, string_types): + return value + + if not value.startswith(include_directive): + return value + + spec = value[len(include_directive):] + filepaths = (os.path.abspath(path.strip()) for path in spec.split(',')) + return '\n'.join( + cls._read_file(path) + for path in filepaths + if (cls._assert_local(path) or True) + and os.path.isfile(path) + ) + + @staticmethod + def _assert_local(filepath): + if not filepath.startswith(os.getcwd()): + raise DistutilsOptionError( + '`file:` directive can not access %s' % filepath) + + @staticmethod + def _read_file(filepath): + with io.open(filepath, encoding='utf-8') as f: + return f.read() + + @classmethod + def _parse_attr(cls, value): + """Represents value as a module attribute. + + Examples: + attr: package.attr + attr: package.module.attr + + :param str value: + :rtype: str + """ + attr_directive = 'attr:' + if not value.startswith(attr_directive): + return value + + attrs_path = value.replace(attr_directive, '').strip().split('.') + attr_name = attrs_path.pop() + + module_name = '.'.join(attrs_path) + module_name = module_name or '__init__' + + sys.path.insert(0, os.getcwd()) + try: + module = import_module(module_name) + value = getattr(module, attr_name) + + finally: + sys.path = sys.path[1:] + + return value + + @classmethod + def _get_parser_compound(cls, *parse_methods): + """Returns parser function to represents value as a list. + + Parses a value applying given methods one after another. + + :param parse_methods: + :rtype: callable + """ + def parse(value): + parsed = value + + for method in parse_methods: + parsed = method(parsed) + + return parsed + + return parse + + @classmethod + def _parse_section_to_dict(cls, section_options, values_parser=None): + """Parses section options into a dictionary. + + Optionally applies a given parser to values. + + :param dict section_options: + :param callable values_parser: + :rtype: dict + """ + value = {} + values_parser = values_parser or (lambda val: val) + for key, (_, val) in section_options.items(): + value[key] = values_parser(val) + return value + + def parse_section(self, section_options): + """Parses configuration file section. + + :param dict section_options: + """ + for (name, (_, value)) in section_options.items(): + try: + self[name] = value + + except KeyError: + pass # Keep silent for a new option may appear anytime. + + def parse(self): + """Parses configuration file items from one + or more related sections. + + """ + for section_name, section_options in self.sections.items(): + + method_postfix = '' + if section_name: # [section.option] variant + method_postfix = '_%s' % section_name + + section_parser_method = getattr( + self, + # Dots in section names are tranlsated into dunderscores. + ('parse_section%s' % method_postfix).replace('.', '__'), + None) + + if section_parser_method is None: + raise DistutilsOptionError( + 'Unsupported distribution option section: [%s.%s]' % ( + self.section_prefix, section_name)) + + section_parser_method(section_options) + + +class ConfigMetadataHandler(ConfigHandler): + + section_prefix = 'metadata' + + aliases = { + 'home_page': 'url', + 'summary': 'description', + 'classifier': 'classifiers', + 'platform': 'platforms', + } + + strict_mode = False + """We need to keep it loose, to be partially compatible with + `pbr` and `d2to1` packages which also uses `metadata` section. + + """ + + @property + def parsers(self): + """Metadata item name to parser function mapping.""" + parse_list = self._parse_list + parse_file = self._parse_file + + return { + 'platforms': parse_list, + 'keywords': parse_list, + 'provides': parse_list, + 'requires': parse_list, + 'obsoletes': parse_list, + 'classifiers': self._get_parser_compound(parse_file, parse_list), + 'license': parse_file, + 'description': parse_file, + 'long_description': parse_file, + 'version': self._parse_version, + } + + def _parse_version(self, value): + """Parses `version` option value. + + :param value: + :rtype: str + + """ + version = self._parse_attr(value) + + if callable(version): + version = version() + + if not isinstance(version, string_types): + if hasattr(version, '__iter__'): + version = '.'.join(map(str, version)) + else: + version = '%s' % version + + return version + + +class ConfigOptionsHandler(ConfigHandler): + + section_prefix = 'options' + + @property + def parsers(self): + """Metadata item name to parser function mapping.""" + parse_list = self._parse_list + parse_list_semicolon = partial(self._parse_list, separator=';') + parse_bool = self._parse_bool + parse_dict = self._parse_dict + + return { + 'zip_safe': parse_bool, + 'use_2to3': parse_bool, + 'include_package_data': parse_bool, + 'package_dir': parse_dict, + 'use_2to3_fixers': parse_list, + 'use_2to3_exclude_fixers': parse_list, + 'convert_2to3_doctests': parse_list, + 'scripts': parse_list, + 'eager_resources': parse_list, + 'dependency_links': parse_list, + 'namespace_packages': parse_list, + 'install_requires': parse_list_semicolon, + 'setup_requires': parse_list_semicolon, + 'tests_require': parse_list_semicolon, + 'packages': self._parse_packages, + 'entry_points': self._parse_file, + 'py_modules': parse_list, + } + + def _parse_packages(self, value): + """Parses `packages` option value. + + :param value: + :rtype: list + """ + find_directive = 'find:' + + if not value.startswith(find_directive): + return self._parse_list(value) + + # Read function arguments from a dedicated section. + find_kwargs = self.parse_section_packages__find( + self.sections.get('packages.find', {})) + + from setuptools import find_packages + + return find_packages(**find_kwargs) + + def parse_section_packages__find(self, section_options): + """Parses `packages.find` configuration file section. + + To be used in conjunction with _parse_packages(). + + :param dict section_options: + """ + section_data = self._parse_section_to_dict( + section_options, self._parse_list) + + valid_keys = ['where', 'include', 'exclude'] + + find_kwargs = dict( + [(k, v) for k, v in section_data.items() if k in valid_keys and v]) + + where = find_kwargs.get('where') + if where is not None: + find_kwargs['where'] = where[0] # cast list to single val + + return find_kwargs + + def parse_section_entry_points(self, section_options): + """Parses `entry_points` configuration file section. + + :param dict section_options: + """ + parsed = self._parse_section_to_dict(section_options, self._parse_list) + self['entry_points'] = parsed + + def _parse_package_data(self, section_options): + parsed = self._parse_section_to_dict(section_options, self._parse_list) + + root = parsed.get('*') + if root: + parsed[''] = root + del parsed['*'] + + return parsed + + def parse_section_package_data(self, section_options): + """Parses `package_data` configuration file section. + + :param dict section_options: + """ + self['package_data'] = self._parse_package_data(section_options) + + def parse_section_exclude_package_data(self, section_options): + """Parses `exclude_package_data` configuration file section. + + :param dict section_options: + """ + self['exclude_package_data'] = self._parse_package_data( + section_options) + + def parse_section_extras_require(self, section_options): + """Parses `extras_require` configuration file section. + + :param dict section_options: + """ + parse_list = partial(self._parse_list, separator=';') + self['extras_require'] = self._parse_section_to_dict( + section_options, parse_list) diff --git a/lib/python3.4/site-packages/setuptools/dep_util.py b/lib/python3.4/site-packages/setuptools/dep_util.py new file mode 100644 index 0000000..2931c13 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/dep_util.py @@ -0,0 +1,23 @@ +from distutils.dep_util import newer_group + +# yes, this is was almost entirely copy-pasted from +# 'newer_pairwise()', this is just another convenience +# function. +def newer_pairwise_group(sources_groups, targets): + """Walk both arguments in parallel, testing if each source group is newer + than its corresponding target. Returns a pair of lists (sources_groups, + targets) where sources is newer than target, according to the semantics + of 'newer_group()'. + """ + if len(sources_groups) != len(targets): + raise ValueError("'sources_group' and 'targets' must be the same length") + + # build a pair of lists (sources_groups, targets) where source is newer + n_sources = [] + n_targets = [] + for i in range(len(sources_groups)): + if newer_group(sources_groups[i], targets[i]): + n_sources.append(sources_groups[i]) + n_targets.append(targets[i]) + + return n_sources, n_targets diff --git a/lib/python3.4/site-packages/setuptools/depends.py b/lib/python3.4/site-packages/setuptools/depends.py index 9f7c9a3..45e7052 100644 --- a/lib/python3.4/site-packages/setuptools/depends.py +++ b/lib/python3.4/site-packages/setuptools/depends.py @@ -1,15 +1,17 @@ import sys import imp import marshal -from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN from distutils.version import StrictVersion +from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN + +from .py33compat import Bytecode -from setuptools.extern import six __all__ = [ 'Require', 'find_module', 'get_module_constant', 'extract_constant' ] + class Require: """A prerequisite to building or installing a distribution""" @@ -30,7 +32,7 @@ class Require: def full_name(self): """Return full package/distribution name, w/version""" if self.requested_version is not None: - return '%s-%s' % (self.name,self.requested_version) + return '%s-%s' % (self.name, self.requested_version) return self.name def version_ok(self, version): @@ -39,7 +41,6 @@ class Require: str(version) != "unknown" and version >= self.requested_version def get_version(self, paths=None, default="unknown"): - """Get version number of installed module, 'None', or 'default' Search 'paths' for module. If not found, return 'None'. If found, @@ -52,8 +53,9 @@ class Require: if self.attribute is None: try: - f,p,i = find_module(self.module,paths) - if f: f.close() + f, p, i = find_module(self.module, paths) + if f: + f.close() return default except ImportError: return None @@ -77,40 +79,6 @@ class Require: return self.version_ok(version) -def _iter_code(code): - - """Yield '(op,arg)' pair for each operation in code object 'code'""" - - from array import array - from dis import HAVE_ARGUMENT, EXTENDED_ARG - - bytes = array('b',code.co_code) - eof = len(code.co_code) - - ptr = 0 - extended_arg = 0 - - while ptr=HAVE_ARGUMENT: - - arg = bytes[ptr+1] + bytes[ptr+2]*256 + extended_arg - ptr += 3 - - if op==EXTENDED_ARG: - long_type = six.integer_types[-1] - extended_arg = arg * long_type(65536) - continue - - else: - arg = None - ptr += 1 - - yield op,arg - - def find_module(module, paths=None): """Just like 'imp.find_module()', but with package support""" @@ -118,20 +86,19 @@ def find_module(module, paths=None): while parts: part = parts.pop(0) - f, path, (suffix,mode,kind) = info = imp.find_module(part, paths) + f, path, (suffix, mode, kind) = info = imp.find_module(part, paths) - if kind==PKG_DIRECTORY: + if kind == PKG_DIRECTORY: parts = parts or ['__init__'] paths = [path] elif parts: - raise ImportError("Can't find %r in %s" % (parts,module)) + raise ImportError("Can't find %r in %s" % (parts, module)) return info def get_module_constant(module, symbol, default=-1, paths=None): - """Find 'module' by searching 'paths', and extract 'symbol' Return 'None' if 'module' does not exist on 'paths', or it does not define @@ -145,12 +112,12 @@ def get_module_constant(module, symbol, default=-1, paths=None): return None try: - if kind==PY_COMPILED: - f.read(8) # skip magic & date + if kind == PY_COMPILED: + f.read(8) # skip magic & date code = marshal.load(f) - elif kind==PY_FROZEN: + elif kind == PY_FROZEN: code = imp.get_frozen_object(module) - elif kind==PY_SOURCE: + elif kind == PY_SOURCE: code = compile(f.read(), path, 'exec') else: # Not something we can parse; we'll have to import it. :( @@ -177,9 +144,8 @@ def extract_constant(code, symbol, default=-1): only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol' must be present in 'code.co_names'. """ - if symbol not in code.co_names: - # name's not there, can't possibly be an assigment + # name's not there, can't possibly be an assignment return None name_idx = list(code.co_names).index(symbol) @@ -190,11 +156,13 @@ def extract_constant(code, symbol, default=-1): const = default - for op, arg in _iter_code(code): + for byte_code in Bytecode(code): + op = byte_code.opcode + arg = byte_code.arg - if op==LOAD_CONST: + if op == LOAD_CONST: const = code.co_consts[arg] - elif arg==name_idx and (op==STORE_NAME or op==STORE_GLOBAL): + elif arg == name_idx and (op == STORE_NAME or op == STORE_GLOBAL): return const else: const = default @@ -214,4 +182,5 @@ def _update_globals(): del globals()[name] __all__.remove(name) + _update_globals() diff --git a/lib/python3.4/site-packages/setuptools/dist.py b/lib/python3.4/site-packages/setuptools/dist.py index 7785541..a2ca879 100644 --- a/lib/python3.4/site-packages/setuptools/dist.py +++ b/lib/python3.4/site-packages/setuptools/dist.py @@ -2,117 +2,159 @@ __all__ = ['Distribution'] import re import os -import sys import warnings import numbers import distutils.log import distutils.core import distutils.cmd import distutils.dist -from distutils.core import Distribution as _Distribution -from distutils.errors import (DistutilsOptionError, DistutilsPlatformError, - DistutilsSetupError) +import itertools +from collections import defaultdict +from distutils.errors import ( + DistutilsOptionError, DistutilsPlatformError, DistutilsSetupError, +) +from distutils.util import rfc822_escape from setuptools.extern import six -from setuptools.extern.six.moves import map +from setuptools.extern.six.moves import map, filter, filterfalse from pkg_resources.extern import packaging from setuptools.depends import Require from setuptools import windows_support +from setuptools.monkey import get_unpatched +from setuptools.config import parse_configuration import pkg_resources +from .py36compat import Distribution_parse_config_files + +__import__('pkg_resources.extern.packaging.specifiers') +__import__('pkg_resources.extern.packaging.version') def _get_unpatched(cls): - """Protect against re-patching the distutils if reloaded + warnings.warn("Do not call this function", DeprecationWarning) + return get_unpatched(cls) - Also ensures that no other distutils extension monkeypatched the distutils - first. + +# Based on Python 3.5 version +def write_pkg_file(self, file): + """Write the PKG-INFO format data to a file object. """ - while cls.__module__.startswith('setuptools'): - cls, = cls.__bases__ - if not cls.__module__.startswith('distutils'): - raise AssertionError( - "distutils has already been patched by %r" % cls - ) - return cls + version = '1.0' + if (self.provides or self.requires or self.obsoletes or + self.classifiers or self.download_url): + version = '1.1' + # Setuptools specific for PEP 345 + if hasattr(self, 'python_requires'): + version = '1.2' -_Distribution = _get_unpatched(_Distribution) + file.write('Metadata-Version: %s\n' % version) + file.write('Name: %s\n' % self.get_name()) + file.write('Version: %s\n' % self.get_version()) + file.write('Summary: %s\n' % self.get_description()) + file.write('Home-page: %s\n' % self.get_url()) + file.write('Author: %s\n' % self.get_contact()) + file.write('Author-email: %s\n' % self.get_contact_email()) + file.write('License: %s\n' % self.get_license()) + if self.download_url: + file.write('Download-URL: %s\n' % self.download_url) -def _patch_distribution_metadata_write_pkg_info(): + long_desc_content_type = getattr( + self, + 'long_description_content_type', + None + ) or 'UNKNOWN' + file.write('Description-Content-Type: %s\n' % long_desc_content_type) + + long_desc = rfc822_escape(self.get_long_description()) + file.write('Description: %s\n' % long_desc) + + keywords = ','.join(self.get_keywords()) + if keywords: + file.write('Keywords: %s\n' % keywords) + + self._write_list(file, 'Platform', self.get_platforms()) + self._write_list(file, 'Classifier', self.get_classifiers()) + + # PEP 314 + self._write_list(file, 'Requires', self.get_requires()) + self._write_list(file, 'Provides', self.get_provides()) + self._write_list(file, 'Obsoletes', self.get_obsoletes()) + + # Setuptools specific for PEP 345 + if hasattr(self, 'python_requires'): + file.write('Requires-Python: %s\n' % self.python_requires) + + +# from Python 3.4 +def write_pkg_info(self, base_dir): + """Write the PKG-INFO file into the release tree. """ - Workaround issue #197 - Python 3 prior to 3.2.2 uses an environment-local - encoding to save the pkg_info. Monkey-patch its write_pkg_info method to - correct this undesirable behavior. - """ - environment_local = (3,) <= sys.version_info[:3] < (3, 2, 2) - if not environment_local: - return + with open(os.path.join(base_dir, 'PKG-INFO'), 'w', + encoding='UTF-8') as pkg_info: + self.write_pkg_file(pkg_info) - # from Python 3.4 - def write_pkg_info(self, base_dir): - """Write the PKG-INFO file into the release tree. - """ - with open(os.path.join(base_dir, 'PKG-INFO'), 'w', - encoding='UTF-8') as pkg_info: - self.write_pkg_file(pkg_info) - - distutils.dist.DistributionMetadata.write_pkg_info = write_pkg_info -_patch_distribution_metadata_write_pkg_info() sequence = tuple, list + def check_importable(dist, attr, value): try: - ep = pkg_resources.EntryPoint.parse('x='+value) + ep = pkg_resources.EntryPoint.parse('x=' + value) assert not ep.extras - except (TypeError,ValueError,AttributeError,AssertionError): + except (TypeError, ValueError, AttributeError, AssertionError): raise DistutilsSetupError( "%r must be importable 'module:attrs' string (got %r)" - % (attr,value) + % (attr, value) ) def assert_string_list(dist, attr, value): """Verify that value is a string list or None""" try: - assert ''.join(value)!=value - except (TypeError,ValueError,AttributeError,AssertionError): + assert ''.join(value) != value + except (TypeError, ValueError, AttributeError, AssertionError): raise DistutilsSetupError( - "%r must be a list of strings (got %r)" % (attr,value) + "%r must be a list of strings (got %r)" % (attr, value) ) + + def check_nsp(dist, attr, value): """Verify that namespace packages are valid""" - assert_string_list(dist,attr,value) - for nsp in value: + ns_packages = value + assert_string_list(dist, attr, ns_packages) + for nsp in ns_packages: if not dist.has_contents_for(nsp): raise DistutilsSetupError( "Distribution contains no modules or packages for " + "namespace package %r" % nsp ) - if '.' in nsp: - parent = '.'.join(nsp.split('.')[:-1]) - if parent not in value: - distutils.log.warn( - "WARNING: %r is declared as a package namespace, but %r" - " is not: please correct this in setup.py", nsp, parent - ) + parent, sep, child = nsp.rpartition('.') + if parent and parent not in ns_packages: + distutils.log.warn( + "WARNING: %r is declared as a package namespace, but %r" + " is not: please correct this in setup.py", nsp, parent + ) + def check_extras(dist, attr, value): """Verify that extras_require mapping is valid""" try: - for k,v in value.items(): - if ':' in k: - k,m = k.split(':',1) - if pkg_resources.invalid_marker(m): - raise DistutilsSetupError("Invalid environment marker: "+m) - list(pkg_resources.parse_requirements(v)) - except (TypeError,ValueError,AttributeError): + list(itertools.starmap(_check_extra, value.items())) + except (TypeError, ValueError, AttributeError): raise DistutilsSetupError( "'extras_require' must be a dictionary whose values are " "strings or lists of strings containing valid project/version " "requirement specifiers." ) + +def _check_extra(extra, reqs): + name, sep, marker = extra.partition(':') + if marker and pkg_resources.invalid_marker(marker): + raise DistutilsSetupError("Invalid environment marker: " + marker) + list(pkg_resources.parse_requirements(reqs)) + + def assert_bool(dist, attr, value): """Verify that value is True, False, 0, or 1""" if bool(value) != value: @@ -131,6 +173,19 @@ def check_requirements(dist, attr, value): ) raise DistutilsSetupError(tmpl.format(attr=attr, error=error)) + +def check_specifier(dist, attr, value): + """Verify that value is a valid version specifier""" + try: + packaging.specifiers.SpecifierSet(value) + except packaging.specifiers.InvalidSpecifier as error: + tmpl = ( + "{attr!r} must be a string " + "containing valid version specifiers; {error}" + ) + raise DistutilsSetupError(tmpl.format(attr=attr, error=error)) + + def check_entry_points(dist, attr, value): """Verify that entry_points map is parseable""" try: @@ -138,25 +193,30 @@ def check_entry_points(dist, attr, value): except ValueError as e: raise DistutilsSetupError(e) + def check_test_suite(dist, attr, value): if not isinstance(value, six.string_types): raise DistutilsSetupError("test_suite must be a string") + def check_package_data(dist, attr, value): """Verify that value is a dictionary of package names to glob lists""" - if isinstance(value,dict): - for k,v in value.items(): - if not isinstance(k,str): break - try: iter(v) + if isinstance(value, dict): + for k, v in value.items(): + if not isinstance(k, str): + break + try: + iter(v) except TypeError: break else: return raise DistutilsSetupError( - attr+" must be a dictionary mapping package names to lists of " + attr + " must be a dictionary mapping package names to lists of " "wildcard patterns" ) + def check_packages(dist, attr, value): for pkgname in value: if not re.match(r'\w+(\.\w+)*', pkgname): @@ -166,7 +226,10 @@ def check_packages(dist, attr, value): ) -class Distribution(_Distribution): +_Distribution = get_unpatched(distutils.core.Distribution) + + +class Distribution(Distribution_parse_config_files, _Distribution): """Distribution with support for features, tests, and package data This is an enhanced version of 'distutils.dist.Distribution' that @@ -261,15 +324,18 @@ class Distribution(_Distribution): self.dist_files = [] self.src_root = attrs and attrs.pop("src_root", None) self.patch_missing_pkg_info(attrs) + self.long_description_content_type = _attrs_dict.get( + 'long_description_content_type' + ) # Make sure we have any eggs needed to interpret 'attrs' if attrs is not None: self.dependency_links = attrs.pop('dependency_links', []) - assert_string_list(self,'dependency_links',self.dependency_links) + assert_string_list(self, 'dependency_links', self.dependency_links) if attrs and 'setup_requires' in attrs: self.fetch_build_eggs(attrs['setup_requires']) for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'): vars(self).setdefault(ep.name, None) - _Distribution.__init__(self,attrs) + _Distribution.__init__(self, attrs) if isinstance(self.metadata.version, numbers.Number): # Some people apparently take "version number" too literally :) self.metadata.version = str(self.metadata.version) @@ -293,6 +359,83 @@ class Distribution(_Distribution): "setuptools, pip, and PyPI. Please see PEP 440 for more " "details." % self.metadata.version ) + self._finalize_requires() + + def _finalize_requires(self): + """ + Set `metadata.python_requires` and fix environment markers + in `install_requires` and `extras_require`. + """ + if getattr(self, 'python_requires', None): + self.metadata.python_requires = self.python_requires + self._convert_extras_requirements() + self._move_install_requirements_markers() + + def _convert_extras_requirements(self): + """ + Convert requirements in `extras_require` of the form + `"extra": ["barbazquux; {marker}"]` to + `"extra:{marker}": ["barbazquux"]`. + """ + spec_ext_reqs = getattr(self, 'extras_require', None) or {} + self._tmp_extras_require = defaultdict(list) + for section, v in spec_ext_reqs.items(): + # Do not strip empty sections. + self._tmp_extras_require[section] + for r in pkg_resources.parse_requirements(v): + suffix = self._suffix_for(r) + self._tmp_extras_require[section + suffix].append(r) + + @staticmethod + def _suffix_for(req): + """ + For a requirement, return the 'extras_require' suffix for + that requirement. + """ + return ':' + str(req.marker) if req.marker else '' + + def _move_install_requirements_markers(self): + """ + Move requirements in `install_requires` that are using environment + markers `extras_require`. + """ + + # divide the install_requires into two sets, simple ones still + # handled by install_requires and more complex ones handled + # by extras_require. + + def is_simple_req(req): + return not req.marker + + spec_inst_reqs = getattr(self, 'install_requires', None) or () + inst_reqs = list(pkg_resources.parse_requirements(spec_inst_reqs)) + simple_reqs = filter(is_simple_req, inst_reqs) + complex_reqs = filterfalse(is_simple_req, inst_reqs) + self.install_requires = list(map(str, simple_reqs)) + + for r in complex_reqs: + self._tmp_extras_require[':' + str(r.marker)].append(r) + self.extras_require = dict( + (k, [str(r) for r in map(self._clean_req, v)]) + for k, v in self._tmp_extras_require.items() + ) + + def _clean_req(self, req): + """ + Given a Requirement, remove environment markers and return it. + """ + req.marker = None + return req + + def parse_config_files(self, filenames=None): + """Parses configuration files from various levels + and loads configuration. + + """ + _Distribution.parse_config_files(self, filenames=filenames) + + parse_configuration(self, self.command_options) + self._finalize_requires() def parse_command_line(self): """Process features after parsing command line options""" @@ -301,9 +444,9 @@ class Distribution(_Distribution): self._finalize_features() return result - def _feature_attrname(self,name): + def _feature_attrname(self, name): """Convert feature name to corresponding option attribute name""" - return 'with_'+name.replace('-','_') + return 'with_' + name.replace('-', '_') def fetch_build_eggs(self, requires): """Resolve pre-setup requirements""" @@ -314,6 +457,7 @@ class Distribution(_Distribution): ) for dist in resolved_dists: pkg_resources.working_set.add(dist, replace=True) + return resolved_dists def finalize_options(self): _Distribution.finalize_options(self) @@ -321,13 +465,16 @@ class Distribution(_Distribution): self._set_global_opts_from_features() for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'): - value = getattr(self,ep.name,None) + value = getattr(self, ep.name, None) if value is not None: ep.require(installer=self.fetch_build_egg) ep.load()(self, ep.name, value) if getattr(self, 'convert_2to3_doctests', None): # XXX may convert to set here when we can rely on set being builtin - self.convert_2to3_doctests = [os.path.abspath(p) for p in self.convert_2to3_doctests] + self.convert_2to3_doctests = [ + os.path.abspath(p) + for p in self.convert_2to3_doctests + ] else: self.convert_2to3_doctests = [] @@ -348,35 +495,30 @@ class Distribution(_Distribution): def fetch_build_egg(self, req): """Fetch an egg needed for building""" - - try: - cmd = self._egg_fetcher - cmd.package_index.to_scan = [] - except AttributeError: - from setuptools.command.easy_install import easy_install - dist = self.__class__({'script_args':['easy_install']}) - dist.parse_config_files() - opts = dist.get_option_dict('easy_install') - keep = ( - 'find_links', 'site_dirs', 'index_url', 'optimize', - 'site_dirs', 'allow_hosts' - ) - for key in list(opts): - if key not in keep: - del opts[key] # don't use any other settings - if self.dependency_links: - links = self.dependency_links[:] - if 'find_links' in opts: - links = opts['find_links'][1].split() + links - opts['find_links'] = ('setup', links) - install_dir = self.get_egg_cache_dir() - cmd = easy_install( - dist, args=["x"], install_dir=install_dir, exclude_scripts=True, - always_copy=False, build_directory=None, editable=False, - upgrade=False, multi_version=True, no_report=True, user=False - ) - cmd.ensure_finalized() - self._egg_fetcher = cmd + from setuptools.command.easy_install import easy_install + dist = self.__class__({'script_args': ['easy_install']}) + dist.parse_config_files() + opts = dist.get_option_dict('easy_install') + keep = ( + 'find_links', 'site_dirs', 'index_url', 'optimize', + 'site_dirs', 'allow_hosts' + ) + for key in list(opts): + if key not in keep: + del opts[key] # don't use any other settings + if self.dependency_links: + links = self.dependency_links[:] + if 'find_links' in opts: + links = opts['find_links'][1].split() + links + opts['find_links'] = ('setup', links) + install_dir = self.get_egg_cache_dir() + cmd = easy_install( + dist, args=["x"], install_dir=install_dir, + exclude_scripts=True, + always_copy=False, build_directory=None, editable=False, + upgrade=False, multi_version=True, no_report=True, user=False + ) + cmd.ensure_finalized() return cmd.easy_install(req) def _set_global_opts_from_features(self): @@ -385,20 +527,23 @@ class Distribution(_Distribution): go = [] no = self.negative_opt.copy() - for name,feature in self.features.items(): - self._set_feature(name,None) + for name, feature in self.features.items(): + self._set_feature(name, None) feature.validate(self) if feature.optional: descr = feature.description incdef = ' (default)' - excdef='' + excdef = '' if not feature.include_by_default(): excdef, incdef = incdef, excdef - go.append(('with-'+name, None, 'include '+descr+incdef)) - go.append(('without-'+name, None, 'exclude '+descr+excdef)) - no['without-'+name] = 'with-'+name + new = ( + ('with-' + name, None, 'include ' + descr + incdef), + ('without-' + name, None, 'exclude ' + descr + excdef), + ) + go.extend(new) + no['without-' + name] = 'with-' + name self.global_options = self.feature_options = go + self.global_options self.negative_opt = self.feature_negopt = no @@ -407,25 +552,26 @@ class Distribution(_Distribution): """Add/remove features and resolve dependencies between them""" # First, flag all the enabled items (and thus their dependencies) - for name,feature in self.features.items(): + for name, feature in self.features.items(): enabled = self.feature_is_included(name) if enabled or (enabled is None and feature.include_by_default()): feature.include_in(self) - self._set_feature(name,1) + self._set_feature(name, 1) # Then disable the rest, so that off-by-default features don't # get flagged as errors when they're required by an enabled feature - for name,feature in self.features.items(): + for name, feature in self.features.items(): if not self.feature_is_included(name): feature.exclude_from(self) - self._set_feature(name,0) + self._set_feature(name, 0) def get_command_class(self, command): """Pluggable version of get_command_class()""" if command in self.cmdclass: return self.cmdclass[command] - for ep in pkg_resources.iter_entry_points('distutils.commands',command): + eps = pkg_resources.iter_entry_points('distutils.commands', command) + for ep in eps: ep.require(installer=self.fetch_build_egg) self.cmdclass[command] = cmdclass = ep.load() return cmdclass @@ -448,26 +594,26 @@ class Distribution(_Distribution): self.cmdclass[ep.name] = cmdclass return _Distribution.get_command_list(self) - def _set_feature(self,name,status): + def _set_feature(self, name, status): """Set feature's inclusion status""" - setattr(self,self._feature_attrname(name),status) + setattr(self, self._feature_attrname(name), status) - def feature_is_included(self,name): + def feature_is_included(self, name): """Return 1 if feature is included, 0 if excluded, 'None' if unknown""" - return getattr(self,self._feature_attrname(name)) + return getattr(self, self._feature_attrname(name)) - def include_feature(self,name): + def include_feature(self, name): """Request inclusion of feature named 'name'""" - if self.feature_is_included(name)==0: + if self.feature_is_included(name) == 0: descr = self.features[name].description raise DistutilsOptionError( descr + " is required, but was excluded or is not available" ) self.features[name].include_in(self) - self._set_feature(name,1) + self._set_feature(name, 1) - def include(self,**attrs): + def include(self, **attrs): """Add items to distribution that are named in keyword arguments For example, 'dist.exclude(py_modules=["x"])' would add 'x' to @@ -482,86 +628,87 @@ class Distribution(_Distribution): will try to call 'dist._include_foo({"bar":"baz"})', which can then handle whatever special inclusion logic is needed. """ - for k,v in attrs.items(): - include = getattr(self, '_include_'+k, None) + for k, v in attrs.items(): + include = getattr(self, '_include_' + k, None) if include: include(v) else: - self._include_misc(k,v) + self._include_misc(k, v) - def exclude_package(self,package): + def exclude_package(self, package): """Remove packages, modules, and extensions in named package""" - pfx = package+'.' + pfx = package + '.' if self.packages: self.packages = [ p for p in self.packages - if p != package and not p.startswith(pfx) + if p != package and not p.startswith(pfx) ] if self.py_modules: self.py_modules = [ p for p in self.py_modules - if p != package and not p.startswith(pfx) + if p != package and not p.startswith(pfx) ] if self.ext_modules: self.ext_modules = [ p for p in self.ext_modules - if p.name != package and not p.name.startswith(pfx) + if p.name != package and not p.name.startswith(pfx) ] - def has_contents_for(self,package): + def has_contents_for(self, package): """Return true if 'exclude_package(package)' would do something""" - pfx = package+'.' + pfx = package + '.' for p in self.iter_distribution_names(): - if p==package or p.startswith(pfx): + if p == package or p.startswith(pfx): return True - def _exclude_misc(self,name,value): + def _exclude_misc(self, name, value): """Handle 'exclude()' for list/tuple attrs without a special handler""" - if not isinstance(value,sequence): + if not isinstance(value, sequence): raise DistutilsSetupError( "%s: setting must be a list or tuple (%r)" % (name, value) ) try: - old = getattr(self,name) + old = getattr(self, name) except AttributeError: raise DistutilsSetupError( "%s: No such distribution setting" % name ) - if old is not None and not isinstance(old,sequence): + if old is not None and not isinstance(old, sequence): raise DistutilsSetupError( - name+": this setting cannot be changed via include/exclude" + name + ": this setting cannot be changed via include/exclude" ) elif old: - setattr(self,name,[item for item in old if item not in value]) + setattr(self, name, [item for item in old if item not in value]) - def _include_misc(self,name,value): + def _include_misc(self, name, value): """Handle 'include()' for list/tuple attrs without a special handler""" - if not isinstance(value,sequence): + if not isinstance(value, sequence): raise DistutilsSetupError( "%s: setting must be a list (%r)" % (name, value) ) try: - old = getattr(self,name) + old = getattr(self, name) except AttributeError: raise DistutilsSetupError( "%s: No such distribution setting" % name ) if old is None: - setattr(self,name,value) - elif not isinstance(old,sequence): + setattr(self, name, value) + elif not isinstance(old, sequence): raise DistutilsSetupError( - name+": this setting cannot be changed via include/exclude" + name + ": this setting cannot be changed via include/exclude" ) else: - setattr(self,name,old+[item for item in value if item not in old]) + new = [item for item in value if item not in old] + setattr(self, name, old + new) - def exclude(self,**attrs): + def exclude(self, **attrs): """Remove items from distribution that are named in keyword arguments For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from @@ -577,15 +724,15 @@ class Distribution(_Distribution): will try to call 'dist._exclude_foo({"bar":"baz"})', which can then handle whatever special exclusion logic is needed. """ - for k,v in attrs.items(): - exclude = getattr(self, '_exclude_'+k, None) + for k, v in attrs.items(): + exclude = getattr(self, '_exclude_' + k, None) if exclude: exclude(v) else: - self._exclude_misc(k,v) + self._exclude_misc(k, v) - def _exclude_packages(self,packages): - if not isinstance(packages,sequence): + def _exclude_packages(self, packages): + if not isinstance(packages, sequence): raise DistutilsSetupError( "packages: setting must be a list or tuple (%r)" % (packages,) ) @@ -600,17 +747,17 @@ class Distribution(_Distribution): command = args[0] aliases = self.get_option_dict('aliases') while command in aliases: - src,alias = aliases[command] - del aliases[command] # ensure each alias can expand only once! + src, alias = aliases[command] + del aliases[command] # ensure each alias can expand only once! import shlex - args[:1] = shlex.split(alias,True) + args[:1] = shlex.split(alias, True) command = args[0] nargs = _Distribution._parse_command_opts(self, parser, args) # Handle commands that want to consume all remaining arguments cmd_class = self.get_command_class(command) - if getattr(cmd_class,'command_consumes_arguments',None): + if getattr(cmd_class, 'command_consumes_arguments', None): self.get_option_dict(command)['args'] = ("command line", nargs) if nargs is not None: return [] @@ -629,31 +776,31 @@ class Distribution(_Distribution): d = {} - for cmd,opts in self.command_options.items(): + for cmd, opts in self.command_options.items(): - for opt,(src,val) in opts.items(): + for opt, (src, val) in opts.items(): if src != "command line": continue - opt = opt.replace('_','-') + opt = opt.replace('_', '-') - if val==0: + if val == 0: cmdobj = self.get_command_obj(cmd) neg_opt = self.negative_opt.copy() - neg_opt.update(getattr(cmdobj,'negative_opt',{})) - for neg,pos in neg_opt.items(): - if pos==opt: - opt=neg - val=None + neg_opt.update(getattr(cmdobj, 'negative_opt', {})) + for neg, pos in neg_opt.items(): + if pos == opt: + opt = neg + val = None break else: raise AssertionError("Shouldn't be able to get here") - elif val==1: + elif val == 1: val = None - d.setdefault(cmd,{})[opt] = val + d.setdefault(cmd, {})[opt] = val return d @@ -667,7 +814,7 @@ class Distribution(_Distribution): yield module for ext in self.ext_modules or (): - if isinstance(ext,tuple): + if isinstance(ext, tuple): name, buildinfo = ext else: name = ext.name @@ -711,16 +858,11 @@ class Distribution(_Distribution): sys.stdout.detach(), encoding, errors, newline, line_buffering) -# Install it throughout the distutils -for module in distutils.dist, distutils.core, distutils.cmd: - module.Distribution = Distribution - - class Feature: """ **deprecated** -- The `Feature` facility was never completely implemented or supported, `has reported issues - `_ and will be removed in + `_ and will be removed in a future version. A subset of the distribution that can be excluded if unneeded/wanted @@ -775,14 +917,14 @@ class Feature: @staticmethod def warn_deprecated(): - warnings.warn( + msg = ( "Features are deprecated and will be removed in a future " - "version. See http://bitbucket.org/pypa/setuptools/65.", - DeprecationWarning, - stacklevel=3, + "version. See https://github.com/pypa/setuptools/issues/65." ) + warnings.warn(msg, DeprecationWarning, stacklevel=3) - def __init__(self, description, standard=False, available=True, + def __init__( + self, description, standard=False, available=True, optional=True, require_features=(), remove=(), **extras): self.warn_deprecated() @@ -790,32 +932,32 @@ class Feature: self.standard = standard self.available = available self.optional = optional - if isinstance(require_features,(str,Require)): + if isinstance(require_features, (str, Require)): require_features = require_features, self.require_features = [ - r for r in require_features if isinstance(r,str) + r for r in require_features if isinstance(r, str) ] - er = [r for r in require_features if not isinstance(r,str)] - if er: extras['require_features'] = er + er = [r for r in require_features if not isinstance(r, str)] + if er: + extras['require_features'] = er - if isinstance(remove,str): + if isinstance(remove, str): remove = remove, self.remove = remove self.extras = extras if not remove and not require_features and not extras: raise DistutilsSetupError( - "Feature %s: must define 'require_features', 'remove', or at least one" - " of 'packages', 'py_modules', etc." + "Feature %s: must define 'require_features', 'remove', or " + "at least one of 'packages', 'py_modules', etc." ) def include_by_default(self): """Should this feature be included by default?""" return self.available and self.standard - def include_in(self,dist): - + def include_in(self, dist): """Ensure feature and its requirements are included in distribution You may override this in a subclass to perform additional operations on @@ -826,7 +968,7 @@ class Feature: if not self.available: raise DistutilsPlatformError( - self.description+" is required, " + self.description + " is required, " "but is not available on this platform" ) @@ -835,8 +977,7 @@ class Feature: for f in self.require_features: dist.include_feature(f) - def exclude_from(self,dist): - + def exclude_from(self, dist): """Ensure feature is excluded from distribution You may override this in a subclass to perform additional operations on @@ -851,8 +992,7 @@ class Feature: for item in self.remove: dist.exclude_package(item) - def validate(self,dist): - + def validate(self, dist): """Verify that feature makes sense in context of distribution This method is called by the distribution just before it parses its diff --git a/lib/python3.4/site-packages/setuptools/extension.py b/lib/python3.4/site-packages/setuptools/extension.py index d10609b..2946889 100644 --- a/lib/python3.4/site-packages/setuptools/extension.py +++ b/lib/python3.4/site-packages/setuptools/extension.py @@ -1,4 +1,3 @@ -import sys import re import functools import distutils.core @@ -7,18 +6,14 @@ import distutils.extension from setuptools.extern.six.moves import map -from .dist import _get_unpatched -from . import msvc9_support +from .monkey import get_unpatched -_Extension = _get_unpatched(distutils.core.Extension) - -msvc9_support.patch_for_specialized_compiler() def _have_cython(): """ Return True if Cython can be imported. """ - cython_impl = 'Cython.Distutils.build_ext', + cython_impl = 'Cython.Distutils.build_ext' try: # from (cython_impl) import build_ext __import__(cython_impl, fromlist=['build_ext']).build_ext @@ -27,13 +22,22 @@ def _have_cython(): pass return False + # for compatibility have_pyrex = _have_cython +_Extension = get_unpatched(distutils.core.Extension) + class Extension(_Extension): """Extension that uses '.c' files in place of '.pyx' files""" + def __init__(self, name, sources, *args, **kw): + # The *args is needed for compatibility as calls may use positional + # arguments. py_limited_api may be set only via keyword. + self.py_limited_api = kw.pop("py_limited_api", False) + _Extension.__init__(self, name, sources, *args, **kw) + def _convert_pyx_sources_to_lang(self): """ Replace sources with .pyx extensions to sources with the target @@ -48,10 +52,6 @@ class Extension(_Extension): sub = functools.partial(re.sub, '.pyx$', target_ext) self.sources = list(map(sub, self.sources)) + class Library(Extension): """Just like a regular Extension, but built as a library instead""" - -distutils.core.Extension = Extension -distutils.extension.Extension = Extension -if 'distutils.command.build_ext' in sys.modules: - sys.modules['distutils.command.build_ext'].Extension = Extension diff --git a/lib/python3.4/site-packages/setuptools/extern/__init__.py b/lib/python3.4/site-packages/setuptools/extern/__init__.py index 6859aa5..2cd08b7 100644 --- a/lib/python3.4/site-packages/setuptools/extern/__init__.py +++ b/lib/python3.4/site-packages/setuptools/extern/__init__.py @@ -1,5 +1,4 @@ from pkg_resources.extern import VendorImporter - names = 'six', VendorImporter(__name__, names, 'pkg_resources._vendor').install() diff --git a/lib/python3.4/site-packages/setuptools/glob.py b/lib/python3.4/site-packages/setuptools/glob.py new file mode 100644 index 0000000..6c781de --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/glob.py @@ -0,0 +1,176 @@ +""" +Filename globbing utility. Mostly a copy of `glob` from Python 3.5. + +Changes include: + * `yield from` and PEP3102 `*` removed. + * `bytes` changed to `six.binary_type`. + * Hidden files are not ignored. +""" + +import os +import re +import fnmatch +from setuptools.extern.six import binary_type + +__all__ = ["glob", "iglob", "escape"] + + +def glob(pathname, recursive=False): + """Return a list of paths matching a pathname pattern. + + The pattern may contain simple shell-style wildcards a la + fnmatch. However, unlike fnmatch, filenames starting with a + dot are special cases that are not matched by '*' and '?' + patterns. + + If recursive is true, the pattern '**' will match any files and + zero or more directories and subdirectories. + """ + return list(iglob(pathname, recursive=recursive)) + + +def iglob(pathname, recursive=False): + """Return an iterator which yields the paths matching a pathname pattern. + + The pattern may contain simple shell-style wildcards a la + fnmatch. However, unlike fnmatch, filenames starting with a + dot are special cases that are not matched by '*' and '?' + patterns. + + If recursive is true, the pattern '**' will match any files and + zero or more directories and subdirectories. + """ + it = _iglob(pathname, recursive) + if recursive and _isrecursive(pathname): + s = next(it) # skip empty string + assert not s + return it + + +def _iglob(pathname, recursive): + dirname, basename = os.path.split(pathname) + if not has_magic(pathname): + if basename: + if os.path.lexists(pathname): + yield pathname + else: + # Patterns ending with a slash should match only directories + if os.path.isdir(dirname): + yield pathname + return + if not dirname: + if recursive and _isrecursive(basename): + for x in glob2(dirname, basename): + yield x + else: + for x in glob1(dirname, basename): + yield x + return + # `os.path.split()` returns the argument itself as a dirname if it is a + # drive or UNC path. Prevent an infinite recursion if a drive or UNC path + # contains magic characters (i.e. r'\\?\C:'). + if dirname != pathname and has_magic(dirname): + dirs = _iglob(dirname, recursive) + else: + dirs = [dirname] + if has_magic(basename): + if recursive and _isrecursive(basename): + glob_in_dir = glob2 + else: + glob_in_dir = glob1 + else: + glob_in_dir = glob0 + for dirname in dirs: + for name in glob_in_dir(dirname, basename): + yield os.path.join(dirname, name) + + +# These 2 helper functions non-recursively glob inside a literal directory. +# They return a list of basenames. `glob1` accepts a pattern while `glob0` +# takes a literal basename (so it only has to check for its existence). + + +def glob1(dirname, pattern): + if not dirname: + if isinstance(pattern, binary_type): + dirname = os.curdir.encode('ASCII') + else: + dirname = os.curdir + try: + names = os.listdir(dirname) + except OSError: + return [] + return fnmatch.filter(names, pattern) + + +def glob0(dirname, basename): + if not basename: + # `os.path.split()` returns an empty basename for paths ending with a + # directory separator. 'q*x/' should match only directories. + if os.path.isdir(dirname): + return [basename] + else: + if os.path.lexists(os.path.join(dirname, basename)): + return [basename] + return [] + + +# This helper function recursively yields relative pathnames inside a literal +# directory. + + +def glob2(dirname, pattern): + assert _isrecursive(pattern) + yield pattern[:0] + for x in _rlistdir(dirname): + yield x + + +# Recursively yields relative pathnames inside a literal directory. +def _rlistdir(dirname): + if not dirname: + if isinstance(dirname, binary_type): + dirname = binary_type(os.curdir, 'ASCII') + else: + dirname = os.curdir + try: + names = os.listdir(dirname) + except os.error: + return + for x in names: + yield x + path = os.path.join(dirname, x) if dirname else x + for y in _rlistdir(path): + yield os.path.join(x, y) + + +magic_check = re.compile('([*?[])') +magic_check_bytes = re.compile(b'([*?[])') + + +def has_magic(s): + if isinstance(s, binary_type): + match = magic_check_bytes.search(s) + else: + match = magic_check.search(s) + return match is not None + + +def _isrecursive(pattern): + if isinstance(pattern, binary_type): + return pattern == b'**' + else: + return pattern == '**' + + +def escape(pathname): + """Escape all special characters. + """ + # Escaping is done by wrapping any of "*?[" between square brackets. + # Metacharacters do not work in the drive part and shouldn't be escaped. + drive, pathname = os.path.splitdrive(pathname) + if isinstance(pathname, binary_type): + pathname = magic_check_bytes.sub(br'[\1]', pathname) + else: + pathname = magic_check.sub(r'[\1]', pathname) + return drive + pathname diff --git a/lib/python3.4/site-packages/setuptools/gui-arm-32.exe b/lib/python3.4/site-packages/setuptools/gui-arm-32.exe deleted file mode 100644 index 537aff3..0000000 Binary files a/lib/python3.4/site-packages/setuptools/gui-arm-32.exe and /dev/null differ diff --git a/lib/python3.4/site-packages/setuptools/launch.py b/lib/python3.4/site-packages/setuptools/launch.py index 06e15e1..308283e 100644 --- a/lib/python3.4/site-packages/setuptools/launch.py +++ b/lib/python3.4/site-packages/setuptools/launch.py @@ -11,25 +11,25 @@ import sys def run(): - """ - Run the script in sys.argv[1] as if it had - been invoked naturally. - """ - __builtins__ - script_name = sys.argv[1] - namespace = dict( - __file__ = script_name, - __name__ = '__main__', - __doc__ = None, - ) - sys.argv[:] = sys.argv[1:] + """ + Run the script in sys.argv[1] as if it had + been invoked naturally. + """ + __builtins__ + script_name = sys.argv[1] + namespace = dict( + __file__=script_name, + __name__='__main__', + __doc__=None, + ) + sys.argv[:] = sys.argv[1:] - open_ = getattr(tokenize, 'open', open) - script = open_(script_name).read() - norm_script = script.replace('\\r\\n', '\\n') - code = compile(norm_script, script_name, 'exec') - exec(code, namespace) + open_ = getattr(tokenize, 'open', open) + script = open_(script_name).read() + norm_script = script.replace('\\r\\n', '\\n') + code = compile(norm_script, script_name, 'exec') + exec(code, namespace) if __name__ == '__main__': - run() + run() diff --git a/lib/python3.4/site-packages/setuptools/lib2to3_ex.py b/lib/python3.4/site-packages/setuptools/lib2to3_ex.py index feef591..4b1a73f 100644 --- a/lib/python3.4/site-packages/setuptools/lib2to3_ex.py +++ b/lib/python3.4/site-packages/setuptools/lib2to3_ex.py @@ -10,8 +10,10 @@ This module raises an ImportError on Python 2. from distutils.util import Mixin2to3 as _Mixin2to3 from distutils import log from lib2to3.refactor import RefactoringTool, get_fixers_from_package + import setuptools + class DistutilsRefactoringTool(RefactoringTool): def log_error(self, msg, *args, **kw): log.error(msg, *args) @@ -22,15 +24,16 @@ class DistutilsRefactoringTool(RefactoringTool): def log_debug(self, msg, *args): log.debug(msg, *args) + class Mixin2to3(_Mixin2to3): - def run_2to3(self, files, doctests = False): + def run_2to3(self, files, doctests=False): # See of the distribution option has been set, otherwise check the # setuptools default. if self.distribution.use_2to3 is not True: return if not files: return - log.info("Fixing "+" ".join(files)) + log.info("Fixing " + " ".join(files)) self.__build_fixer_names() self.__exclude_fixers() if doctests: @@ -41,7 +44,8 @@ class Mixin2to3(_Mixin2to3): _Mixin2to3.run_2to3(self, files) def __build_fixer_names(self): - if self.fixer_names: return + if self.fixer_names: + return self.fixer_names = [] for p in setuptools.lib2to3_fixer_packages: self.fixer_names.extend(get_fixers_from_package(p)) diff --git a/lib/python3.4/site-packages/setuptools/monkey.py b/lib/python3.4/site-packages/setuptools/monkey.py new file mode 100644 index 0000000..6d3711e --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/monkey.py @@ -0,0 +1,197 @@ +""" +Monkey patching of distutils. +""" + +import sys +import distutils.filelist +import platform +import types +import functools +import inspect + +from .py26compat import import_module +from setuptools.extern import six + +import setuptools + +__all__ = [] +""" +Everything is private. Contact the project team +if you think you need this functionality. +""" + + +def _get_mro(cls): + """ + Returns the bases classes for cls sorted by the MRO. + + Works around an issue on Jython where inspect.getmro will not return all + base classes if multiple classes share the same name. Instead, this + function will return a tuple containing the class itself, and the contents + of cls.__bases__. See https://github.com/pypa/setuptools/issues/1024. + """ + if platform.python_implementation() == "Jython": + return (cls,) + cls.__bases__ + return inspect.getmro(cls) + + +def get_unpatched(item): + lookup = ( + get_unpatched_class if isinstance(item, six.class_types) else + get_unpatched_function if isinstance(item, types.FunctionType) else + lambda item: None + ) + return lookup(item) + + +def get_unpatched_class(cls): + """Protect against re-patching the distutils if reloaded + + Also ensures that no other distutils extension monkeypatched the distutils + first. + """ + external_bases = ( + cls + for cls in _get_mro(cls) + if not cls.__module__.startswith('setuptools') + ) + base = next(external_bases) + if not base.__module__.startswith('distutils'): + msg = "distutils has already been patched by %r" % cls + raise AssertionError(msg) + return base + + +def patch_all(): + # we can't patch distutils.cmd, alas + distutils.core.Command = setuptools.Command + + has_issue_12885 = sys.version_info <= (3, 5, 3) + + if has_issue_12885: + # fix findall bug in distutils (http://bugs.python.org/issue12885) + distutils.filelist.findall = setuptools.findall + + needs_warehouse = ( + sys.version_info < (2, 7, 13) + or + (3, 0) < sys.version_info < (3, 3, 7) + or + (3, 4) < sys.version_info < (3, 4, 6) + or + (3, 5) < sys.version_info <= (3, 5, 3) + ) + + if needs_warehouse: + warehouse = 'https://upload.pypi.org/legacy/' + distutils.config.PyPIRCCommand.DEFAULT_REPOSITORY = warehouse + + _patch_distribution_metadata_write_pkg_file() + _patch_distribution_metadata_write_pkg_info() + + # Install Distribution throughout the distutils + for module in distutils.dist, distutils.core, distutils.cmd: + module.Distribution = setuptools.dist.Distribution + + # Install the patched Extension + distutils.core.Extension = setuptools.extension.Extension + distutils.extension.Extension = setuptools.extension.Extension + if 'distutils.command.build_ext' in sys.modules: + sys.modules['distutils.command.build_ext'].Extension = ( + setuptools.extension.Extension + ) + + patch_for_msvc_specialized_compiler() + + +def _patch_distribution_metadata_write_pkg_file(): + """Patch write_pkg_file to also write Requires-Python/Requires-External""" + distutils.dist.DistributionMetadata.write_pkg_file = ( + setuptools.dist.write_pkg_file + ) + + +def _patch_distribution_metadata_write_pkg_info(): + """ + Workaround issue #197 - Python 3 prior to 3.2.2 uses an environment-local + encoding to save the pkg_info. Monkey-patch its write_pkg_info method to + correct this undesirable behavior. + """ + environment_local = (3,) <= sys.version_info[:3] < (3, 2, 2) + if not environment_local: + return + + distutils.dist.DistributionMetadata.write_pkg_info = ( + setuptools.dist.write_pkg_info + ) + + +def patch_func(replacement, target_mod, func_name): + """ + Patch func_name in target_mod with replacement + + Important - original must be resolved by name to avoid + patching an already patched function. + """ + original = getattr(target_mod, func_name) + + # set the 'unpatched' attribute on the replacement to + # point to the original. + vars(replacement).setdefault('unpatched', original) + + # replace the function in the original module + setattr(target_mod, func_name, replacement) + + +def get_unpatched_function(candidate): + return getattr(candidate, 'unpatched') + + +def patch_for_msvc_specialized_compiler(): + """ + Patch functions in distutils to use standalone Microsoft Visual C++ + compilers. + """ + # import late to avoid circular imports on Python < 3.5 + msvc = import_module('setuptools.msvc') + + if platform.system() != 'Windows': + # Compilers only availables on Microsoft Windows + return + + def patch_params(mod_name, func_name): + """ + Prepare the parameters for patch_func to patch indicated function. + """ + repl_prefix = 'msvc9_' if 'msvc9' in mod_name else 'msvc14_' + repl_name = repl_prefix + func_name.lstrip('_') + repl = getattr(msvc, repl_name) + mod = import_module(mod_name) + if not hasattr(mod, func_name): + raise ImportError(func_name) + return repl, mod, func_name + + # Python 2.7 to 3.4 + msvc9 = functools.partial(patch_params, 'distutils.msvc9compiler') + + # Python 3.5+ + msvc14 = functools.partial(patch_params, 'distutils._msvccompiler') + + try: + # Patch distutils.msvc9compiler + patch_func(*msvc9('find_vcvarsall')) + patch_func(*msvc9('query_vcvarsall')) + except ImportError: + pass + + try: + # Patch distutils._msvccompiler._get_vc_env + patch_func(*msvc14('_get_vc_env')) + except ImportError: + pass + + try: + # Patch distutils._msvccompiler.gen_lib_options for Numpy + patch_func(*msvc14('gen_lib_options')) + except ImportError: + pass diff --git a/lib/python3.4/site-packages/setuptools/msvc.py b/lib/python3.4/site-packages/setuptools/msvc.py new file mode 100644 index 0000000..8e3b638 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/msvc.py @@ -0,0 +1,1302 @@ +""" +Improved support for Microsoft Visual C++ compilers. + +Known supported compilers: +-------------------------- +Microsoft Visual C++ 9.0: + Microsoft Visual C++ Compiler for Python 2.7 (x86, amd64) + Microsoft Windows SDK 6.1 (x86, x64, ia64) + Microsoft Windows SDK 7.0 (x86, x64, ia64) + +Microsoft Visual C++ 10.0: + Microsoft Windows SDK 7.1 (x86, x64, ia64) + +Microsoft Visual C++ 14.0: + Microsoft Visual C++ Build Tools 2015 (x86, x64, arm) + Microsoft Visual Studio 2017 (x86, x64, arm, arm64) + Microsoft Visual Studio Build Tools 2017 (x86, x64, arm, arm64) +""" + +import os +import sys +import platform +import itertools +import distutils.errors +from pkg_resources.extern.packaging.version import LegacyVersion + +from setuptools.extern.six.moves import filterfalse + +from .monkey import get_unpatched + +if platform.system() == 'Windows': + from setuptools.extern.six.moves import winreg + safe_env = os.environ +else: + """ + Mock winreg and environ so the module can be imported + on this platform. + """ + + class winreg: + HKEY_USERS = None + HKEY_CURRENT_USER = None + HKEY_LOCAL_MACHINE = None + HKEY_CLASSES_ROOT = None + + safe_env = dict() + +_msvc9_suppress_errors = ( + # msvc9compiler isn't available on some platforms + ImportError, + + # msvc9compiler raises DistutilsPlatformError in some + # environments. See #1118. + distutils.errors.DistutilsPlatformError, +) + +try: + from distutils.msvc9compiler import Reg +except _msvc9_suppress_errors: + pass + + +def msvc9_find_vcvarsall(version): + """ + Patched "distutils.msvc9compiler.find_vcvarsall" to use the standalone + compiler build for Python (VCForPython). Fall back to original behavior + when the standalone compiler is not available. + + Redirect the path of "vcvarsall.bat". + + Known supported compilers + ------------------------- + Microsoft Visual C++ 9.0: + Microsoft Visual C++ Compiler for Python 2.7 (x86, amd64) + + Parameters + ---------- + version: float + Required Microsoft Visual C++ version. + + Return + ------ + vcvarsall.bat path: str + """ + VC_BASE = r'Software\%sMicrosoft\DevDiv\VCForPython\%0.1f' + key = VC_BASE % ('', version) + try: + # Per-user installs register the compiler path here + productdir = Reg.get_value(key, "installdir") + except KeyError: + try: + # All-user installs on a 64-bit system register here + key = VC_BASE % ('Wow6432Node\\', version) + productdir = Reg.get_value(key, "installdir") + except KeyError: + productdir = None + + if productdir: + vcvarsall = os.path.os.path.join(productdir, "vcvarsall.bat") + if os.path.isfile(vcvarsall): + return vcvarsall + + return get_unpatched(msvc9_find_vcvarsall)(version) + + +def msvc9_query_vcvarsall(ver, arch='x86', *args, **kwargs): + """ + Patched "distutils.msvc9compiler.query_vcvarsall" for support extra + compilers. + + Set environment without use of "vcvarsall.bat". + + Known supported compilers + ------------------------- + Microsoft Visual C++ 9.0: + Microsoft Visual C++ Compiler for Python 2.7 (x86, amd64) + Microsoft Windows SDK 6.1 (x86, x64, ia64) + Microsoft Windows SDK 7.0 (x86, x64, ia64) + + Microsoft Visual C++ 10.0: + Microsoft Windows SDK 7.1 (x86, x64, ia64) + + Parameters + ---------- + ver: float + Required Microsoft Visual C++ version. + arch: str + Target architecture. + + Return + ------ + environment: dict + """ + # Try to get environement from vcvarsall.bat (Classical way) + try: + orig = get_unpatched(msvc9_query_vcvarsall) + return orig(ver, arch, *args, **kwargs) + except distutils.errors.DistutilsPlatformError: + # Pass error if Vcvarsall.bat is missing + pass + except ValueError: + # Pass error if environment not set after executing vcvarsall.bat + pass + + # If error, try to set environment directly + try: + return EnvironmentInfo(arch, ver).return_env() + except distutils.errors.DistutilsPlatformError as exc: + _augment_exception(exc, ver, arch) + raise + + +def msvc14_get_vc_env(plat_spec): + """ + Patched "distutils._msvccompiler._get_vc_env" for support extra + compilers. + + Set environment without use of "vcvarsall.bat". + + Known supported compilers + ------------------------- + Microsoft Visual C++ 14.0: + Microsoft Visual C++ Build Tools 2015 (x86, x64, arm) + Microsoft Visual Studio 2017 (x86, x64, arm, arm64) + Microsoft Visual Studio Build Tools 2017 (x86, x64, arm, arm64) + + Parameters + ---------- + plat_spec: str + Target architecture. + + Return + ------ + environment: dict + """ + # Try to get environment from vcvarsall.bat (Classical way) + try: + return get_unpatched(msvc14_get_vc_env)(plat_spec) + except distutils.errors.DistutilsPlatformError: + # Pass error Vcvarsall.bat is missing + pass + + # If error, try to set environment directly + try: + return EnvironmentInfo(plat_spec, vc_min_ver=14.0).return_env() + except distutils.errors.DistutilsPlatformError as exc: + _augment_exception(exc, 14.0) + raise + + +def msvc14_gen_lib_options(*args, **kwargs): + """ + Patched "distutils._msvccompiler.gen_lib_options" for fix + compatibility between "numpy.distutils" and "distutils._msvccompiler" + (for Numpy < 1.11.2) + """ + if "numpy.distutils" in sys.modules: + import numpy as np + if LegacyVersion(np.__version__) < LegacyVersion('1.11.2'): + return np.distutils.ccompiler.gen_lib_options(*args, **kwargs) + return get_unpatched(msvc14_gen_lib_options)(*args, **kwargs) + + +def _augment_exception(exc, version, arch=''): + """ + Add details to the exception message to help guide the user + as to what action will resolve it. + """ + # Error if MSVC++ directory not found or environment not set + message = exc.args[0] + + if "vcvarsall" in message.lower() or "visual c" in message.lower(): + # Special error message if MSVC++ not installed + tmpl = 'Microsoft Visual C++ {version:0.1f} is required.' + message = tmpl.format(**locals()) + msdownload = 'www.microsoft.com/download/details.aspx?id=%d' + if version == 9.0: + if arch.lower().find('ia64') > -1: + # For VC++ 9.0, if IA64 support is needed, redirect user + # to Windows SDK 7.0 + message += ' Get it with "Microsoft Windows SDK 7.0": ' + message += msdownload % 3138 + else: + # For VC++ 9.0 redirect user to Vc++ for Python 2.7 : + # This redirection link is maintained by Microsoft. + # Contact vspython@microsoft.com if it needs updating. + message += ' Get it from http://aka.ms/vcpython27' + elif version == 10.0: + # For VC++ 10.0 Redirect user to Windows SDK 7.1 + message += ' Get it with "Microsoft Windows SDK 7.1": ' + message += msdownload % 8279 + elif version >= 14.0: + # For VC++ 14.0 Redirect user to Visual C++ Build Tools + message += (' Get it with "Microsoft Visual C++ Build Tools": ' + r'http://landinghub.visualstudio.com/' + 'visual-cpp-build-tools') + + exc.args = (message, ) + + +class PlatformInfo: + """ + Current and Target Architectures informations. + + Parameters + ---------- + arch: str + Target architecture. + """ + current_cpu = safe_env.get('processor_architecture', '').lower() + + def __init__(self, arch): + self.arch = arch.lower().replace('x64', 'amd64') + + @property + def target_cpu(self): + return self.arch[self.arch.find('_') + 1:] + + def target_is_x86(self): + return self.target_cpu == 'x86' + + def current_is_x86(self): + return self.current_cpu == 'x86' + + def current_dir(self, hidex86=False, x64=False): + """ + Current platform specific subfolder. + + Parameters + ---------- + hidex86: bool + return '' and not '\x86' if architecture is x86. + x64: bool + return '\x64' and not '\amd64' if architecture is amd64. + + Return + ------ + subfolder: str + '\target', or '' (see hidex86 parameter) + """ + return ( + '' if (self.current_cpu == 'x86' and hidex86) else + r'\x64' if (self.current_cpu == 'amd64' and x64) else + r'\%s' % self.current_cpu + ) + + def target_dir(self, hidex86=False, x64=False): + r""" + Target platform specific subfolder. + + Parameters + ---------- + hidex86: bool + return '' and not '\x86' if architecture is x86. + x64: bool + return '\x64' and not '\amd64' if architecture is amd64. + + Return + ------ + subfolder: str + '\current', or '' (see hidex86 parameter) + """ + return ( + '' if (self.target_cpu == 'x86' and hidex86) else + r'\x64' if (self.target_cpu == 'amd64' and x64) else + r'\%s' % self.target_cpu + ) + + def cross_dir(self, forcex86=False): + r""" + Cross platform specific subfolder. + + Parameters + ---------- + forcex86: bool + Use 'x86' as current architecture even if current acritecture is + not x86. + + Return + ------ + subfolder: str + '' if target architecture is current architecture, + '\current_target' if not. + """ + current = 'x86' if forcex86 else self.current_cpu + return ( + '' if self.target_cpu == current else + self.target_dir().replace('\\', '\\%s_' % current) + ) + + +class RegistryInfo: + """ + Microsoft Visual Studio related registry informations. + + Parameters + ---------- + platform_info: PlatformInfo + "PlatformInfo" instance. + """ + HKEYS = (winreg.HKEY_USERS, + winreg.HKEY_CURRENT_USER, + winreg.HKEY_LOCAL_MACHINE, + winreg.HKEY_CLASSES_ROOT) + + def __init__(self, platform_info): + self.pi = platform_info + + @property + def visualstudio(self): + """ + Microsoft Visual Studio root registry key. + """ + return 'VisualStudio' + + @property + def sxs(self): + """ + Microsoft Visual Studio SxS registry key. + """ + return os.path.join(self.visualstudio, 'SxS') + + @property + def vc(self): + """ + Microsoft Visual C++ VC7 registry key. + """ + return os.path.join(self.sxs, 'VC7') + + @property + def vs(self): + """ + Microsoft Visual Studio VS7 registry key. + """ + return os.path.join(self.sxs, 'VS7') + + @property + def vc_for_python(self): + """ + Microsoft Visual C++ for Python registry key. + """ + return r'DevDiv\VCForPython' + + @property + def microsoft_sdk(self): + """ + Microsoft SDK registry key. + """ + return 'Microsoft SDKs' + + @property + def windows_sdk(self): + """ + Microsoft Windows/Platform SDK registry key. + """ + return os.path.join(self.microsoft_sdk, 'Windows') + + @property + def netfx_sdk(self): + """ + Microsoft .NET Framework SDK registry key. + """ + return os.path.join(self.microsoft_sdk, 'NETFXSDK') + + @property + def windows_kits_roots(self): + """ + Microsoft Windows Kits Roots registry key. + """ + return r'Windows Kits\Installed Roots' + + def microsoft(self, key, x86=False): + """ + Return key in Microsoft software registry. + + Parameters + ---------- + key: str + Registry key path where look. + x86: str + Force x86 software registry. + + Return + ------ + str: value + """ + node64 = '' if self.pi.current_is_x86() or x86 else 'Wow6432Node' + return os.path.join('Software', node64, 'Microsoft', key) + + def lookup(self, key, name): + """ + Look for values in registry in Microsoft software registry. + + Parameters + ---------- + key: str + Registry key path where look. + name: str + Value name to find. + + Return + ------ + str: value + """ + KEY_READ = winreg.KEY_READ + openkey = winreg.OpenKey + ms = self.microsoft + for hkey in self.HKEYS: + try: + bkey = openkey(hkey, ms(key), 0, KEY_READ) + except (OSError, IOError): + if not self.pi.current_is_x86(): + try: + bkey = openkey(hkey, ms(key, True), 0, KEY_READ) + except (OSError, IOError): + continue + else: + continue + try: + return winreg.QueryValueEx(bkey, name)[0] + except (OSError, IOError): + pass + + +class SystemInfo: + """ + Microsoft Windows and Visual Studio related system inormations. + + Parameters + ---------- + registry_info: RegistryInfo + "RegistryInfo" instance. + vc_ver: float + Required Microsoft Visual C++ version. + """ + + # Variables and properties in this class use originals CamelCase variables + # names from Microsoft source files for more easy comparaison. + WinDir = safe_env.get('WinDir', '') + ProgramFiles = safe_env.get('ProgramFiles', '') + ProgramFilesx86 = safe_env.get('ProgramFiles(x86)', ProgramFiles) + + def __init__(self, registry_info, vc_ver=None): + self.ri = registry_info + self.pi = self.ri.pi + self.vc_ver = vc_ver or self._find_latest_available_vc_ver() + + def _find_latest_available_vc_ver(self): + try: + return self.find_available_vc_vers()[-1] + except IndexError: + err = 'No Microsoft Visual C++ version found' + raise distutils.errors.DistutilsPlatformError(err) + + def find_available_vc_vers(self): + """ + Find all available Microsoft Visual C++ versions. + """ + ms = self.ri.microsoft + vckeys = (self.ri.vc, self.ri.vc_for_python, self.ri.vs) + vc_vers = [] + for hkey in self.ri.HKEYS: + for key in vckeys: + try: + bkey = winreg.OpenKey(hkey, ms(key), 0, winreg.KEY_READ) + except (OSError, IOError): + continue + subkeys, values, _ = winreg.QueryInfoKey(bkey) + for i in range(values): + try: + ver = float(winreg.EnumValue(bkey, i)[0]) + if ver not in vc_vers: + vc_vers.append(ver) + except ValueError: + pass + for i in range(subkeys): + try: + ver = float(winreg.EnumKey(bkey, i)) + if ver not in vc_vers: + vc_vers.append(ver) + except ValueError: + pass + return sorted(vc_vers) + + @property + def VSInstallDir(self): + """ + Microsoft Visual Studio directory. + """ + # Default path + name = 'Microsoft Visual Studio %0.1f' % self.vc_ver + default = os.path.join(self.ProgramFilesx86, name) + + # Try to get path from registry, if fail use default path + return self.ri.lookup(self.ri.vs, '%0.1f' % self.vc_ver) or default + + @property + def VCInstallDir(self): + """ + Microsoft Visual C++ directory. + """ + self.VSInstallDir + + guess_vc = self._guess_vc() or self._guess_vc_legacy() + + # Try to get "VC++ for Python" path from registry as default path + reg_path = os.path.join(self.ri.vc_for_python, '%0.1f' % self.vc_ver) + python_vc = self.ri.lookup(reg_path, 'installdir') + default_vc = os.path.join(python_vc, 'VC') if python_vc else guess_vc + + # Try to get path from registry, if fail use default path + path = self.ri.lookup(self.ri.vc, '%0.1f' % self.vc_ver) or default_vc + + if not os.path.isdir(path): + msg = 'Microsoft Visual C++ directory not found' + raise distutils.errors.DistutilsPlatformError(msg) + + return path + + def _guess_vc(self): + """ + Locate Visual C for 2017 + """ + if self.vc_ver <= 14.0: + return + + default = r'VC\Tools\MSVC' + guess_vc = os.path.join(self.VSInstallDir, default) + # Subdir with VC exact version as name + try: + vc_exact_ver = os.listdir(guess_vc)[-1] + return os.path.join(guess_vc, vc_exact_ver) + except (OSError, IOError, IndexError): + pass + + def _guess_vc_legacy(self): + """ + Locate Visual C for versions prior to 2017 + """ + default = r'Microsoft Visual Studio %0.1f\VC' % self.vc_ver + return os.path.join(self.ProgramFilesx86, default) + + @property + def WindowsSdkVersion(self): + """ + Microsoft Windows SDK versions for specified MSVC++ version. + """ + if self.vc_ver <= 9.0: + return ('7.0', '6.1', '6.0a') + elif self.vc_ver == 10.0: + return ('7.1', '7.0a') + elif self.vc_ver == 11.0: + return ('8.0', '8.0a') + elif self.vc_ver == 12.0: + return ('8.1', '8.1a') + elif self.vc_ver >= 14.0: + return ('10.0', '8.1') + + @property + def WindowsSdkLastVersion(self): + """ + Microsoft Windows SDK last version + """ + return self._use_last_dir_name(os.path.join( + self.WindowsSdkDir, 'lib')) + + @property + def WindowsSdkDir(self): + """ + Microsoft Windows SDK directory. + """ + sdkdir = '' + for ver in self.WindowsSdkVersion: + # Try to get it from registry + loc = os.path.join(self.ri.windows_sdk, 'v%s' % ver) + sdkdir = self.ri.lookup(loc, 'installationfolder') + if sdkdir: + break + if not sdkdir or not os.path.isdir(sdkdir): + # Try to get "VC++ for Python" version from registry + path = os.path.join(self.ri.vc_for_python, '%0.1f' % self.vc_ver) + install_base = self.ri.lookup(path, 'installdir') + if install_base: + sdkdir = os.path.join(install_base, 'WinSDK') + if not sdkdir or not os.path.isdir(sdkdir): + # If fail, use default new path + for ver in self.WindowsSdkVersion: + intver = ver[:ver.rfind('.')] + path = r'Microsoft SDKs\Windows Kits\%s' % (intver) + d = os.path.join(self.ProgramFiles, path) + if os.path.isdir(d): + sdkdir = d + if not sdkdir or not os.path.isdir(sdkdir): + # If fail, use default old path + for ver in self.WindowsSdkVersion: + path = r'Microsoft SDKs\Windows\v%s' % ver + d = os.path.join(self.ProgramFiles, path) + if os.path.isdir(d): + sdkdir = d + if not sdkdir: + # If fail, use Platform SDK + sdkdir = os.path.join(self.VCInstallDir, 'PlatformSDK') + return sdkdir + + @property + def WindowsSDKExecutablePath(self): + """ + Microsoft Windows SDK executable directory. + """ + # Find WinSDK NetFx Tools registry dir name + if self.vc_ver <= 11.0: + netfxver = 35 + arch = '' + else: + netfxver = 40 + hidex86 = True if self.vc_ver <= 12.0 else False + arch = self.pi.current_dir(x64=True, hidex86=hidex86) + fx = 'WinSDK-NetFx%dTools%s' % (netfxver, arch.replace('\\', '-')) + + # liste all possibles registry paths + regpaths = [] + if self.vc_ver >= 14.0: + for ver in self.NetFxSdkVersion: + regpaths += [os.path.join(self.ri.netfx_sdk, ver, fx)] + + for ver in self.WindowsSdkVersion: + regpaths += [os.path.join(self.ri.windows_sdk, 'v%sA' % ver, fx)] + + # Return installation folder from the more recent path + for path in regpaths: + execpath = self.ri.lookup(path, 'installationfolder') + if execpath: + break + return execpath + + @property + def FSharpInstallDir(self): + """ + Microsoft Visual F# directory. + """ + path = r'%0.1f\Setup\F#' % self.vc_ver + path = os.path.join(self.ri.visualstudio, path) + return self.ri.lookup(path, 'productdir') or '' + + @property + def UniversalCRTSdkDir(self): + """ + Microsoft Universal CRT SDK directory. + """ + # Set Kit Roots versions for specified MSVC++ version + if self.vc_ver >= 14.0: + vers = ('10', '81') + else: + vers = () + + # Find path of the more recent Kit + for ver in vers: + sdkdir = self.ri.lookup(self.ri.windows_kits_roots, + 'kitsroot%s' % ver) + if sdkdir: + break + return sdkdir or '' + + @property + def UniversalCRTSdkLastVersion(self): + """ + Microsoft Universal C Runtime SDK last version + """ + return self._use_last_dir_name(os.path.join( + self.UniversalCRTSdkDir, 'lib')) + + @property + def NetFxSdkVersion(self): + """ + Microsoft .NET Framework SDK versions. + """ + # Set FxSdk versions for specified MSVC++ version + if self.vc_ver >= 14.0: + return ('4.6.1', '4.6') + else: + return () + + @property + def NetFxSdkDir(self): + """ + Microsoft .NET Framework SDK directory. + """ + for ver in self.NetFxSdkVersion: + loc = os.path.join(self.ri.netfx_sdk, ver) + sdkdir = self.ri.lookup(loc, 'kitsinstallationfolder') + if sdkdir: + break + return sdkdir or '' + + @property + def FrameworkDir32(self): + """ + Microsoft .NET Framework 32bit directory. + """ + # Default path + guess_fw = os.path.join(self.WinDir, r'Microsoft.NET\Framework') + + # Try to get path from registry, if fail use default path + return self.ri.lookup(self.ri.vc, 'frameworkdir32') or guess_fw + + @property + def FrameworkDir64(self): + """ + Microsoft .NET Framework 64bit directory. + """ + # Default path + guess_fw = os.path.join(self.WinDir, r'Microsoft.NET\Framework64') + + # Try to get path from registry, if fail use default path + return self.ri.lookup(self.ri.vc, 'frameworkdir64') or guess_fw + + @property + def FrameworkVersion32(self): + """ + Microsoft .NET Framework 32bit versions. + """ + return self._find_dot_net_versions(32) + + @property + def FrameworkVersion64(self): + """ + Microsoft .NET Framework 64bit versions. + """ + return self._find_dot_net_versions(64) + + def _find_dot_net_versions(self, bits): + """ + Find Microsoft .NET Framework versions. + + Parameters + ---------- + bits: int + Platform number of bits: 32 or 64. + """ + # Find actual .NET version in registry + reg_ver = self.ri.lookup(self.ri.vc, 'frameworkver%d' % bits) + dot_net_dir = getattr(self, 'FrameworkDir%d' % bits) + ver = reg_ver or self._use_last_dir_name(dot_net_dir, 'v') or '' + + # Set .NET versions for specified MSVC++ version + if self.vc_ver >= 12.0: + frameworkver = (ver, 'v4.0') + elif self.vc_ver >= 10.0: + frameworkver = ('v4.0.30319' if ver.lower()[:2] != 'v4' else ver, + 'v3.5') + elif self.vc_ver == 9.0: + frameworkver = ('v3.5', 'v2.0.50727') + if self.vc_ver == 8.0: + frameworkver = ('v3.0', 'v2.0.50727') + return frameworkver + + def _use_last_dir_name(self, path, prefix=''): + """ + Return name of the last dir in path or '' if no dir found. + + Parameters + ---------- + path: str + Use dirs in this path + prefix: str + Use only dirs startings by this prefix + """ + matching_dirs = ( + dir_name + for dir_name in reversed(os.listdir(path)) + if os.path.isdir(os.path.join(path, dir_name)) and + dir_name.startswith(prefix) + ) + return next(matching_dirs, None) or '' + + +class EnvironmentInfo: + """ + Return environment variables for specified Microsoft Visual C++ version + and platform : Lib, Include, Path and libpath. + + This function is compatible with Microsoft Visual C++ 9.0 to 14.0. + + Script created by analysing Microsoft environment configuration files like + "vcvars[...].bat", "SetEnv.Cmd", "vcbuildtools.bat", ... + + Parameters + ---------- + arch: str + Target architecture. + vc_ver: float + Required Microsoft Visual C++ version. If not set, autodetect the last + version. + vc_min_ver: float + Minimum Microsoft Visual C++ version. + """ + + # Variables and properties in this class use originals CamelCase variables + # names from Microsoft source files for more easy comparaison. + + def __init__(self, arch, vc_ver=None, vc_min_ver=0): + self.pi = PlatformInfo(arch) + self.ri = RegistryInfo(self.pi) + self.si = SystemInfo(self.ri, vc_ver) + + if self.vc_ver < vc_min_ver: + err = 'No suitable Microsoft Visual C++ version found' + raise distutils.errors.DistutilsPlatformError(err) + + @property + def vc_ver(self): + """ + Microsoft Visual C++ version. + """ + return self.si.vc_ver + + @property + def VSTools(self): + """ + Microsoft Visual Studio Tools + """ + paths = [r'Common7\IDE', r'Common7\Tools'] + + if self.vc_ver >= 14.0: + arch_subdir = self.pi.current_dir(hidex86=True, x64=True) + paths += [r'Common7\IDE\CommonExtensions\Microsoft\TestWindow'] + paths += [r'Team Tools\Performance Tools'] + paths += [r'Team Tools\Performance Tools%s' % arch_subdir] + + return [os.path.join(self.si.VSInstallDir, path) for path in paths] + + @property + def VCIncludes(self): + """ + Microsoft Visual C++ & Microsoft Foundation Class Includes + """ + return [os.path.join(self.si.VCInstallDir, 'Include'), + os.path.join(self.si.VCInstallDir, r'ATLMFC\Include')] + + @property + def VCLibraries(self): + """ + Microsoft Visual C++ & Microsoft Foundation Class Libraries + """ + if self.vc_ver >= 15.0: + arch_subdir = self.pi.target_dir(x64=True) + else: + arch_subdir = self.pi.target_dir(hidex86=True) + paths = ['Lib%s' % arch_subdir, r'ATLMFC\Lib%s' % arch_subdir] + + if self.vc_ver >= 14.0: + paths += [r'Lib\store%s' % arch_subdir] + + return [os.path.join(self.si.VCInstallDir, path) for path in paths] + + @property + def VCStoreRefs(self): + """ + Microsoft Visual C++ store references Libraries + """ + if self.vc_ver < 14.0: + return [] + return [os.path.join(self.si.VCInstallDir, r'Lib\store\references')] + + @property + def VCTools(self): + """ + Microsoft Visual C++ Tools + """ + si = self.si + tools = [os.path.join(si.VCInstallDir, 'VCPackages')] + + forcex86 = True if self.vc_ver <= 10.0 else False + arch_subdir = self.pi.cross_dir(forcex86) + if arch_subdir: + tools += [os.path.join(si.VCInstallDir, 'Bin%s' % arch_subdir)] + + if self.vc_ver == 14.0: + path = 'Bin%s' % self.pi.current_dir(hidex86=True) + tools += [os.path.join(si.VCInstallDir, path)] + + elif self.vc_ver >= 15.0: + host_dir = (r'bin\HostX86%s' if self.pi.current_is_x86() else + r'bin\HostX64%s') + tools += [os.path.join( + si.VCInstallDir, host_dir % self.pi.target_dir(x64=True))] + + if self.pi.current_cpu != self.pi.target_cpu: + tools += [os.path.join( + si.VCInstallDir, host_dir % self.pi.current_dir(x64=True))] + + else: + tools += [os.path.join(si.VCInstallDir, 'Bin')] + + return tools + + @property + def OSLibraries(self): + """ + Microsoft Windows SDK Libraries + """ + if self.vc_ver <= 10.0: + arch_subdir = self.pi.target_dir(hidex86=True, x64=True) + return [os.path.join(self.si.WindowsSdkDir, 'Lib%s' % arch_subdir)] + + else: + arch_subdir = self.pi.target_dir(x64=True) + lib = os.path.join(self.si.WindowsSdkDir, 'lib') + libver = self._sdk_subdir + return [os.path.join(lib, '%sum%s' % (libver , arch_subdir))] + + @property + def OSIncludes(self): + """ + Microsoft Windows SDK Include + """ + include = os.path.join(self.si.WindowsSdkDir, 'include') + + if self.vc_ver <= 10.0: + return [include, os.path.join(include, 'gl')] + + else: + if self.vc_ver >= 14.0: + sdkver = self._sdk_subdir + else: + sdkver = '' + return [os.path.join(include, '%sshared' % sdkver), + os.path.join(include, '%sum' % sdkver), + os.path.join(include, '%swinrt' % sdkver)] + + @property + def OSLibpath(self): + """ + Microsoft Windows SDK Libraries Paths + """ + ref = os.path.join(self.si.WindowsSdkDir, 'References') + libpath = [] + + if self.vc_ver <= 9.0: + libpath += self.OSLibraries + + if self.vc_ver >= 11.0: + libpath += [os.path.join(ref, r'CommonConfiguration\Neutral')] + + if self.vc_ver >= 14.0: + libpath += [ + ref, + os.path.join(self.si.WindowsSdkDir, 'UnionMetadata'), + os.path.join( + ref, + 'Windows.Foundation.UniversalApiContract', + '1.0.0.0', + ), + os.path.join( + ref, + 'Windows.Foundation.FoundationContract', + '1.0.0.0', + ), + os.path.join( + ref, + 'Windows.Networking.Connectivity.WwanContract', + '1.0.0.0', + ), + os.path.join( + self.si.WindowsSdkDir, + 'ExtensionSDKs', + 'Microsoft.VCLibs', + '%0.1f' % self.vc_ver, + 'References', + 'CommonConfiguration', + 'neutral', + ), + ] + return libpath + + @property + def SdkTools(self): + """ + Microsoft Windows SDK Tools + """ + return list(self._sdk_tools()) + + def _sdk_tools(self): + """ + Microsoft Windows SDK Tools paths generator + """ + if self.vc_ver < 15.0: + bin_dir = 'Bin' if self.vc_ver <= 11.0 else r'Bin\x86' + yield os.path.join(self.si.WindowsSdkDir, bin_dir) + + if not self.pi.current_is_x86(): + arch_subdir = self.pi.current_dir(x64=True) + path = 'Bin%s' % arch_subdir + yield os.path.join(self.si.WindowsSdkDir, path) + + if self.vc_ver == 10.0 or self.vc_ver == 11.0: + if self.pi.target_is_x86(): + arch_subdir = '' + else: + arch_subdir = self.pi.current_dir(hidex86=True, x64=True) + path = r'Bin\NETFX 4.0 Tools%s' % arch_subdir + yield os.path.join(self.si.WindowsSdkDir, path) + + elif self.vc_ver >= 15.0: + path = os.path.join(self.si.WindowsSdkDir, 'Bin') + arch_subdir = self.pi.current_dir(x64=True) + sdkver = self.si.WindowsSdkLastVersion + yield os.path.join(path, '%s%s' % (sdkver, arch_subdir)) + + if self.si.WindowsSDKExecutablePath: + yield self.si.WindowsSDKExecutablePath + + @property + def _sdk_subdir(self): + """ + Microsoft Windows SDK version subdir + """ + ucrtver = self.si.WindowsSdkLastVersion + return ('%s\\' % ucrtver) if ucrtver else '' + + @property + def SdkSetup(self): + """ + Microsoft Windows SDK Setup + """ + if self.vc_ver > 9.0: + return [] + + return [os.path.join(self.si.WindowsSdkDir, 'Setup')] + + @property + def FxTools(self): + """ + Microsoft .NET Framework Tools + """ + pi = self.pi + si = self.si + + if self.vc_ver <= 10.0: + include32 = True + include64 = not pi.target_is_x86() and not pi.current_is_x86() + else: + include32 = pi.target_is_x86() or pi.current_is_x86() + include64 = pi.current_cpu == 'amd64' or pi.target_cpu == 'amd64' + + tools = [] + if include32: + tools += [os.path.join(si.FrameworkDir32, ver) + for ver in si.FrameworkVersion32] + if include64: + tools += [os.path.join(si.FrameworkDir64, ver) + for ver in si.FrameworkVersion64] + return tools + + @property + def NetFxSDKLibraries(self): + """ + Microsoft .Net Framework SDK Libraries + """ + if self.vc_ver < 14.0 or not self.si.NetFxSdkDir: + return [] + + arch_subdir = self.pi.target_dir(x64=True) + return [os.path.join(self.si.NetFxSdkDir, r'lib\um%s' % arch_subdir)] + + @property + def NetFxSDKIncludes(self): + """ + Microsoft .Net Framework SDK Includes + """ + if self.vc_ver < 14.0 or not self.si.NetFxSdkDir: + return [] + + return [os.path.join(self.si.NetFxSdkDir, r'include\um')] + + @property + def VsTDb(self): + """ + Microsoft Visual Studio Team System Database + """ + return [os.path.join(self.si.VSInstallDir, r'VSTSDB\Deploy')] + + @property + def MSBuild(self): + """ + Microsoft Build Engine + """ + if self.vc_ver < 12.0: + return [] + elif self.vc_ver < 15.0: + base_path = self.si.ProgramFilesx86 + arch_subdir = self.pi.current_dir(hidex86=True) + else: + base_path = self.si.VSInstallDir + arch_subdir = '' + + path = r'MSBuild\%0.1f\bin%s' % (self.vc_ver, arch_subdir) + build = [os.path.join(base_path, path)] + + if self.vc_ver >= 15.0: + # Add Roslyn C# & Visual Basic Compiler + build += [os.path.join(base_path, path, 'Roslyn')] + + return build + + @property + def HTMLHelpWorkshop(self): + """ + Microsoft HTML Help Workshop + """ + if self.vc_ver < 11.0: + return [] + + return [os.path.join(self.si.ProgramFilesx86, 'HTML Help Workshop')] + + @property + def UCRTLibraries(self): + """ + Microsoft Universal C Runtime SDK Libraries + """ + if self.vc_ver < 14.0: + return [] + + arch_subdir = self.pi.target_dir(x64=True) + lib = os.path.join(self.si.UniversalCRTSdkDir, 'lib') + ucrtver = self._ucrt_subdir + return [os.path.join(lib, '%sucrt%s' % (ucrtver, arch_subdir))] + + @property + def UCRTIncludes(self): + """ + Microsoft Universal C Runtime SDK Include + """ + if self.vc_ver < 14.0: + return [] + + include = os.path.join(self.si.UniversalCRTSdkDir, 'include') + return [os.path.join(include, '%sucrt' % self._ucrt_subdir)] + + @property + def _ucrt_subdir(self): + """ + Microsoft Universal C Runtime SDK version subdir + """ + ucrtver = self.si.UniversalCRTSdkLastVersion + return ('%s\\' % ucrtver) if ucrtver else '' + + @property + def FSharp(self): + """ + Microsoft Visual F# + """ + if self.vc_ver < 11.0 and self.vc_ver > 12.0: + return [] + + return self.si.FSharpInstallDir + + @property + def VCRuntimeRedist(self): + """ + Microsoft Visual C++ runtime redistribuable dll + """ + arch_subdir = self.pi.target_dir(x64=True) + if self.vc_ver < 15: + redist_path = self.si.VCInstallDir + vcruntime = 'redist%s\\Microsoft.VC%d0.CRT\\vcruntime%d0.dll' + else: + redist_path = self.si.VCInstallDir.replace('\\Tools', '\\Redist') + vcruntime = 'onecore%s\\Microsoft.VC%d0.CRT\\vcruntime%d0.dll' + + # Visual Studio 2017 is still Visual C++ 14.0 + dll_ver = 14.0 if self.vc_ver == 15 else self.vc_ver + + vcruntime = vcruntime % (arch_subdir, self.vc_ver, dll_ver) + return os.path.join(redist_path, vcruntime) + + def return_env(self, exists=True): + """ + Return environment dict. + + Parameters + ---------- + exists: bool + It True, only return existing paths. + """ + env = dict( + include=self._build_paths('include', + [self.VCIncludes, + self.OSIncludes, + self.UCRTIncludes, + self.NetFxSDKIncludes], + exists), + lib=self._build_paths('lib', + [self.VCLibraries, + self.OSLibraries, + self.FxTools, + self.UCRTLibraries, + self.NetFxSDKLibraries], + exists), + libpath=self._build_paths('libpath', + [self.VCLibraries, + self.FxTools, + self.VCStoreRefs, + self.OSLibpath], + exists), + path=self._build_paths('path', + [self.VCTools, + self.VSTools, + self.VsTDb, + self.SdkTools, + self.SdkSetup, + self.FxTools, + self.MSBuild, + self.HTMLHelpWorkshop, + self.FSharp], + exists), + ) + if self.vc_ver >= 14 and os.path.isfile(self.VCRuntimeRedist): + env['py_vcruntime_redist'] = self.VCRuntimeRedist + return env + + def _build_paths(self, name, spec_path_lists, exists): + """ + Given an environment variable name and specified paths, + return a pathsep-separated string of paths containing + unique, extant, directories from those paths and from + the environment variable. Raise an error if no paths + are resolved. + """ + # flatten spec_path_lists + spec_paths = itertools.chain.from_iterable(spec_path_lists) + env_paths = safe_env.get(name, '').split(os.pathsep) + paths = itertools.chain(spec_paths, env_paths) + extant_paths = list(filter(os.path.isdir, paths)) if exists else paths + if not extant_paths: + msg = "%s environment variable is empty" % name.upper() + raise distutils.errors.DistutilsPlatformError(msg) + unique_paths = self._unique_everseen(extant_paths) + return os.pathsep.join(unique_paths) + + # from Python docs + def _unique_everseen(self, iterable, key=None): + """ + List unique elements, preserving order. + Remember all elements ever seen. + + _unique_everseen('AAAABBBCCDAABBB') --> A B C D + + _unique_everseen('ABBCcAD', str.lower) --> A B C D + """ + seen = set() + seen_add = seen.add + if key is None: + for element in filterfalse(seen.__contains__, iterable): + seen_add(element) + yield element + else: + for element in iterable: + k = key(element) + if k not in seen: + seen_add(k) + yield element diff --git a/lib/python3.4/site-packages/setuptools/msvc9_support.py b/lib/python3.4/site-packages/setuptools/msvc9_support.py deleted file mode 100644 index a69c747..0000000 --- a/lib/python3.4/site-packages/setuptools/msvc9_support.py +++ /dev/null @@ -1,63 +0,0 @@ -try: - import distutils.msvc9compiler -except ImportError: - pass - -unpatched = dict() - -def patch_for_specialized_compiler(): - """ - Patch functions in distutils.msvc9compiler to use the standalone compiler - build for Python (Windows only). Fall back to original behavior when the - standalone compiler is not available. - """ - if 'distutils' not in globals(): - # The module isn't available to be patched - return - - if unpatched: - # Already patched - return - - unpatched.update(vars(distutils.msvc9compiler)) - - distutils.msvc9compiler.find_vcvarsall = find_vcvarsall - distutils.msvc9compiler.query_vcvarsall = query_vcvarsall - -def find_vcvarsall(version): - Reg = distutils.msvc9compiler.Reg - VC_BASE = r'Software\%sMicrosoft\DevDiv\VCForPython\%0.1f' - key = VC_BASE % ('', version) - try: - # Per-user installs register the compiler path here - productdir = Reg.get_value(key, "installdir") - except KeyError: - try: - # All-user installs on a 64-bit system register here - key = VC_BASE % ('Wow6432Node\\', version) - productdir = Reg.get_value(key, "installdir") - except KeyError: - productdir = None - - if productdir: - import os - vcvarsall = os.path.join(productdir, "vcvarsall.bat") - if os.path.isfile(vcvarsall): - return vcvarsall - - return unpatched['find_vcvarsall'](version) - -def query_vcvarsall(version, *args, **kwargs): - try: - return unpatched['query_vcvarsall'](version, *args, **kwargs) - except distutils.errors.DistutilsPlatformError as exc: - if exc and "vcvarsall.bat" in exc.args[0]: - message = 'Microsoft Visual C++ %0.1f is required (%s).' % (version, exc.args[0]) - if int(version) == 9: - # This redirection link is maintained by Microsoft. - # Contact vspython@microsoft.com if it needs updating. - raise distutils.errors.DistutilsPlatformError( - message + ' Get it from http://aka.ms/vcpython27' - ) - raise distutils.errors.DistutilsPlatformError(message) - raise diff --git a/lib/python3.4/site-packages/setuptools/namespaces.py b/lib/python3.4/site-packages/setuptools/namespaces.py new file mode 100644 index 0000000..dc16106 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/namespaces.py @@ -0,0 +1,107 @@ +import os +from distutils import log +import itertools + +from setuptools.extern.six.moves import map + + +flatten = itertools.chain.from_iterable + + +class Installer: + + nspkg_ext = '-nspkg.pth' + + def install_namespaces(self): + nsp = self._get_all_ns_packages() + if not nsp: + return + filename, ext = os.path.splitext(self._get_target()) + filename += self.nspkg_ext + self.outputs.append(filename) + log.info("Installing %s", filename) + lines = map(self._gen_nspkg_line, nsp) + + if self.dry_run: + # always generate the lines, even in dry run + list(lines) + return + + with open(filename, 'wt') as f: + f.writelines(lines) + + def uninstall_namespaces(self): + filename, ext = os.path.splitext(self._get_target()) + filename += self.nspkg_ext + if not os.path.exists(filename): + return + log.info("Removing %s", filename) + os.remove(filename) + + def _get_target(self): + return self.target + + _nspkg_tmpl = ( + "import sys, types, os", + "has_mfs = sys.version_info > (3, 5)", + "p = os.path.join(%(root)s, *%(pth)r)", + "importlib = has_mfs and __import__('importlib.util')", + "has_mfs and __import__('importlib.machinery')", + "m = has_mfs and " + "sys.modules.setdefault(%(pkg)r, " + "importlib.util.module_from_spec(" + "importlib.machinery.PathFinder.find_spec(%(pkg)r, " + "[os.path.dirname(p)])))", + "m = m or " + "sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))", + "mp = (m or []) and m.__dict__.setdefault('__path__',[])", + "(p not in mp) and mp.append(p)", + ) + "lines for the namespace installer" + + _nspkg_tmpl_multi = ( + 'm and setattr(sys.modules[%(parent)r], %(child)r, m)', + ) + "additional line(s) when a parent package is indicated" + + def _get_root(self): + return "sys._getframe(1).f_locals['sitedir']" + + def _gen_nspkg_line(self, pkg): + # ensure pkg is not a unicode string under Python 2.7 + pkg = str(pkg) + pth = tuple(pkg.split('.')) + root = self._get_root() + tmpl_lines = self._nspkg_tmpl + parent, sep, child = pkg.rpartition('.') + if parent: + tmpl_lines += self._nspkg_tmpl_multi + return ';'.join(tmpl_lines) % locals() + '\n' + + def _get_all_ns_packages(self): + """Return sorted list of all package namespaces""" + pkgs = self.distribution.namespace_packages or [] + return sorted(flatten(map(self._pkg_names, pkgs))) + + @staticmethod + def _pkg_names(pkg): + """ + Given a namespace package, yield the components of that + package. + + >>> names = Installer._pkg_names('a.b.c') + >>> set(names) == set(['a', 'a.b', 'a.b.c']) + True + """ + parts = pkg.split('.') + while parts: + yield '.'.join(parts) + parts.pop() + + +class DevelopInstaller(Installer): + def _get_root(self): + return repr(str(self.egg_path)) + + def _get_target(self): + return self.egg_link diff --git a/lib/python3.4/site-packages/setuptools/package_index.py b/lib/python3.4/site-packages/setuptools/package_index.py index c53343e..a6363b1 100644 --- a/lib/python3.4/site-packages/setuptools/package_index.py +++ b/lib/python3.4/site-packages/setuptools/package_index.py @@ -17,9 +17,10 @@ except ImportError: from setuptools.extern import six from setuptools.extern.six.moves import urllib, http_client, configparser, map +import setuptools from pkg_resources import ( CHECKOUT_DIST, Distribution, BINARY_DIST, normalize_path, SOURCE_DIST, - require, Environment, find_distributions, safe_name, safe_version, + Environment, find_distributions, safe_name, safe_version, to_filename, Requirement, DEVELOP_DIST, ) from setuptools import ssl_support @@ -29,14 +30,14 @@ from fnmatch import translate from setuptools.py26compat import strip_fragment from setuptools.py27compat import get_all_headers -EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.]+)$') +EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.+!]+)$') HREF = re.compile("""href\\s*=\\s*['"]?([^'"> ]+)""", re.I) # this is here to fix emacs' cruddy broken syntax highlighting PYPI_MD5 = re.compile( - '([^<]+)\n\s+\\(md5\\)' + '([^<]+)\n\\s+\\(md5\\)' ) -URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):',re.I).match +URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):', re.I).match EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz".split() __all__ = [ @@ -46,6 +47,19 @@ __all__ = [ _SOCKET_TIMEOUT = 15 +_tmpl = "setuptools/{setuptools.__version__} Python-urllib/{py_major}" +user_agent = _tmpl.format(py_major=sys.version[:3], setuptools=setuptools) + + +def parse_requirement_arg(spec): + try: + return Requirement.parse(spec) + except ValueError: + raise DistutilsError( + "Not a URL, existing file, or requirement spec: %r" % (spec,) + ) + + def parse_bdist_wininst(name): """Return (base,pyversion) or (None,None) for possible .exe name""" @@ -56,45 +70,49 @@ def parse_bdist_wininst(name): if lower.endswith('.win32.exe'): base = name[:-10] plat = 'win32' - elif lower.startswith('.win32-py',-16): + elif lower.startswith('.win32-py', -16): py_ver = name[-7:-4] base = name[:-16] plat = 'win32' elif lower.endswith('.win-amd64.exe'): base = name[:-14] plat = 'win-amd64' - elif lower.startswith('.win-amd64-py',-20): + elif lower.startswith('.win-amd64-py', -20): py_ver = name[-7:-4] base = name[:-20] plat = 'win-amd64' - return base,py_ver,plat + return base, py_ver, plat def egg_info_for_url(url): parts = urllib.parse.urlparse(url) scheme, server, path, parameters, query, fragment = parts base = urllib.parse.unquote(path.split('/')[-1]) - if server=='sourceforge.net' and base=='download': # XXX Yuck + if server == 'sourceforge.net' and base == 'download': # XXX Yuck base = urllib.parse.unquote(path.split('/')[-2]) - if '#' in base: base, fragment = base.split('#',1) - return base,fragment + if '#' in base: + base, fragment = base.split('#', 1) + return base, fragment + def distros_for_url(url, metadata=None): """Yield egg or source distribution objects that might be found at a URL""" base, fragment = egg_info_for_url(url) - for dist in distros_for_location(url, base, metadata): yield dist + for dist in distros_for_location(url, base, metadata): + yield dist if fragment: match = EGG_FRAGMENT.match(fragment) if match: for dist in interpret_distro_name( - url, match.group(1), metadata, precedence = CHECKOUT_DIST + url, match.group(1), metadata, precedence=CHECKOUT_DIST ): yield dist + def distros_for_location(location, basename, metadata=None): """Yield egg or source distribution objects based on basename""" if basename.endswith('.egg.zip'): - basename = basename[:-4] # strip the .zip + basename = basename[:-4] # strip the .zip if basename.endswith('.egg') and '-' in basename: # only one, unambiguous interpretation return [Distribution.from_location(location, basename, metadata)] @@ -112,6 +130,7 @@ def distros_for_location(location, basename, metadata=None): return interpret_distro_name(location, basename, metadata) return [] # no extension matched + def distros_for_filename(filename, metadata=None): """Yield possible egg or source distribution objects based on a filename""" return distros_for_location( @@ -142,17 +161,18 @@ def interpret_distro_name( # versions in distribution archive names (sdist and bdist). parts = basename.split('-') - if not py_version and any(re.match('py\d\.\d$', p) for p in parts[2:]): + if not py_version and any(re.match(r'py\d\.\d$', p) for p in parts[2:]): # it is a bdist_dumb, not an sdist -- bail out return - for p in range(1,len(parts)+1): + for p in range(1, len(parts) + 1): yield Distribution( location, metadata, '-'.join(parts[:p]), '-'.join(parts[p:]), - py_version=py_version, precedence = precedence, - platform = platform + py_version=py_version, precedence=precedence, + platform=platform ) + # From Python 2.7 docs def unique_everseen(iterable, key=None): "List unique elements, preserving order. Remember all elements ever seen." @@ -171,19 +191,24 @@ def unique_everseen(iterable, key=None): seen_add(k) yield element + def unique_values(func): """ Wrap a function returning an iterable such that the resulting iterable only ever yields unique items. """ + @wraps(func) def wrapper(*args, **kwargs): return unique_everseen(func(*args, **kwargs)) + return wrapper -REL = re.compile("""<([^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*)>""", re.I) + +REL = re.compile(r"""<([^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*)>""", re.I) # this line is here to fix emacs' cruddy broken syntax highlighting + @unique_values def find_external_links(url, page): """Find rel="homepage" and rel="download" links in `page`, yielding URLs""" @@ -197,19 +222,17 @@ def find_external_links(url, page): for tag in ("Home Page", "Download URL"): pos = page.find(tag) - if pos!=-1: - match = HREF.search(page,pos) + if pos != -1: + match = HREF.search(page, pos) if match: yield urllib.parse.urljoin(url, htmldecode(match.group(1))) -user_agent = "Python-urllib/%s setuptools/%s" % ( - sys.version[:3], require('setuptools')[0].version -) class ContentChecker(object): """ A null content checker that defines the interface for checking content """ + def feed(self, block): """ Feed a block of data to the hash. @@ -229,6 +252,7 @@ class ContentChecker(object): """ return + class HashChecker(ContentChecker): pattern = re.compile( r'(?Psha1|sha224|sha384|sha256|sha512|md5)=' @@ -269,16 +293,22 @@ class PackageIndex(Environment): self, index_url="https://pypi.python.org/simple", hosts=('*',), ca_bundle=None, verify_ssl=True, *args, **kw ): - Environment.__init__(self,*args,**kw) - self.index_url = index_url + "/"[:not index_url.endswith('/')] + Environment.__init__(self, *args, **kw) + self.index_url = index_url + "/" [:not index_url.endswith('/')] self.scanned_urls = {} self.fetched_urls = {} self.package_pages = {} - self.allows = re.compile('|'.join(map(translate,hosts))).match + self.allows = re.compile('|'.join(map(translate, hosts))).match self.to_scan = [] - if verify_ssl and ssl_support.is_available and (ca_bundle or ssl_support.find_ca_bundle()): + use_ssl = ( + verify_ssl + and ssl_support.is_available + and (ca_bundle or ssl_support.find_ca_bundle()) + ) + if use_ssl: self.opener = ssl_support.opener_for(ca_bundle) - else: self.opener = urllib.request.urlopen + else: + self.opener = urllib.request.urlopen def process_url(self, url, retrieve=False): """Evaluate a URL as a possible download, and maybe retrieve it""" @@ -304,17 +334,19 @@ class PackageIndex(Environment): return self.info("Reading %s", url) - self.fetched_urls[url] = True # prevent multiple fetch attempts - f = self.open_url(url, "Download error on %s: %%s -- Some packages may not be found!" % url) - if f is None: return + self.fetched_urls[url] = True # prevent multiple fetch attempts + tmpl = "Download error on %s: %%s -- Some packages may not be found!" + f = self.open_url(url, tmpl % url) + if f is None: + return self.fetched_urls[f.url] = True if 'html' not in f.headers.get('content-type', '').lower(): - f.close() # not html, we can't process it + f.close() # not html, we can't process it return - base = f.url # handle redirects + base = f.url # handle redirects page = f.read() - if not isinstance(page, str): # We are in Python 3 and got bytes. We want str. + if not isinstance(page, str): # We are in Python 3 and got bytes. We want str. if isinstance(f, urllib.error.HTTPError): # Errors have no charset, assume latin1: charset = 'latin-1' @@ -325,7 +357,7 @@ class PackageIndex(Environment): for match in HREF.finditer(page): link = urllib.parse.urljoin(base, htmldecode(match.group(1))) self.process_url(link) - if url.startswith(self.index_url) and getattr(f,'code',None)!=404: + if url.startswith(self.index_url) and getattr(f, 'code', None) != 404: page = self.process_index(url, page) def process_filename(self, fn, nested=False): @@ -337,7 +369,7 @@ class PackageIndex(Environment): if os.path.isdir(fn) and not nested: path = os.path.realpath(fn) for item in os.listdir(path): - self.process_filename(os.path.join(path,item), True) + self.process_filename(os.path.join(path, item), True) dists = distros_for_filename(fn) if dists: @@ -346,7 +378,8 @@ class PackageIndex(Environment): def url_ok(self, url, fatal=False): s = URL_SCHEME(url) - if (s and s.group(1).lower()=='file') or self.allows(urllib.parse.urlparse(url)[1]): + is_file = s and s.group(1).lower() == 'file' + if is_file or self.allows(urllib.parse.urlparse(url)[1]): return True msg = ("\nNote: Bypassing %s (disallowed host; see " "http://bit.ly/1dg9ijs for details).\n") @@ -381,19 +414,20 @@ class PackageIndex(Environment): dist.precedence = SOURCE_DIST self.add(dist) - def process_index(self,url,page): + def process_index(self, url, page): """Process the contents of a PyPI page""" + def scan(link): # Process a URL to see if it's for a package page if link.startswith(self.index_url): parts = list(map( urllib.parse.unquote, link[len(self.index_url):].split('/') )) - if len(parts)==2 and '#' not in parts[1]: + if len(parts) == 2 and '#' not in parts[1]: # it's a package page, sanitize and index it pkg = safe_name(parts[0]) ver = safe_version(parts[1]) - self.package_pages.setdefault(pkg.lower(),{})[link] = True + self.package_pages.setdefault(pkg.lower(), {})[link] = True return to_filename(pkg), to_filename(ver) return None, None @@ -404,7 +438,7 @@ class PackageIndex(Environment): except ValueError: pass - pkg, ver = scan(url) # ensure this page is in the page index + pkg, ver = scan(url) # ensure this page is in the page index if pkg: # process individual package page for new_url in find_external_links(url, page): @@ -412,16 +446,16 @@ class PackageIndex(Environment): base, frag = egg_info_for_url(new_url) if base.endswith('.py') and not frag: if ver: - new_url+='#egg=%s-%s' % (pkg,ver) + new_url += '#egg=%s-%s' % (pkg, ver) else: self.need_version_info(url) self.scan_url(new_url) return PYPI_MD5.sub( - lambda m: '%s' % m.group(1,3,2), page + lambda m: '%s' % m.group(1, 3, 2), page ) else: - return "" # no sense double-scanning non-package pages + return "" # no sense double-scanning non-package pages def need_version_info(self, url): self.scan_all( @@ -431,24 +465,25 @@ class PackageIndex(Environment): def scan_all(self, msg=None, *args): if self.index_url not in self.fetched_urls: - if msg: self.warn(msg,*args) + if msg: + self.warn(msg, *args) self.info( "Scanning index of all packages (this may take a while)" ) self.scan_url(self.index_url) def find_packages(self, requirement): - self.scan_url(self.index_url + requirement.unsafe_name+'/') + self.scan_url(self.index_url + requirement.unsafe_name + '/') if not self.package_pages.get(requirement.key): # Fall back to safe version of the name - self.scan_url(self.index_url + requirement.project_name+'/') + self.scan_url(self.index_url + requirement.project_name + '/') if not self.package_pages.get(requirement.key): # We couldn't find the target package, so search the index page too self.not_found_in_index(requirement) - for url in list(self.package_pages.get(requirement.key,())): + for url in list(self.package_pages.get(requirement.key, ())): # scan each page that might be related to the desired package self.scan_url(url) @@ -459,7 +494,7 @@ class PackageIndex(Environment): if dist in requirement: return dist self.debug("%s does not match %s", requirement, dist) - return super(PackageIndex, self).obtain(requirement,installer) + return super(PackageIndex, self).obtain(requirement, installer) def check_hash(self, checker, filename, tfp): """ @@ -480,10 +515,10 @@ class PackageIndex(Environment): """Add `urls` to the list that will be prescanned for searches""" for url in urls: if ( - self.to_scan is None # if we have already "gone online" - or not URL_SCHEME(url) # or it's a local file/directory + self.to_scan is None # if we have already "gone online" + or not URL_SCHEME(url) # or it's a local file/directory or url.startswith('file:') - or list(distros_for_url(url)) # or a direct package link + or list(distros_for_url(url)) # or a direct package link ): # then go ahead and process it now self.scan_url(url) @@ -495,12 +530,12 @@ class PackageIndex(Environment): """Scan urls scheduled for prescanning (e.g. --find-links)""" if self.to_scan: list(map(self.scan_url, self.to_scan)) - self.to_scan = None # from now on, go ahead and process immediately + self.to_scan = None # from now on, go ahead and process immediately def not_found_in_index(self, requirement): - if self[requirement.key]: # we've seen at least one distro + if self[requirement.key]: # we've seen at least one distro meth, msg = self.info, "Couldn't retrieve index page for %r" - else: # no distros seen for this name, might be misspelled + else: # no distros seen for this name, might be misspelled meth, msg = (self.warn, "Couldn't find index page for %r (maybe misspelled?)") meth(msg, requirement.unsafe_name) @@ -524,27 +559,21 @@ class PackageIndex(Environment): of `tmpdir`, and the local filename is returned. Various errors may be raised if a problem occurs during downloading. """ - if not isinstance(spec,Requirement): + if not isinstance(spec, Requirement): scheme = URL_SCHEME(spec) if scheme: # It's a url, download it to tmpdir found = self._download_url(scheme.group(1), spec, tmpdir) base, fragment = egg_info_for_url(spec) if base.endswith('.py'): - found = self.gen_setup(found,fragment,tmpdir) + found = self.gen_setup(found, fragment, tmpdir) return found elif os.path.exists(spec): # Existing file or directory, just return it return spec else: - try: - spec = Requirement.parse(spec) - except ValueError: - raise DistutilsError( - "Not a URL, existing file, or requirement spec: %r" % - (spec,) - ) - return getattr(self.fetch_distribution(spec, tmpdir),'location',None) + spec = parse_requirement_arg(spec) + return getattr(self.fetch_distribution(spec, tmpdir), 'location', None) def fetch_distribution( self, requirement, tmpdir, force_scan=False, source=False, @@ -578,22 +607,24 @@ class PackageIndex(Environment): for dist in env[req.key]: - if dist.precedence==DEVELOP_DIST and not develop_ok: + if dist.precedence == DEVELOP_DIST and not develop_ok: if dist not in skipped: - self.warn("Skipping development or system egg: %s",dist) + self.warn("Skipping development or system egg: %s", dist) skipped[dist] = 1 continue - if dist in req and (dist.precedence<=SOURCE_DIST or not source): - return dist + if dist in req and (dist.precedence <= SOURCE_DIST or not source): + dist.download_location = self.download(dist.location, tmpdir) + if os.path.exists(dist.download_location): + return dist if force_scan: self.prescan() self.find_packages(requirement) dist = find(requirement) - if local_index is not None: - dist = dist or find(requirement, local_index) + if not dist and local_index is not None: + dist = find(requirement, local_index) if dist is None: if self.to_scan is not None: @@ -606,13 +637,13 @@ class PackageIndex(Environment): if dist is None: self.warn( - "No local packages or download links found for %s%s", + "No local packages or working download links found for %s%s", (source and "a source distribution of " or ""), requirement, ) else: self.info("Best match: %s", dist) - return dist.clone(location=self.download(dist.location, tmpdir)) + return dist.clone(location=dist.download_location) def fetch(self, requirement, tmpdir, force_scan=False, source=False): """Obtain a file suitable for fulfilling `requirement` @@ -622,7 +653,7 @@ class PackageIndex(Environment): ``location`` of the downloaded distribution instead of a distribution object. """ - dist = self.fetch_distribution(requirement,tmpdir,force_scan,source) + dist = self.fetch_distribution(requirement, tmpdir, force_scan, source) if dist is not None: return dist.location return None @@ -634,7 +665,7 @@ class PackageIndex(Environment): interpret_distro_name(filename, match.group(1), None) if d.version ] or [] - if len(dists)==1: # unambiguous ``#egg`` fragment + if len(dists) == 1: # unambiguous ``#egg`` fragment basename = os.path.basename(filename) # Make sure the file has been downloaded to the temp dir. @@ -643,7 +674,7 @@ class PackageIndex(Environment): from setuptools.command.easy_install import samefile if not samefile(filename, dst): shutil.copy2(filename, dst) - filename=dst + filename = dst with open(os.path.join(tmpdir, 'setup.py'), 'w') as file: file.write( @@ -660,7 +691,7 @@ class PackageIndex(Environment): raise DistutilsError( "Can't unambiguously interpret project/version identifier %r; " "any dashes in the name or version should be escaped using " - "underscores. %r" % (fragment,dists) + "underscores. %r" % (fragment, dists) ) else: raise DistutilsError( @@ -669,6 +700,7 @@ class PackageIndex(Environment): ) dl_blocksize = 8192 + def _download_to(self, url, filename): self.info("Downloading %s", url) # Download the file @@ -678,7 +710,7 @@ class PackageIndex(Environment): fp = self.open_url(strip_fragment(url)) if isinstance(fp, urllib.error.HTTPError): raise DistutilsError( - "Can't download %s: %s %s" % (url, fp.code,fp.msg) + "Can't download %s: %s %s" % (url, fp.code, fp.msg) ) headers = fp.info() blocknum = 0 @@ -689,7 +721,7 @@ class PackageIndex(Environment): sizes = get_all_headers(headers, 'Content-Length') size = max(map(int, sizes)) self.reporthook(url, filename, blocknum, bs, size) - with open(filename,'wb') as tfp: + with open(filename, 'wb') as tfp: while True: block = fp.read(bs) if block: @@ -702,10 +734,11 @@ class PackageIndex(Environment): self.check_hash(checker, filename, tfp) return headers finally: - if fp: fp.close() + if fp: + fp.close() def reporthook(self, url, filename, blocknum, blksize, size): - pass # no-op + pass # no-op def open_url(self, url, warning=None): if url.startswith('file:'): @@ -735,7 +768,7 @@ class PackageIndex(Environment): 'down, %s' % (url, v.line) ) - except http_client.HTTPException as v: + except (http_client.HTTPException, socket.error) as v: if warning: self.warn(warning, v) else: @@ -748,27 +781,27 @@ class PackageIndex(Environment): name, fragment = egg_info_for_url(url) if name: while '..' in name: - name = name.replace('..','.').replace('\\','_') + name = name.replace('..', '.').replace('\\', '_') else: - name = "__downloaded__" # default if URL has no path contents + name = "__downloaded__" # default if URL has no path contents if name.endswith('.egg.zip'): - name = name[:-4] # strip the extra .zip before download + name = name[:-4] # strip the extra .zip before download - filename = os.path.join(tmpdir,name) + filename = os.path.join(tmpdir, name) # Download the file # - if scheme=='svn' or scheme.startswith('svn+'): + if scheme == 'svn' or scheme.startswith('svn+'): return self._download_svn(url, filename) - elif scheme=='git' or scheme.startswith('git+'): + elif scheme == 'git' or scheme.startswith('git+'): return self._download_git(url, filename) elif scheme.startswith('hg+'): return self._download_hg(url, filename) - elif scheme=='file': + elif scheme == 'file': return urllib.request.url2pathname(urllib.parse.urlparse(url)[2]) else: - self.url_ok(url, True) # raises error if not allowed + self.url_ok(url, True) # raises error if not allowed return self._attempt_download(url, filename) def scan_url(self, url): @@ -776,7 +809,7 @@ class PackageIndex(Environment): def _attempt_download(self, url, filename): headers = self._download_to(url, filename) - if 'html' in headers.get('content-type','').lower(): + if 'html' in headers.get('content-type', '').lower(): return self._download_html(url, headers, filename) else: return filename @@ -791,25 +824,25 @@ class PackageIndex(Environment): file.close() os.unlink(filename) return self._download_svn(url, filename) - break # not an index page + break # not an index page file.close() os.unlink(filename) - raise DistutilsError("Unexpected HTML page found at "+url) + raise DistutilsError("Unexpected HTML page found at " + url) def _download_svn(self, url, filename): - url = url.split('#',1)[0] # remove any fragment for svn's sake + url = url.split('#', 1)[0] # remove any fragment for svn's sake creds = '' if url.lower().startswith('svn:') and '@' in url: scheme, netloc, path, p, q, f = urllib.parse.urlparse(url) if not netloc and path.startswith('//') and '/' in path[2:]: - netloc, path = path[2:].split('/',1) + netloc, path = path[2:].split('/', 1) auth, host = splituser(netloc) if auth: if ':' in auth: - user, pw = auth.split(':',1) + user, pw = auth.split(':', 1) creds = " --username=%s --password=%s" % (user, pw) else: - creds = " --username="+auth + creds = " --username=" + auth netloc = host parts = scheme, netloc, url, p, q, f url = urllib.parse.urlunparse(parts) @@ -824,7 +857,7 @@ class PackageIndex(Environment): scheme = scheme.split('+', 1)[-1] # Some fragment identification fails - path = path.split('#',1)[0] + path = path.split('#', 1)[0] rev = None if '@' in path: @@ -836,7 +869,7 @@ class PackageIndex(Environment): return url, rev def _download_git(self, url, filename): - filename = filename.split('#',1)[0] + filename = filename.split('#', 1)[0] url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True) self.info("Doing git clone from %s to %s", url, filename) @@ -852,7 +885,7 @@ class PackageIndex(Environment): return filename def _download_hg(self, url, filename): - filename = filename.split('#',1)[0] + filename = filename.split('#', 1)[0] url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True) self.info("Doing hg clone from %s to %s", url, filename) @@ -860,7 +893,7 @@ class PackageIndex(Environment): if rev is not None: self.info("Updating to %s", rev) - os.system("(cd %s && hg up -C -r %s >&-)" % ( + os.system("(cd %s && hg up -C -r %s -q)" % ( filename, rev, )) @@ -876,16 +909,20 @@ class PackageIndex(Environment): def warn(self, msg, *args): log.warn(msg, *args) + # This pattern matches a character entity reference (a decimal numeric # references, a hexadecimal numeric reference, or a named reference). entity_sub = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?').sub + def uchr(c): if not isinstance(c, int): return c - if c>255: return six.unichr(c) + if c > 255: + return six.unichr(c) return chr(c) + def decode_entity(match): what = match.group(1) if what.startswith('#x'): @@ -896,10 +933,12 @@ def decode_entity(match): what = six.moves.html_entities.name2codepoint.get(what, match.group(0)) return uchr(what) + def htmldecode(text): """Decode HTML entities in the given text.""" return entity_sub(decode_entity, text) + def socket_timeout(timeout=15): def _socket_timeout(func): def _socket_timeout(*args, **kwargs): @@ -909,9 +948,12 @@ def socket_timeout(timeout=15): return func(*args, **kwargs) finally: socket.setdefaulttimeout(old_timeout) + return _socket_timeout + return _socket_timeout + def _encode_auth(auth): """ A function compatible with Python 2.3-3.3 that will encode @@ -932,12 +974,14 @@ def _encode_auth(auth): # convert back to a string encoded = encoded_bytes.decode() # strip the trailing carriage return - return encoded.replace('\n','') + return encoded.replace('\n', '') + class Credential(object): """ A username/password pair. Use like a namedtuple. """ + def __init__(self, username, password): self.username = username self.password = password @@ -949,8 +993,8 @@ class Credential(object): def __str__(self): return '%(username)s:%(password)s' % vars(self) -class PyPIConfig(configparser.RawConfigParser): +class PyPIConfig(configparser.RawConfigParser): def __init__(self): """ Load from ~/.pypirc @@ -1008,7 +1052,7 @@ def open_with_auth(url, opener=urllib.request.urlopen): if cred: auth = str(cred) info = cred.username, url - log.info('Authenticating as %s for %s (from .pypirc)' % info) + log.info('Authenticating as %s for %s (from .pypirc)', *info) if auth: auth = "Basic " + _encode_auth(auth) @@ -1026,18 +1070,20 @@ def open_with_auth(url, opener=urllib.request.urlopen): # Put authentication info back into request URL if same host, # so that links found on the page will work s2, h2, path2, param2, query2, frag2 = urllib.parse.urlparse(fp.url) - if s2==scheme and h2==host: + if s2 == scheme and h2 == host: parts = s2, netloc, path2, param2, query2, frag2 fp.url = urllib.parse.urlunparse(parts) return fp + # adding a timeout to avoid freezing package_index open_with_auth = socket_timeout(_SOCKET_TIMEOUT)(open_with_auth) def fix_sf_url(url): - return url # backward compatibility + return url # backward compatibility + def local_open(url): """Read a local path, with special support for directories""" diff --git a/lib/python3.4/site-packages/setuptools/py26compat.py b/lib/python3.4/site-packages/setuptools/py26compat.py index e52bd85..4d3add8 100644 --- a/lib/python3.4/site-packages/setuptools/py26compat.py +++ b/lib/python3.4/site-packages/setuptools/py26compat.py @@ -5,18 +5,27 @@ Compatibility Support for Python 2.6 and earlier import sys try: - from urllib.parse import splittag + from urllib.parse import splittag except ImportError: - from urllib import splittag + from urllib import splittag + def strip_fragment(url): - """ - In `Python 8280 `_, Python 2.7 and - later was patched to disregard the fragment when making URL requests. - Do the same for Python 2.6 and earlier. - """ - url, fragment = splittag(url) - return url + """ + In `Python 8280 `_, Python 2.7 and + later was patched to disregard the fragment when making URL requests. + Do the same for Python 2.6 and earlier. + """ + url, fragment = splittag(url) + return url -if sys.version_info >= (2,7): - strip_fragment = lambda x: x + +if sys.version_info >= (2, 7): + strip_fragment = lambda x: x + +try: + from importlib import import_module +except ImportError: + + def import_module(module_name): + return __import__(module_name, fromlist=['__name__']) diff --git a/lib/python3.4/site-packages/setuptools/py27compat.py b/lib/python3.4/site-packages/setuptools/py27compat.py index 9d2886d..2985011 100644 --- a/lib/python3.4/site-packages/setuptools/py27compat.py +++ b/lib/python3.4/site-packages/setuptools/py27compat.py @@ -2,14 +2,27 @@ Compatibility Support for Python 2.7 and earlier """ -import sys +import platform + +from setuptools.extern import six + def get_all_headers(message, key): - """ - Given an HTTPMessage, return all headers matching a given key. - """ - return message.get_all(key) + """ + Given an HTTPMessage, return all headers matching a given key. + """ + return message.get_all(key) -if sys.version_info < (3,): - def get_all_headers(message, key): - return message.getheaders(key) + +if six.PY2: + def get_all_headers(message, key): + return message.getheaders(key) + + +linux_py2_ascii = ( + platform.system() == 'Linux' and + six.PY2 +) + +rmtree_safe = str if linux_py2_ascii else lambda x: x +"""Workaround for http://bugs.python.org/issue24672""" diff --git a/lib/python3.4/site-packages/setuptools/py31compat.py b/lib/python3.4/site-packages/setuptools/py31compat.py index 8fe6dd9..44b025d 100644 --- a/lib/python3.4/site-packages/setuptools/py31compat.py +++ b/lib/python3.4/site-packages/setuptools/py31compat.py @@ -8,10 +8,12 @@ try: from sysconfig import get_config_vars, get_path except ImportError: from distutils.sysconfig import get_config_vars, get_python_lib + def get_path(name): if name not in ('platlib', 'purelib'): raise ValueError("Name must be purelib or platlib") - return get_python_lib(name=='platlib') + return get_python_lib(name == 'platlib') + try: # Python >=3.2 @@ -19,14 +21,16 @@ try: except ImportError: import shutil import tempfile + class TemporaryDirectory(object): """ Very simple temporary directory context manager. Will try to delete afterward, but will also ignore OS and similar errors on deletion. """ + def __init__(self): - self.name = None # Handle mkdtemp raising an exception + self.name = None # Handle mkdtemp raising an exception self.name = tempfile.mkdtemp() def __enter__(self): @@ -35,7 +39,7 @@ except ImportError: def __exit__(self, exctype, excvalue, exctrace): try: shutil.rmtree(self.name, True) - except OSError: #removal errors are not the only possible + except OSError: # removal errors are not the only possible pass self.name = None diff --git a/lib/python3.4/site-packages/setuptools/py33compat.py b/lib/python3.4/site-packages/setuptools/py33compat.py new file mode 100644 index 0000000..af64d5d --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/py33compat.py @@ -0,0 +1,45 @@ +import dis +import array +import collections + +from setuptools.extern import six + + +OpArg = collections.namedtuple('OpArg', 'opcode arg') + + +class Bytecode_compat(object): + def __init__(self, code): + self.code = code + + def __iter__(self): + """Yield '(op,arg)' pair for each operation in code object 'code'""" + + bytes = array.array('b', self.code.co_code) + eof = len(self.code.co_code) + + ptr = 0 + extended_arg = 0 + + while ptr < eof: + + op = bytes[ptr] + + if op >= dis.HAVE_ARGUMENT: + + arg = bytes[ptr + 1] + bytes[ptr + 2] * 256 + extended_arg + ptr += 3 + + if op == dis.EXTENDED_ARG: + long_type = six.integer_types[-1] + extended_arg = arg * long_type(65536) + continue + + else: + arg = None + ptr += 1 + + yield OpArg(op, arg) + + +Bytecode = getattr(dis, 'Bytecode', Bytecode_compat) diff --git a/lib/python3.4/site-packages/setuptools/py36compat.py b/lib/python3.4/site-packages/setuptools/py36compat.py new file mode 100644 index 0000000..f527969 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/py36compat.py @@ -0,0 +1,82 @@ +import sys +from distutils.errors import DistutilsOptionError +from distutils.util import strtobool +from distutils.debug import DEBUG + + +class Distribution_parse_config_files: + """ + Mix-in providing forward-compatibility for functionality to be + included by default on Python 3.7. + + Do not edit the code in this class except to update functionality + as implemented in distutils. + """ + def parse_config_files(self, filenames=None): + from configparser import ConfigParser + + # Ignore install directory options if we have a venv + if sys.prefix != sys.base_prefix: + ignore_options = [ + 'install-base', 'install-platbase', 'install-lib', + 'install-platlib', 'install-purelib', 'install-headers', + 'install-scripts', 'install-data', 'prefix', 'exec-prefix', + 'home', 'user', 'root'] + else: + ignore_options = [] + + ignore_options = frozenset(ignore_options) + + if filenames is None: + filenames = self.find_config_files() + + if DEBUG: + self.announce("Distribution.parse_config_files():") + + parser = ConfigParser(interpolation=None) + for filename in filenames: + if DEBUG: + self.announce(" reading %s" % filename) + parser.read(filename) + for section in parser.sections(): + options = parser.options(section) + opt_dict = self.get_option_dict(section) + + for opt in options: + if opt != '__name__' and opt not in ignore_options: + val = parser.get(section,opt) + opt = opt.replace('-', '_') + opt_dict[opt] = (filename, val) + + # Make the ConfigParser forget everything (so we retain + # the original filenames that options come from) + parser.__init__() + + # If there was a "global" section in the config file, use it + # to set Distribution options. + + if 'global' in self.command_options: + for (opt, (src, val)) in self.command_options['global'].items(): + alias = self.negative_opt.get(opt) + try: + if alias: + setattr(self, alias, not strtobool(val)) + elif opt in ('verbose', 'dry_run'): # ugh! + setattr(self, opt, strtobool(val)) + else: + setattr(self, opt, val) + except ValueError as msg: + raise DistutilsOptionError(msg) + + +if sys.version_info < (3,): + # Python 2 behavior is sufficient + class Distribution_parse_config_files: + pass + + +if False: + # When updated behavior is available upstream, + # disable override here. + class Distribution_parse_config_files: + pass diff --git a/lib/python3.4/site-packages/setuptools/sandbox.py b/lib/python3.4/site-packages/setuptools/sandbox.py index 23e296b..1d981f4 100644 --- a/lib/python3.4/site-packages/setuptools/sandbox.py +++ b/lib/python3.4/site-packages/setuptools/sandbox.py @@ -7,11 +7,12 @@ import itertools import re import contextlib import pickle +import textwrap from setuptools.extern import six from setuptools.extern.six.moves import builtins, map -import pkg_resources +import pkg_resources.py31compat if sys.platform.startswith('java'): import org.python.modules.posix.PosixModule as _os @@ -25,10 +26,12 @@ _open = open from distutils.errors import DistutilsError from pkg_resources import working_set + __all__ = [ "AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup", ] + def _execfile(filename, globals, locals=None): """ Python 3 implementation of execfile. @@ -71,8 +74,7 @@ def override_temp(replacement): """ Monkey-patch tempfile.tempdir with replacement, ensuring it exists """ - if not os.path.isdir(replacement): - os.makedirs(replacement) + pkg_resources.py31compat.makedirs(replacement, exist_ok=True) saved = tempfile.tempdir @@ -98,6 +100,7 @@ class UnpickleableException(Exception): """ An exception representing another Exception that could not be pickled. """ + @staticmethod def dump(type, exc): """ @@ -117,6 +120,7 @@ class ExceptionSaver: A Context Manager that will save an exception, serialized, and restore it later. """ + def __enter__(self): return self @@ -212,7 +216,7 @@ def _needs_hiding(mod_name): >>> _needs_hiding('Cython') True """ - pattern = re.compile('(setuptools|pkg_resources|distutils|Cython)(\.|$)') + pattern = re.compile(r'(setuptools|pkg_resources|distutils|Cython)(\.|$)') return bool(pattern.match(mod_name)) @@ -232,15 +236,22 @@ def run_setup(setup_script, args): setup_dir = os.path.abspath(os.path.dirname(setup_script)) with setup_context(setup_dir): try: - sys.argv[:] = [setup_script]+list(args) + sys.argv[:] = [setup_script] + list(args) sys.path.insert(0, setup_dir) # reset to include setup dir, w/clean callback list working_set.__init__() - working_set.callbacks.append(lambda dist:dist.activate()) - def runner(): - ns = dict(__file__=setup_script, __name__='__main__') + working_set.callbacks.append(lambda dist: dist.activate()) + + # __file__ should be a byte string on Python 2 (#712) + dunder_file = ( + setup_script + if isinstance(setup_script, str) else + setup_script.encode(sys.getfilesystemencoding()) + ) + + with DirectorySandbox(setup_dir): + ns = dict(__file__=dunder_file, __name__='__main__') _execfile(setup_script, ns) - DirectorySandbox(setup_dir).run(runner) except SystemExit as v: if v.args and v.args[0]: raise @@ -255,46 +266,54 @@ class AbstractSandbox: def __init__(self): self._attrs = [ name for name in dir(_os) - if not name.startswith('_') and hasattr(self,name) + if not name.startswith('_') and hasattr(self, name) ] def _copy(self, source): for name in self._attrs: - setattr(os, name, getattr(source,name)) + setattr(os, name, getattr(source, name)) + + def __enter__(self): + self._copy(self) + if _file: + builtins.file = self._file + builtins.open = self._open + self._active = True + + def __exit__(self, exc_type, exc_value, traceback): + self._active = False + if _file: + builtins.file = _file + builtins.open = _open + self._copy(_os) def run(self, func): """Run 'func' under os sandboxing""" - try: - self._copy(self) - if _file: - builtins.file = self._file - builtins.open = self._open - self._active = True + with self: return func() - finally: - self._active = False - if _file: - builtins.file = _file - builtins.open = _open - self._copy(_os) def _mk_dual_path_wrapper(name): - original = getattr(_os,name) - def wrap(self,src,dst,*args,**kw): + original = getattr(_os, name) + + def wrap(self, src, dst, *args, **kw): if self._active: - src,dst = self._remap_pair(name,src,dst,*args,**kw) - return original(src,dst,*args,**kw) + src, dst = self._remap_pair(name, src, dst, *args, **kw) + return original(src, dst, *args, **kw) + return wrap for name in ["rename", "link", "symlink"]: - if hasattr(_os,name): locals()[name] = _mk_dual_path_wrapper(name) + if hasattr(_os, name): + locals()[name] = _mk_dual_path_wrapper(name) def _mk_single_path_wrapper(name, original=None): - original = original or getattr(_os,name) - def wrap(self,path,*args,**kw): + original = original or getattr(_os, name) + + def wrap(self, path, *args, **kw): if self._active: - path = self._remap_input(name,path,*args,**kw) - return original(path,*args,**kw) + path = self._remap_input(name, path, *args, **kw) + return original(path, *args, **kw) + return wrap if _file: @@ -305,49 +324,56 @@ class AbstractSandbox: "remove", "unlink", "rmdir", "utime", "lchown", "chroot", "lstat", "startfile", "mkfifo", "mknod", "pathconf", "access" ]: - if hasattr(_os,name): locals()[name] = _mk_single_path_wrapper(name) + if hasattr(_os, name): + locals()[name] = _mk_single_path_wrapper(name) def _mk_single_with_return(name): - original = getattr(_os,name) - def wrap(self,path,*args,**kw): + original = getattr(_os, name) + + def wrap(self, path, *args, **kw): if self._active: - path = self._remap_input(name,path,*args,**kw) - return self._remap_output(name, original(path,*args,**kw)) - return original(path,*args,**kw) + path = self._remap_input(name, path, *args, **kw) + return self._remap_output(name, original(path, *args, **kw)) + return original(path, *args, **kw) + return wrap for name in ['readlink', 'tempnam']: - if hasattr(_os,name): locals()[name] = _mk_single_with_return(name) + if hasattr(_os, name): + locals()[name] = _mk_single_with_return(name) def _mk_query(name): - original = getattr(_os,name) - def wrap(self,*args,**kw): - retval = original(*args,**kw) + original = getattr(_os, name) + + def wrap(self, *args, **kw): + retval = original(*args, **kw) if self._active: return self._remap_output(name, retval) return retval + return wrap for name in ['getcwd', 'tmpnam']: - if hasattr(_os,name): locals()[name] = _mk_query(name) + if hasattr(_os, name): + locals()[name] = _mk_query(name) - def _validate_path(self,path): + def _validate_path(self, path): """Called to remap or validate any path, whether input or output""" return path - def _remap_input(self,operation,path,*args,**kw): + def _remap_input(self, operation, path, *args, **kw): """Called for path inputs""" return self._validate_path(path) - def _remap_output(self,operation,path): + def _remap_output(self, operation, path): """Called for path outputs""" return self._validate_path(path) - def _remap_pair(self,operation,src,dst,*args,**kw): + def _remap_pair(self, operation, src, dst, *args, **kw): """Called for path pairs like rename, link, and symlink operations""" return ( - self._remap_input(operation+'-from',src,*args,**kw), - self._remap_input(operation+'-to',dst,*args,**kw) + self._remap_input(operation + '-from', src, *args, **kw), + self._remap_input(operation + '-to', dst, *args, **kw) ) @@ -356,13 +382,6 @@ if hasattr(os, 'devnull'): else: _EXCEPTIONS = [] -try: - from win32com.client.gencache import GetGeneratePath - _EXCEPTIONS.append(GetGeneratePath()) - del GetGeneratePath -except ImportError: - # it appears pywin32 is not installed, so no need to exclude. - pass class DirectorySandbox(AbstractSandbox): """Restrict operations to a single subdirectory - pseudo-chroot""" @@ -374,13 +393,13 @@ class DirectorySandbox(AbstractSandbox): _exception_patterns = [ # Allow lib2to3 to attempt to save a pickled grammar object (#121) - '.*lib2to3.*\.pickle$', + r'.*lib2to3.*\.pickle$', ] "exempt writing to paths that match the pattern" def __init__(self, sandbox, exceptions=_EXCEPTIONS): self._sandbox = os.path.normcase(os.path.realpath(sandbox)) - self._prefix = os.path.join(self._sandbox,'') + self._prefix = os.path.join(self._sandbox, '') self._exceptions = [ os.path.normcase(os.path.realpath(path)) for path in exceptions @@ -392,15 +411,16 @@ class DirectorySandbox(AbstractSandbox): raise SandboxViolation(operation, args, kw) if _file: + def _file(self, path, mode='r', *args, **kw): if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path): self._violation("file", path, mode, *args, **kw) - return _file(path,mode,*args,**kw) + return _file(path, mode, *args, **kw) def _open(self, path, mode='r', *args, **kw): if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path): self._violation("open", path, mode, *args, **kw) - return _open(path,mode,*args,**kw) + return _open(path, mode, *args, **kw) def tmpnam(self): self._violation("tmpnam") @@ -440,57 +460,36 @@ class DirectorySandbox(AbstractSandbox): """Called for path pairs like rename, link, and symlink operations""" if not self._ok(src) or not self._ok(dst): self._violation(operation, src, dst, *args, **kw) - return (src,dst) + return (src, dst) def open(self, file, flags, mode=0o777, *args, **kw): """Called for low-level os.open()""" if flags & WRITE_FLAGS and not self._ok(file): self._violation("os.open", file, flags, mode, *args, **kw) - return _os.open(file,flags,mode, *args, **kw) + return _os.open(file, flags, mode, *args, **kw) + WRITE_FLAGS = functools.reduce( operator.or_, [getattr(_os, a, 0) for a in "O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split()] ) + class SandboxViolation(DistutilsError): """A setup script attempted to modify the filesystem outside the sandbox""" + tmpl = textwrap.dedent(""" + SandboxViolation: {cmd}{args!r} {kwargs} + + The package setup script has attempted to modify files on your system + that are not within the EasyInstall build area, and has been aborted. + + This package cannot be safely installed by EasyInstall, and may not + support alternate installation locations even if you run its setup + script by hand. Please inform the package's author and the EasyInstall + maintainers to find out if a fix or workaround is available. + """).lstrip() + def __str__(self): - return """SandboxViolation: %s%r %s - -The package setup script has attempted to modify files on your system -that are not within the EasyInstall build area, and has been aborted. - -This package cannot be safely installed by EasyInstall, and may not -support alternate installation locations even if you run its setup -script by hand. Please inform the package's author and the EasyInstall -maintainers to find out if a fix or workaround is available.""" % self.args - - - - - - - - - - - - - - - - - - - - - - - - - - - -# + cmd, args, kwargs = self.args + return self.tmpl.format(**locals()) diff --git a/lib/python3.4/site-packages/setuptools/site-patch.py b/lib/python3.4/site-packages/setuptools/site-patch.py index c216801..0d2d2ff 100644 --- a/lib/python3.4/site-packages/setuptools/site-patch.py +++ b/lib/python3.4/site-packages/setuptools/site-patch.py @@ -2,19 +2,18 @@ def __boot(): import sys import os PYTHONPATH = os.environ.get('PYTHONPATH') - if PYTHONPATH is None or (sys.platform=='win32' and not PYTHONPATH): + if PYTHONPATH is None or (sys.platform == 'win32' and not PYTHONPATH): PYTHONPATH = [] else: PYTHONPATH = PYTHONPATH.split(os.pathsep) - pic = getattr(sys,'path_importer_cache',{}) + pic = getattr(sys, 'path_importer_cache', {}) stdpath = sys.path[len(PYTHONPATH):] mydir = os.path.dirname(__file__) - #print "searching",stdpath,sys.path for item in stdpath: - if item==mydir or not item: - continue # skip if current dir. on Windows, or my own directory + if item == mydir or not item: + continue # skip if current dir. on Windows, or my own directory importer = pic.get(item) if importer is not None: loader = importer.find_module('site') @@ -24,32 +23,30 @@ def __boot(): break else: try: - import imp # Avoid import loop in Python >= 3.3 - stream, path, descr = imp.find_module('site',[item]) + import imp # Avoid import loop in Python >= 3.3 + stream, path, descr = imp.find_module('site', [item]) except ImportError: continue if stream is None: continue try: # This should actually reload the current module - imp.load_module('site',stream,path,descr) + imp.load_module('site', stream, path, descr) finally: stream.close() break else: raise ImportError("Couldn't find the real 'site' module") - #print "loaded", __file__ + known_paths = dict([(makepath(item)[1], 1) for item in sys.path]) # 2.2 comp - known_paths = dict([(makepath(item)[1],1) for item in sys.path]) # 2.2 comp - - oldpos = getattr(sys,'__egginsert',0) # save old insertion position - sys.__egginsert = 0 # and reset the current one + oldpos = getattr(sys, '__egginsert', 0) # save old insertion position + sys.__egginsert = 0 # and reset the current one for item in PYTHONPATH: addsitedir(item) - sys.__egginsert += oldpos # restore effective old position + sys.__egginsert += oldpos # restore effective old position d, nd = makepath(stdpath[0]) insert_at = None @@ -58,7 +55,7 @@ def __boot(): for item in sys.path: p, np = makepath(item) - if np==nd and insert_at is None: + if np == nd and insert_at is None: # We've hit the first 'system' path entry, so added entries go here insert_at = len(new_path) @@ -71,6 +68,7 @@ def __boot(): sys.path[:] = new_path -if __name__=='site': + +if __name__ == 'site': __boot() del __boot diff --git a/lib/python3.4/site-packages/setuptools/ssl_support.py b/lib/python3.4/site-packages/setuptools/ssl_support.py index 657197c..72b18ef 100644 --- a/lib/python3.4/site-packages/setuptools/ssl_support.py +++ b/lib/python3.4/site-packages/setuptools/ssl_support.py @@ -2,10 +2,10 @@ import os import socket import atexit import re +import functools -from setuptools.extern.six.moves import urllib, http_client, map +from setuptools.extern.six.moves import urllib, http_client, map, filter -import pkg_resources from pkg_resources import ResolutionError, ExtractionError try: @@ -26,9 +26,9 @@ cert_paths = """ /etc/ssl/cert.pem /System/Library/OpenSSL/certs/cert.pem /usr/local/share/certs/ca-root-nss.crt +/etc/ssl/ca-bundle.pem """.strip().split() - try: HTTPSHandler = urllib.request.HTTPSHandler HTTPSConnection = http_client.HTTPSConnection @@ -49,10 +49,13 @@ except ImportError: match_hostname = None if not CertificateError: + class CertificateError(ValueError): pass + if not match_hostname: + def _dnsname_match(dn, hostname, max_wildcards=1): """Matching according to RFC 6125, section 6.4.3 @@ -161,6 +164,7 @@ class VerifyingHTTPSHandler(HTTPSHandler): class VerifyingHTTPSConn(HTTPSConnection): """Simple verifying connection: no auth, subclasses, timeouts, etc.""" + def __init__(self, host, ca_bundle, **kw): HTTPSConnection.__init__(self, host, **kw) self.ca_bundle = ca_bundle @@ -192,6 +196,7 @@ class VerifyingHTTPSConn(HTTPSConnection): self.sock.close() raise + def opener_for(ca_bundle=None): """Get a urlopen() replacement that uses ca_bundle for verification""" return urllib.request.build_opener( @@ -199,45 +204,52 @@ def opener_for(ca_bundle=None): ).open -_wincerts = None +# from jaraco.functools +def once(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + if not hasattr(func, 'always_returns'): + func.always_returns = func(*args, **kwargs) + return func.always_returns + return wrapper + +@once def get_win_certfile(): - global _wincerts - if _wincerts is not None: - return _wincerts.name - try: - from wincertstore import CertFile + import wincertstore except ImportError: return None - class MyCertFile(CertFile): - def __init__(self, stores=(), certs=()): - CertFile.__init__(self) - for store in stores: - self.addstore(store) - self.addcerts(certs) + class CertFile(wincertstore.CertFile): + def __init__(self): + super(CertFile, self).__init__() atexit.register(self.close) def close(self): try: - super(MyCertFile, self).close() + super(CertFile, self).close() except OSError: pass - _wincerts = MyCertFile(stores=['CA', 'ROOT']) + _wincerts = CertFile() + _wincerts.addstore('CA') + _wincerts.addstore('ROOT') return _wincerts.name def find_ca_bundle(): """Return an existing CA bundle path, or None""" - if os.name=='nt': - return get_win_certfile() - else: - for cert_path in cert_paths: - if os.path.isfile(cert_path): - return cert_path + extant_cert_paths = filter(os.path.isfile, cert_paths) + return ( + get_win_certfile() + or next(extant_cert_paths, None) + or _certifi_where() + ) + + +def _certifi_where(): try: - return pkg_resources.resource_filename('certifi', 'cacert.pem') + return __import__('certifi').where() except (ImportError, ResolutionError, ExtractionError): - return None + pass diff --git a/lib/python3.4/site-packages/setuptools/unicode_utils.py b/lib/python3.4/site-packages/setuptools/unicode_utils.py index ffab3e2..7c63efd 100644 --- a/lib/python3.4/site-packages/setuptools/unicode_utils.py +++ b/lib/python3.4/site-packages/setuptools/unicode_utils.py @@ -3,6 +3,7 @@ import sys from setuptools.extern import six + # HFS Plus uses decomposed UTF-8 def decompose(path): if isinstance(path, six.text_type): diff --git a/lib/python3.4/site-packages/setuptools/utils.py b/lib/python3.4/site-packages/setuptools/utils.py deleted file mode 100644 index 91e4b87..0000000 --- a/lib/python3.4/site-packages/setuptools/utils.py +++ /dev/null @@ -1,11 +0,0 @@ -import os -import os.path - - -def cs_path_exists(fspath): - if not os.path.exists(fspath): - return False - # make absolute so we always have a directory - abspath = os.path.abspath(fspath) - directory, filename = os.path.split(abspath) - return filename in os.listdir(directory) \ No newline at end of file diff --git a/lib/python3.4/site-packages/setuptools/version.py b/lib/python3.4/site-packages/setuptools/version.py index 4494728..95e1869 100644 --- a/lib/python3.4/site-packages/setuptools/version.py +++ b/lib/python3.4/site-packages/setuptools/version.py @@ -1 +1,6 @@ -__version__ = '20.1.1' +import pkg_resources + +try: + __version__ = pkg_resources.get_distribution('setuptools').version +except Exception: + __version__ = 'unknown' diff --git a/lib/python3.4/site-packages/six-1.10.0.dist-info/RECORD b/lib/python3.4/site-packages/six-1.10.0.dist-info/RECORD deleted file mode 100644 index f4ec904..0000000 --- a/lib/python3.4/site-packages/six-1.10.0.dist-info/RECORD +++ /dev/null @@ -1,9 +0,0 @@ -six.py,sha256=A6hdJZVjI3t_geebZ9BzUvwRrIXo0lfwzQlM2LcKyas,30098 -six-1.10.0.dist-info/DESCRIPTION.rst,sha256=QWBtSTT2zzabwJv1NQbTfClSX13m-Qc6tqU4TRL1RLs,774 -six-1.10.0.dist-info/METADATA,sha256=5HceJsUnHof2IRamlCKO2MwNjve1eSP4rLzVQDfwpCQ,1283 -six-1.10.0.dist-info/RECORD,, -six-1.10.0.dist-info/WHEEL,sha256=GrqQvamwgBV4nLoJe0vhYRSWzWsx7xjlt74FT0SWYfE,110 -six-1.10.0.dist-info/metadata.json,sha256=jtOeeTBubYDChl_5Ql5ZPlKoHgg6rdqRIjOz1e5Ek2U,658 -six-1.10.0.dist-info/top_level.txt,sha256=_iVH_iYEtEXnD8nYGQYpYFUvkUW9sEO1GYbkeKSAais,4 -six-1.10.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -__pycache__/six.cpython-34.pyc,, diff --git a/lib/python3.4/site-packages/six-1.10.0.dist-info/metadata.json b/lib/python3.4/site-packages/six-1.10.0.dist-info/metadata.json deleted file mode 100644 index 21f9f6c..0000000 --- a/lib/python3.4/site-packages/six-1.10.0.dist-info/metadata.json +++ /dev/null @@ -1 +0,0 @@ -{"generator": "bdist_wheel (0.26.0)", "summary": "Python 2 and 3 compatibility utilities", "classifiers": ["Programming Language :: Python :: 2", "Programming Language :: Python :: 3", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Topic :: Software Development :: Libraries", "Topic :: Utilities"], "extensions": {"python.details": {"project_urls": {"Home": "http://pypi.python.org/pypi/six/"}, "contacts": [{"email": "benjamin@python.org", "name": "Benjamin Peterson", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}}}, "license": "MIT", "metadata_version": "2.0", "name": "six", "version": "1.10.0"} \ No newline at end of file diff --git a/lib/python3.4/site-packages/six-1.10.0.dist-info/DESCRIPTION.rst b/lib/python3.4/site-packages/six-1.11.0.dist-info/DESCRIPTION.rst similarity index 58% rename from lib/python3.4/site-packages/six-1.10.0.dist-info/DESCRIPTION.rst rename to lib/python3.4/site-packages/six-1.11.0.dist-info/DESCRIPTION.rst index 2e2607d..09c2c99 100644 --- a/lib/python3.4/site-packages/six-1.10.0.dist-info/DESCRIPTION.rst +++ b/lib/python3.4/site-packages/six-1.11.0.dist-info/DESCRIPTION.rst @@ -1,3 +1,12 @@ +.. image:: http://img.shields.io/pypi/v/six.svg + :target: https://pypi.python.org/pypi/six + +.. image:: https://travis-ci.org/benjaminp/six.svg?branch=master + :target: https://travis-ci.org/benjaminp/six + +.. image:: http://img.shields.io/badge/license-MIT-green.svg + :target: https://github.com/benjaminp/six/blob/master/LICENSE + Six is a Python 2 and 3 compatibility library. It provides utility functions for smoothing over the differences between the Python versions with the goal of writing Python code that is compatible on both Python versions. See the @@ -7,9 +16,9 @@ Six supports every Python version since 2.6. It is contained in only one Python file, so it can be easily copied into your project. (The copyright and license notice must be retained.) -Online documentation is at https://pythonhosted.org/six/. +Online documentation is at http://six.rtfd.org. -Bugs can be reported to https://bitbucket.org/gutworth/six. The code can also +Bugs can be reported to https://github.com/benjaminp/six. The code can also be found there. For questions about six or porting in general, email the python-porting mailing diff --git a/lib/python3.4/site-packages/setuptools-20.3.1.dist-info/INSTALLER b/lib/python3.4/site-packages/six-1.11.0.dist-info/INSTALLER similarity index 100% rename from lib/python3.4/site-packages/setuptools-20.3.1.dist-info/INSTALLER rename to lib/python3.4/site-packages/six-1.11.0.dist-info/INSTALLER diff --git a/lib/python3.4/site-packages/six-1.10.0.dist-info/METADATA b/lib/python3.4/site-packages/six-1.11.0.dist-info/METADATA similarity index 70% rename from lib/python3.4/site-packages/six-1.10.0.dist-info/METADATA rename to lib/python3.4/site-packages/six-1.11.0.dist-info/METADATA index 4fc3d07..04e93dc 100644 --- a/lib/python3.4/site-packages/six-1.10.0.dist-info/METADATA +++ b/lib/python3.4/site-packages/six-1.11.0.dist-info/METADATA @@ -1,6 +1,6 @@ Metadata-Version: 2.0 Name: six -Version: 1.10.0 +Version: 1.11.0 Summary: Python 2 and 3 compatibility utilities Home-page: http://pypi.python.org/pypi/six/ Author: Benjamin Peterson @@ -14,6 +14,15 @@ Classifier: License :: OSI Approved :: MIT License Classifier: Topic :: Software Development :: Libraries Classifier: Topic :: Utilities +.. image:: http://img.shields.io/pypi/v/six.svg + :target: https://pypi.python.org/pypi/six + +.. image:: https://travis-ci.org/benjaminp/six.svg?branch=master + :target: https://travis-ci.org/benjaminp/six + +.. image:: http://img.shields.io/badge/license-MIT-green.svg + :target: https://github.com/benjaminp/six/blob/master/LICENSE + Six is a Python 2 and 3 compatibility library. It provides utility functions for smoothing over the differences between the Python versions with the goal of writing Python code that is compatible on both Python versions. See the @@ -23,9 +32,9 @@ Six supports every Python version since 2.6. It is contained in only one Python file, so it can be easily copied into your project. (The copyright and license notice must be retained.) -Online documentation is at https://pythonhosted.org/six/. +Online documentation is at http://six.rtfd.org. -Bugs can be reported to https://bitbucket.org/gutworth/six. The code can also +Bugs can be reported to https://github.com/benjaminp/six. The code can also be found there. For questions about six or porting in general, email the python-porting mailing diff --git a/lib/python3.4/site-packages/six-1.11.0.dist-info/RECORD b/lib/python3.4/site-packages/six-1.11.0.dist-info/RECORD new file mode 100644 index 0000000..99350b6 --- /dev/null +++ b/lib/python3.4/site-packages/six-1.11.0.dist-info/RECORD @@ -0,0 +1,9 @@ +six.py,sha256=A08MPb-Gi9FfInI3IW7HimXFmEH2T2IPzHgDvdhZPRA,30888 +six-1.11.0.dist-info/DESCRIPTION.rst,sha256=gPBoq1Ruc1QDWyLeXPlieL3F-XZz1_WXB-5gctCfg-A,1098 +six-1.11.0.dist-info/METADATA,sha256=06nZXaDYN3vnC-pmUjhkECYFH_a--ywvcPIpUdNeH1o,1607 +six-1.11.0.dist-info/RECORD,, +six-1.11.0.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 +six-1.11.0.dist-info/metadata.json,sha256=ac3f4f7MpSHSnZ1SqhHCwsL7FGWMG0gBEb0hhS2eSSM,703 +six-1.11.0.dist-info/top_level.txt,sha256=_iVH_iYEtEXnD8nYGQYpYFUvkUW9sEO1GYbkeKSAais,4 +six-1.11.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +__pycache__/six.cpython-34.pyc,, diff --git a/lib/python3.4/site-packages/pip-8.1.1.dist-info/WHEEL b/lib/python3.4/site-packages/six-1.11.0.dist-info/WHEEL similarity index 100% rename from lib/python3.4/site-packages/pip-8.1.1.dist-info/WHEEL rename to lib/python3.4/site-packages/six-1.11.0.dist-info/WHEEL diff --git a/lib/python3.4/site-packages/six-1.11.0.dist-info/metadata.json b/lib/python3.4/site-packages/six-1.11.0.dist-info/metadata.json new file mode 100644 index 0000000..2c7fcea --- /dev/null +++ b/lib/python3.4/site-packages/six-1.11.0.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Programming Language :: Python :: 2", "Programming Language :: Python :: 3", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Topic :: Software Development :: Libraries", "Topic :: Utilities"], "extensions": {"python.details": {"contacts": [{"email": "benjamin@python.org", "name": "Benjamin Peterson", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "http://pypi.python.org/pypi/six/"}}}, "generator": "bdist_wheel (0.29.0)", "license": "MIT", "metadata_version": "2.0", "name": "six", "summary": "Python 2 and 3 compatibility utilities", "test_requires": [{"requires": ["pytest"]}], "version": "1.11.0"} \ No newline at end of file diff --git a/lib/python3.4/site-packages/six-1.10.0.dist-info/top_level.txt b/lib/python3.4/site-packages/six-1.11.0.dist-info/top_level.txt similarity index 100% rename from lib/python3.4/site-packages/six-1.10.0.dist-info/top_level.txt rename to lib/python3.4/site-packages/six-1.11.0.dist-info/top_level.txt diff --git a/lib/python3.4/site-packages/six.py b/lib/python3.4/site-packages/six.py index 190c023..6bf4fd3 100644 --- a/lib/python3.4/site-packages/six.py +++ b/lib/python3.4/site-packages/six.py @@ -1,6 +1,4 @@ -"""Utilities for writing code that runs on Python 2 and 3""" - -# Copyright (c) 2010-2015 Benjamin Peterson +# Copyright (c) 2010-2017 Benjamin Peterson # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal @@ -20,6 +18,8 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +"""Utilities for writing code that runs on Python 2 and 3""" + from __future__ import absolute_import import functools @@ -29,7 +29,7 @@ import sys import types __author__ = "Benjamin Peterson " -__version__ = "1.10.0" +__version__ = "1.11.0" # Useful for very coarse version differentiation. @@ -241,6 +241,7 @@ _moved_attributes = [ MovedAttribute("map", "itertools", "builtins", "imap", "map"), MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), + MovedAttribute("getoutput", "commands", "subprocess"), MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), MovedAttribute("reduce", "__builtin__", "functools"), @@ -262,10 +263,11 @@ _moved_attributes = [ MovedModule("html_entities", "htmlentitydefs", "html.entities"), MovedModule("html_parser", "HTMLParser", "html.parser"), MovedModule("http_client", "httplib", "http.client"), + MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), + MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"), MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), - MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), @@ -337,10 +339,12 @@ _urllib_parse_moved_attributes = [ MovedAttribute("quote_plus", "urllib", "urllib.parse"), MovedAttribute("unquote", "urllib", "urllib.parse"), MovedAttribute("unquote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"), MovedAttribute("urlencode", "urllib", "urllib.parse"), MovedAttribute("splitquery", "urllib", "urllib.parse"), MovedAttribute("splittag", "urllib", "urllib.parse"), MovedAttribute("splituser", "urllib", "urllib.parse"), + MovedAttribute("splitvalue", "urllib", "urllib.parse"), MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), MovedAttribute("uses_params", "urlparse", "urllib.parse"), @@ -416,6 +420,8 @@ _urllib_request_moved_attributes = [ MovedAttribute("URLopener", "urllib", "urllib.request"), MovedAttribute("FancyURLopener", "urllib", "urllib.request"), MovedAttribute("proxy_bypass", "urllib", "urllib.request"), + MovedAttribute("parse_http_list", "urllib2", "urllib.request"), + MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"), ] for attr in _urllib_request_moved_attributes: setattr(Module_six_moves_urllib_request, attr.name, attr) @@ -679,11 +685,15 @@ if PY3: exec_ = getattr(moves.builtins, "exec") def reraise(tp, value, tb=None): - if value is None: - value = tp() - if value.__traceback__ is not tb: - raise value.with_traceback(tb) - raise value + try: + if value is None: + value = tp() + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + finally: + value = None + tb = None else: def exec_(_code_, _globs_=None, _locs_=None): @@ -699,19 +709,28 @@ else: exec("""exec _code_ in _globs_, _locs_""") exec_("""def reraise(tp, value, tb=None): - raise tp, value, tb + try: + raise tp, value, tb + finally: + tb = None """) if sys.version_info[:2] == (3, 2): exec_("""def raise_from(value, from_value): - if from_value is None: - raise value - raise value from from_value + try: + if from_value is None: + raise value + raise value from from_value + finally: + value = None """) elif sys.version_info[:2] > (3, 2): exec_("""def raise_from(value, from_value): - raise value from from_value + try: + raise value from from_value + finally: + value = None """) else: def raise_from(value, from_value): @@ -802,10 +821,14 @@ def with_metaclass(meta, *bases): # This requires a bit of explanation: the basic idea is to make a dummy # metaclass for one level of class instantiation that replaces itself with # the actual metaclass. - class metaclass(meta): + class metaclass(type): def __new__(cls, name, this_bases, d): return meta(name, bases, d) + + @classmethod + def __prepare__(cls, name, this_bases): + return meta.__prepare__(name, bases) return type.__new__(metaclass, 'temporary_class', (), {}) diff --git a/lib/python3.4/site-packages/sqlalchemy/cprocessors.cpython-34m.so b/lib/python3.4/site-packages/sqlalchemy/cprocessors.cpython-34m.so index 414727a..7b1c78e 100755 Binary files a/lib/python3.4/site-packages/sqlalchemy/cprocessors.cpython-34m.so and b/lib/python3.4/site-packages/sqlalchemy/cprocessors.cpython-34m.so differ diff --git a/lib/python3.4/site-packages/sqlalchemy/cprocessors.cpython-35m-x86_64-linux-gnu.so b/lib/python3.4/site-packages/sqlalchemy/cprocessors.cpython-35m-x86_64-linux-gnu.so index edcd4f3..8d5baf4 100755 Binary files a/lib/python3.4/site-packages/sqlalchemy/cprocessors.cpython-35m-x86_64-linux-gnu.so and b/lib/python3.4/site-packages/sqlalchemy/cprocessors.cpython-35m-x86_64-linux-gnu.so differ diff --git a/lib/python3.4/site-packages/sqlalchemy/cprocessors.cpython-36m-x86_64-linux-gnu.so b/lib/python3.4/site-packages/sqlalchemy/cprocessors.cpython-36m-x86_64-linux-gnu.so new file mode 100755 index 0000000..19f3794 Binary files /dev/null and b/lib/python3.4/site-packages/sqlalchemy/cprocessors.cpython-36m-x86_64-linux-gnu.so differ diff --git a/lib/python3.4/site-packages/sqlalchemy/cresultproxy.cpython-34m.so b/lib/python3.4/site-packages/sqlalchemy/cresultproxy.cpython-34m.so index dabde30..27c0c4f 100755 Binary files a/lib/python3.4/site-packages/sqlalchemy/cresultproxy.cpython-34m.so and b/lib/python3.4/site-packages/sqlalchemy/cresultproxy.cpython-34m.so differ diff --git a/lib/python3.4/site-packages/sqlalchemy/cresultproxy.cpython-35m-x86_64-linux-gnu.so b/lib/python3.4/site-packages/sqlalchemy/cresultproxy.cpython-35m-x86_64-linux-gnu.so index 5b32295..526596f 100755 Binary files a/lib/python3.4/site-packages/sqlalchemy/cresultproxy.cpython-35m-x86_64-linux-gnu.so and b/lib/python3.4/site-packages/sqlalchemy/cresultproxy.cpython-35m-x86_64-linux-gnu.so differ diff --git a/lib/python3.4/site-packages/sqlalchemy/cresultproxy.cpython-36m-x86_64-linux-gnu.so b/lib/python3.4/site-packages/sqlalchemy/cresultproxy.cpython-36m-x86_64-linux-gnu.so new file mode 100755 index 0000000..20aacaa Binary files /dev/null and b/lib/python3.4/site-packages/sqlalchemy/cresultproxy.cpython-36m-x86_64-linux-gnu.so differ diff --git a/lib/python3.4/site-packages/sqlalchemy/cutils.cpython-34m.so b/lib/python3.4/site-packages/sqlalchemy/cutils.cpython-34m.so index 429e17c..b5b447d 100755 Binary files a/lib/python3.4/site-packages/sqlalchemy/cutils.cpython-34m.so and b/lib/python3.4/site-packages/sqlalchemy/cutils.cpython-34m.so differ diff --git a/lib/python3.4/site-packages/sqlalchemy/cutils.cpython-35m-x86_64-linux-gnu.so b/lib/python3.4/site-packages/sqlalchemy/cutils.cpython-35m-x86_64-linux-gnu.so index 362e2b4..15aaff6 100755 Binary files a/lib/python3.4/site-packages/sqlalchemy/cutils.cpython-35m-x86_64-linux-gnu.so and b/lib/python3.4/site-packages/sqlalchemy/cutils.cpython-35m-x86_64-linux-gnu.so differ diff --git a/lib/python3.4/site-packages/sqlalchemy/cutils.cpython-36m-x86_64-linux-gnu.so b/lib/python3.4/site-packages/sqlalchemy/cutils.cpython-36m-x86_64-linux-gnu.so new file mode 100755 index 0000000..8cfcf0d Binary files /dev/null and b/lib/python3.4/site-packages/sqlalchemy/cutils.cpython-36m-x86_64-linux-gnu.so differ diff --git a/lib/python3.4/site-packages/wheel-0.29.0.dist-info/INSTALLER b/lib/python3.4/site-packages/wheel-0.29.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/lib/python3.4/site-packages/wheel-0.29.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/lib/python3.4/site-packages/wheel-0.29.0.dist-info/RECORD b/lib/python3.4/site-packages/wheel-0.29.0.dist-info/RECORD deleted file mode 100644 index 40615fe..0000000 --- a/lib/python3.4/site-packages/wheel-0.29.0.dist-info/RECORD +++ /dev/null @@ -1,81 +0,0 @@ -wheel/__init__.py,sha256=YumT_ajakW9VAgnV3umrUYypy6VzpbLKE-OPbVnWm8M,96 -wheel/__main__.py,sha256=LbDDGId08qTxuhLYqX5mXO1MUs9D-Hhxb5NVc53ej0k,416 -wheel/archive.py,sha256=eGyE04hV52JjU3KulISGPqQB340uDwIVwBcJkghkxx4,2286 -wheel/bdist_wheel.py,sha256=4QOgk1c66zu045XjET9Enz4ODW89LlHzJXukbIft_yY,17441 -wheel/decorator.py,sha256=U2K77ZZ8x3x5vSIGCcEeh8GAxB6rABB7AlDwRukaoCk,541 -wheel/egg2wheel.py,sha256=_JNueL6ZcWOxiPdL1r71fB9Mwuzmln4cZOIf_gA0Nc4,2633 -wheel/eggnames.txt,sha256=X6LYsOjMd8llrzLo3SB3FwJ-uN9IskJqYKJDq2zIcbs,2490 -wheel/install.py,sha256=yzPyjCAUmIgPTk_be-tbXFOJ3m08kYt3T3Tf1R8Lmh0,18070 -wheel/metadata.py,sha256=ttwI-jwjN5YnmDFbfLR4mFKod4HSnd1tje8lK4rQNqc,11050 -wheel/paths.py,sha256=6AmG-MKx-NeJOC9zUJoSSZjYhZYGmX1UHG_N0IbkplI,1130 -wheel/pep425tags.py,sha256=vbazM-mj7u-8s-YauwSykBhXM_YreCb8mG-eZyz2vl4,5341 -wheel/pkginfo.py,sha256=-gLOTuQrkRf4geOD04qm0IUkdYAbjg81j-5zNtvWA9A,1225 -wheel/util.py,sha256=Pe2JZ9grNjmAcRRJay20FlDVJUbePWaR5ltySo3c6zQ,4890 -wheel/wininst2wheel.py,sha256=_cTbf8bcAt481G6tYRQsOUT9ZGeg3artyrS0tBU9Tzs,6961 -wheel/signatures/__init__.py,sha256=kZpKNsmxBDZhuXf85_uyy5qBH40AxVT_0utbAiTp_yg,3779 -wheel/signatures/djbec.py,sha256=53HTnlNlfqOHFDf8OY_6KL8sxR4swiLsWhUjtw3W3nI,6755 -wheel/signatures/ed25519py.py,sha256=SeTxiMZ7kmoMdIurBSaKb8Ku-amGnf6ZTRGElLzV8iI,1695 -wheel/signatures/keys.py,sha256=x3g4sAcs7KbIHM-5V8KWdMc24_VK7VeD-pjCyktNnYo,3320 -wheel/test/__init__.py,sha256=M0NZuQ7-112l8K2h1eayVvSmvQrufrOcD5AYKgIf_Is,1 -wheel/test/pydist-schema.json,sha256=ynEvNvThC1zRa7FioMsW3k-9nl98ytEoo1_3xbOP2eo,11483 -wheel/test/test-1.0-py2.py3-none-win32.whl,sha256=tCbefJJ7RpQJReRQaSRiwnTDM-YDlBpbcX9Rjcv9bf4,5224 -wheel/test/test_basic.py,sha256=2DIvjApcshiLpXVsEhXvN3l62ZrwS0jJcWK8SyASoNU,6405 -wheel/test/test_install.py,sha256=c0EECXPkVIGhCD9V5ad2qsBPRPYb1ehhaS0k6Gv5JQc,1866 -wheel/test/test_keys.py,sha256=5mBc9tf2TwC3TCpx1ySTYsCe5yvd6kMK64AlUUCcKEY,2575 -wheel/test/test_paths.py,sha256=-QOVUDFJIpF9OPNRzCJ-Xf4nBibKlUe_g4aaZ6sm3wE,172 -wheel/test/test_ranking.py,sha256=FSAQX4oHZ476jLddqVMlGmQFtbEel9a8SzOngJ03TJw,1496 -wheel/test/test_signatures.py,sha256=Z4REXj62p28gbSDB2D_OxopA-TTXICTZ5e2yZ3ejEVc,1120 -wheel/test/test_tagopt.py,sha256=t7A-iRbe3bH2Iz6NKdSEOpFFIFSF9I4ATmlrRBXoCcQ,5927 -wheel/test/test_tool.py,sha256=yt5dAr8mp51WoDzt0MmlwPk0xf7FvXAedy-YlNZXv1I,656 -wheel/test/test_wheelfile.py,sha256=x4exzQYuQB48YHqhU2NZyN76k-BSK6784-7rye0q6Ss,4585 -wheel/test/complex-dist/setup.py,sha256=4i1_AJoJxo4i6ik-mvydo23AF8BHzjmAEQYa5J4YPK4,855 -wheel/test/complex-dist/complexdist/__init__.py,sha256=PGDJWQTxjLXqnNrbqmTKK_yk6DVQBNeRp-YpP7w1rVk,23 -wheel/test/headers.dist/header.h,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -wheel/test/headers.dist/headersdist.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -wheel/test/headers.dist/setup.py,sha256=p3d9TGy7NLX6TnkBHnNHzedqYoOkdGQvwheyDQjf-JQ,324 -wheel/test/simple.dist/setup.py,sha256=8zWen71Um-iN_A5thot6VFogrkWs_RGVO-jr_MxkFog,383 -wheel/test/simple.dist/simpledist/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -wheel/tool/__init__.py,sha256=anweXjmIg4EIHPkH0kOgcTx-gyOmzE4ieRe2yk-aHDA,13229 -wheel-0.29.0.dist-info/DESCRIPTION.rst,sha256=JH6mogUIatQVQewIh4GB1ywCxuWbm7G4TjI_63dURp8,9813 -wheel-0.29.0.dist-info/METADATA,sha256=SA310hLnZJJFgp1TRwFLCIiXurVwKpIq2w3KWhMdgdo,11019 -wheel-0.29.0.dist-info/RECORD,, -wheel-0.29.0.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 -wheel-0.29.0.dist-info/entry_points.txt,sha256=2LFQDKAUKNMG-2zNtbLscfirPr9BEqBuwc-JALCv-D0,107 -wheel-0.29.0.dist-info/metadata.json,sha256=dxlCIm4231kQk4VlVdiN5ABz3l0nWY3gQ9HPoDammlU,1510 -wheel-0.29.0.dist-info/top_level.txt,sha256=HxSBIbgEstMPe4eFawhA66Mq-QYHMopXVoAncfjb_1c,6 -../../../bin/wheel,sha256=9AId6sAKuD-WAQ9mDTPhEG_pesWmjL2UuvKmOR8tOTo,278 -wheel-0.29.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -wheel/test/simple.dist/simpledist/__pycache__/__init__.cpython-34.pyc,, -wheel/__pycache__/wininst2wheel.cpython-34.pyc,, -wheel/test/headers.dist/__pycache__/headersdist.cpython-34.pyc,, -wheel/__pycache__/__main__.cpython-34.pyc,, -wheel/test/__pycache__/test_tool.cpython-34.pyc,, -wheel/__pycache__/metadata.cpython-34.pyc,, -wheel/test/complex-dist/__pycache__/setup.cpython-34.pyc,, -wheel/signatures/__pycache__/djbec.cpython-34.pyc,, -wheel/test/__pycache__/__init__.cpython-34.pyc,, -wheel/signatures/__pycache__/keys.cpython-34.pyc,, -wheel/test/__pycache__/test_signatures.cpython-34.pyc,, -wheel/tool/__pycache__/__init__.cpython-34.pyc,, -wheel/test/simple.dist/__pycache__/setup.cpython-34.pyc,, -wheel/test/__pycache__/test_keys.cpython-34.pyc,, -wheel/__pycache__/archive.cpython-34.pyc,, -wheel/test/headers.dist/__pycache__/setup.cpython-34.pyc,, -wheel/test/complex-dist/complexdist/__pycache__/__init__.cpython-34.pyc,, -wheel/__pycache__/pkginfo.cpython-34.pyc,, -wheel/__pycache__/__init__.cpython-34.pyc,, -wheel/__pycache__/bdist_wheel.cpython-34.pyc,, -wheel/signatures/__pycache__/__init__.cpython-34.pyc,, -wheel/__pycache__/util.cpython-34.pyc,, -wheel/test/__pycache__/test_ranking.cpython-34.pyc,, -wheel/test/__pycache__/test_basic.cpython-34.pyc,, -wheel/__pycache__/install.cpython-34.pyc,, -wheel/__pycache__/decorator.cpython-34.pyc,, -wheel/test/__pycache__/test_install.cpython-34.pyc,, -wheel/signatures/__pycache__/ed25519py.cpython-34.pyc,, -wheel/test/__pycache__/test_tagopt.cpython-34.pyc,, -wheel/test/__pycache__/test_wheelfile.cpython-34.pyc,, -wheel/__pycache__/pep425tags.cpython-34.pyc,, -wheel/test/__pycache__/test_paths.cpython-34.pyc,, -wheel/__pycache__/paths.cpython-34.pyc,, -wheel/__pycache__/egg2wheel.cpython-34.pyc,, diff --git a/lib/python3.4/site-packages/wheel-0.29.0.dist-info/WHEEL b/lib/python3.4/site-packages/wheel-0.29.0.dist-info/WHEEL deleted file mode 100644 index 8b6dd1b..0000000 --- a/lib/python3.4/site-packages/wheel-0.29.0.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.29.0) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/lib/python3.4/site-packages/wheel-0.29.0.dist-info/metadata.json b/lib/python3.4/site-packages/wheel-0.29.0.dist-info/metadata.json deleted file mode 100644 index 1a42ef9..0000000 --- a/lib/python3.4/site-packages/wheel-0.29.0.dist-info/metadata.json +++ /dev/null @@ -1 +0,0 @@ -{"classifiers": ["Development Status :: 4 - Beta", "Intended Audience :: Developers", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4"], "extensions": {"python.commands": {"wrap_console": {"wheel": "wheel.tool:main"}}, "python.details": {"contacts": [{"email": "dholth@fastmail.fm", "name": "Daniel Holth", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://bitbucket.org/pypa/wheel/"}}, "python.exports": {"console_scripts": {"wheel": "wheel.tool:main"}, "distutils.commands": {"bdist_wheel": "wheel.bdist_wheel:bdist_wheel"}}}, "extras": ["faster-signatures", "signatures", "tool"], "generator": "bdist_wheel (0.29.0)", "keywords": ["wheel", "packaging"], "license": "MIT", "metadata_version": "2.0", "name": "wheel", "run_requires": [{"extra": "faster-signatures", "requires": ["ed25519ll"]}, {"extra": "signatures", "requires": ["keyring", "keyrings.alt"]}, {"environment": "python_version==\"2.6\"", "requires": ["argparse"]}, {"environment": "python_version==\"2.6\"", "extra": "signatures", "requires": ["importlib"]}, {"environment": "sys_platform!=\"win32\"", "extra": "signatures", "requires": ["pyxdg"]}], "summary": "A built-package format for Python.", "version": "0.29.0"} \ No newline at end of file diff --git a/lib/python3.4/site-packages/wheel-0.29.0.dist-info/DESCRIPTION.rst b/lib/python3.4/site-packages/wheel-0.30.0.dist-info/DESCRIPTION.rst similarity index 92% rename from lib/python3.4/site-packages/wheel-0.29.0.dist-info/DESCRIPTION.rst rename to lib/python3.4/site-packages/wheel-0.30.0.dist-info/DESCRIPTION.rst index 07a9baf..9f37cad 100644 --- a/lib/python3.4/site-packages/wheel-0.29.0.dist-info/DESCRIPTION.rst +++ b/lib/python3.4/site-packages/wheel-0.30.0.dist-info/DESCRIPTION.rst @@ -19,7 +19,7 @@ line utility. The wheel documentation is at http://wheel.rtfd.org/. The file format is documented in PEP 427 (http://www.python.org/dev/peps/pep-0427/). -The reference implementation is at https://bitbucket.org/pypa/wheel +The reference implementation is at https://github.com/pypa/wheel Why not egg? ------------ @@ -49,6 +49,21 @@ rooms, and mailing lists is expected to follow the `PyPA Code of Conduct`_. .. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/ +0.30.0 +====== +- Added py-limited-api {cp32|cp33|cp34|...} flag to produce cpNN.abi3.{arch} + tags on CPython 3. +- Documented the ``license_file`` metadata key +- Improved Python, abi tagging for `wheel convert`. Thanks Ales Erjavec. +- Fixed `>` being prepended to lines starting with "From" in the long description +- Added support for specifying a build number (as per PEP 427). + Thanks Ian Cordasco. +- Made the order of files in generated ZIP files deterministic. + Thanks Matthias Bach. +- Made the order of requirements in metadata deterministic. Thanks Chris Lamb. +- Fixed `wheel install` clobbering existing files +- Improved the error message when trying to verify an unsigned wheel file +- Removed support for Python 2.6, 3.2 and 3.3. 0.29.0 ====== diff --git a/lib/python3.4/site-packages/six-1.10.0.dist-info/INSTALLER b/lib/python3.4/site-packages/wheel-0.30.0.dist-info/INSTALLER similarity index 100% rename from lib/python3.4/site-packages/six-1.10.0.dist-info/INSTALLER rename to lib/python3.4/site-packages/wheel-0.30.0.dist-info/INSTALLER diff --git a/lib/python3.4/site-packages/wheel-0.30.0.dist-info/LICENSE.txt b/lib/python3.4/site-packages/wheel-0.30.0.dist-info/LICENSE.txt new file mode 100644 index 0000000..c3441e6 --- /dev/null +++ b/lib/python3.4/site-packages/wheel-0.30.0.dist-info/LICENSE.txt @@ -0,0 +1,22 @@ +"wheel" copyright (c) 2012-2014 Daniel Holth and +contributors. + +The MIT License + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR +OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/lib/python3.4/site-packages/wheel-0.29.0.dist-info/METADATA b/lib/python3.4/site-packages/wheel-0.30.0.dist-info/METADATA similarity index 88% rename from lib/python3.4/site-packages/wheel-0.29.0.dist-info/METADATA rename to lib/python3.4/site-packages/wheel-0.30.0.dist-info/METADATA index 8962475..52b6411 100644 --- a/lib/python3.4/site-packages/wheel-0.29.0.dist-info/METADATA +++ b/lib/python3.4/site-packages/wheel-0.30.0.dist-info/METADATA @@ -1,33 +1,35 @@ Metadata-Version: 2.0 Name: wheel -Version: 0.29.0 +Version: 0.30.0 Summary: A built-package format for Python. -Home-page: https://bitbucket.org/pypa/wheel/ -Author: Daniel Holth -Author-email: dholth@fastmail.fm +Home-page: https://github.com/pypa/wheel +Author: Alex Grönholm +Author-email: alex.gronholm@nextday.fi License: MIT +Description-Content-Type: UNKNOWN Keywords: wheel,packaging Platform: UNKNOWN -Classifier: Development Status :: 4 - Beta +Classifier: Development Status :: 5 - Production/Stable Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.6 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.2 -Classifier: Programming Language :: Python :: 3.3 Classifier: Programming Language :: Python :: 3.4 -Requires-Dist: argparse; python_version=="2.6" +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 Provides-Extra: faster-signatures Requires-Dist: ed25519ll; extra == 'faster-signatures' Provides-Extra: signatures Requires-Dist: keyring; extra == 'signatures' Requires-Dist: keyrings.alt; extra == 'signatures' Provides-Extra: signatures -Requires-Dist: importlib; python_version=="2.6" and extra == 'signatures' -Provides-Extra: signatures Requires-Dist: pyxdg; sys_platform!="win32" and extra == 'signatures' +Provides-Extra: test +Requires-Dist: jsonschema; extra == 'test' +Requires-Dist: pytest (>=3.0.0); extra == 'test' +Requires-Dist: pytest-cov; extra == 'test' Provides-Extra: tool Wheel @@ -51,7 +53,7 @@ line utility. The wheel documentation is at http://wheel.rtfd.org/. The file format is documented in PEP 427 (http://www.python.org/dev/peps/pep-0427/). -The reference implementation is at https://bitbucket.org/pypa/wheel +The reference implementation is at https://github.com/pypa/wheel Why not egg? ------------ @@ -81,6 +83,21 @@ rooms, and mailing lists is expected to follow the `PyPA Code of Conduct`_. .. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/ +0.30.0 +====== +- Added py-limited-api {cp32|cp33|cp34|...} flag to produce cpNN.abi3.{arch} + tags on CPython 3. +- Documented the ``license_file`` metadata key +- Improved Python, abi tagging for `wheel convert`. Thanks Ales Erjavec. +- Fixed `>` being prepended to lines starting with "From" in the long description +- Added support for specifying a build number (as per PEP 427). + Thanks Ian Cordasco. +- Made the order of files in generated ZIP files deterministic. + Thanks Matthias Bach. +- Made the order of requirements in metadata deterministic. Thanks Chris Lamb. +- Fixed `wheel install` clobbering existing files +- Improved the error message when trying to verify an unsigned wheel file +- Removed support for Python 2.6, 3.2 and 3.3. 0.29.0 ====== diff --git a/lib/python3.4/site-packages/wheel-0.30.0.dist-info/RECORD b/lib/python3.4/site-packages/wheel-0.30.0.dist-info/RECORD new file mode 100644 index 0000000..291a54b --- /dev/null +++ b/lib/python3.4/site-packages/wheel-0.30.0.dist-info/RECORD @@ -0,0 +1,46 @@ +wheel/__init__.py,sha256=ja92NKda3sstt4uKroYgFATu736whcI33p3GJNdslLQ,96 +wheel/__main__.py,sha256=K--m7mq-27NO0fm-a8KlthkucCe0w_-0hVxL3uDujkU,419 +wheel/archive.py,sha256=oEv42UnpxkoFMKcLXQ9RD8a8oic4X3oe2_H5FAgJ7_M,2376 +wheel/bdist_wheel.py,sha256=qKWdyvpkdmuLB4_GGIZsjmlcMLZuZDd8tRvaQI0w_eo,18852 +wheel/decorator.py,sha256=U2K77ZZ8x3x5vSIGCcEeh8GAxB6rABB7AlDwRukaoCk,541 +wheel/egg2wheel.py,sha256=me4Iaz4idCvS-xjfAzfb2dXXlXx_w6AgLjH6hi1Bt1A,3043 +wheel/install.py,sha256=zYQ-A8uQi-R2PwMvOh64YMlQDplqYpcBVM0EmbxZu8Y,18417 +wheel/metadata.py,sha256=SzI1MtzITZJuAJuvUVzEWi60VhgDbXSV_hapyiX0rlw,11561 +wheel/paths.py,sha256=OAtaJgCivlKvJKw1qC3YbJypvp2d38Eka8GQWdBWNZw,1129 +wheel/pep425tags.py,sha256=Lk9zYm1rrHG1X3RKlf9plcwpsoSZT8UR7fG3jhaoZrQ,5760 +wheel/pkginfo.py,sha256=GR76kupQzn1x9sKDaXuE6B6FsZ4OkfRtG7pndlXPvQ4,1257 +wheel/util.py,sha256=eJB-mrhMAaCGcoKhTLDYdpCf5N8BMLtX4usW_7qeZBg,4732 +wheel/wininst2wheel.py,sha256=afPAHWwa7FY0IkpG-BuuuY-dlB93VmFPrXff511NkBk,7772 +wheel/signatures/__init__.py,sha256=O7kZICZvXxN5YRkCYrPmAEr1LpGaZKJh5sLPWIRIoYE,3766 +wheel/signatures/djbec.py,sha256=jnfWxdS7dwLjiO6n0hy-4jLa_71SPrKWL0-7ocDrSHc,7035 +wheel/signatures/ed25519py.py,sha256=nFKDMq4LW2iJKk4IZKMxY46GyZNYPKxuWha9xYHk9lE,1669 +wheel/signatures/keys.py,sha256=k4j4yGZL31Dt2pa5TneIEeq6qkVIXEPExmFxiZxpE1Y,3299 +wheel/tool/__init__.py,sha256=rOy5VFvj-gTKgMwi_u2_iNu_Pq6aqw4rEfaciDTbmwg,13421 +wheel-0.30.0.dist-info/DESCRIPTION.rst,sha256=Alb3Ol--LhPgmWuBBPfzu54xzQ8J2skWNV34XCjhe0k,10549 +wheel-0.30.0.dist-info/LICENSE.txt,sha256=zKniDGrx_Pv2lAjzd3aShsvuvN7TNhAMm0o_NfvmNeQ,1125 +wheel-0.30.0.dist-info/METADATA,sha256=fYLxr6baQD-wDn4Yu8t-8fF7PJuiBTcThsl2UKBE7kg,11815 +wheel-0.30.0.dist-info/RECORD,, +wheel-0.30.0.dist-info/WHEEL,sha256=kdsN-5OJAZIiHN-iO4Rhl82KyS0bDWf4uBwMbkNafr8,110 +wheel-0.30.0.dist-info/entry_points.txt,sha256=pTyeGVsucyfr_BXe5OQKuA1Bp5YKaIAWy5pejkq4Qx0,109 +wheel-0.30.0.dist-info/metadata.json,sha256=neXQocJnVqPTjr4zpuOVdxBGCmjrTsOs76AvP8ngyJY,1522 +wheel-0.30.0.dist-info/top_level.txt,sha256=HxSBIbgEstMPe4eFawhA66Mq-QYHMopXVoAncfjb_1c,6 +../../../bin/wheel,sha256=sjtPVJ0ZS5WdGK7UXcnQrN6MG_czYyrsndkMrC0qluw,279 +wheel-0.30.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +wheel/signatures/__pycache__/__init__.cpython-34.pyc,, +wheel/__pycache__/decorator.cpython-34.pyc,, +wheel/__pycache__/__main__.cpython-34.pyc,, +wheel/signatures/__pycache__/ed25519py.cpython-34.pyc,, +wheel/__pycache__/util.cpython-34.pyc,, +wheel/__pycache__/wininst2wheel.cpython-34.pyc,, +wheel/__pycache__/pkginfo.cpython-34.pyc,, +wheel/__pycache__/__init__.cpython-34.pyc,, +wheel/signatures/__pycache__/djbec.cpython-34.pyc,, +wheel/__pycache__/metadata.cpython-34.pyc,, +wheel/__pycache__/egg2wheel.cpython-34.pyc,, +wheel/signatures/__pycache__/keys.cpython-34.pyc,, +wheel/__pycache__/archive.cpython-34.pyc,, +wheel/__pycache__/bdist_wheel.cpython-34.pyc,, +wheel/tool/__pycache__/__init__.cpython-34.pyc,, +wheel/__pycache__/install.cpython-34.pyc,, +wheel/__pycache__/pep425tags.cpython-34.pyc,, +wheel/__pycache__/paths.cpython-34.pyc,, diff --git a/lib/python3.4/site-packages/six-1.10.0.dist-info/WHEEL b/lib/python3.4/site-packages/wheel-0.30.0.dist-info/WHEEL similarity index 70% rename from lib/python3.4/site-packages/six-1.10.0.dist-info/WHEEL rename to lib/python3.4/site-packages/wheel-0.30.0.dist-info/WHEEL index 0de529b..7332a41 100644 --- a/lib/python3.4/site-packages/six-1.10.0.dist-info/WHEEL +++ b/lib/python3.4/site-packages/wheel-0.30.0.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.26.0) +Generator: bdist_wheel (0.30.0) Root-Is-Purelib: true Tag: py2-none-any Tag: py3-none-any diff --git a/lib/python3.4/site-packages/wheel-0.29.0.dist-info/entry_points.txt b/lib/python3.4/site-packages/wheel-0.30.0.dist-info/entry_points.txt similarity index 58% rename from lib/python3.4/site-packages/wheel-0.29.0.dist-info/entry_points.txt rename to lib/python3.4/site-packages/wheel-0.30.0.dist-info/entry_points.txt index f57b8c0..4ad253e 100644 --- a/lib/python3.4/site-packages/wheel-0.29.0.dist-info/entry_points.txt +++ b/lib/python3.4/site-packages/wheel-0.30.0.dist-info/entry_points.txt @@ -2,4 +2,5 @@ wheel = wheel.tool:main [distutils.commands] -bdist_wheel = wheel.bdist_wheel:bdist_wheel \ No newline at end of file +bdist_wheel = wheel.bdist_wheel:bdist_wheel + diff --git a/lib/python3.4/site-packages/wheel-0.30.0.dist-info/metadata.json b/lib/python3.4/site-packages/wheel-0.30.0.dist-info/metadata.json new file mode 100644 index 0000000..709ccac --- /dev/null +++ b/lib/python3.4/site-packages/wheel-0.30.0.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6"], "description_content_type": "UNKNOWN", "extensions": {"python.commands": {"wrap_console": {"wheel": "wheel.tool:main"}}, "python.details": {"contacts": [{"email": "alex.gronholm@nextday.fi", "name": "Alex Gr\u00f6nholm", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst", "license": "LICENSE.txt"}, "project_urls": {"Home": "https://github.com/pypa/wheel"}}, "python.exports": {"console_scripts": {"wheel": "wheel.tool:main"}, "distutils.commands": {"bdist_wheel": "wheel.bdist_wheel:bdist_wheel"}}}, "extras": ["faster-signatures", "signatures", "test", "tool"], "generator": "bdist_wheel (0.30.0)", "keywords": ["wheel", "packaging"], "license": "MIT", "metadata_version": "2.0", "name": "wheel", "run_requires": [{"extra": "faster-signatures", "requires": ["ed25519ll"]}, {"extra": "test", "requires": ["jsonschema", "pytest (>=3.0.0)", "pytest-cov"]}, {"extra": "signatures", "requires": ["keyring", "keyrings.alt"]}, {"environment": "sys_platform!=\"win32\"", "extra": "signatures", "requires": ["pyxdg"]}], "summary": "A built-package format for Python.", "version": "0.30.0"} \ No newline at end of file diff --git a/lib/python3.4/site-packages/wheel-0.29.0.dist-info/top_level.txt b/lib/python3.4/site-packages/wheel-0.30.0.dist-info/top_level.txt similarity index 100% rename from lib/python3.4/site-packages/wheel-0.29.0.dist-info/top_level.txt rename to lib/python3.4/site-packages/wheel-0.30.0.dist-info/top_level.txt diff --git a/lib/python3.4/site-packages/wheel/__init__.py b/lib/python3.4/site-packages/wheel/__init__.py index be2453a..64cd668 100644 --- a/lib/python3.4/site-packages/wheel/__init__.py +++ b/lib/python3.4/site-packages/wheel/__init__.py @@ -1,2 +1,2 @@ # __variables__ with double-quoted values will be available in setup.py: -__version__ = "0.29.0" +__version__ = "0.30.0" diff --git a/lib/python3.4/site-packages/wheel/__main__.py b/lib/python3.4/site-packages/wheel/__main__.py index 889359c..8f0c4fe 100644 --- a/lib/python3.4/site-packages/wheel/__main__.py +++ b/lib/python3.4/site-packages/wheel/__main__.py @@ -4,7 +4,8 @@ Wheel command line tool (enable python -m wheel syntax) import sys -def main(): # needed for console script + +def main(): # needed for console script if __package__ == '': # To be able to run 'python wheel-0.9.whl/wheel': import os.path @@ -13,5 +14,6 @@ def main(): # needed for console script import wheel.tool sys.exit(wheel.tool.main()) + if __name__ == "__main__": sys.exit(main()) diff --git a/lib/python3.4/site-packages/wheel/archive.py b/lib/python3.4/site-packages/wheel/archive.py index f928e6a..5b1647a 100644 --- a/lib/python3.4/site-packages/wheel/archive.py +++ b/lib/python3.4/site-packages/wheel/archive.py @@ -3,18 +3,16 @@ Archive tools for wheel. """ import os -import time -import logging import os.path +import time import zipfile - -log = logging.getLogger("wheel") +from distutils import log def archive_wheelfile(base_name, base_dir): - '''Archive all files under `base_dir` in a whl file and name it like + """Archive all files under `base_dir` in a whl file and name it like `base_name`. - ''' + """ olddir = os.path.abspath(os.curdir) base_name = os.path.abspath(base_name) try: @@ -43,8 +41,7 @@ def make_wheelfile_inner(base_name, base_dir='.'): date_time = time.gmtime(int(timestamp))[0:6] # XXX support bz2, xz when available - zip = zipfile.ZipFile(open(zip_filename, "wb+"), "w", - compression=zipfile.ZIP_DEFLATED) + zip = zipfile.ZipFile(zip_filename, "w", compression=zipfile.ZIP_DEFLATED) score = {'WHEEL': 1, 'METADATA': 2, 'RECORD': 3} deferred = [] @@ -62,7 +59,10 @@ def make_wheelfile_inner(base_name, base_dir='.'): log.info("adding '%s'" % path) for dirpath, dirnames, filenames in os.walk(base_dir): - for name in filenames: + # Sort the directory names so that `os.walk` will walk them in a + # defined order on the next iteration. + dirnames.sort() + for name in sorted(filenames): path = os.path.normpath(os.path.join(dirpath, name)) if os.path.isfile(path): diff --git a/lib/python3.4/site-packages/wheel/bdist_wheel.py b/lib/python3.4/site-packages/wheel/bdist_wheel.py index 90db748..7fbeb4b 100644 --- a/lib/python3.4/site-packages/wheel/bdist_wheel.py +++ b/lib/python3.4/site-packages/wheel/bdist_wheel.py @@ -11,41 +11,39 @@ import subprocess import warnings import shutil import json -import wheel - -try: - import sysconfig -except ImportError: # pragma nocover - # Python < 2.7 - import distutils.sysconfig as sysconfig +import sys +import re +from email.generator import Generator +from distutils.core import Command +from distutils.sysconfig import get_python_version +from distutils import log as logger +from shutil import rmtree import pkg_resources -safe_name = pkg_resources.safe_name -safe_version = pkg_resources.safe_version - -from shutil import rmtree -from email.generator import Generator - -from distutils.util import get_platform -from distutils.core import Command -from distutils.sysconfig import get_python_version - -from distutils import log as logger - -from .pep425tags import get_abbr_impl, get_impl_ver, get_abi_tag +from .pep425tags import get_abbr_impl, get_impl_ver, get_abi_tag, get_platform from .util import native, open_for_csv from .archive import archive_wheelfile from .pkginfo import read_pkg_info, write_pkg_info from .metadata import pkginfo_to_dict from . import pep425tags, metadata +from . import __version__ as wheel_version + + +safe_name = pkg_resources.safe_name +safe_version = pkg_resources.safe_version + +PY_LIMITED_API_PATTERN = r'cp3\d' + def safer_name(name): return safe_name(name).replace('-', '_') + def safer_version(version): return safe_version(version).replace('-', '_') + class bdist_wheel(Command): description = 'create a wheel distribution' @@ -77,6 +75,13 @@ class bdist_wheel(Command): ('python-tag=', None, "Python implementation compatibility tag" " (default: py%s)" % get_impl_ver()[0]), + ('build-number=', None, + "Build number for this particular version. " + "As specified in PEP-0427, this must start with a digit. " + "[default: None]"), + ('py-limited-api=', None, + "Python tag (cp32|cp33|cpNN) for abi3 wheel tag" + " (default: false)"), ] boolean_options = ['keep-temp', 'skip-build', 'relative', 'universal'] @@ -98,6 +103,8 @@ class bdist_wheel(Command): self.group = None self.universal = False self.python_tag = 'py' + get_impl_ver()[0] + self.build_number = None + self.py_limited_api = False self.plat_name_supplied = False def finalize_options(self): @@ -116,6 +123,9 @@ class bdist_wheel(Command): self.root_is_pure = not (self.distribution.has_ext_modules() or self.distribution.has_c_libraries()) + if self.py_limited_api and not re.match(PY_LIMITED_API_PATTERN, self.py_limited_api): + raise ValueError("py-limited-api must match '%s'" % PY_LIMITED_API_PATTERN) + # Support legacy [wheel] section for setting universal wheel = self.distribution.get_option_dict('wheel') if 'universal' in wheel: @@ -124,11 +134,17 @@ class bdist_wheel(Command): if val.lower() in ('1', 'true', 'yes'): self.universal = True + if self.build_number is not None and not self.build_number[:1].isdigit(): + raise ValueError("Build tag (build-number) must start with a digit.") + @property def wheel_dist_name(self): """Return distribution full name with - replaced with _""" - return '-'.join((safer_name(self.distribution.get_name()), - safer_version(self.distribution.get_version()))) + components = (safer_name(self.distribution.get_name()), + safer_version(self.distribution.get_version())) + if self.build_number: + components += (self.build_number,) + return '-'.join(components) def get_tag(self): # bdist sets self.plat_name if unset, we should only use it for purepy @@ -139,6 +155,8 @@ class bdist_wheel(Command): plat_name = 'any' else: plat_name = self.plat_name or get_platform() + if plat_name in ('linux-x86_64', 'linux_x86_64') and sys.maxsize == 2147483647: + plat_name = 'linux_i686' plat_name = plat_name.replace('-', '_').replace('.', '_') if self.root_is_pure: @@ -150,13 +168,20 @@ class bdist_wheel(Command): else: impl_name = get_abbr_impl() impl_ver = get_impl_ver() - # PEP 3149 - abi_tag = str(get_abi_tag()).lower() - tag = (impl_name + impl_ver, abi_tag, plat_name) + impl = impl_name + impl_ver + # We don't work on CPython 3.1, 3.0. + if self.py_limited_api and (impl_name + impl_ver).startswith('cp3'): + impl = self.py_limited_api + abi_tag = 'abi3' + else: + abi_tag = str(get_abi_tag()).lower() + tag = (impl, abi_tag, plat_name) supported_tags = pep425tags.get_supported( supplied_platform=plat_name if self.plat_name_supplied else None) # XXX switch to this alternate implementation for non-pure: - assert tag == supported_tags[0] + if not self.py_limited_api: + assert tag == supported_tags[0], "%s != %s" % (tag, supported_tags[0]) + assert tag in supported_tags, "would build wheel with unsupported tag {}".format(tag) return tag def get_archive_basename(self): @@ -254,12 +279,14 @@ class bdist_wheel(Command): else: rmtree(self.bdist_dir) - def write_wheelfile(self, wheelfile_base, generator='bdist_wheel (' + wheel.__version__ + ')'): + def write_wheelfile(self, wheelfile_base, generator='bdist_wheel (' + wheel_version + ')'): from email.message import Message msg = Message() msg['Wheel-Version'] = '1.0' # of the spec msg['Generator'] = generator msg['Root-Is-Purelib'] = str(self.root_is_pure).lower() + if self.build_number is not None: + msg['Build'] = self.build_number # Doesn't work for bdist_wininst impl_tag, abi_tag, plat_tag = self.get_tag() @@ -286,7 +313,7 @@ class bdist_wheel(Command): def license_file(self): """Return license filename from a license-file key in setup.cfg, or None.""" metadata = self.distribution.get_option_dict('metadata') - if not 'license_file' in metadata: + if 'license_file' not in metadata: return None return metadata['license_file'][1] @@ -315,7 +342,7 @@ class bdist_wheel(Command): # our .ini parser folds - to _ in key names: for key, title in (('provides_extra', 'Provides-Extra'), ('requires_dist', 'Requires-Dist')): - if not key in metadata: + if key not in metadata: continue field = metadata[key] for line in field[1].splitlines(): @@ -327,7 +354,9 @@ class bdist_wheel(Command): def add_requirements(self, metadata_path): """Add additional requirements from setup.cfg to file metadata_path""" additional = list(self.setupcfg_requirements()) - if not additional: return + if not additional: + return + pkg_info = read_pkg_info(metadata_path) if 'Provides-Extra' in pkg_info or 'Requires-Dist' in pkg_info: warnings.warn('setup.cfg requirements overwrite values from setup.py') @@ -375,10 +404,9 @@ class bdist_wheel(Command): # ignore common egg metadata that is useless to wheel shutil.copytree(egginfo_path, distinfo_path, - ignore=lambda x, y: set(('PKG-INFO', - 'requires.txt', - 'SOURCES.txt', - 'not-zip-safe',))) + ignore=lambda x, y: {'PKG-INFO', 'requires.txt', 'SOURCES.txt', + 'not-zip-safe'} + ) # delete dependency_links if it is only whitespace dependency_links_path = os.path.join(distinfo_path, 'dependency_links.txt') @@ -405,7 +433,8 @@ class bdist_wheel(Command): description_filename) with open(description_path, "wb") as description_file: description_file.write(description_text.encode('utf-8')) - pymeta['extensions']['python.details']['document_names']['description'] = description_filename + pymeta['extensions']['python.details']['document_names']['description'] = \ + description_filename # XXX heuristically copy any LICENSE/LICENSE.txt? license = self.license_file() @@ -420,7 +449,7 @@ class bdist_wheel(Command): adios(egginfo_path) def write_record(self, bdist_dir, distinfo_dir): - from wheel.util import urlsafe_b64encode + from .util import urlsafe_b64encode record_path = os.path.join(distinfo_dir, 'RECORD') record_relpath = os.path.relpath(record_path, bdist_dir) diff --git a/lib/python3.4/site-packages/wheel/egg2wheel.py b/lib/python3.4/site-packages/wheel/egg2wheel.py index bf919c4..3799909 100644 --- a/lib/python3.4/site-packages/wheel/egg2wheel.py +++ b/lib/python3.4/site-packages/wheel/egg2wheel.py @@ -1,19 +1,22 @@ #!/usr/bin/env python +import distutils.dist import os.path import re +import shutil import sys import tempfile import zipfile -import wheel.bdist_wheel -import shutil -import distutils.dist -from distutils.archive_util import make_archive from argparse import ArgumentParser +from distutils.archive_util import make_archive from glob import iglob +import wheel.bdist_wheel +from wheel.wininst2wheel import _bdist_wheel_tag + egg_info_re = re.compile(r'''(?P.+?)-(?P.+?) (-(?P.+?))?(-(?P.+?))?.egg''', re.VERBOSE) + def egg2wheel(egg_path, dest_dir): egg_info = egg_info_re.match(os.path.basename(egg_path)).groupdict() dir = tempfile.mkdtemp(suffix="_e2w") @@ -43,8 +46,20 @@ def egg2wheel(egg_path, dest_dir): abi, arch )) - bw = wheel.bdist_wheel.bdist_wheel(distutils.dist.Distribution()) - bw.root_is_purelib = egg_info['arch'] is None + root_is_purelib = egg_info['arch'] is None + if root_is_purelib: + bw = wheel.bdist_wheel.bdist_wheel(distutils.dist.Distribution()) + else: + bw = _bdist_wheel_tag(distutils.dist.Distribution()) + + bw.root_is_pure = root_is_purelib + bw.python_tag = pyver + bw.plat_name_supplied = True + bw.plat_name = egg_info['arch'] or 'any' + if not root_is_purelib: + bw.full_tag_supplied = True + bw.full_tag = (pyver, abi, arch) + dist_info_dir = os.path.join(dir, '%s.dist-info' % dist_info) bw.egg2dist(os.path.join(dir, 'EGG-INFO'), dist_info_dir) @@ -54,11 +69,12 @@ def egg2wheel(egg_path, dest_dir): os.rename(filename, filename[:-3] + 'whl') shutil.rmtree(dir) + def main(): parser = ArgumentParser() parser.add_argument('eggs', nargs='*', help="Eggs to convert") parser.add_argument('--dest-dir', '-d', default=os.path.curdir, - help="Directory to store wheels (default %(default)s)") + help="Directory to store wheels (default %(default)s)") parser.add_argument('--verbose', '-v', action='store_true') args = parser.parse_args() for pat in args.eggs: @@ -69,5 +85,6 @@ def main(): if args.verbose: sys.stdout.write("OK\n") + if __name__ == "__main__": main() diff --git a/lib/python3.4/site-packages/wheel/eggnames.txt b/lib/python3.4/site-packages/wheel/eggnames.txt deleted file mode 100644 index d422120..0000000 --- a/lib/python3.4/site-packages/wheel/eggnames.txt +++ /dev/null @@ -1,87 +0,0 @@ -vcard-0.7.8-py2.7.egg -qtalchemy-0.7.1-py2.7.egg -AMQPDeliver-0.1-py2.7.egg -infi.registry-0.1.1-py2.7.egg -infi.instruct-0.5.5-py2.7.egg -infi.devicemanager-0.1.2-py2.7.egg -TracTixSummary-1.0-py2.7.egg -ToscaWidgets-0.9.12-py2.7.egg -archipel_agent_iphone_notification-0.5.0beta-py2.7.egg -archipel_agent_action_scheduler-0.5.0beta-py2.7.egg -ao.social-1.0.2-py2.7.egg -apgl-0.7-py2.7.egg -satchmo_payment_payworld-0.1.1-py2.7.egg -snmpsim-0.1.3-py2.7.egg -sshim-0.2-py2.7.egg -shove-0.3.4-py2.7.egg -simpleavro-0.3.0-py2.7.egg -wkhtmltopdf-0.2-py2.7.egg -wokkel-0.7.0-py2.7.egg -jmbo_social-0.0.6-py2.7.egg -jmbo_post-0.0.6-py2.7.egg -jcrack-0.0.2-py2.7.egg -riak-1.4.0-py2.7.egg -restclient-0.10.2-py2.7.egg -Sutekh-0.8.1-py2.7.egg -trayify-0.0.1-py2.7.egg -tweepy-1.9-py2.7.egg -topzootools-0.2.1-py2.7.egg -haystack-0.16-py2.7.egg -zope.interface-4.0.1-py2.7-win32.egg -neuroshare-0.8.5-py2.7-macosx-10.7-intel.egg -ndg_httpsclient-0.2.0-py2.7.egg -libtele-0.3-py2.7.egg -litex.cxpool-1.0.2-py2.7.egg -obspy.iris-0.5.1-py2.7.egg -obspy.mseed-0.6.1-py2.7-win32.egg -obspy.core-0.6.2-py2.7.egg -CorePost-0.0.3-py2.7.egg -fnordstalk-0.0.3-py2.7.egg -Persistence-2.13.2-py2.7-win32.egg -Pydap-3.1.RC1-py2.7.egg -PyExecJS-1.0.4-py2.7.egg -Wally-0.7.2-py2.7.egg -ExtensionClass-4.0a1-py2.7-win32.egg -Feedjack-0.9.16-py2.7.egg -Mars24-0.3.9-py2.7.egg -HalWeb-0.6.0-py2.7.egg -DARE-0.7.140-py2.7.egg -macholib-1.3-py2.7.egg -marrow.wsgi.egress.compression-1.1-py2.7.egg -mcs-0.3.7-py2.7.egg -Kook-0.6.0-py2.7.egg -er-0.1-py2.7.egg -evasion_director-1.1.4-py2.7.egg -djquery-0.1a-py2.7.egg -django_factory-0.7-py2.7.egg -django_gizmo-0.0.3-py2.7.egg -django_category-0.1-py2.7.egg -dbwrap-0.3.2-py2.7.egg -django_supergeneric-1.0-py2.7.egg -django_dynamo-0.25-py2.7.egg -django_acollabauth-0.1-py2.7.egg -django_qrlink-0.1.0-py2.7.egg -django_addons-0.6.6-py2.7.egg -cover_grabber-1.1.2-py2.7.egg -chem-1.1-py2.7.egg -crud-0.1-py2.7.egg -bongo-0.1-py2.7.egg -bytecodehacks-April2000-py2.7.egg -greenlet-0.3.4-py2.7-win32.egg -ginvoke-0.3.1-py2.7.egg -pyobjc_framework_ScriptingBridge-2.3-py2.7.egg -pecan-0.2.0a-py2.7.egg -pyress-0.2.0-py2.7.egg -pyobjc_framework_PubSub-2.3-py2.7.egg -pyobjc_framework_ExceptionHandling-2.3-py2.7.egg -pywps-trunk-py2.7.egg -pyobjc_framework_CFNetwork-2.3-py2.7-macosx-10.6-fat.egg -py.saunter-0.40-py2.7.egg -pyfnordmetric-0.0.1-py2.7.egg -pyws-1.1.1-py2.7.egg -prestapyt-0.4.0-py2.7.egg -passlib-1.5.3-py2.7.egg -pyga-2.1-py2.7.egg -pygithub3-0.3-py2.7.egg -pyobjc_framework_OpenDirectory-2.3-py2.7.egg -yaposib-0.2.75-py2.7-linux-x86_64.egg diff --git a/lib/python3.4/site-packages/wheel/install.py b/lib/python3.4/site-packages/wheel/install.py index 3af6d0c..5a88a75 100644 --- a/lib/python3.4/site-packages/wheel/install.py +++ b/lib/python3.4/site-packages/wheel/install.py @@ -1,33 +1,31 @@ """ -Operations on existing wheel files, including basic installation. +Operations on existing wheel files, including basic installation. """ # XXX see patched pip to install -import sys -import warnings +import csv +import hashlib import os.path import re -import zipfile -import hashlib -import csv - import shutil +import sys +import warnings +import zipfile + +from . import signatures +from .decorator import reify +from .paths import get_install_paths +from .pep425tags import get_supported +from .pkginfo import read_pkg_info_bytes +from .util import ( + urlsafe_b64encode, from_json, urlsafe_b64decode, native, binary, HashingFile, + open_for_csv) try: _big_number = sys.maxsize except NameError: _big_number = sys.maxint -from wheel.decorator import reify -from wheel.util import (urlsafe_b64encode, from_json, urlsafe_b64decode, - native, binary, HashingFile) -from wheel import signatures -from wheel.pkginfo import read_pkg_info_bytes -from wheel.util import open_for_csv - -from .pep425tags import get_supported -from .paths import get_install_paths - # The next major version after this version of the 'wheel' tool: VERSION_TOO_HIGH = (1, 0) @@ -39,6 +37,7 @@ WHEEL_INFO_RE = re.compile( \.whl|\.dist-info)$""", re.VERBOSE).match + def parse_version(version): """Use parse_version from pkg_resources or distutils as available.""" global parse_version @@ -48,6 +47,7 @@ def parse_version(version): from distutils.version import LooseVersion as parse_version return parse_version(version) + class BadWheelFile(ValueError): pass @@ -55,7 +55,7 @@ class BadWheelFile(ValueError): class WheelFile(object): """Parse wheel-specific attributes from a wheel (.whl) file and offer basic installation and verification support. - + WheelFile can be used to simply parse a wheel filename by avoiding the methods that require the actual file contents.""" @@ -201,11 +201,11 @@ class WheelFile(object): raise TypeError("{0}.context != {1}.context".format(self, other)) sc = self.rank oc = other.rank - if sc != None and oc != None and sc != oc: + if sc is not None and oc is not None and sc != oc: # Smaller compatibility ranks are "better" than larger ones, # so we have to reverse the sense of the comparison here! return sc > oc - elif sc == None and oc != None: + elif sc is None and oc is not None: return False return self.filename < other.filename @@ -247,10 +247,10 @@ class WheelFile(object): """ Consult distutils to get the install paths for our dist. A dict with ('purelib', 'platlib', 'headers', 'scripts', 'data'). - + We use the name from our filename as the dist name, which means headers could be installed in the wrong place if the filesystem-escaped name - is different than the Name. Who cares? + is different than the Name. Who cares? """ name = self.parsed_filename.group('name') return get_install_paths(name) @@ -323,7 +323,9 @@ class WheelFile(object): k = info.filename key, target, filename, dest = v if os.path.exists(dest): - raise ValueError("Wheel file {0} would overwrite {1}. Use force if this is intended".format(k, dest)) + raise ValueError( + "Wheel file {0} would overwrite {1}. Use force if this is intended".format( + k, dest)) # Get the name of our executable, for use when replacing script # wrapper hashbang lines. @@ -342,13 +344,24 @@ class WheelFile(object): ddir = os.path.dirname(dest) if not os.path.isdir(ddir): os.makedirs(ddir) - destination = HashingFile(open(dest, 'wb')) - if key == 'scripts': - hashbang = source.readline() - if hashbang.startswith(b'#!python'): - hashbang = b'#!' + exename + binary(os.linesep) - destination.write(hashbang) - shutil.copyfileobj(source, destination) + + temp_filename = dest + '.part' + try: + with HashingFile(temp_filename, 'wb') as destination: + if key == 'scripts': + hashbang = source.readline() + if hashbang.startswith(b'#!python'): + hashbang = b'#!' + exename + binary(os.linesep) + destination.write(hashbang) + + shutil.copyfileobj(source, destination) + except: + if os.path.exists(temp_filename): + os.unlink(temp_filename) + + raise + + os.rename(temp_filename, dest) reldest = os.path.relpath(dest, root) reldest.replace(os.sep, '/') record_data.append((reldest, destination.digest(), destination.length)) @@ -360,15 +373,16 @@ class WheelFile(object): os.chmod(dest, info.external_attr >> 16) record_name = os.path.join(root, self.record_name) - writer = csv.writer(open_for_csv(record_name, 'w+')) - for reldest, digest, length in sorted(record_data): - writer.writerow((reldest, digest, length)) - writer.writerow((self.record_name, '', '')) + with open_for_csv(record_name, 'w+') as record_file: + writer = csv.writer(record_file) + for reldest, digest, length in sorted(record_data): + writer.writerow((reldest, digest, length)) + writer.writerow((self.record_name, '', '')) def verify(self, zipfile=None): - """Configure the VerifyingZipFile `zipfile` by verifying its signature + """Configure the VerifyingZipFile `zipfile` by verifying its signature and setting expected hashes for every hash in RECORD. - Caller must complete the verification process by completely reading + Caller must complete the verification process by completely reading every file in the archive (e.g. with extractall).""" sig = None if zipfile is None: @@ -412,7 +426,7 @@ class WheelFile(object): class VerifyingZipFile(zipfile.ZipFile): """ZipFile that can assert that each of its extracted contents matches - an expected sha256 hash. Note that each file must be completly read in + an expected sha256 hash. Note that each file must be completly read in order for its hash to be checked.""" def __init__(self, file, mode="r", @@ -439,8 +453,8 @@ class VerifyingZipFile(zipfile.ZipFile): name = name_or_info.filename else: name = name_or_info - if (name in self._expected_hashes - and self._expected_hashes[name] != None): + + if name in self._expected_hashes and self._expected_hashes[name] is not None: expected_hash = self._expected_hashes[name] try: _update_crc_orig = ef._update_crc diff --git a/lib/python3.4/site-packages/wheel/metadata.py b/lib/python3.4/site-packages/wheel/metadata.py index b3cc65c..29638e7 100644 --- a/lib/python3.4/site-packages/wheel/metadata.py +++ b/lib/python3.4/site-packages/wheel/metadata.py @@ -2,37 +2,33 @@ Tools for converting old- to new-style metadata. """ -from collections import namedtuple +import email.parser +import os.path +import re +import textwrap +from collections import namedtuple, OrderedDict + +import pkg_resources + +from . import __version__ as wheel_version from .pkginfo import read_pkg_info from .util import OrderedDefaultDict -try: - from collections import OrderedDict -except ImportError: - OrderedDict = dict - -import re -import os.path -import textwrap -import pkg_resources -import email.parser -import wheel METADATA_VERSION = "2.0" -PLURAL_FIELDS = { "classifier" : "classifiers", - "provides_dist" : "provides", - "provides_extra" : "extras" } +PLURAL_FIELDS = {"classifier": "classifiers", + "provides_dist": "provides", + "provides_extra": "extras"} SKIP_FIELDS = set() -CONTACT_FIELDS = (({"email":"author_email", "name": "author"}, - "author"), - ({"email":"maintainer_email", "name": "maintainer"}, - "maintainer")) +CONTACT_FIELDS = (({"email": "author_email", "name": "author"}, + "author"), + ({"email": "maintainer_email", "name": "maintainer"}, + "maintainer")) # commonly filled out as "UNKNOWN" by distutils: -UNKNOWN_FIELDS = set(("author", "author_email", "platform", "home_page", - "license")) +UNKNOWN_FIELDS = {"author", "author_email", "platform", "home_page", "license"} # Wheel itself is probably the only program that uses non-extras markers # in METADATA/PKG-INFO. Support its syntax with the extra at the end only. @@ -41,13 +37,14 @@ KEYWORDS_RE = re.compile("[\0-,]+") MayRequiresKey = namedtuple('MayRequiresKey', ('condition', 'extra')) + def unique(iterable): """ Yield unique values in iterable, preserving order. """ seen = set() for value in iterable: - if not value in seen: + if value not in seen: seen.add(value) yield value @@ -74,6 +71,7 @@ def handle_requires(metadata, pkg_info, key): if may_requires: metadata['run_requires'] = [] + def sort_key(item): # Both condition and extra could be None, which can't be compared # against strings in Python 3. @@ -81,6 +79,7 @@ def handle_requires(metadata, pkg_info, key): if key.condition is None: return '' return key.condition + for key, value in sorted(may_requires.items(), key=sort_key): may_requirement = OrderedDict((('requires', value),)) if key.extra: @@ -89,7 +88,7 @@ def handle_requires(metadata, pkg_info, key): may_requirement['environment'] = key.condition metadata['run_requires'].append(may_requirement) - if not 'extras' in metadata: + if 'extras' not in metadata: metadata['extras'] = [] metadata['extras'].extend([key.extra for key in may_requires.keys() if key.extra]) @@ -105,13 +104,15 @@ def pkginfo_to_dict(path, distribution=None): distribution: optional distutils Distribution() """ - metadata = OrderedDefaultDict(lambda: OrderedDefaultDict(lambda: OrderedDefaultDict(OrderedDict))) - metadata["generator"] = "bdist_wheel (" + wheel.__version__ + ")" + metadata = OrderedDefaultDict( + lambda: OrderedDefaultDict(lambda: OrderedDefaultDict(OrderedDict))) + metadata["generator"] = "bdist_wheel (" + wheel_version + ")" try: unicode pkg_info = read_pkg_info(path) except NameError: - pkg_info = email.parser.Parser().parsestr(open(path, 'rb').read().decode('utf-8')) + with open(path, 'rb') as pkg_info_file: + pkg_info = email.parser.Parser().parsestr(pkg_info_file.read().decode('utf-8')) description = None if pkg_info['Summary']: @@ -149,12 +150,12 @@ def pkginfo_to_dict(path, distribution=None): handle_requires(metadata, pkg_info, key) elif low_key == 'provides_extra': - if not 'extras' in metadata: + if 'extras' not in metadata: metadata['extras'] = [] metadata['extras'].extend(pkg_info.get_all(key)) elif low_key == 'home_page': - metadata['extensions']['python.details']['project_urls'] = {'Home':pkg_info[key]} + metadata['extensions']['python.details']['project_urls'] = {'Home': pkg_info[key]} elif low_key == 'keywords': metadata['keywords'] = KEYWORDS_RE.split(pkg_info[key]) @@ -174,7 +175,7 @@ def pkginfo_to_dict(path, distribution=None): requirements = getattr(distribution, attr) if isinstance(requirements, list): new_requirements = sorted(convert_requirements(requirements)) - metadata[requires] = [{'requires':new_requirements}] + metadata[requires] = [{'requires': new_requirements}] except AttributeError: pass @@ -216,6 +217,7 @@ def pkginfo_to_dict(path, distribution=None): return metadata + def requires_to_requires_dist(requirement): """Compose the version predicates for requirement in PEP 345 fashion.""" requires_dist = [] @@ -223,7 +225,8 @@ def requires_to_requires_dist(requirement): requires_dist.append(op + ver) if not requires_dist: return '' - return " (%s)" % ','.join(requires_dist) + return " (%s)" % ','.join(sorted(requires_dist)) + def convert_requirements(requirements): """Yield Requires-Dist: strings for parsed requirements strings.""" @@ -235,6 +238,31 @@ def convert_requirements(requirements): extras = "[%s]" % extras yield (parsed_requirement.project_name + extras + spec) + +def generate_requirements(extras_require): + """ + Convert requirements from a setup()-style dictionary to ('Requires-Dist', 'requirement') + and ('Provides-Extra', 'extra') tuples. + + extras_require is a dictionary of {extra: [requirements]} as passed to setup(), + using the empty extra {'': [requirements]} to hold install_requires. + """ + for extra, depends in extras_require.items(): + condition = '' + if extra and ':' in extra: # setuptools extra:condition syntax + extra, condition = extra.split(':', 1) + extra = pkg_resources.safe_extra(extra) + if extra: + yield ('Provides-Extra', extra) + if condition: + condition += " and " + condition += "extra == '%s'" % extra + if condition: + condition = '; ' + condition + for new_req in convert_requirements(depends): + yield ('Requires-Dist', new_req + condition) + + def pkginfo_to_metadata(egg_info_path, pkginfo_path): """ Convert .egg-info directory with PKG-INFO to the Metadata 1.3 aka @@ -244,21 +272,12 @@ def pkginfo_to_metadata(egg_info_path, pkginfo_path): pkg_info.replace_header('Metadata-Version', '2.0') requires_path = os.path.join(egg_info_path, 'requires.txt') if os.path.exists(requires_path): - requires = open(requires_path).read() + with open(requires_path) as requires_file: + requires = requires_file.read() for extra, reqs in sorted(pkg_resources.split_sections(requires), key=lambda x: x[0] or ''): - condition = '' - if extra and ':' in extra: # setuptools extra:condition syntax - extra, condition = extra.split(':', 1) - if extra: - pkg_info['Provides-Extra'] = extra - if condition: - condition += " and " - condition += 'extra == %s' % repr(extra) - if condition: - condition = '; ' + condition - for new_req in sorted(convert_requirements(reqs)): - pkg_info['Requires-Dist'] = new_req + condition + for item in generate_requirements({extra: reqs}): + pkg_info[item[0]] = item[1] description = pkg_info['Description'] if description: @@ -277,8 +296,8 @@ def pkginfo_unicode(pkg_info, field): return str(text) for item in pkg_info.raw_items(): if item[0].lower() == field: - text = item[1].encode('ascii', 'surrogateescape')\ - .decode('utf-8') + text = item[1].encode('ascii', 'surrogateescape') \ + .decode('utf-8') break return text @@ -298,20 +317,22 @@ def dedent_description(pkg_info): description_lines = description.splitlines() description_dedent = '\n'.join( - # if the first line of long_description is blank, - # the first line here will be indented. - (description_lines[0].lstrip(), - textwrap.dedent('\n'.join(description_lines[1:])), - '\n')) + # if the first line of long_description is blank, + # the first line here will be indented. + (description_lines[0].lstrip(), + textwrap.dedent('\n'.join(description_lines[1:])), + '\n')) if surrogates: - description_dedent = description_dedent\ - .encode("utf8")\ - .decode("ascii", "surrogateescape") + description_dedent = description_dedent \ + .encode("utf8") \ + .decode("ascii", "surrogateescape") return description_dedent if __name__ == "__main__": - import sys, pprint + import sys + import pprint + pprint.pprint(pkginfo_to_dict(sys.argv[1])) diff --git a/lib/python3.4/site-packages/wheel/paths.py b/lib/python3.4/site-packages/wheel/paths.py index fe3dfd6..afb3cae 100644 --- a/lib/python3.4/site-packages/wheel/paths.py +++ b/lib/python3.4/site-packages/wheel/paths.py @@ -4,22 +4,24 @@ Installation paths. Map the .data/ subdirectory names to install paths. """ +import distutils.command.install as install +import distutils.dist as dist import os.path import sys -import distutils.dist as dist -import distutils.command.install as install + def get_install_command(name): # late binding due to potential monkeypatching - d = dist.Distribution({'name':name}) + d = dist.Distribution({'name': name}) i = install.install(d) i.finalize_options() return i + def get_install_paths(name): """ Return the (distutils) install paths for the named dist. - + A dict with ('purelib', 'platlib', 'headers', 'scripts', 'data') keys. """ paths = {} diff --git a/lib/python3.4/site-packages/wheel/pep425tags.py b/lib/python3.4/site-packages/wheel/pep425tags.py index 106c879..29afdc3 100644 --- a/lib/python3.4/site-packages/wheel/pep425tags.py +++ b/lib/python3.4/site-packages/wheel/pep425tags.py @@ -1,14 +1,10 @@ """Generate and work with PEP 425 Compatibility Tags.""" -import sys -import warnings - -try: - import sysconfig -except ImportError: # pragma nocover - # Python < 2.7 - import distutils.sysconfig as sysconfig import distutils.util +import platform +import sys +import sysconfig +import warnings def get_config_var(var): @@ -21,15 +17,17 @@ def get_config_var(var): def get_abbr_impl(): """Return abbreviated implementation name.""" - if hasattr(sys, 'pypy_version_info'): - pyimpl = 'pp' - elif sys.platform.startswith('java'): - pyimpl = 'jy' - elif sys.platform == 'cli': - pyimpl = 'ip' - else: - pyimpl = 'cp' - return pyimpl + impl = platform.python_implementation() + if impl == 'PyPy': + return 'pp' + elif impl == 'Jython': + return 'jy' + elif impl == 'IronPython': + return 'ip' + elif impl == 'CPython': + return 'cp' + + raise LookupError('Unknown Python implementation: ' + impl) def get_impl_ver(): @@ -100,18 +98,22 @@ def get_abi_tag(): def get_platform(): """Return our platform name 'win32', 'linux_x86_64'""" # XXX remove distutils dependency - return distutils.util.get_platform().replace('.', '_').replace('-', '_') + result = distutils.util.get_platform().replace('.', '_').replace('-', '_') + if result == "linux_x86_64" and sys.maxsize == 2147483647: + # pip pull request #3497 + result = "linux_i686" + return result def get_supported(versions=None, supplied_platform=None): """Return a list of supported tags for each version specified in `versions`. - :param versions: a list of string versions, of the form ["33", "32"], + :param versions: a list of string versions, of the form ["33", "32"], or None. The first version will be assumed to support our ABI. """ supported = [] - + # Versions must be given with respect to the preference if versions is None: versions = [] @@ -120,15 +122,15 @@ def get_supported(versions=None, supplied_platform=None): # Support all previous minor Python versions. for minor in range(version_info[-1], -1, -1): versions.append(''.join(map(str, major + (minor,)))) - + impl = get_abbr_impl() - + abis = [] abi = get_abi_tag() if abi: abis[0:0] = [abi] - + abi3s = set() import imp for suffix in imp.get_suffixes(): @@ -143,20 +145,29 @@ def get_supported(versions=None, supplied_platform=None): if supplied_platform: platforms.append(supplied_platform) platforms.append(get_platform()) - + # Current version, current API (built specifically for our Python): for abi in abis: for arch in platforms: supported.append(('%s%s' % (impl, versions[0]), abi, arch)) - + + # abi3 modules compatible with older version of Python + for version in versions[1:]: + # abi3 was introduced in Python 3.2 + if version in ('31', '30'): + break + for abi in abi3s: # empty set if not Python 3 + for arch in platforms: + supported.append(("%s%s" % (impl, version), abi, arch)) + # No abi / arch, but requires our implementation: for i, version in enumerate(versions): supported.append(('%s%s' % (impl, version), 'none', 'any')) if i == 0: - # Tagged specifically as being cross-version compatible + # Tagged specifically as being cross-version compatible # (with just the major version specified) - supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any')) - + supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any')) + # Major Python version + platform; e.g. binaries not using the Python API supported.append(('py%s' % (versions[0][0]), 'none', arch)) @@ -165,5 +176,5 @@ def get_supported(versions=None, supplied_platform=None): supported.append(('py%s' % (version,), 'none', 'any')) if i == 0: supported.append(('py%s' % (version[0]), 'none', 'any')) - + return supported diff --git a/lib/python3.4/site-packages/wheel/pkginfo.py b/lib/python3.4/site-packages/wheel/pkginfo.py index 8a4aca3..115be45 100644 --- a/lib/python3.4/site-packages/wheel/pkginfo.py +++ b/lib/python3.4/site-packages/wheel/pkginfo.py @@ -11,7 +11,7 @@ except NameError: if not _PY3: from email.generator import Generator - + def read_pkg_info_bytes(bytestr): return Parser().parsestr(bytestr) @@ -22,23 +22,22 @@ if not _PY3: def write_pkg_info(path, message): with open(path, 'w') as metadata: - Generator(metadata, maxheaderlen=0).flatten(message) - + Generator(metadata, mangle_from_=False, maxheaderlen=0).flatten(message) else: from email.generator import BytesGenerator + def read_pkg_info_bytes(bytestr): headers = bytestr.decode(encoding="ascii", errors="surrogateescape") message = Parser().parsestr(headers) return message def read_pkg_info(path): - with open(path, "r", - encoding="ascii", + with open(path, "r", + encoding="ascii", errors="surrogateescape") as headers: message = Parser().parse(headers) return message def write_pkg_info(path, message): with open(path, "wb") as out: - BytesGenerator(out, maxheaderlen=0).flatten(message) - + BytesGenerator(out, mangle_from_=False, maxheaderlen=0).flatten(message) diff --git a/lib/python3.4/site-packages/wheel/signatures/__init__.py b/lib/python3.4/site-packages/wheel/signatures/__init__.py index 3f21b50..e7a5331 100644 --- a/lib/python3.4/site-packages/wheel/signatures/__init__.py +++ b/lib/python3.4/site-packages/wheel/signatures/__init__.py @@ -2,63 +2,67 @@ Create and verify jws-js format Ed25519 signatures. """ -__all__ = [ 'sign', 'verify' ] - import json from ..util import urlsafe_b64decode, urlsafe_b64encode, native, binary +__all__ = ['sign', 'verify'] + ed25519ll = None ALG = "Ed25519" + def get_ed25519ll(): """Lazy import-and-test of ed25519 module""" global ed25519ll - + if not ed25519ll: try: - import ed25519ll # fast (thousands / s) - except (ImportError, OSError): # pragma nocover - from . import ed25519py as ed25519ll # pure Python (hundreds / s) + import ed25519ll # fast (thousands / s) + except (ImportError, OSError): # pragma nocover + from . import ed25519py as ed25519ll # pure Python (hundreds / s) test() - + return ed25519ll + def sign(payload, keypair): - """Return a JWS-JS format signature given a JSON-serializable payload and + """Return a JWS-JS format signature given a JSON-serializable payload and an Ed25519 keypair.""" get_ed25519ll() # header = { "alg": ALG, "jwk": { - "kty": ALG, # alg -> kty in jwk-08. + "kty": ALG, # alg -> kty in jwk-08. "vk": native(urlsafe_b64encode(keypair.vk)) } } - + encoded_header = urlsafe_b64encode(binary(json.dumps(header, sort_keys=True))) encoded_payload = urlsafe_b64encode(binary(json.dumps(payload, sort_keys=True))) secured_input = b".".join((encoded_header, encoded_payload)) sig_msg = ed25519ll.crypto_sign(secured_input, keypair.sk) signature = sig_msg[:ed25519ll.SIGNATUREBYTES] encoded_signature = urlsafe_b64encode(signature) - - return {"recipients": - [{"header":native(encoded_header), - "signature":native(encoded_signature)}], + + return {"recipients": + [{"header": native(encoded_header), + "signature": native(encoded_signature)}], "payload": native(encoded_payload)} + def assertTrue(condition, message=""): if not condition: raise ValueError(message) - + + def verify(jwsjs): """Return (decoded headers, payload) if all signatures in jwsjs are consistent, else raise ValueError. - + Caller must decide whether the keys are actually trusted.""" - get_ed25519ll() + get_ed25519ll() # XXX forbid duplicate keys in JSON input using object_pairs_hook (2.7+) recipients = jwsjs["recipients"] encoded_payload = binary(jwsjs["payload"]) @@ -68,12 +72,12 @@ def verify(jwsjs): h = binary(recipient["header"]) s = binary(recipient["signature"]) header = json.loads(native(urlsafe_b64decode(h))) - assertTrue(header["alg"] == ALG, - "Unexpected algorithm {0}".format(header["alg"])) - if "alg" in header["jwk"] and not "kty" in header["jwk"]: - header["jwk"]["kty"] = header["jwk"]["alg"] # b/w for JWK < -08 - assertTrue(header["jwk"]["kty"] == ALG, # true for Ed25519 - "Unexpected key type {0}".format(header["jwk"]["kty"])) + assertTrue(header["alg"] == ALG, + "Unexpected algorithm {0}".format(header["alg"])) + if "alg" in header["jwk"] and "kty" not in header["jwk"]: + header["jwk"]["kty"] = header["jwk"]["alg"] # b/w for JWK < -08 + assertTrue(header["jwk"]["kty"] == ALG, # true for Ed25519 + "Unexpected key type {0}".format(header["jwk"]["kty"])) vk = urlsafe_b64decode(binary(header["jwk"]["vk"])) secured_input = b".".join((h, encoded_payload)) sig = urlsafe_b64decode(s) @@ -91,6 +95,7 @@ def verify(jwsjs): return headers, payload + def test(): kp = ed25519ll.crypto_sign_keypair() payload = {'test': 'onstartup'} @@ -101,6 +106,5 @@ def test(): verify(jwsjs) except ValueError: pass - else: # pragma no cover + else: # pragma no cover raise RuntimeError("No error from bad wheel.signatures payload.") - diff --git a/lib/python3.4/site-packages/wheel/signatures/djbec.py b/lib/python3.4/site-packages/wheel/signatures/djbec.py index 56efe44..87f72d4 100644 --- a/lib/python3.4/site-packages/wheel/signatures/djbec.py +++ b/lib/python3.4/site-packages/wheel/signatures/djbec.py @@ -6,61 +6,80 @@ # http://www.hyperelliptic.org/EFD/g1p/auto-twisted-extended-1.html # Specifically add-2008-hwcd-4 and dbl-2008-hwcd -try: # pragma nocover +import hashlib +import random + +try: # pragma nocover unicode PY3 = False + def asbytes(b): """Convert array of integers to byte string""" return ''.join(chr(x) for x in b) + def joinbytes(b): """Convert array of bytes to byte string""" return ''.join(b) + def bit(h, i): """Return i'th bit of bytestring h""" - return (ord(h[i//8]) >> (i%8)) & 1 - -except NameError: # pragma nocover + return (ord(h[i // 8]) >> (i % 8)) & 1 +except NameError: # pragma nocover PY3 = True asbytes = bytes joinbytes = bytes - def bit(h, i): - return (h[i//8] >> (i%8)) & 1 -import hashlib + def bit(h, i): + return (h[i // 8] >> (i % 8)) & 1 b = 256 -q = 2**255 - 19 -l = 2**252 + 27742317777372353535851937790883648493 +q = 2 ** 255 - 19 +l = 2 ** 252 + 27742317777372353535851937790883648493 + def H(m): return hashlib.sha512(m).digest() + def expmod(b, e, m): - if e == 0: return 1 + if e == 0: + return 1 + t = expmod(b, e // 2, m) ** 2 % m - if e & 1: t = (t * b) % m + if e & 1: + t = (t * b) % m + return t + # Can probably get some extra speedup here by replacing this with # an extended-euclidean, but performance seems OK without that def inv(x): - return expmod(x, q-2, q) + return expmod(x, q - 2, q) + d = -121665 * inv(121666) -I = expmod(2,(q-1)//4,q) +I = expmod(2, (q - 1) // 4, q) + def xrecover(y): - xx = (y*y-1) * inv(d*y*y+1) - x = expmod(xx,(q+3)//8,q) - if (x*x - xx) % q != 0: x = (x*I) % q - if x % 2 != 0: x = q-x + xx = (y * y - 1) * inv(d * y * y + 1) + x = expmod(xx, (q + 3) // 8, q) + if (x * x - xx) % q != 0: + x = (x * I) % q + + if x % 2 != 0: + x = q - x + return x + By = 4 * inv(5) Bx = xrecover(By) -B = [Bx % q,By % q] +B = [Bx % q, By % q] -#def edwards(P,Q): + +# def edwards(P,Q): # x1 = P[0] # y1 = P[1] # x2 = Q[0] @@ -69,7 +88,7 @@ B = [Bx % q,By % q] # y3 = (y1*y2+x1*x2) * inv(1-d*x1*x2*y1*y2) # return (x3 % q,y3 % q) -#def scalarmult(P,e): +# def scalarmult(P,e): # if e == 0: return [0,1] # Q = scalarmult(P,e/2) # Q = edwards(Q,Q) @@ -82,113 +101,137 @@ B = [Bx % q,By % q] def xpt_add(pt1, pt2): (X1, Y1, Z1, T1) = pt1 (X2, Y2, Z2, T2) = pt2 - A = ((Y1-X1)*(Y2+X2)) % q - B = ((Y1+X1)*(Y2-X2)) % q - C = (Z1*2*T2) % q - D = (T1*2*Z2) % q - E = (D+C) % q - F = (B-A) % q - G = (B+A) % q - H = (D-C) % q - X3 = (E*F) % q - Y3 = (G*H) % q - Z3 = (F*G) % q - T3 = (E*H) % q + A = ((Y1 - X1) * (Y2 + X2)) % q + B = ((Y1 + X1) * (Y2 - X2)) % q + C = (Z1 * 2 * T2) % q + D = (T1 * 2 * Z2) % q + E = (D + C) % q + F = (B - A) % q + G = (B + A) % q + H = (D - C) % q + X3 = (E * F) % q + Y3 = (G * H) % q + Z3 = (F * G) % q + T3 = (E * H) % q return (X3, Y3, Z3, T3) -def xpt_double (pt): + +def xpt_double(pt): (X1, Y1, Z1, _) = pt - A = (X1*X1) - B = (Y1*Y1) - C = (2*Z1*Z1) + A = (X1 * X1) + B = (Y1 * Y1) + C = (2 * Z1 * Z1) D = (-A) % q - J = (X1+Y1) % q - E = (J*J-A-B) % q - G = (D+B) % q - F = (G-C) % q - H = (D-B) % q - X3 = (E*F) % q - Y3 = (G*H) % q - Z3 = (F*G) % q - T3 = (E*H) % q - return (X3, Y3, Z3, T3) + J = (X1 + Y1) % q + E = (J * J - A - B) % q + G = (D + B) % q + F = (G - C) % q + H = (D - B) % q + X3 = (E * F) % q + Y3 = (G * H) % q + Z3 = (F * G) % q + T3 = (E * H) % q + return X3, Y3, Z3, T3 -def pt_xform (pt): + +def pt_xform(pt): (x, y) = pt - return (x, y, 1, (x*y)%q) + return x, y, 1, (x * y) % q -def pt_unxform (pt): + +def pt_unxform(pt): (x, y, z, _) = pt - return ((x*inv(z))%q, (y*inv(z))%q) + return (x * inv(z)) % q, (y * inv(z)) % q + + +def xpt_mult(pt, n): + if n == 0: + return pt_xform((0, 1)) + + _ = xpt_double(xpt_mult(pt, n >> 1)) + return xpt_add(_, pt) if n & 1 else _ -def xpt_mult (pt, n): - if n==0: return pt_xform((0,1)) - _ = xpt_double(xpt_mult(pt, n>>1)) - return xpt_add(_, pt) if n&1 else _ def scalarmult(pt, e): return pt_unxform(xpt_mult(pt_xform(pt), e)) + def encodeint(y): bits = [(y >> i) & 1 for i in range(b)] e = [(sum([bits[i * 8 + j] << j for j in range(8)])) - for i in range(b//8)] + for i in range(b // 8)] return asbytes(e) + def encodepoint(P): x = P[0] y = P[1] bits = [(y >> i) & 1 for i in range(b - 1)] + [x & 1] e = [(sum([bits[i * 8 + j] << j for j in range(8)])) - for i in range(b//8)] + for i in range(b // 8)] return asbytes(e) - + + def publickey(sk): h = H(sk) - a = 2**(b-2) + sum(2**i * bit(h,i) for i in range(3,b-2)) - A = scalarmult(B,a) + a = 2 ** (b - 2) + sum(2 ** i * bit(h, i) for i in range(3, b - 2)) + A = scalarmult(B, a) return encodepoint(A) + def Hint(m): h = H(m) - return sum(2**i * bit(h,i) for i in range(2*b)) + return sum(2 ** i * bit(h, i) for i in range(2 * b)) -def signature(m,sk,pk): + +def signature(m, sk, pk): h = H(sk) - a = 2**(b-2) + sum(2**i * bit(h,i) for i in range(3,b-2)) - inter = joinbytes([h[i] for i in range(b//8,b//4)]) + a = 2 ** (b - 2) + sum(2 ** i * bit(h, i) for i in range(3, b - 2)) + inter = joinbytes([h[i] for i in range(b // 8, b // 4)]) r = Hint(inter + m) - R = scalarmult(B,r) + R = scalarmult(B, r) S = (r + Hint(encodepoint(R) + pk + m) * a) % l return encodepoint(R) + encodeint(S) + def isoncurve(P): x = P[0] y = P[1] - return (-x*x + y*y - 1 - d*x*x*y*y) % q == 0 + return (-x * x + y * y - 1 - d * x * x * y * y) % q == 0 + def decodeint(s): - return sum(2**i * bit(s,i) for i in range(0,b)) + return sum(2 ** i * bit(s, i) for i in range(0, b)) + def decodepoint(s): - y = sum(2**i * bit(s,i) for i in range(0,b-1)) + y = sum(2 ** i * bit(s, i) for i in range(0, b - 1)) x = xrecover(y) - if x & 1 != bit(s,b-1): x = q-x - P = [x,y] - if not isoncurve(P): raise Exception("decoding point that is not on curve") + if x & 1 != bit(s, b - 1): + x = q - x + + P = [x, y] + if not isoncurve(P): + raise Exception("decoding point that is not on curve") + return P + def checkvalid(s, m, pk): - if len(s) != b//4: raise Exception("signature length is wrong") - if len(pk) != b//8: raise Exception("public-key length is wrong") - R = decodepoint(s[0:b//8]) + if len(s) != b // 4: + raise Exception("signature length is wrong") + if len(pk) != b // 8: + raise Exception("public-key length is wrong") + + R = decodepoint(s[0:b // 8]) A = decodepoint(pk) - S = decodeint(s[b//8:b//4]) + S = decodeint(s[b // 8:b // 4]) h = Hint(encodepoint(R) + pk + m) - v1 = scalarmult(B,S) -# v2 = edwards(R,scalarmult(A,h)) + v1 = scalarmult(B, S) + # v2 = edwards(R,scalarmult(A,h)) v2 = pt_unxform(xpt_add(pt_xform(R), pt_xform(scalarmult(A, h)))) - return v1==v2 + return v1 == v2 + ########################################################## # @@ -199,7 +242,8 @@ def checkvalid(s, m, pk): P = q A = 486662 -#def expmod(b, e, m): + +# def expmod(b, e, m): # if e == 0: return 1 # t = expmod(b, e / 2, m) ** 2 % m # if e & 1: t = (t * b) % m @@ -207,64 +251,73 @@ A = 486662 # def inv(x): return expmod(x, P - 2, P) + def add(n, m, d): (xn, zn) = n - (xm, zm) = m + (xm, zm) = m (xd, zd) = d x = 4 * (xm * xn - zm * zn) ** 2 * zd z = 4 * (xm * zn - zm * xn) ** 2 * xd return (x % P, z % P) + def double(n): (xn, zn) = n x = (xn ** 2 - zn ** 2) ** 2 z = 4 * xn * zn * (xn ** 2 + A * xn * zn + zn ** 2) return (x % P, z % P) + def curve25519(n, base=9): - one = (base,1) + one = (base, 1) two = double(one) + # f(m) evaluates to a tuple # containing the mth multiple and the # (m+1)th multiple of base. def f(m): - if m == 1: return (one, two) + if m == 1: + return (one, two) + (pm, pm1) = f(m // 2) - if (m & 1): + if m & 1: return (add(pm, pm1, one), double(pm1)) + return (double(pm), add(pm, pm1, one)) - ((x,z), _) = f(n) + + ((x, z), _) = f(n) return (x * inv(z)) % P -import random def genkey(n=0): - n = n or random.randint(0,P) + n = n or random.randint(0, P) n &= ~7 n &= ~(128 << 8 * 31) n |= 64 << 8 * 31 return n -#def str2int(s): + +# def str2int(s): # return int(hexlify(s), 16) # # return sum(ord(s[i]) << (8 * i) for i in range(32)) # -#def int2str(n): +# def int2str(n): # return unhexlify("%x" % n) # # return ''.join([chr((n >> (8 * i)) & 255) for i in range(32)]) ################################################# + def dsa_test(): import os - msg = str(random.randint(q,q+q)).encode('utf-8') + msg = str(random.randint(q, q + q)).encode('utf-8') sk = os.urandom(32) pk = publickey(sk) sig = signature(msg, sk, pk) return checkvalid(sig, msg, pk) + def dh_test(): sk1 = genkey() sk2 = genkey() return curve25519(sk1, curve25519(sk2)) == curve25519(sk2, curve25519(sk1)) - diff --git a/lib/python3.4/site-packages/wheel/signatures/ed25519py.py b/lib/python3.4/site-packages/wheel/signatures/ed25519py.py index 55eba2e..0c4ab8f 100644 --- a/lib/python3.4/site-packages/wheel/signatures/ed25519py.py +++ b/lib/python3.4/site-packages/wheel/signatures/ed25519py.py @@ -1,22 +1,21 @@ -# -*- coding: utf-8 -*- - -import warnings import os - +import warnings from collections import namedtuple + from . import djbec __all__ = ['crypto_sign', 'crypto_sign_open', 'crypto_sign_keypair', 'Keypair', 'PUBLICKEYBYTES', 'SECRETKEYBYTES', 'SIGNATUREBYTES'] -PUBLICKEYBYTES=32 -SECRETKEYBYTES=64 -SIGNATUREBYTES=64 +PUBLICKEYBYTES = 32 +SECRETKEYBYTES = 64 +SIGNATUREBYTES = 64 + +Keypair = namedtuple('Keypair', ('vk', 'sk')) # verifying key, secret key -Keypair = namedtuple('Keypair', ('vk', 'sk')) # verifying key, secret key def crypto_sign_keypair(seed=None): - """Return (verifying, secret) key from a given seed, or os.urandom(32)""" + """Return (verifying, secret) key from a given seed, or os.urandom(32)""" if seed is None: seed = os.urandom(PUBLICKEYBYTES) else: @@ -47,6 +46,5 @@ def crypto_sign_open(signed, vk): raise ValueError("Bad verifying key length %d" % len(vk)) rc = djbec.checkvalid(signed[:SIGNATUREBYTES], signed[SIGNATUREBYTES:], vk) if not rc: - raise ValueError("rc != True", rc) + raise ValueError("rc != True", rc) return signed[SIGNATUREBYTES:] - diff --git a/lib/python3.4/site-packages/wheel/signatures/keys.py b/lib/python3.4/site-packages/wheel/signatures/keys.py index 1dde4bf..eb5d4ac 100644 --- a/lib/python3.4/site-packages/wheel/signatures/keys.py +++ b/lib/python3.4/site-packages/wheel/signatures/keys.py @@ -1,7 +1,7 @@ """Store and retrieve wheel signing / verifying keys. -Given a scope (a package name, + meaning "all packages", or - meaning -"no packages"), return a list of verifying keys that are trusted for that +Given a scope (a package name, + meaning "all packages", or - meaning +"no packages"), return a list of verifying keys that are trusted for that scope. Given a package name, return a list of (scope, key) suggested keys to sign @@ -33,15 +33,17 @@ wheel export key import json import os.path -from wheel.util import native, load_config_paths, save_config_path + +from ..util import native, load_config_paths, save_config_path + class WheelKeys(object): SCHEMA = 1 CONFIG_NAME = 'wheel.json' - + def __init__(self): - self.data = {'signers':[], 'verifiers':[]} - + self.data = {'signers': [], 'verifiers': []} + def load(self): # XXX JSON is not a great database for path in load_config_paths('wheel'): @@ -50,7 +52,7 @@ class WheelKeys(object): with open(conf, 'r') as infile: self.data = json.load(infile) for x in ('signers', 'verifiers'): - if not x in self.data: + if x not in self.data: self.data[x] = [] if 'schema' not in self.data: self.data['schema'] = self.SCHEMA @@ -62,38 +64,38 @@ class WheelKeys(object): return self def save(self): - # Try not to call this a very long time after load() + # Try not to call this a very long time after load() path = save_config_path('wheel') conf = os.path.join(native(path), self.CONFIG_NAME) with open(conf, 'w+') as out: json.dump(self.data, out, indent=2) return self - + def trust(self, scope, vk): """Start trusting a particular key for given scope.""" - self.data['verifiers'].append({'scope':scope, 'vk':vk}) + self.data['verifiers'].append({'scope': scope, 'vk': vk}) return self - + def untrust(self, scope, vk): """Stop trusting a particular key for given scope.""" - self.data['verifiers'].remove({'scope':scope, 'vk':vk}) + self.data['verifiers'].remove({'scope': scope, 'vk': vk}) return self - + def trusted(self, scope=None): """Return list of [(scope, trusted key), ...] for given scope.""" - trust = [(x['scope'], x['vk']) for x in self.data['verifiers'] if x['scope'] in (scope, '+')] + trust = [(x['scope'], x['vk']) for x in self.data['verifiers'] + if x['scope'] in (scope, '+')] trust.sort(key=lambda x: x[0]) trust.reverse() return trust - + def signers(self, scope): """Return list of signing key(s).""" sign = [(x['scope'], x['vk']) for x in self.data['signers'] if x['scope'] in (scope, '+')] sign.sort(key=lambda x: x[0]) sign.reverse() return sign - + def add_signer(self, scope, vk): """Remember verifying key vk as being valid for signing in scope.""" - self.data['signers'].append({'scope':scope, 'vk':vk}) - + self.data['signers'].append({'scope': scope, 'vk': vk}) diff --git a/lib/python3.4/site-packages/wheel/test/__init__.py b/lib/python3.4/site-packages/wheel/test/__init__.py deleted file mode 100644 index 4287ca8..0000000 --- a/lib/python3.4/site-packages/wheel/test/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# \ No newline at end of file diff --git a/lib/python3.4/site-packages/wheel/test/complex-dist/complexdist/__init__.py b/lib/python3.4/site-packages/wheel/test/complex-dist/complexdist/__init__.py deleted file mode 100644 index 559fbb7..0000000 --- a/lib/python3.4/site-packages/wheel/test/complex-dist/complexdist/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -def main(): - return diff --git a/lib/python3.4/site-packages/wheel/test/complex-dist/setup.py b/lib/python3.4/site-packages/wheel/test/complex-dist/setup.py deleted file mode 100644 index 615d5dc..0000000 --- a/lib/python3.4/site-packages/wheel/test/complex-dist/setup.py +++ /dev/null @@ -1,30 +0,0 @@ -from setuptools import setup - -try: - unicode - def u8(s): - return s.decode('unicode-escape') -except NameError: - def u8(s): - return s - -setup(name='complex-dist', - version='0.1', - description=u8('Another testing distribution \N{SNOWMAN}'), - long_description=u8('Another testing distribution \N{SNOWMAN}'), - author="Illustrious Author", - author_email="illustrious@example.org", - url="http://example.org/exemplary", - packages=['complexdist'], - setup_requires=["wheel", "setuptools"], - install_requires=["quux", "splort"], - extras_require={'simple':['simple.dist']}, - tests_require=["foo", "bar>=10.0.0"], - entry_points={ - 'console_scripts': [ - 'complex-dist=complexdist:main', - 'complex-dist2=complexdist:main', - ], - }, - ) - diff --git a/lib/python3.4/site-packages/wheel/test/headers.dist/header.h b/lib/python3.4/site-packages/wheel/test/headers.dist/header.h deleted file mode 100644 index e69de29..0000000 diff --git a/lib/python3.4/site-packages/wheel/test/headers.dist/headersdist.py b/lib/python3.4/site-packages/wheel/test/headers.dist/headersdist.py deleted file mode 100644 index e69de29..0000000 diff --git a/lib/python3.4/site-packages/wheel/test/headers.dist/setup.py b/lib/python3.4/site-packages/wheel/test/headers.dist/setup.py deleted file mode 100644 index 2704f01..0000000 --- a/lib/python3.4/site-packages/wheel/test/headers.dist/setup.py +++ /dev/null @@ -1,16 +0,0 @@ -from setuptools import setup - -try: - unicode - def u8(s): - return s.decode('unicode-escape').encode('utf-8') -except NameError: - def u8(s): - return s.encode('utf-8') - -setup(name='headers.dist', - version='0.1', - description=u8('A distribution with headers'), - headers=['header.h'] - ) - diff --git a/lib/python3.4/site-packages/wheel/test/pydist-schema.json b/lib/python3.4/site-packages/wheel/test/pydist-schema.json deleted file mode 100644 index 566f3a4..0000000 --- a/lib/python3.4/site-packages/wheel/test/pydist-schema.json +++ /dev/null @@ -1,362 +0,0 @@ -{ - "id": "http://www.python.org/dev/peps/pep-0426/", - "$schema": "http://json-schema.org/draft-04/schema#", - "title": "Metadata for Python Software Packages 2.0", - "type": "object", - "properties": { - "metadata_version": { - "description": "Version of the file format", - "type": "string", - "pattern": "^(\\d+(\\.\\d+)*)$" - }, - "generator": { - "description": "Name and version of the program that produced this file.", - "type": "string", - "pattern": "^[0-9A-Za-z]([0-9A-Za-z_.-]*[0-9A-Za-z])( \\(.*\\))?$" - }, - "name": { - "description": "The name of the distribution.", - "type": "string", - "$ref": "#/definitions/distribution_name" - }, - "version": { - "description": "The distribution's public version identifier", - "type": "string", - "pattern": "^(\\d+(\\.\\d+)*)((a|b|c|rc)(\\d+))?(\\.(post)(\\d+))?(\\.(dev)(\\d+))?$" - }, - "source_label": { - "description": "A constrained identifying text string", - "type": "string", - "pattern": "^[0-9a-z_.-+]+$" - }, - "source_url": { - "description": "A string containing a full URL where the source for this specific version of the distribution can be downloaded.", - "type": "string", - "format": "uri" - }, - "summary": { - "description": "A one-line summary of what the distribution does.", - "type": "string" - }, - "extras": { - "description": "A list of optional sets of dependencies that may be used to define conditional dependencies in \"may_require\" and similar fields.", - "type": "array", - "items": { - "type": "string", - "$ref": "#/definitions/extra_name" - } - }, - "meta_requires": { - "description": "A list of subdistributions made available through this metadistribution.", - "type": "array", - "$ref": "#/definitions/dependencies" - }, - "run_requires": { - "description": "A list of other distributions needed to run this distribution.", - "type": "array", - "$ref": "#/definitions/dependencies" - }, - "test_requires": { - "description": "A list of other distributions needed when this distribution is tested.", - "type": "array", - "$ref": "#/definitions/dependencies" - }, - "build_requires": { - "description": "A list of other distributions needed when this distribution is built.", - "type": "array", - "$ref": "#/definitions/dependencies" - }, - "dev_requires": { - "description": "A list of other distributions needed when this distribution is developed.", - "type": "array", - "$ref": "#/definitions/dependencies" - }, - "provides": { - "description": "A list of strings naming additional dependency requirements that are satisfied by installing this distribution. These strings must be of the form Name or Name (Version)", - "type": "array", - "items": { - "type": "string", - "$ref": "#/definitions/provides_declaration" - } - }, - "modules": { - "description": "A list of modules and/or packages available for import after installing this distribution.", - "type": "array", - "items": { - "type": "string", - "$ref": "#/definitions/qualified_name" - } - }, - "namespaces": { - "description": "A list of namespace packages this distribution contributes to", - "type": "array", - "items": { - "type": "string", - "$ref": "#/definitions/qualified_name" - } - }, - "obsoleted_by": { - "description": "A string that indicates that this project is no longer being developed. The named project provides a substitute or replacement.", - "type": "string", - "$ref": "#/definitions/requirement" - }, - "supports_environments": { - "description": "A list of strings specifying the environments that the distribution explicitly supports.", - "type": "array", - "items": { - "type": "string", - "$ref": "#/definitions/environment_marker" - } - }, - "install_hooks": { - "description": "The install_hooks field is used to define various operations that may be invoked on a distribution in a platform independent manner.", - "type": "object", - "properties": { - "postinstall": { - "type": "string", - "$ref": "#/definitions/export_specifier" - }, - "preuninstall": { - "type": "string", - "$ref": "#/definitions/export_specifier" - } - } - }, - "extensions": { - "description": "Extensions to the metadata may be present in a mapping under the 'extensions' key.", - "type": "object", - "$ref": "#/definitions/extensions" - } - }, - - "required": ["metadata_version", "name", "version", "summary"], - "additionalProperties": false, - - "definitions": { - "contact": { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "email": { - "type": "string" - }, - "url": { - "type": "string" - }, - "role": { - "type": "string" - } - }, - "required": ["name"], - "additionalProperties": false - }, - "dependencies": { - "type": "array", - "items": { - "type": "object", - "$ref": "#/definitions/dependency" - } - }, - "dependency": { - "type": "object", - "properties": { - "extra": { - "type": "string", - "$ref": "#/definitions/extra_name" - }, - "environment": { - "type": "string", - "$ref": "#/definitions/environment_marker" - }, - "requires": { - "type": "array", - "items": { - "type": "string", - "$ref": "#/definitions/requirement" - } - } - }, - "required": ["requires"], - "additionalProperties": false - }, - "extensions": { - "type": "object", - "patternProperties": { - "^[A-Za-z][0-9A-Za-z_]*([.][0-9A-Za-z_]*)*$": {} - }, - "properties": { - "python.details" : { - "description": "More information regarding the distribution.", - "type": "object", - "properties": { - "document_names": { - "description": "Names of supporting metadata documents", - "type": "object", - "properties": { - "description": { - "type": "string", - "$ref": "#/definitions/document_name" - }, - "changelog": { - "type": "string", - "$ref": "#/definitions/document_name" - }, - "license": { - "type": "string", - "$ref": "#/definitions/document_name" - } - }, - "additionalProperties": false - }, - "keywords": { - "description": "A list of additional keywords to be used to assist searching for the distribution in a larger catalog.", - "type": "array", - "items": { - "type": "string" - } - }, - "license": { - "description": "A string indicating the license covering the distribution.", - "type": "string" - }, - "classifiers": { - "description": "A list of strings, with each giving a single classification value for the distribution.", - "type": "array", - "items": { - "type": "string" - } - } - } - }, - "python.project" : { - "description": "More information regarding the creation and maintenance of the distribution.", - "$ref": "#/definitions/project_or_integrator" - }, - "python.integrator" : { - "description": "More information regarding the downstream redistributor of the distribution.", - "$ref": "#/definitions/project_or_integrator" - }, - "python.commands" : { - "description": "Command line interfaces provided by this distribution", - "type": "object", - "$ref": "#/definitions/commands" - }, - "python.exports" : { - "description": "Other exported interfaces provided by this distribution", - "type": "object", - "$ref": "#/definitions/exports" - } - }, - "additionalProperties": false - }, - "commands": { - "type": "object", - "properties": { - "wrap_console": { - "type": "object", - "$ref": "#/definitions/command_map" - }, - "wrap_gui": { - "type": "object", - "$ref": "#/definitions/command_map" - }, - "prebuilt": { - "type": "array", - "items": { - "type": "string", - "$ref": "#/definitions/relative_path" - } - } - }, - "additionalProperties": false - }, - "exports": { - "type": "object", - "patternProperties": { - "^[A-Za-z][0-9A-Za-z_]*([.][0-9A-Za-z_]*)*$": { - "type": "object", - "patternProperties": { - ".": { - "type": "string", - "$ref": "#/definitions/export_specifier" - } - }, - "additionalProperties": false - } - }, - "additionalProperties": false - }, - "command_map": { - "type": "object", - "patternProperties": { - "^[0-9A-Za-z]([0-9A-Za-z_.-]*[0-9A-Za-z])?$": { - "type": "string", - "$ref": "#/definitions/export_specifier" - } - }, - "additionalProperties": false - }, - "project_or_integrator" : { - "type": "object", - "properties" : { - "contacts": { - "description": "A list of contributor entries giving the recommended contact points for getting more information about the project.", - "type": "array", - "items": { - "type": "object", - "$ref": "#/definitions/contact" - } - }, - "contributors": { - "description": "A list of contributor entries for other contributors not already listed as current project points of contact.", - "type": "array", - "items": { - "type": "object", - "$ref": "#/definitions/contact" - } - }, - "project_urls": { - "description": "A mapping of arbitrary text labels to additional URLs relevant to the project.", - "type": "object" - } - } - }, - "distribution_name": { - "type": "string", - "pattern": "^[0-9A-Za-z]([0-9A-Za-z_.-]*[0-9A-Za-z])?$" - }, - "requirement": { - "type": "string" - }, - "provides_declaration": { - "type": "string" - }, - "environment_marker": { - "type": "string" - }, - "document_name": { - "type": "string" - }, - "extra_name" : { - "type": "string", - "pattern": "^[0-9A-Za-z]([0-9A-Za-z_.-]*[0-9A-Za-z])?$" - }, - "relative_path" : { - "type": "string" - }, - "export_specifier": { - "type": "string", - "pattern": "^([A-Za-z_][A-Za-z_0-9]*([.][A-Za-z_][A-Za-z_0-9]*)*)(:[A-Za-z_][A-Za-z_0-9]*([.][A-Za-z_][A-Za-z_0-9]*)*)?(\\[[0-9A-Za-z]([0-9A-Za-z_.-]*[0-9A-Za-z])?\\])?$" - }, - "qualified_name" : { - "type": "string", - "pattern": "^[A-Za-z_][A-Za-z_0-9]*([.][A-Za-z_][A-Za-z_0-9]*)*$" - }, - "prefixed_name" : { - "type": "string", - "pattern": "^[A-Za-z_][A-Za-z_0-9]*([.][A-Za-z_0-9]*)*$" - } - } -} diff --git a/lib/python3.4/site-packages/wheel/test/simple.dist/setup.py b/lib/python3.4/site-packages/wheel/test/simple.dist/setup.py deleted file mode 100644 index 50c909f..0000000 --- a/lib/python3.4/site-packages/wheel/test/simple.dist/setup.py +++ /dev/null @@ -1,17 +0,0 @@ -from setuptools import setup - -try: - unicode - def u8(s): - return s.decode('unicode-escape').encode('utf-8') -except NameError: - def u8(s): - return s.encode('utf-8') - -setup(name='simple.dist', - version='0.1', - description=u8('A testing distribution \N{SNOWMAN}'), - packages=['simpledist'], - extras_require={'voting': ['beaglevote']}, - ) - diff --git a/lib/python3.4/site-packages/wheel/test/simple.dist/simpledist/__init__.py b/lib/python3.4/site-packages/wheel/test/simple.dist/simpledist/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/lib/python3.4/site-packages/wheel/test/test-1.0-py2.py3-none-win32.whl b/lib/python3.4/site-packages/wheel/test/test-1.0-py2.py3-none-win32.whl deleted file mode 100644 index 095583e..0000000 Binary files a/lib/python3.4/site-packages/wheel/test/test-1.0-py2.py3-none-win32.whl and /dev/null differ diff --git a/lib/python3.4/site-packages/wheel/test/test_basic.py b/lib/python3.4/site-packages/wheel/test/test_basic.py deleted file mode 100644 index e69fef9..0000000 --- a/lib/python3.4/site-packages/wheel/test/test_basic.py +++ /dev/null @@ -1,176 +0,0 @@ -""" -Basic wheel tests. -""" - -import os -import pkg_resources -import json -import sys - -from pkg_resources import resource_filename - -import wheel.util -import wheel.tool - -from wheel import egg2wheel -from wheel.install import WheelFile -from zipfile import ZipFile -from shutil import rmtree - -test_distributions = ("complex-dist", "simple.dist", "headers.dist") - -def teardown_module(): - """Delete eggs/wheels created by tests.""" - base = pkg_resources.resource_filename('wheel.test', '') - for dist in test_distributions: - for subdir in ('build', 'dist'): - try: - rmtree(os.path.join(base, dist, subdir)) - except OSError: - pass - -def setup_module(): - build_wheel() - build_egg() - -def build_wheel(): - """Build wheels from test distributions.""" - for dist in test_distributions: - pwd = os.path.abspath(os.curdir) - distdir = pkg_resources.resource_filename('wheel.test', dist) - os.chdir(distdir) - try: - sys.argv = ['', 'bdist_wheel'] - exec(compile(open('setup.py').read(), 'setup.py', 'exec')) - finally: - os.chdir(pwd) - -def build_egg(): - """Build eggs from test distributions.""" - for dist in test_distributions: - pwd = os.path.abspath(os.curdir) - distdir = pkg_resources.resource_filename('wheel.test', dist) - os.chdir(distdir) - try: - sys.argv = ['', 'bdist_egg'] - exec(compile(open('setup.py').read(), 'setup.py', 'exec')) - finally: - os.chdir(pwd) - -def test_findable(): - """Make sure pkg_resources can find us.""" - assert pkg_resources.working_set.by_key['wheel'].version - -def test_egg_re(): - """Make sure egg_info_re matches.""" - egg_names = open(pkg_resources.resource_filename('wheel', 'eggnames.txt')) - for line in egg_names: - line = line.strip() - if not line: - continue - assert egg2wheel.egg_info_re.match(line), line - -def test_compatibility_tags(): - """Test compatibilty tags are working.""" - wf = WheelFile("package-1.0.0-cp32.cp33-noabi-noarch.whl") - assert (list(wf.compatibility_tags) == - [('cp32', 'noabi', 'noarch'), ('cp33', 'noabi', 'noarch')]) - assert (wf.arity == 2) - - wf2 = WheelFile("package-1.0.0-1st-cp33-noabi-noarch.whl") - wf2_info = wf2.parsed_filename.groupdict() - assert wf2_info['build'] == '1st', wf2_info - -def test_convert_egg(): - base = pkg_resources.resource_filename('wheel.test', '') - for dist in test_distributions: - distdir = os.path.join(base, dist, 'dist') - eggs = [e for e in os.listdir(distdir) if e.endswith('.egg')] - wheel.tool.convert(eggs, distdir, verbose=False) - -def test_unpack(): - """ - Make sure 'wheel unpack' works. - This also verifies the integrity of our testing wheel files. - """ - for dist in test_distributions: - distdir = pkg_resources.resource_filename('wheel.test', - os.path.join(dist, 'dist')) - for wheelfile in (w for w in os.listdir(distdir) if w.endswith('.whl')): - wheel.tool.unpack(os.path.join(distdir, wheelfile), distdir) - -def test_no_scripts(): - """Make sure entry point scripts are not generated.""" - dist = "complex-dist" - basedir = pkg_resources.resource_filename('wheel.test', dist) - for (dirname, subdirs, filenames) in os.walk(basedir): - for filename in filenames: - if filename.endswith('.whl'): - whl = ZipFile(os.path.join(dirname, filename)) - for entry in whl.infolist(): - assert not '.data/scripts/' in entry.filename - -def test_pydist(): - """Make sure pydist.json exists and validates against our schema.""" - # XXX this test may need manual cleanup of older wheels - - import jsonschema - - def open_json(filename): - return json.loads(open(filename, 'rb').read().decode('utf-8')) - - pymeta_schema = open_json(resource_filename('wheel.test', - 'pydist-schema.json')) - valid = 0 - for dist in ("simple.dist", "complex-dist"): - basedir = pkg_resources.resource_filename('wheel.test', dist) - for (dirname, subdirs, filenames) in os.walk(basedir): - for filename in filenames: - if filename.endswith('.whl'): - whl = ZipFile(os.path.join(dirname, filename)) - for entry in whl.infolist(): - if entry.filename.endswith('/metadata.json'): - pymeta = json.loads(whl.read(entry).decode('utf-8')) - jsonschema.validate(pymeta, pymeta_schema) - valid += 1 - assert valid > 0, "No metadata.json found" - -def test_util(): - """Test functions in util.py.""" - for i in range(10): - before = b'*' * i - encoded = wheel.util.urlsafe_b64encode(before) - assert not encoded.endswith(b'=') - after = wheel.util.urlsafe_b64decode(encoded) - assert before == after - - -def test_pick_best(): - """Test the wheel ranking algorithm.""" - def get_tags(res): - info = res[-1].parsed_filename.groupdict() - return info['pyver'], info['abi'], info['plat'] - - cand_tags = [('py27', 'noabi', 'noarch'), ('py26', 'noabi', 'noarch'), - ('cp27', 'noabi', 'linux_i686'), - ('cp26', 'noabi', 'linux_i686'), - ('cp27', 'noabi', 'linux_x86_64'), - ('cp26', 'noabi', 'linux_x86_64')] - cand_wheels = [WheelFile('testpkg-1.0-%s-%s-%s.whl' % t) - for t in cand_tags] - - supported = [('cp27', 'noabi', 'linux_i686'), ('py27', 'noabi', 'noarch')] - supported2 = [('cp27', 'noabi', 'linux_i686'), ('py27', 'noabi', 'noarch'), - ('cp26', 'noabi', 'linux_i686'), ('py26', 'noabi', 'noarch')] - supported3 = [('cp26', 'noabi', 'linux_i686'), ('py26', 'noabi', 'noarch'), - ('cp27', 'noabi', 'linux_i686'), ('py27', 'noabi', 'noarch')] - - for supp in (supported, supported2, supported3): - context = lambda: list(supp) - for wheel in cand_wheels: - wheel.context = context - best = max(cand_wheels) - assert list(best.tags)[0] == supp[0] - - # assert_equal( - # list(map(get_tags, pick_best(cand_wheels, supp, top=False))), supp) diff --git a/lib/python3.4/site-packages/wheel/test/test_install.py b/lib/python3.4/site-packages/wheel/test/test_install.py deleted file mode 100644 index ddcddf5..0000000 --- a/lib/python3.4/site-packages/wheel/test/test_install.py +++ /dev/null @@ -1,55 +0,0 @@ -# Test wheel. -# The file has the following contents: -# hello.pyd -# hello/hello.py -# hello/__init__.py -# test-1.0.data/data/hello.dat -# test-1.0.data/headers/hello.dat -# test-1.0.data/scripts/hello.sh -# test-1.0.dist-info/WHEEL -# test-1.0.dist-info/METADATA -# test-1.0.dist-info/RECORD -# The root is PLATLIB -# So, some in PLATLIB, and one in each of DATA, HEADERS and SCRIPTS. - -import wheel.tool -import wheel.pep425tags -from wheel.install import WheelFile -from tempfile import mkdtemp -import shutil -import os - -THISDIR = os.path.dirname(__file__) -TESTWHEEL = os.path.join(THISDIR, 'test-1.0-py2.py3-none-win32.whl') - -def check(*path): - return os.path.exists(os.path.join(*path)) - -def test_install(): - tempdir = mkdtemp() - def get_supported(): - return list(wheel.pep425tags.get_supported()) + [('py3', 'none', 'win32')] - whl = WheelFile(TESTWHEEL, context=get_supported) - assert whl.supports_current_python(get_supported) - try: - locs = {} - for key in ('purelib', 'platlib', 'scripts', 'headers', 'data'): - locs[key] = os.path.join(tempdir, key) - os.mkdir(locs[key]) - whl.install(overrides=locs) - assert len(os.listdir(locs['purelib'])) == 0 - assert check(locs['platlib'], 'hello.pyd') - assert check(locs['platlib'], 'hello', 'hello.py') - assert check(locs['platlib'], 'hello', '__init__.py') - assert check(locs['data'], 'hello.dat') - assert check(locs['headers'], 'hello.dat') - assert check(locs['scripts'], 'hello.sh') - assert check(locs['platlib'], 'test-1.0.dist-info', 'RECORD') - finally: - shutil.rmtree(tempdir) - -def test_install_tool(): - """Slightly improve coverage of wheel.install""" - wheel.tool.install([TESTWHEEL], force=True, dry_run=True) - - \ No newline at end of file diff --git a/lib/python3.4/site-packages/wheel/test/test_keys.py b/lib/python3.4/site-packages/wheel/test/test_keys.py deleted file mode 100644 index f96166b..0000000 --- a/lib/python3.4/site-packages/wheel/test/test_keys.py +++ /dev/null @@ -1,98 +0,0 @@ -import tempfile -import os.path -import unittest -import json - -from wheel.signatures import keys - -wheel_json = """ -{ - "verifiers": [ - { - "scope": "+", - "vk": "bp-bjK2fFgtA-8DhKKAAPm9-eAZcX_u03oBv2RlKOBc" - }, - { - "scope": "+", - "vk": "KAHZBfyqFW3OcFDbLSG4nPCjXxUPy72phP9I4Rn9MAo" - }, - { - "scope": "+", - "vk": "tmAYCrSfj8gtJ10v3VkvW7jOndKmQIYE12hgnFu3cvk" - } - ], - "signers": [ - { - "scope": "+", - "vk": "tmAYCrSfj8gtJ10v3VkvW7jOndKmQIYE12hgnFu3cvk" - }, - { - "scope": "+", - "vk": "KAHZBfyqFW3OcFDbLSG4nPCjXxUPy72phP9I4Rn9MAo" - } - ], - "schema": 1 -} -""" - -class TestWheelKeys(unittest.TestCase): - def setUp(self): - self.config = tempfile.NamedTemporaryFile(suffix='.json') - self.config.close() - - self.config_path, self.config_filename = os.path.split(self.config.name) - def load(*args): - return [self.config_path] - def save(*args): - return self.config_path - keys.load_config_paths = load - keys.save_config_path = save - self.wk = keys.WheelKeys() - self.wk.CONFIG_NAME = self.config_filename - - def tearDown(self): - os.unlink(self.config.name) - - def test_load_save(self): - self.wk.data = json.loads(wheel_json) - - self.wk.add_signer('+', '67890') - self.wk.add_signer('scope', 'abcdefg') - - self.wk.trust('epocs', 'gfedcba') - self.wk.trust('+', '12345') - - self.wk.save() - - del self.wk.data - self.wk.load() - - signers = self.wk.signers('scope') - self.assertTrue(signers[0] == ('scope', 'abcdefg'), self.wk.data['signers']) - self.assertTrue(signers[1][0] == '+', self.wk.data['signers']) - - trusted = self.wk.trusted('epocs') - self.assertTrue(trusted[0] == ('epocs', 'gfedcba')) - self.assertTrue(trusted[1][0] == '+') - - self.wk.untrust('epocs', 'gfedcba') - trusted = self.wk.trusted('epocs') - self.assertTrue(('epocs', 'gfedcba') not in trusted) - - def test_load_save_incomplete(self): - self.wk.data = json.loads(wheel_json) - del self.wk.data['signers'] - self.wk.data['schema'] = self.wk.SCHEMA+1 - self.wk.save() - try: - self.wk.load() - except ValueError: - pass - else: - raise Exception("Expected ValueError") - - del self.wk.data['schema'] - self.wk.save() - self.wk.load() - - diff --git a/lib/python3.4/site-packages/wheel/test/test_paths.py b/lib/python3.4/site-packages/wheel/test/test_paths.py deleted file mode 100644 index a23d506..0000000 --- a/lib/python3.4/site-packages/wheel/test/test_paths.py +++ /dev/null @@ -1,6 +0,0 @@ -import wheel.paths -from distutils.command.install import SCHEME_KEYS - -def test_path(): - d = wheel.paths.get_install_paths('wheel') - assert len(d) == len(SCHEME_KEYS) diff --git a/lib/python3.4/site-packages/wheel/test/test_ranking.py b/lib/python3.4/site-packages/wheel/test/test_ranking.py deleted file mode 100644 index 1632a13..0000000 --- a/lib/python3.4/site-packages/wheel/test/test_ranking.py +++ /dev/null @@ -1,43 +0,0 @@ -import unittest - -from wheel.pep425tags import get_supported -from wheel.install import WheelFile - -WHEELPAT = "%(name)s-%(ver)s-%(pyver)s-%(abi)s-%(arch)s.whl" -def make_wheel(name, ver, pyver, abi, arch): - name = WHEELPAT % dict(name=name, ver=ver, pyver=pyver, abi=abi, - arch=arch) - return WheelFile(name) - -# This relies on the fact that generate_supported will always return the -# exact pyver, abi, and architecture for its first (best) match. -sup = get_supported() -pyver, abi, arch = sup[0] -genver = 'py' + pyver[2:] -majver = genver[:3] - -COMBINATIONS = ( - ('bar', '0.9', 'py2.py3', 'none', 'any'), - ('bar', '0.9', majver, 'none', 'any'), - ('bar', '0.9', genver, 'none', 'any'), - ('bar', '0.9', pyver, abi, arch), - ('bar', '1.3.2', majver, 'none', 'any'), - ('bar', '3.1', genver, 'none', 'any'), - ('bar', '3.1', pyver, abi, arch), - ('foo', '1.0', majver, 'none', 'any'), - ('foo', '1.1', pyver, abi, arch), - ('foo', '2.1', majver + '0', 'none', 'any'), - # This will not be compatible for Python x.0. Beware when we hit Python - # 4.0, and don't test with 3.0!!! - ('foo', '2.1', majver + '1', 'none', 'any'), - ('foo', '2.1', pyver , 'none', 'any'), - ('foo', '2.1', pyver , abi, arch), -) - -WHEELS = [ make_wheel(*args) for args in COMBINATIONS ] - -class TestRanking(unittest.TestCase): - def test_comparison(self): - for i in range(len(WHEELS)-1): - for j in range(i): - self.assertTrue(WHEELS[j]') - setup_py = SETUP_PY.format(ext_modules=EXT_MODULES) - else: - setup_py = SETUP_PY.format(ext_modules='') - temppath.join('setup.py').write(setup_py) - return temppath - -@pytest.fixture -def temp_ext_pkg(request): - return temp_pkg(request, ext=True) - -def test_default_tag(temp_pkg): - subprocess.check_call([sys.executable, 'setup.py', 'bdist_wheel'], - cwd=str(temp_pkg)) - dist_dir = temp_pkg.join('dist') - assert dist_dir.check(dir=1) - wheels = dist_dir.listdir() - assert len(wheels) == 1 - assert wheels[0].basename == 'Test-1.0-py%s-none-any.whl' % (sys.version[0],) - assert wheels[0].ext == '.whl' - -def test_explicit_tag(temp_pkg): - subprocess.check_call( - [sys.executable, 'setup.py', 'bdist_wheel', '--python-tag=py32'], - cwd=str(temp_pkg)) - dist_dir = temp_pkg.join('dist') - assert dist_dir.check(dir=1) - wheels = dist_dir.listdir() - assert len(wheels) == 1 - assert wheels[0].basename.startswith('Test-1.0-py32-') - assert wheels[0].ext == '.whl' - -def test_universal_tag(temp_pkg): - subprocess.check_call( - [sys.executable, 'setup.py', 'bdist_wheel', '--universal'], - cwd=str(temp_pkg)) - dist_dir = temp_pkg.join('dist') - assert dist_dir.check(dir=1) - wheels = dist_dir.listdir() - assert len(wheels) == 1 - assert wheels[0].basename.startswith('Test-1.0-py2.py3-') - assert wheels[0].ext == '.whl' - -def test_universal_beats_explicit_tag(temp_pkg): - subprocess.check_call( - [sys.executable, 'setup.py', 'bdist_wheel', '--universal', '--python-tag=py32'], - cwd=str(temp_pkg)) - dist_dir = temp_pkg.join('dist') - assert dist_dir.check(dir=1) - wheels = dist_dir.listdir() - assert len(wheels) == 1 - assert wheels[0].basename.startswith('Test-1.0-py2.py3-') - assert wheels[0].ext == '.whl' - -def test_universal_in_setup_cfg(temp_pkg): - temp_pkg.join('setup.cfg').write('[bdist_wheel]\nuniversal=1') - subprocess.check_call( - [sys.executable, 'setup.py', 'bdist_wheel'], - cwd=str(temp_pkg)) - dist_dir = temp_pkg.join('dist') - assert dist_dir.check(dir=1) - wheels = dist_dir.listdir() - assert len(wheels) == 1 - assert wheels[0].basename.startswith('Test-1.0-py2.py3-') - assert wheels[0].ext == '.whl' - -def test_pythontag_in_setup_cfg(temp_pkg): - temp_pkg.join('setup.cfg').write('[bdist_wheel]\npython_tag=py32') - subprocess.check_call( - [sys.executable, 'setup.py', 'bdist_wheel'], - cwd=str(temp_pkg)) - dist_dir = temp_pkg.join('dist') - assert dist_dir.check(dir=1) - wheels = dist_dir.listdir() - assert len(wheels) == 1 - assert wheels[0].basename.startswith('Test-1.0-py32-') - assert wheels[0].ext == '.whl' - -def test_legacy_wheel_section_in_setup_cfg(temp_pkg): - temp_pkg.join('setup.cfg').write('[wheel]\nuniversal=1') - subprocess.check_call( - [sys.executable, 'setup.py', 'bdist_wheel'], - cwd=str(temp_pkg)) - dist_dir = temp_pkg.join('dist') - assert dist_dir.check(dir=1) - wheels = dist_dir.listdir() - assert len(wheels) == 1 - assert wheels[0].basename.startswith('Test-1.0-py2.py3-') - assert wheels[0].ext == '.whl' - -def test_plat_name_purepy(temp_pkg): - subprocess.check_call( - [sys.executable, 'setup.py', 'bdist_wheel', '--plat-name=testplat.pure'], - cwd=str(temp_pkg)) - dist_dir = temp_pkg.join('dist') - assert dist_dir.check(dir=1) - wheels = dist_dir.listdir() - assert len(wheels) == 1 - assert wheels[0].basename.endswith('-testplat_pure.whl') - assert wheels[0].ext == '.whl' - -def test_plat_name_ext(temp_ext_pkg): - try: - subprocess.check_call( - [sys.executable, 'setup.py', 'bdist_wheel', '--plat-name=testplat.arch'], - cwd=str(temp_ext_pkg)) - except subprocess.CalledProcessError: - pytest.skip("Cannot compile C Extensions") - dist_dir = temp_ext_pkg.join('dist') - assert dist_dir.check(dir=1) - wheels = dist_dir.listdir() - assert len(wheels) == 1 - assert wheels[0].basename.endswith('-testplat_arch.whl') - assert wheels[0].ext == '.whl' - -def test_plat_name_purepy_in_setupcfg(temp_pkg): - temp_pkg.join('setup.cfg').write('[bdist_wheel]\nplat_name=testplat.pure') - subprocess.check_call( - [sys.executable, 'setup.py', 'bdist_wheel'], - cwd=str(temp_pkg)) - dist_dir = temp_pkg.join('dist') - assert dist_dir.check(dir=1) - wheels = dist_dir.listdir() - assert len(wheels) == 1 - assert wheels[0].basename.endswith('-testplat_pure.whl') - assert wheels[0].ext == '.whl' - -def test_plat_name_ext_in_setupcfg(temp_ext_pkg): - temp_ext_pkg.join('setup.cfg').write('[bdist_wheel]\nplat_name=testplat.arch') - try: - subprocess.check_call( - [sys.executable, 'setup.py', 'bdist_wheel'], - cwd=str(temp_ext_pkg)) - except subprocess.CalledProcessError: - pytest.skip("Cannot compile C Extensions") - dist_dir = temp_ext_pkg.join('dist') - assert dist_dir.check(dir=1) - wheels = dist_dir.listdir() - assert len(wheels) == 1 - assert wheels[0].basename.endswith('-testplat_arch.whl') - assert wheels[0].ext == '.whl' diff --git a/lib/python3.4/site-packages/wheel/test/test_tool.py b/lib/python3.4/site-packages/wheel/test/test_tool.py deleted file mode 100644 index 078f1ed..0000000 --- a/lib/python3.4/site-packages/wheel/test/test_tool.py +++ /dev/null @@ -1,25 +0,0 @@ -from .. import tool - -def test_keygen(): - def get_keyring(): - WheelKeys, keyring = tool.get_keyring() - - class WheelKeysTest(WheelKeys): - def save(self): - pass - - class keyringTest: - @classmethod - def get_keyring(cls): - class keyringTest2: - pw = None - def set_password(self, a, b, c): - self.pw = c - def get_password(self, a, b): - return self.pw - - return keyringTest2() - - return WheelKeysTest, keyringTest - - tool.keygen(get_keyring=get_keyring) diff --git a/lib/python3.4/site-packages/wheel/test/test_wheelfile.py b/lib/python3.4/site-packages/wheel/test/test_wheelfile.py deleted file mode 100644 index 181668f..0000000 --- a/lib/python3.4/site-packages/wheel/test/test_wheelfile.py +++ /dev/null @@ -1,142 +0,0 @@ -import os -import wheel.install -import wheel.archive -import hashlib -try: - from StringIO import StringIO -except ImportError: - from io import BytesIO as StringIO -import codecs -import zipfile -import pytest -import shutil -import tempfile -from contextlib import contextmanager - -@contextmanager -def environ(key, value): - old_value = os.environ.get(key) - try: - os.environ[key] = value - yield - finally: - if old_value is None: - del os.environ[key] - else: - os.environ[key] = old_value - -@contextmanager -def temporary_directory(): - # tempfile.TemporaryDirectory doesn't exist in Python 2. - tempdir = tempfile.mkdtemp() - try: - yield tempdir - finally: - shutil.rmtree(tempdir) - -@contextmanager -def readable_zipfile(path): - # zipfile.ZipFile() isn't a context manager under Python 2. - zf = zipfile.ZipFile(path, 'r') - try: - yield zf - finally: - zf.close() - - -def test_verifying_zipfile(): - if not hasattr(zipfile.ZipExtFile, '_update_crc'): - pytest.skip('No ZIP verification. Missing ZipExtFile._update_crc.') - - sio = StringIO() - zf = zipfile.ZipFile(sio, 'w') - zf.writestr("one", b"first file") - zf.writestr("two", b"second file") - zf.writestr("three", b"third file") - zf.close() - - # In default mode, VerifyingZipFile checks the hash of any read file - # mentioned with set_expected_hash(). Files not mentioned with - # set_expected_hash() are not checked. - vzf = wheel.install.VerifyingZipFile(sio, 'r') - vzf.set_expected_hash("one", hashlib.sha256(b"first file").digest()) - vzf.set_expected_hash("three", "blurble") - vzf.open("one").read() - vzf.open("two").read() - try: - vzf.open("three").read() - except wheel.install.BadWheelFile: - pass - else: - raise Exception("expected exception 'BadWheelFile()'") - - # In strict mode, VerifyingZipFile requires every read file to be - # mentioned with set_expected_hash(). - vzf.strict = True - try: - vzf.open("two").read() - except wheel.install.BadWheelFile: - pass - else: - raise Exception("expected exception 'BadWheelFile()'") - - vzf.set_expected_hash("two", None) - vzf.open("two").read() - -def test_pop_zipfile(): - sio = StringIO() - zf = wheel.install.VerifyingZipFile(sio, 'w') - zf.writestr("one", b"first file") - zf.writestr("two", b"second file") - zf.close() - - try: - zf.pop() - except RuntimeError: - pass # already closed - else: - raise Exception("expected RuntimeError") - - zf = wheel.install.VerifyingZipFile(sio, 'a') - zf.pop() - zf.close() - - zf = wheel.install.VerifyingZipFile(sio, 'r') - assert len(zf.infolist()) == 1 - -def test_zipfile_timestamp(): - # An environment variable can be used to influence the timestamp on - # TarInfo objects inside the zip. See issue #143. TemporaryDirectory is - # not a context manager under Python 3. - with temporary_directory() as tempdir: - for filename in ('one', 'two', 'three'): - path = os.path.join(tempdir, filename) - with codecs.open(path, 'w', encoding='utf-8') as fp: - fp.write(filename + '\n') - zip_base_name = os.path.join(tempdir, 'dummy') - # The earliest date representable in TarInfos, 1980-01-01 - with environ('SOURCE_DATE_EPOCH', '315576060'): - zip_filename = wheel.archive.make_wheelfile_inner( - zip_base_name, tempdir) - with readable_zipfile(zip_filename) as zf: - for info in zf.infolist(): - assert info.date_time[:3] == (1980, 1, 1) - -def test_zipfile_attributes(): - # With the change from ZipFile.write() to .writestr(), we need to manually - # set member attributes. - with temporary_directory() as tempdir: - files = (('foo', 0o644), ('bar', 0o755)) - for filename, mode in files: - path = os.path.join(tempdir, filename) - with codecs.open(path, 'w', encoding='utf-8') as fp: - fp.write(filename + '\n') - os.chmod(path, mode) - zip_base_name = os.path.join(tempdir, 'dummy') - zip_filename = wheel.archive.make_wheelfile_inner( - zip_base_name, tempdir) - with readable_zipfile(zip_filename) as zf: - for filename, mode in files: - info = zf.getinfo(os.path.join(tempdir, filename)) - assert info.external_attr == (mode | 0o100000) << 16 - assert info.compress_type == zipfile.ZIP_DEFLATED diff --git a/lib/python3.4/site-packages/wheel/tool/__init__.py b/lib/python3.4/site-packages/wheel/tool/__init__.py index 95f0a9b..d6b9893 100644 --- a/lib/python3.4/site-packages/wheel/tool/__init__.py +++ b/lib/python3.4/site-packages/wheel/tool/__init__.py @@ -2,27 +2,29 @@ Wheel command-line utility. """ -import os +import argparse import hashlib -import sys import json -import wheel.paths - +import os +import sys from glob import iglob + from .. import signatures -from ..util import (urlsafe_b64decode, urlsafe_b64encode, native, binary, - matches_requirement) -from ..install import WheelFile +from ..install import WheelFile, VerifyingZipFile +from ..paths import get_install_command +from ..util import urlsafe_b64decode, urlsafe_b64encode, native, binary, matches_requirement + def require_pkgresources(name): try: - import pkg_resources + import pkg_resources # noqa: F401 except ImportError: raise RuntimeError("'{0}' needs pkg_resources (part of setuptools).".format(name)) -import argparse -class WheelError(Exception): pass +class WheelError(Exception): + pass + # For testability def get_keyring(): @@ -31,9 +33,12 @@ def get_keyring(): import keyring assert keyring.get_keyring().priority except (ImportError, AssertionError): - raise WheelError("Install wheel[signatures] (requires keyring, keyrings.alt, pyxdg) for signatures.") + raise WheelError( + "Install wheel[signatures] (requires keyring, keyrings.alt, pyxdg) for signatures.") + return keys.WheelKeys, keyring + def keygen(get_keyring=get_keyring): """Generate a public/private key pair.""" WheelKeys, keyring = get_keyring() @@ -59,6 +64,7 @@ def keygen(get_keyring=get_keyring): wk.trust('+', vk) wk.save() + def sign(wheelfile, replace=False, get_keyring=get_keyring): """Sign a wheel""" WheelKeys, keyring = get_keyring() @@ -78,17 +84,17 @@ def sign(wheelfile, replace=False, get_keyring=get_keyring): keypair = ed25519ll.Keypair(urlsafe_b64decode(binary(vk)), urlsafe_b64decode(binary(sk))) - record_name = wf.distinfo_name + '/RECORD' sig_name = wf.distinfo_name + '/RECORD.jws' if sig_name in wf.zipfile.namelist(): raise WheelError("Wheel is already signed.") record_data = wf.zipfile.read(record_name) - payload = {"hash":"sha256=" + native(urlsafe_b64encode(hashlib.sha256(record_data).digest()))} + payload = {"hash": "sha256=" + native(urlsafe_b64encode(hashlib.sha256(record_data).digest()))} sig = signatures.sign(payload, keypair) wf.zipfile.writestr(sig_name, json.dumps(sig, sort_keys=True)) wf.zipfile.close() + def unsign(wheelfile): """ Remove RECORD.jws from a wheel by truncating the zip file. @@ -97,14 +103,14 @@ def unsign(wheelfile): ordinary archive, with the compressed files and the directory in the same order, and without any non-zip content after the truncation point. """ - import wheel.install - vzf = wheel.install.VerifyingZipFile(wheelfile, "a") + vzf = VerifyingZipFile(wheelfile, "a") info = vzf.infolist() if not (len(info) and info[-1].filename.endswith('/RECORD.jws')): - raise WheelError("RECORD.jws not found at end of archive.") + raise WheelError('The wheel is not signed (RECORD.jws not found at end of the archive).') vzf.pop() vzf.close() + def verify(wheelfile): """Verify a wheel. @@ -114,12 +120,17 @@ def verify(wheelfile): """ wf = WheelFile(wheelfile) sig_name = wf.distinfo_name + '/RECORD.jws' - sig = json.loads(native(wf.zipfile.open(sig_name).read())) + try: + sig = json.loads(native(wf.zipfile.open(sig_name).read())) + except KeyError: + raise WheelError('The wheel is not signed (RECORD.jws not found at end of the archive).') + verified = signatures.verify(sig) sys.stderr.write("Signatures are internally consistent.\n") sys.stdout.write(json.dumps(verified, indent=2)) sys.stdout.write('\n') + def unpack(wheelfile, dest='.'): """Unpack a wheel. @@ -136,6 +147,7 @@ def unpack(wheelfile, dest='.'): wf.zipfile.extractall(destination) wf.zipfile.close() + def install(requirements, requirements_file=None, wheel_dirs=None, force=False, list_files=False, dry_run=False): @@ -156,7 +168,7 @@ def install(requirements, requirements_file=None, if wheelpath: wheel_dirs = wheelpath.split(os.pathsep) else: - wheel_dirs = [ os.path.curdir ] + wheel_dirs = [os.path.curdir] # Get a list of all valid wheels in wheel_dirs all_wheels = [] @@ -221,6 +233,7 @@ def install(requirements, requirements_file=None, wf.install(force=force) wf.zipfile.close() + def install_scripts(distributions): """ Regenerate the entry_points console_scripts for the named distribution. @@ -233,12 +246,13 @@ def install_scripts(distributions): for dist in distributions: pkg_resources_dist = pkg_resources.get_distribution(dist) - install = wheel.paths.get_install_command(dist) + install = get_install_command(dist) command = easy_install.easy_install(install.distribution) - command.args = ['wheel'] # dummy argument + command.args = ['wheel'] # dummy argument command.finalize_options() command.install_egg_scripts(pkg_resources_dist) + def convert(installers, dest_dir, verbose): require_pkgresources('wheel convert') @@ -259,6 +273,7 @@ def convert(installers, dest_dir, verbose): if verbose: sys.stdout.write("OK\n") + def parser(): p = argparse.ArgumentParser() s = p.add_subparsers(help="commands") @@ -328,7 +343,7 @@ def parser(): convert_parser = s.add_parser('convert', help='Convert egg or wininst to wheel') convert_parser.add_argument('installers', nargs='*', help='Installers to convert') convert_parser.add_argument('--dest-dir', '-d', default=os.path.curdir, - help="Directory to store wheels (default %(default)s)") + help="Directory to store wheels (default %(default)s)") convert_parser.add_argument('--verbose', '-v', action='store_true') convert_parser.set_defaults(func=convert_f) @@ -345,6 +360,7 @@ def parser(): return p + def main(): p = parser() args = p.parse_args() diff --git a/lib/python3.4/site-packages/wheel/util.py b/lib/python3.4/site-packages/wheel/util.py index 5268813..c58d108 100644 --- a/lib/python3.4/site-packages/wheel/util.py +++ b/lib/python3.4/site-packages/wheel/util.py @@ -1,18 +1,32 @@ """Utility functions.""" -import sys -import os import base64 -import json import hashlib -try: - from collections import OrderedDict -except ImportError: - OrderedDict = dict +import json +import os +import sys +from collections import OrderedDict __all__ = ['urlsafe_b64encode', 'urlsafe_b64decode', 'utf8', 'to_json', 'from_json', 'matches_requirement'] + +# For encoding ascii back and forth between bytestrings, as is repeatedly +# necessary in JSON-based crypto under Python 3 +if sys.version_info[0] < 3: + text_type = unicode # noqa: F821 + + def native(s): + return s +else: + text_type = str + + def native(s): + if isinstance(s, bytes): + return s.decode('ascii') + return s + + def urlsafe_b64encode(data): """urlsafe_b64encode without padding""" return base64.urlsafe_b64encode(data).rstrip(binary('=')) @@ -25,76 +39,67 @@ def urlsafe_b64decode(data): def to_json(o): - '''Convert given data to JSON.''' + """Convert given data to JSON.""" return json.dumps(o, sort_keys=True) def from_json(j): - '''Decode a JSON payload.''' + """Decode a JSON payload.""" return json.loads(j) + def open_for_csv(name, mode): if sys.version_info[0] < 3: nl = {} bin = 'b' else: - nl = { 'newline': '' } + nl = {'newline': ''} bin = '' + return open(name, mode + bin, **nl) -try: - unicode - def utf8(data): - '''Utf-8 encode data.''' - if isinstance(data, unicode): - return data.encode('utf-8') - return data -except NameError: - def utf8(data): - '''Utf-8 encode data.''' - if isinstance(data, str): - return data.encode('utf-8') - return data +def utf8(data): + """Utf-8 encode data.""" + if isinstance(data, text_type): + return data.encode('utf-8') + return data -try: - # For encoding ascii back and forth between bytestrings, as is repeatedly - # necessary in JSON-based crypto under Python 3 - unicode - def native(s): - return s - def binary(s): - if isinstance(s, unicode): - return s.encode('ascii') - return s -except NameError: - def native(s): - if isinstance(s, bytes): - return s.decode('ascii') - return s - def binary(s): - if isinstance(s, str): - return s.encode('ascii') +def binary(s): + if isinstance(s, text_type): + return s.encode('ascii') + return s + class HashingFile(object): - def __init__(self, fd, hashtype='sha256'): - self.fd = fd + def __init__(self, path, mode, hashtype='sha256'): + self.fd = open(path, mode) self.hashtype = hashtype self.hash = hashlib.new(hashtype) self.length = 0 + def write(self, data): self.hash.update(data) self.length += len(data) self.fd.write(data) + def close(self): self.fd.close() + def digest(self): if self.hashtype == 'md5': return self.hash.hexdigest() digest = self.hash.digest() return self.hashtype + '=' + native(urlsafe_b64encode(digest)) + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.fd.close() + + class OrderedDefaultDict(OrderedDict): def __init__(self, *args, **kwargs): if not args: @@ -106,18 +111,19 @@ class OrderedDefaultDict(OrderedDict): args = args[1:] super(OrderedDefaultDict, self).__init__(*args, **kwargs) - def __missing__ (self, key): + def __missing__(self, key): if self.default_factory is None: raise KeyError(key) self[key] = default = self.default_factory() return default + if sys.platform == 'win32': import ctypes.wintypes # CSIDL_APPDATA for reference - not used here for compatibility with # dirspec, which uses LOCAL_APPDATA and COMMON_APPDATA in that order - csidl = dict(CSIDL_APPDATA=26, CSIDL_LOCAL_APPDATA=28, - CSIDL_COMMON_APPDATA=35) + csidl = dict(CSIDL_APPDATA=26, CSIDL_LOCAL_APPDATA=28, CSIDL_COMMON_APPDATA=35) + def get_path(name): SHGFP_TYPE_CURRENT = 0 buf = ctypes.create_unicode_buffer(ctypes.wintypes.MAX_PATH) @@ -130,6 +136,7 @@ if sys.platform == 'win32': if not os.path.isdir(path): os.makedirs(path) return path + def load_config_paths(*resource): ids = ["CSIDL_LOCAL_APPDATA", "CSIDL_COMMON_APPDATA"] for id in ids: @@ -141,10 +148,12 @@ else: def save_config_path(*resource): import xdg.BaseDirectory return xdg.BaseDirectory.save_config_path(*resource) + def load_config_paths(*resource): import xdg.BaseDirectory return xdg.BaseDirectory.load_config_paths(*resource) + def matches_requirement(req, wheels): """List of wheels matching a requirement. diff --git a/lib/python3.4/site-packages/wheel/wininst2wheel.py b/lib/python3.4/site-packages/wheel/wininst2wheel.py index 297f8d1..b8a3469 100644 --- a/lib/python3.4/site-packages/wheel/wininst2wheel.py +++ b/lib/python3.4/site-packages/wheel/wininst2wheel.py @@ -1,23 +1,24 @@ #!/usr/bin/env python +import distutils.dist import os.path import re import sys import tempfile import zipfile -import wheel.bdist_wheel -import distutils.dist -from distutils.archive_util import make_archive -from shutil import rmtree -from wheel.archive import archive_wheelfile from argparse import ArgumentParser from glob import iglob +from shutil import rmtree + +import wheel.bdist_wheel +from wheel.archive import archive_wheelfile egg_info_re = re.compile(r'''(^|/)(?P[^/]+?)-(?P.+?) (-(?P.+?))?(-(?P.+?))?.egg-info(/|$)''', re.VERBOSE) + def parse_info(wininfo_name, egginfo_name): """Extract metadata from filenames. - + Extracts the 4 metadataitems needed (name, version, pyversion, arch) from the installer filename and the name of the egg-info directory embedded in the zipfile (if any). @@ -52,15 +53,14 @@ def parse_info(wininfo_name, egginfo_name): if egginfo_name: egginfo = egg_info_re.search(egginfo_name) if not egginfo: - raise ValueError("Egg info filename %s is not valid" % - (egginfo_name,)) + raise ValueError("Egg info filename %s is not valid" % (egginfo_name,)) # Parse the wininst filename # 1. Distribution name (up to the first '-') w_name, sep, rest = wininfo_name.partition('-') if not sep: - raise ValueError("Installer filename %s is not valid" % - (wininfo_name,)) + raise ValueError("Installer filename %s is not valid" % (wininfo_name,)) + # Strip '.exe' rest = rest[:-4] # 2. Python version (from the last '-', must start with 'py') @@ -78,8 +78,7 @@ def parse_info(wininfo_name, egginfo_name): # 3. Version and architecture w_ver, sep, w_arch = rest.rpartition('.') if not sep: - raise ValueError("Installer filename %s is not valid" % - (wininfo_name,)) + raise ValueError("Installer filename %s is not valid" % (wininfo_name,)) if egginfo: w_name = egginfo.group('name') @@ -87,6 +86,7 @@ def parse_info(wininfo_name, egginfo_name): return dict(name=w_name, ver=w_ver, arch=w_arch, pyver=w_pyver) + def bdist_wininst2wheel(path, dest_dir=os.path.curdir): bdw = zipfile.ZipFile(path) @@ -158,21 +158,50 @@ def bdist_wininst2wheel(path, dest_dir=os.path.curdir): abi, arch )) - bw = wheel.bdist_wheel.bdist_wheel(distutils.dist.Distribution()) - bw.root_is_purelib = root_is_purelib + if root_is_purelib: + bw = wheel.bdist_wheel.bdist_wheel(distutils.dist.Distribution()) + else: + bw = _bdist_wheel_tag(distutils.dist.Distribution()) + + bw.root_is_pure = root_is_purelib + bw.python_tag = pyver + bw.plat_name_supplied = True + bw.plat_name = info['arch'] or 'any' + + if not root_is_purelib: + bw.full_tag_supplied = True + bw.full_tag = (pyver, abi, arch) + dist_info_dir = os.path.join(dir, '%s.dist-info' % dist_info) bw.egg2dist(os.path.join(dir, egginfo_name), dist_info_dir) bw.write_wheelfile(dist_info_dir, generator='wininst2wheel') bw.write_record(dir, dist_info_dir) - + archive_wheelfile(os.path.join(dest_dir, wheel_name), dir) rmtree(dir) + +class _bdist_wheel_tag(wheel.bdist_wheel.bdist_wheel): + # allow the client to override the default generated wheel tag + # The default bdist_wheel implementation uses python and abi tags + # of the running python process. This is not suitable for + # generating/repackaging prebuild binaries. + + full_tag_supplied = False + full_tag = None # None or a (pytag, soabitag, plattag) triple + + def get_tag(self): + if self.full_tag_supplied and self.full_tag is not None: + return self.full_tag + else: + return super(_bdist_wheel_tag, self).get_tag() + + def main(): parser = ArgumentParser() parser.add_argument('installers', nargs='*', help="Installers to convert") parser.add_argument('--dest-dir', '-d', default=os.path.curdir, - help="Directory to store wheels (default %(default)s)") + help="Directory to store wheels (default %(default)s)") parser.add_argument('--verbose', '-v', action='store_true') args = parser.parse_args() for pat in args.installers: @@ -183,5 +212,6 @@ def main(): if args.verbose: sys.stdout.write("OK\n") + if __name__ == "__main__": main() diff --git a/update.sh b/update.sh index f9bd843..7602de6 100755 --- a/update.sh +++ b/update.sh @@ -15,7 +15,7 @@ mkdir old test -e lib && mv lib old/ test -e bin && mv bin old/ -rm -rf p34 p35 +rm -rf p34 p35 p36 virtualenv -p /usr/bin/python3.4 p34 p34/bin/pip3 install -r requirements.txt @@ -34,4 +34,15 @@ do cp p35/lib/python3.5/$f lib/python3.4/$f done -rm -r p34 p35 old requirements.txt +virtualenv -p /usr/bin/python3.6 p36 +p36/bin/pip3 install -r requirements.txt +for f in site-packages/ed25519/_ed25519.cpython-36m-x86_64-linux-gnu.so \ + site-packages/sqlalchemy/cprocessors.cpython-36m-x86_64-linux-gnu.so \ + site-packages/sqlalchemy/cresultproxy.cpython-36m-x86_64-linux-gnu.so \ + site-packages/sqlalchemy/cutils.cpython-36m-x86_64-linux-gnu.so \ + site-packages/netifaces.cpython-36m-x86_64-linux-gnu.so +do + cp p36/lib/python3.6/$f lib/python3.4/$f +done + +rm -r p34 p35 p36 old requirements.txt