diff --git a/lib/python3.4/site-packages/SQLAlchemy-0.9.7.egg-info/PKG-INFO b/lib/python3.4/site-packages/SQLAlchemy-0.9.7.egg-info/PKG-INFO deleted file mode 100644 index 0a46d7a..0000000 --- a/lib/python3.4/site-packages/SQLAlchemy-0.9.7.egg-info/PKG-INFO +++ /dev/null @@ -1,155 +0,0 @@ -Metadata-Version: 1.1 -Name: SQLAlchemy -Version: 0.9.7 -Summary: Database Abstraction Library -Home-page: http://www.sqlalchemy.org -Author: Mike Bayer -Author-email: mike_mp@zzzcomputing.com -License: MIT License -Description: SQLAlchemy - ========== - - The Python SQL Toolkit and Object Relational Mapper - - Introduction - ------------- - - SQLAlchemy is the Python SQL toolkit and Object Relational Mapper - that gives application developers the full power and - flexibility of SQL. SQLAlchemy provides a full suite - of well known enterprise-level persistence patterns, - designed for efficient and high-performing database - access, adapted into a simple and Pythonic domain - language. - - Major SQLAlchemy features include: - - * An industrial strength ORM, built - from the core on the identity map, unit of work, - and data mapper patterns. These patterns - allow transparent persistence of objects - using a declarative configuration system. - Domain models - can be constructed and manipulated naturally, - and changes are synchronized with the - current transaction automatically. - * A relationally-oriented query system, exposing - the full range of SQL's capabilities - explicitly, including joins, subqueries, - correlation, and most everything else, - in terms of the object model. - Writing queries with the ORM uses the same - techniques of relational composition you use - when writing SQL. While you can drop into - literal SQL at any time, it's virtually never - needed. - * A comprehensive and flexible system - of eager loading for related collections and objects. - Collections are cached within a session, - and can be loaded on individual access, all - at once using joins, or by query per collection - across the full result set. - * A Core SQL construction system and DBAPI - interaction layer. The SQLAlchemy Core is - separate from the ORM and is a full database - abstraction layer in its own right, and includes - an extensible Python-based SQL expression - language, schema metadata, connection pooling, - type coercion, and custom types. - * All primary and foreign key constraints are - assumed to be composite and natural. Surrogate - integer primary keys are of course still the - norm, but SQLAlchemy never assumes or hardcodes - to this model. - * Database introspection and generation. Database - schemas can be "reflected" in one step into - Python structures representing database metadata; - those same structures can then generate - CREATE statements right back out - all within - the Core, independent of the ORM. - - SQLAlchemy's philosophy: - - * SQL databases behave less and less like object - collections the more size and performance start to - matter; object collections behave less and less like - tables and rows the more abstraction starts to matter. - SQLAlchemy aims to accommodate both of these - principles. - * An ORM doesn't need to hide the "R". A relational - database provides rich, set-based functionality - that should be fully exposed. SQLAlchemy's - ORM provides an open-ended set of patterns - that allow a developer to construct a custom - mediation layer between a domain model and - a relational schema, turning the so-called - "object relational impedance" issue into - a distant memory. - * The developer, in all cases, makes all decisions - regarding the design, structure, and naming conventions - of both the object model as well as the relational - schema. SQLAlchemy only provides the means - to automate the execution of these decisions. - * With SQLAlchemy, there's no such thing as - "the ORM generated a bad query" - you - retain full control over the structure of - queries, including how joins are organized, - how subqueries and correlation is used, what - columns are requested. Everything SQLAlchemy - does is ultimately the result of a developer- - initiated decision. - * Don't use an ORM if the problem doesn't need one. - SQLAlchemy consists of a Core and separate ORM - component. The Core offers a full SQL expression - language that allows Pythonic construction - of SQL constructs that render directly to SQL - strings for a target database, returning - result sets that are essentially enhanced DBAPI - cursors. - * Transactions should be the norm. With SQLAlchemy's - ORM, nothing goes to permanent storage until - commit() is called. SQLAlchemy encourages applications - to create a consistent means of delineating - the start and end of a series of operations. - * Never render a literal value in a SQL statement. - Bound parameters are used to the greatest degree - possible, allowing query optimizers to cache - query plans effectively and making SQL injection - attacks a non-issue. - - Documentation - ------------- - - Latest documentation is at: - - http://www.sqlalchemy.org/docs/ - - Installation / Requirements - --------------------------- - - Full documentation for installation is at - `Installation `_. - - Getting Help / Development / Bug reporting - ------------------------------------------ - - Please refer to the `SQLAlchemy Community Guide `_. - - License - ------- - - SQLAlchemy is distributed under the `MIT license - `_. - - -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: Jython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Database :: Front-Ends -Classifier: Operating System :: OS Independent diff --git a/lib/python3.4/site-packages/SQLAlchemy-0.9.7.egg-info/SOURCES.txt b/lib/python3.4/site-packages/SQLAlchemy-0.9.7.egg-info/SOURCES.txt deleted file mode 100644 index 50d65c8..0000000 --- a/lib/python3.4/site-packages/SQLAlchemy-0.9.7.egg-info/SOURCES.txt +++ /dev/null @@ -1,682 +0,0 @@ -AUTHORS -CHANGES -LICENSE -MANIFEST.in -README.dialects.rst -README.rst -README.unittests.rst -setup.cfg -setup.py -sqla_nose.py -doc/contents.html -doc/copyright.html -doc/faq.html -doc/genindex.html -doc/glossary.html -doc/index.html -doc/intro.html -doc/search.html -doc/searchindex.js -doc/_images/sqla_arch_small.png -doc/_images/sqla_engine_arch.png -doc/_modules/index.html -doc/_modules/examples/adjacency_list/adjacency_list.html -doc/_modules/examples/association/basic_association.html -doc/_modules/examples/association/dict_of_sets_with_default.html -doc/_modules/examples/association/proxied_association.html -doc/_modules/examples/custom_attributes/custom_management.html -doc/_modules/examples/custom_attributes/listen_for_events.html -doc/_modules/examples/dogpile_caching/advanced.html -doc/_modules/examples/dogpile_caching/caching_query.html -doc/_modules/examples/dogpile_caching/environment.html -doc/_modules/examples/dogpile_caching/fixture_data.html -doc/_modules/examples/dogpile_caching/helloworld.html -doc/_modules/examples/dogpile_caching/local_session_caching.html -doc/_modules/examples/dogpile_caching/model.html -doc/_modules/examples/dogpile_caching/relationship_caching.html -doc/_modules/examples/dynamic_dict/dynamic_dict.html -doc/_modules/examples/elementtree/adjacency_list.html -doc/_modules/examples/elementtree/optimized_al.html -doc/_modules/examples/elementtree/pickle.html -doc/_modules/examples/generic_associations/discriminator_on_association.html -doc/_modules/examples/generic_associations/generic_fk.html -doc/_modules/examples/generic_associations/table_per_association.html -doc/_modules/examples/generic_associations/table_per_related.html -doc/_modules/examples/graphs/directed_graph.html -doc/_modules/examples/inheritance/concrete.html -doc/_modules/examples/inheritance/joined.html -doc/_modules/examples/inheritance/single.html -doc/_modules/examples/join_conditions/cast.html -doc/_modules/examples/join_conditions/threeway.html -doc/_modules/examples/large_collection/large_collection.html -doc/_modules/examples/materialized_paths/materialized_paths.html -doc/_modules/examples/nested_sets/nested_sets.html -doc/_modules/examples/postgis/postgis.html -doc/_modules/examples/sharding/attribute_shard.html -doc/_modules/examples/versioned_history/history_meta.html -doc/_modules/examples/versioned_history/test_versioning.html -doc/_modules/examples/versioned_rows/versioned_map.html -doc/_modules/examples/versioned_rows/versioned_rows.html -doc/_modules/examples/vertical/dictlike-polymorphic.html -doc/_modules/examples/vertical/dictlike.html -doc/_static/basic.css -doc/_static/changelog.css -doc/_static/comment-bright.png -doc/_static/comment-close.png -doc/_static/comment.png -doc/_static/default.css -doc/_static/detectmobile.js -doc/_static/docs.css -doc/_static/doctools.js -doc/_static/down-pressed.png -doc/_static/down.png -doc/_static/file.png -doc/_static/init.js -doc/_static/jquery.js -doc/_static/minus.png -doc/_static/plus.png -doc/_static/pygments.css -doc/_static/searchtools.js -doc/_static/sidebar.js -doc/_static/sphinx_paramlinks.css -doc/_static/underscore.js -doc/_static/up-pressed.png -doc/_static/up.png -doc/_static/websupport.js -doc/build/Makefile -doc/build/conf.py -doc/build/contents.rst -doc/build/copyright.rst -doc/build/faq.rst -doc/build/glossary.rst -doc/build/index.rst -doc/build/intro.rst -doc/build/requirements.txt -doc/build/sqla_arch_small.png -doc/build/testdocs.py -doc/build/builder/__init__.py -doc/build/builder/autodoc_mods.py -doc/build/builder/dialect_info.py -doc/build/builder/mako.py -doc/build/builder/sqlformatter.py -doc/build/builder/util.py -doc/build/builder/viewsource.py -doc/build/changelog/changelog_01.rst -doc/build/changelog/changelog_02.rst -doc/build/changelog/changelog_03.rst -doc/build/changelog/changelog_04.rst -doc/build/changelog/changelog_05.rst -doc/build/changelog/changelog_06.rst -doc/build/changelog/changelog_07.rst -doc/build/changelog/changelog_08.rst -doc/build/changelog/changelog_09.rst -doc/build/changelog/index.rst -doc/build/changelog/migration_04.rst -doc/build/changelog/migration_05.rst -doc/build/changelog/migration_06.rst -doc/build/changelog/migration_07.rst -doc/build/changelog/migration_08.rst -doc/build/changelog/migration_09.rst -doc/build/core/compiler.rst -doc/build/core/connections.rst -doc/build/core/constraints.rst -doc/build/core/ddl.rst -doc/build/core/defaults.rst -doc/build/core/dml.rst -doc/build/core/engines.rst -doc/build/core/event.rst -doc/build/core/events.rst -doc/build/core/exceptions.rst -doc/build/core/expression_api.rst -doc/build/core/functions.rst -doc/build/core/index.rst -doc/build/core/inspection.rst -doc/build/core/interfaces.rst -doc/build/core/internals.rst -doc/build/core/metadata.rst -doc/build/core/pooling.rst -doc/build/core/reflection.rst -doc/build/core/schema.rst -doc/build/core/selectable.rst -doc/build/core/serializer.rst -doc/build/core/sqla_engine_arch.png -doc/build/core/sqlelement.rst -doc/build/core/tutorial.rst -doc/build/core/types.rst -doc/build/dialects/drizzle.rst -doc/build/dialects/firebird.rst -doc/build/dialects/index.rst -doc/build/dialects/mssql.rst -doc/build/dialects/mysql.rst -doc/build/dialects/oracle.rst -doc/build/dialects/postgresql.rst -doc/build/dialects/sqlite.rst -doc/build/dialects/sybase.rst -doc/build/orm/collections.rst -doc/build/orm/deprecated.rst -doc/build/orm/events.rst -doc/build/orm/examples.rst -doc/build/orm/exceptions.rst -doc/build/orm/index.rst -doc/build/orm/inheritance.rst -doc/build/orm/internals.rst -doc/build/orm/loading.rst -doc/build/orm/mapper_config.rst -doc/build/orm/query.rst -doc/build/orm/relationships.rst -doc/build/orm/session.rst -doc/build/orm/tutorial.rst -doc/build/orm/extensions/associationproxy.rst -doc/build/orm/extensions/automap.rst -doc/build/orm/extensions/declarative.rst -doc/build/orm/extensions/horizontal_shard.rst -doc/build/orm/extensions/hybrid.rst -doc/build/orm/extensions/index.rst -doc/build/orm/extensions/instrumentation.rst -doc/build/orm/extensions/mutable.rst -doc/build/orm/extensions/orderinglist.rst -doc/build/static/detectmobile.js -doc/build/static/docs.css -doc/build/static/init.js -doc/build/templates/genindex.mako -doc/build/templates/layout.mako -doc/build/templates/page.mako -doc/build/templates/search.mako -doc/build/templates/static_base.mako -doc/build/texinputs/Makefile -doc/build/texinputs/sphinx.sty -doc/changelog/changelog_01.html -doc/changelog/changelog_02.html -doc/changelog/changelog_03.html -doc/changelog/changelog_04.html -doc/changelog/changelog_05.html -doc/changelog/changelog_06.html -doc/changelog/changelog_07.html -doc/changelog/changelog_08.html -doc/changelog/changelog_09.html -doc/changelog/index.html -doc/changelog/migration_04.html -doc/changelog/migration_05.html -doc/changelog/migration_06.html -doc/changelog/migration_07.html -doc/changelog/migration_08.html -doc/changelog/migration_09.html -doc/core/compiler.html -doc/core/connections.html -doc/core/constraints.html -doc/core/ddl.html -doc/core/defaults.html -doc/core/dml.html -doc/core/engines.html -doc/core/event.html -doc/core/events.html -doc/core/exceptions.html -doc/core/expression_api.html -doc/core/functions.html -doc/core/index.html -doc/core/inspection.html -doc/core/interfaces.html -doc/core/internals.html -doc/core/metadata.html -doc/core/pooling.html -doc/core/reflection.html -doc/core/schema.html -doc/core/selectable.html -doc/core/serializer.html -doc/core/sqlelement.html -doc/core/tutorial.html -doc/core/types.html -doc/dialects/drizzle.html -doc/dialects/firebird.html -doc/dialects/index.html -doc/dialects/mssql.html -doc/dialects/mysql.html -doc/dialects/oracle.html -doc/dialects/postgresql.html -doc/dialects/sqlite.html -doc/dialects/sybase.html -doc/orm/collections.html -doc/orm/deprecated.html -doc/orm/events.html -doc/orm/examples.html -doc/orm/exceptions.html -doc/orm/index.html -doc/orm/inheritance.html -doc/orm/internals.html -doc/orm/loading.html -doc/orm/mapper_config.html -doc/orm/query.html -doc/orm/relationships.html -doc/orm/session.html -doc/orm/tutorial.html -doc/orm/extensions/associationproxy.html -doc/orm/extensions/automap.html -doc/orm/extensions/declarative.html -doc/orm/extensions/horizontal_shard.html -doc/orm/extensions/hybrid.html -doc/orm/extensions/index.html -doc/orm/extensions/instrumentation.html -doc/orm/extensions/mutable.html -doc/orm/extensions/orderinglist.html -examples/__init__.py -examples/adjacency_list/__init__.py -examples/adjacency_list/adjacency_list.py -examples/association/__init__.py -examples/association/basic_association.py -examples/association/dict_of_sets_with_default.py -examples/association/proxied_association.py -examples/custom_attributes/__init__.py -examples/custom_attributes/custom_management.py -examples/custom_attributes/listen_for_events.py -examples/dogpile_caching/__init__.py -examples/dogpile_caching/advanced.py -examples/dogpile_caching/caching_query.py -examples/dogpile_caching/environment.py -examples/dogpile_caching/fixture_data.py -examples/dogpile_caching/helloworld.py -examples/dogpile_caching/local_session_caching.py -examples/dogpile_caching/model.py -examples/dogpile_caching/relationship_caching.py -examples/dynamic_dict/__init__.py -examples/dynamic_dict/dynamic_dict.py -examples/elementtree/__init__.py -examples/elementtree/adjacency_list.py -examples/elementtree/optimized_al.py -examples/elementtree/pickle.py -examples/elementtree/test.xml -examples/elementtree/test2.xml -examples/elementtree/test3.xml -examples/generic_associations/__init__.py -examples/generic_associations/discriminator_on_association.py -examples/generic_associations/generic_fk.py -examples/generic_associations/table_per_association.py -examples/generic_associations/table_per_related.py -examples/graphs/__init__.py -examples/graphs/directed_graph.py -examples/inheritance/__init__.py -examples/inheritance/concrete.py -examples/inheritance/joined.py -examples/inheritance/single.py -examples/join_conditions/__init__.py -examples/join_conditions/cast.py -examples/join_conditions/threeway.py -examples/large_collection/__init__.py -examples/large_collection/large_collection.py -examples/materialized_paths/__init__.py -examples/materialized_paths/materialized_paths.py -examples/nested_sets/__init__.py -examples/nested_sets/nested_sets.py -examples/postgis/__init__.py -examples/postgis/postgis.py -examples/sharding/__init__.py -examples/sharding/attribute_shard.py -examples/versioned_history/__init__.py -examples/versioned_history/history_meta.py -examples/versioned_history/test_versioning.py -examples/versioned_rows/__init__.py -examples/versioned_rows/versioned_map.py -examples/versioned_rows/versioned_rows.py -examples/vertical/__init__.py -examples/vertical/dictlike-polymorphic.py -examples/vertical/dictlike.py -lib/SQLAlchemy.egg-info/PKG-INFO -lib/SQLAlchemy.egg-info/SOURCES.txt -lib/SQLAlchemy.egg-info/dependency_links.txt -lib/SQLAlchemy.egg-info/top_level.txt -lib/sqlalchemy/__init__.py -lib/sqlalchemy/events.py -lib/sqlalchemy/exc.py -lib/sqlalchemy/inspection.py -lib/sqlalchemy/interfaces.py -lib/sqlalchemy/log.py -lib/sqlalchemy/pool.py -lib/sqlalchemy/processors.py -lib/sqlalchemy/schema.py -lib/sqlalchemy/types.py -lib/sqlalchemy/cextension/processors.c -lib/sqlalchemy/cextension/resultproxy.c -lib/sqlalchemy/cextension/utils.c -lib/sqlalchemy/connectors/__init__.py -lib/sqlalchemy/connectors/mxodbc.py -lib/sqlalchemy/connectors/mysqldb.py -lib/sqlalchemy/connectors/pyodbc.py -lib/sqlalchemy/connectors/zxJDBC.py -lib/sqlalchemy/databases/__init__.py -lib/sqlalchemy/dialects/__init__.py -lib/sqlalchemy/dialects/postgres.py -lib/sqlalchemy/dialects/type_migration_guidelines.txt -lib/sqlalchemy/dialects/drizzle/__init__.py -lib/sqlalchemy/dialects/drizzle/base.py -lib/sqlalchemy/dialects/drizzle/mysqldb.py -lib/sqlalchemy/dialects/firebird/__init__.py -lib/sqlalchemy/dialects/firebird/base.py -lib/sqlalchemy/dialects/firebird/fdb.py -lib/sqlalchemy/dialects/firebird/kinterbasdb.py -lib/sqlalchemy/dialects/mssql/__init__.py -lib/sqlalchemy/dialects/mssql/adodbapi.py -lib/sqlalchemy/dialects/mssql/base.py -lib/sqlalchemy/dialects/mssql/information_schema.py -lib/sqlalchemy/dialects/mssql/mxodbc.py -lib/sqlalchemy/dialects/mssql/pymssql.py -lib/sqlalchemy/dialects/mssql/pyodbc.py -lib/sqlalchemy/dialects/mssql/zxjdbc.py -lib/sqlalchemy/dialects/mysql/__init__.py -lib/sqlalchemy/dialects/mysql/base.py -lib/sqlalchemy/dialects/mysql/cymysql.py -lib/sqlalchemy/dialects/mysql/gaerdbms.py -lib/sqlalchemy/dialects/mysql/mysqlconnector.py -lib/sqlalchemy/dialects/mysql/mysqldb.py -lib/sqlalchemy/dialects/mysql/oursql.py -lib/sqlalchemy/dialects/mysql/pymysql.py -lib/sqlalchemy/dialects/mysql/pyodbc.py -lib/sqlalchemy/dialects/mysql/zxjdbc.py -lib/sqlalchemy/dialects/oracle/__init__.py -lib/sqlalchemy/dialects/oracle/base.py -lib/sqlalchemy/dialects/oracle/cx_oracle.py -lib/sqlalchemy/dialects/oracle/zxjdbc.py -lib/sqlalchemy/dialects/postgresql/__init__.py -lib/sqlalchemy/dialects/postgresql/base.py -lib/sqlalchemy/dialects/postgresql/constraints.py -lib/sqlalchemy/dialects/postgresql/hstore.py -lib/sqlalchemy/dialects/postgresql/json.py -lib/sqlalchemy/dialects/postgresql/pg8000.py -lib/sqlalchemy/dialects/postgresql/psycopg2.py -lib/sqlalchemy/dialects/postgresql/pypostgresql.py -lib/sqlalchemy/dialects/postgresql/ranges.py -lib/sqlalchemy/dialects/postgresql/zxjdbc.py -lib/sqlalchemy/dialects/sqlite/__init__.py -lib/sqlalchemy/dialects/sqlite/base.py -lib/sqlalchemy/dialects/sqlite/pysqlite.py -lib/sqlalchemy/dialects/sybase/__init__.py -lib/sqlalchemy/dialects/sybase/base.py -lib/sqlalchemy/dialects/sybase/mxodbc.py -lib/sqlalchemy/dialects/sybase/pyodbc.py -lib/sqlalchemy/dialects/sybase/pysybase.py -lib/sqlalchemy/engine/__init__.py -lib/sqlalchemy/engine/base.py -lib/sqlalchemy/engine/default.py -lib/sqlalchemy/engine/interfaces.py -lib/sqlalchemy/engine/reflection.py -lib/sqlalchemy/engine/result.py -lib/sqlalchemy/engine/strategies.py -lib/sqlalchemy/engine/threadlocal.py -lib/sqlalchemy/engine/url.py -lib/sqlalchemy/engine/util.py -lib/sqlalchemy/event/__init__.py -lib/sqlalchemy/event/api.py -lib/sqlalchemy/event/attr.py -lib/sqlalchemy/event/base.py -lib/sqlalchemy/event/legacy.py -lib/sqlalchemy/event/registry.py -lib/sqlalchemy/ext/__init__.py -lib/sqlalchemy/ext/associationproxy.py -lib/sqlalchemy/ext/automap.py -lib/sqlalchemy/ext/compiler.py -lib/sqlalchemy/ext/horizontal_shard.py -lib/sqlalchemy/ext/hybrid.py -lib/sqlalchemy/ext/instrumentation.py -lib/sqlalchemy/ext/mutable.py -lib/sqlalchemy/ext/orderinglist.py -lib/sqlalchemy/ext/serializer.py -lib/sqlalchemy/ext/declarative/__init__.py -lib/sqlalchemy/ext/declarative/api.py -lib/sqlalchemy/ext/declarative/base.py -lib/sqlalchemy/ext/declarative/clsregistry.py -lib/sqlalchemy/orm/__init__.py -lib/sqlalchemy/orm/attributes.py -lib/sqlalchemy/orm/base.py -lib/sqlalchemy/orm/collections.py -lib/sqlalchemy/orm/dependency.py -lib/sqlalchemy/orm/deprecated_interfaces.py -lib/sqlalchemy/orm/descriptor_props.py -lib/sqlalchemy/orm/dynamic.py -lib/sqlalchemy/orm/evaluator.py -lib/sqlalchemy/orm/events.py -lib/sqlalchemy/orm/exc.py -lib/sqlalchemy/orm/identity.py -lib/sqlalchemy/orm/instrumentation.py -lib/sqlalchemy/orm/interfaces.py -lib/sqlalchemy/orm/loading.py -lib/sqlalchemy/orm/mapper.py -lib/sqlalchemy/orm/path_registry.py -lib/sqlalchemy/orm/persistence.py -lib/sqlalchemy/orm/properties.py -lib/sqlalchemy/orm/query.py -lib/sqlalchemy/orm/relationships.py -lib/sqlalchemy/orm/scoping.py -lib/sqlalchemy/orm/session.py -lib/sqlalchemy/orm/state.py -lib/sqlalchemy/orm/strategies.py -lib/sqlalchemy/orm/strategy_options.py -lib/sqlalchemy/orm/sync.py -lib/sqlalchemy/orm/unitofwork.py -lib/sqlalchemy/orm/util.py -lib/sqlalchemy/sql/__init__.py -lib/sqlalchemy/sql/annotation.py -lib/sqlalchemy/sql/base.py -lib/sqlalchemy/sql/compiler.py -lib/sqlalchemy/sql/ddl.py -lib/sqlalchemy/sql/default_comparator.py -lib/sqlalchemy/sql/dml.py -lib/sqlalchemy/sql/elements.py -lib/sqlalchemy/sql/expression.py -lib/sqlalchemy/sql/functions.py -lib/sqlalchemy/sql/naming.py -lib/sqlalchemy/sql/operators.py -lib/sqlalchemy/sql/schema.py -lib/sqlalchemy/sql/selectable.py -lib/sqlalchemy/sql/sqltypes.py -lib/sqlalchemy/sql/type_api.py -lib/sqlalchemy/sql/util.py -lib/sqlalchemy/sql/visitors.py -lib/sqlalchemy/testing/__init__.py -lib/sqlalchemy/testing/assertions.py -lib/sqlalchemy/testing/assertsql.py -lib/sqlalchemy/testing/config.py -lib/sqlalchemy/testing/distutils_run.py -lib/sqlalchemy/testing/engines.py -lib/sqlalchemy/testing/entities.py -lib/sqlalchemy/testing/exclusions.py -lib/sqlalchemy/testing/fixtures.py -lib/sqlalchemy/testing/mock.py -lib/sqlalchemy/testing/pickleable.py -lib/sqlalchemy/testing/profiling.py -lib/sqlalchemy/testing/requirements.py -lib/sqlalchemy/testing/runner.py -lib/sqlalchemy/testing/schema.py -lib/sqlalchemy/testing/util.py -lib/sqlalchemy/testing/warnings.py -lib/sqlalchemy/testing/plugin/__init__.py -lib/sqlalchemy/testing/plugin/noseplugin.py -lib/sqlalchemy/testing/plugin/plugin_base.py -lib/sqlalchemy/testing/plugin/pytestplugin.py -lib/sqlalchemy/testing/suite/__init__.py -lib/sqlalchemy/testing/suite/test_ddl.py -lib/sqlalchemy/testing/suite/test_insert.py -lib/sqlalchemy/testing/suite/test_reflection.py -lib/sqlalchemy/testing/suite/test_results.py -lib/sqlalchemy/testing/suite/test_select.py -lib/sqlalchemy/testing/suite/test_sequence.py -lib/sqlalchemy/testing/suite/test_types.py -lib/sqlalchemy/testing/suite/test_update_delete.py -lib/sqlalchemy/util/__init__.py -lib/sqlalchemy/util/_collections.py -lib/sqlalchemy/util/compat.py -lib/sqlalchemy/util/deprecations.py -lib/sqlalchemy/util/langhelpers.py -lib/sqlalchemy/util/queue.py -lib/sqlalchemy/util/topological.py -test/__init__.py -test/binary_data_one.dat -test/binary_data_two.dat -test/conftest.py -test/requirements.py -test/aaa_profiling/__init__.py -test/aaa_profiling/test_compiler.py -test/aaa_profiling/test_memusage.py -test/aaa_profiling/test_orm.py -test/aaa_profiling/test_pool.py -test/aaa_profiling/test_resultset.py -test/aaa_profiling/test_zoomark.py -test/aaa_profiling/test_zoomark_orm.py -test/base/__init__.py -test/base/test_dependency.py -test/base/test_events.py -test/base/test_except.py -test/base/test_inspect.py -test/base/test_utils.py -test/dialect/__init__.py -test/dialect/test_firebird.py -test/dialect/test_mxodbc.py -test/dialect/test_oracle.py -test/dialect/test_pyodbc.py -test/dialect/test_sqlite.py -test/dialect/test_suite.py -test/dialect/test_sybase.py -test/dialect/mssql/__init__.py -test/dialect/mssql/test_compiler.py -test/dialect/mssql/test_engine.py -test/dialect/mssql/test_query.py -test/dialect/mssql/test_reflection.py -test/dialect/mssql/test_types.py -test/dialect/mysql/__init__.py -test/dialect/mysql/test_compiler.py -test/dialect/mysql/test_dialect.py -test/dialect/mysql/test_query.py -test/dialect/mysql/test_reflection.py -test/dialect/mysql/test_types.py -test/dialect/postgresql/__init__.py -test/dialect/postgresql/test_compiler.py -test/dialect/postgresql/test_dialect.py -test/dialect/postgresql/test_query.py -test/dialect/postgresql/test_reflection.py -test/dialect/postgresql/test_types.py -test/engine/__init__.py -test/engine/test_bind.py -test/engine/test_ddlevents.py -test/engine/test_execute.py -test/engine/test_logging.py -test/engine/test_parseconnect.py -test/engine/test_pool.py -test/engine/test_processors.py -test/engine/test_reconnect.py -test/engine/test_reflection.py -test/engine/test_transaction.py -test/ext/__init__.py -test/ext/test_associationproxy.py -test/ext/test_automap.py -test/ext/test_compiler.py -test/ext/test_extendedattr.py -test/ext/test_horizontal_shard.py -test/ext/test_hybrid.py -test/ext/test_mutable.py -test/ext/test_orderinglist.py -test/ext/test_serializer.py -test/ext/declarative/__init__.py -test/ext/declarative/test_basic.py -test/ext/declarative/test_clsregistry.py -test/ext/declarative/test_inheritance.py -test/ext/declarative/test_mixin.py -test/ext/declarative/test_reflection.py -test/orm/__init__.py -test/orm/_fixtures.py -test/orm/test_association.py -test/orm/test_assorted_eager.py -test/orm/test_attributes.py -test/orm/test_backref_mutations.py -test/orm/test_bind.py -test/orm/test_bundle.py -test/orm/test_cascade.py -test/orm/test_collection.py -test/orm/test_compile.py -test/orm/test_composites.py -test/orm/test_cycles.py -test/orm/test_default_strategies.py -test/orm/test_defaults.py -test/orm/test_deferred.py -test/orm/test_deprecations.py -test/orm/test_descriptor.py -test/orm/test_dynamic.py -test/orm/test_eager_relations.py -test/orm/test_evaluator.py -test/orm/test_events.py -test/orm/test_expire.py -test/orm/test_froms.py -test/orm/test_generative.py -test/orm/test_hasparent.py -test/orm/test_immediate_load.py -test/orm/test_inspect.py -test/orm/test_instrumentation.py -test/orm/test_joins.py -test/orm/test_lazy_relations.py -test/orm/test_load_on_fks.py -test/orm/test_loading.py -test/orm/test_lockmode.py -test/orm/test_manytomany.py -test/orm/test_mapper.py -test/orm/test_merge.py -test/orm/test_naturalpks.py -test/orm/test_of_type.py -test/orm/test_onetoone.py -test/orm/test_options.py -test/orm/test_pickled.py -test/orm/test_query.py -test/orm/test_rel_fn.py -test/orm/test_relationships.py -test/orm/test_scoping.py -test/orm/test_selectable.py -test/orm/test_session.py -test/orm/test_subquery_relations.py -test/orm/test_sync.py -test/orm/test_transaction.py -test/orm/test_unitofwork.py -test/orm/test_unitofworkv2.py -test/orm/test_update_delete.py -test/orm/test_utils.py -test/orm/test_validators.py -test/orm/test_versioning.py -test/orm/inheritance/__init__.py -test/orm/inheritance/_poly_fixtures.py -test/orm/inheritance/test_abc_inheritance.py -test/orm/inheritance/test_abc_polymorphic.py -test/orm/inheritance/test_assorted_poly.py -test/orm/inheritance/test_basic.py -test/orm/inheritance/test_concrete.py -test/orm/inheritance/test_magazine.py -test/orm/inheritance/test_manytomany.py -test/orm/inheritance/test_poly_linked_list.py -test/orm/inheritance/test_poly_persistence.py -test/orm/inheritance/test_polymorphic_rel.py -test/orm/inheritance/test_productspec.py -test/orm/inheritance/test_relationship.py -test/orm/inheritance/test_selects.py -test/orm/inheritance/test_single.py -test/orm/inheritance/test_with_poly.py -test/perf/invalidate_stresstest.py -test/perf/orm2010.py -test/sql/__init__.py -test/sql/test_case_statement.py -test/sql/test_compiler.py -test/sql/test_constraints.py -test/sql/test_cte.py -test/sql/test_ddlemit.py -test/sql/test_defaults.py -test/sql/test_delete.py -test/sql/test_functions.py -test/sql/test_generative.py -test/sql/test_insert.py -test/sql/test_inspect.py -test/sql/test_join_rewriting.py -test/sql/test_labels.py -test/sql/test_metadata.py -test/sql/test_operators.py -test/sql/test_query.py -test/sql/test_quote.py -test/sql/test_returning.py -test/sql/test_rowcount.py -test/sql/test_selectable.py -test/sql/test_text.py -test/sql/test_type_expressions.py -test/sql/test_types.py -test/sql/test_unicode.py -test/sql/test_update.py \ No newline at end of file diff --git a/lib/python3.4/site-packages/SQLAlchemy-0.9.7.egg-info/dependency_links.txt b/lib/python3.4/site-packages/SQLAlchemy-0.9.7.egg-info/dependency_links.txt deleted file mode 100644 index 8b13789..0000000 --- a/lib/python3.4/site-packages/SQLAlchemy-0.9.7.egg-info/dependency_links.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/lib/python3.4/site-packages/SQLAlchemy-0.9.7.egg-info/installed-files.txt b/lib/python3.4/site-packages/SQLAlchemy-0.9.7.egg-info/installed-files.txt deleted file mode 100644 index 1e75335..0000000 --- a/lib/python3.4/site-packages/SQLAlchemy-0.9.7.egg-info/installed-files.txt +++ /dev/null @@ -1,366 +0,0 @@ -../sqlalchemy/__init__.py -../sqlalchemy/events.py -../sqlalchemy/exc.py -../sqlalchemy/inspection.py -../sqlalchemy/interfaces.py -../sqlalchemy/log.py -../sqlalchemy/pool.py -../sqlalchemy/processors.py -../sqlalchemy/schema.py -../sqlalchemy/types.py -../sqlalchemy/connectors/__init__.py -../sqlalchemy/connectors/mxodbc.py -../sqlalchemy/connectors/mysqldb.py -../sqlalchemy/connectors/pyodbc.py -../sqlalchemy/connectors/zxJDBC.py -../sqlalchemy/databases/__init__.py -../sqlalchemy/dialects/__init__.py -../sqlalchemy/dialects/postgres.py -../sqlalchemy/dialects/drizzle/__init__.py -../sqlalchemy/dialects/drizzle/base.py -../sqlalchemy/dialects/drizzle/mysqldb.py -../sqlalchemy/dialects/firebird/__init__.py -../sqlalchemy/dialects/firebird/base.py -../sqlalchemy/dialects/firebird/fdb.py -../sqlalchemy/dialects/firebird/kinterbasdb.py -../sqlalchemy/dialects/mssql/__init__.py -../sqlalchemy/dialects/mssql/adodbapi.py -../sqlalchemy/dialects/mssql/base.py -../sqlalchemy/dialects/mssql/information_schema.py -../sqlalchemy/dialects/mssql/mxodbc.py -../sqlalchemy/dialects/mssql/pymssql.py -../sqlalchemy/dialects/mssql/pyodbc.py -../sqlalchemy/dialects/mssql/zxjdbc.py -../sqlalchemy/dialects/mysql/__init__.py -../sqlalchemy/dialects/mysql/base.py -../sqlalchemy/dialects/mysql/cymysql.py -../sqlalchemy/dialects/mysql/gaerdbms.py -../sqlalchemy/dialects/mysql/mysqlconnector.py -../sqlalchemy/dialects/mysql/mysqldb.py -../sqlalchemy/dialects/mysql/oursql.py -../sqlalchemy/dialects/mysql/pymysql.py -../sqlalchemy/dialects/mysql/pyodbc.py -../sqlalchemy/dialects/mysql/zxjdbc.py -../sqlalchemy/dialects/oracle/__init__.py -../sqlalchemy/dialects/oracle/base.py -../sqlalchemy/dialects/oracle/cx_oracle.py -../sqlalchemy/dialects/oracle/zxjdbc.py -../sqlalchemy/dialects/postgresql/__init__.py -../sqlalchemy/dialects/postgresql/base.py -../sqlalchemy/dialects/postgresql/constraints.py -../sqlalchemy/dialects/postgresql/hstore.py -../sqlalchemy/dialects/postgresql/json.py -../sqlalchemy/dialects/postgresql/pg8000.py -../sqlalchemy/dialects/postgresql/psycopg2.py -../sqlalchemy/dialects/postgresql/pypostgresql.py -../sqlalchemy/dialects/postgresql/ranges.py -../sqlalchemy/dialects/postgresql/zxjdbc.py -../sqlalchemy/dialects/sqlite/__init__.py -../sqlalchemy/dialects/sqlite/base.py -../sqlalchemy/dialects/sqlite/pysqlite.py -../sqlalchemy/dialects/sybase/__init__.py -../sqlalchemy/dialects/sybase/base.py -../sqlalchemy/dialects/sybase/mxodbc.py -../sqlalchemy/dialects/sybase/pyodbc.py -../sqlalchemy/dialects/sybase/pysybase.py -../sqlalchemy/engine/__init__.py -../sqlalchemy/engine/base.py -../sqlalchemy/engine/default.py -../sqlalchemy/engine/interfaces.py -../sqlalchemy/engine/reflection.py -../sqlalchemy/engine/result.py -../sqlalchemy/engine/strategies.py -../sqlalchemy/engine/threadlocal.py -../sqlalchemy/engine/url.py -../sqlalchemy/engine/util.py -../sqlalchemy/event/__init__.py -../sqlalchemy/event/api.py -../sqlalchemy/event/attr.py -../sqlalchemy/event/base.py -../sqlalchemy/event/legacy.py -../sqlalchemy/event/registry.py -../sqlalchemy/ext/__init__.py -../sqlalchemy/ext/associationproxy.py -../sqlalchemy/ext/automap.py -../sqlalchemy/ext/compiler.py -../sqlalchemy/ext/horizontal_shard.py -../sqlalchemy/ext/hybrid.py -../sqlalchemy/ext/instrumentation.py -../sqlalchemy/ext/mutable.py -../sqlalchemy/ext/orderinglist.py -../sqlalchemy/ext/serializer.py -../sqlalchemy/ext/declarative/__init__.py -../sqlalchemy/ext/declarative/api.py -../sqlalchemy/ext/declarative/base.py -../sqlalchemy/ext/declarative/clsregistry.py -../sqlalchemy/orm/__init__.py -../sqlalchemy/orm/attributes.py -../sqlalchemy/orm/base.py -../sqlalchemy/orm/collections.py -../sqlalchemy/orm/dependency.py -../sqlalchemy/orm/deprecated_interfaces.py -../sqlalchemy/orm/descriptor_props.py -../sqlalchemy/orm/dynamic.py -../sqlalchemy/orm/evaluator.py -../sqlalchemy/orm/events.py -../sqlalchemy/orm/exc.py -../sqlalchemy/orm/identity.py -../sqlalchemy/orm/instrumentation.py -../sqlalchemy/orm/interfaces.py -../sqlalchemy/orm/loading.py -../sqlalchemy/orm/mapper.py -../sqlalchemy/orm/path_registry.py -../sqlalchemy/orm/persistence.py -../sqlalchemy/orm/properties.py -../sqlalchemy/orm/query.py -../sqlalchemy/orm/relationships.py -../sqlalchemy/orm/scoping.py -../sqlalchemy/orm/session.py -../sqlalchemy/orm/state.py -../sqlalchemy/orm/strategies.py -../sqlalchemy/orm/strategy_options.py -../sqlalchemy/orm/sync.py -../sqlalchemy/orm/unitofwork.py -../sqlalchemy/orm/util.py -../sqlalchemy/sql/__init__.py -../sqlalchemy/sql/annotation.py -../sqlalchemy/sql/base.py -../sqlalchemy/sql/compiler.py -../sqlalchemy/sql/ddl.py -../sqlalchemy/sql/default_comparator.py -../sqlalchemy/sql/dml.py -../sqlalchemy/sql/elements.py -../sqlalchemy/sql/expression.py -../sqlalchemy/sql/functions.py -../sqlalchemy/sql/naming.py -../sqlalchemy/sql/operators.py -../sqlalchemy/sql/schema.py -../sqlalchemy/sql/selectable.py -../sqlalchemy/sql/sqltypes.py -../sqlalchemy/sql/type_api.py -../sqlalchemy/sql/util.py -../sqlalchemy/sql/visitors.py -../sqlalchemy/testing/__init__.py -../sqlalchemy/testing/assertions.py -../sqlalchemy/testing/assertsql.py -../sqlalchemy/testing/config.py -../sqlalchemy/testing/distutils_run.py -../sqlalchemy/testing/engines.py -../sqlalchemy/testing/entities.py -../sqlalchemy/testing/exclusions.py -../sqlalchemy/testing/fixtures.py -../sqlalchemy/testing/mock.py -../sqlalchemy/testing/pickleable.py -../sqlalchemy/testing/profiling.py -../sqlalchemy/testing/requirements.py -../sqlalchemy/testing/runner.py -../sqlalchemy/testing/schema.py -../sqlalchemy/testing/util.py -../sqlalchemy/testing/warnings.py -../sqlalchemy/testing/plugin/__init__.py -../sqlalchemy/testing/plugin/noseplugin.py -../sqlalchemy/testing/plugin/plugin_base.py -../sqlalchemy/testing/plugin/pytestplugin.py -../sqlalchemy/testing/suite/__init__.py -../sqlalchemy/testing/suite/test_ddl.py -../sqlalchemy/testing/suite/test_insert.py -../sqlalchemy/testing/suite/test_reflection.py -../sqlalchemy/testing/suite/test_results.py -../sqlalchemy/testing/suite/test_select.py -../sqlalchemy/testing/suite/test_sequence.py -../sqlalchemy/testing/suite/test_types.py -../sqlalchemy/testing/suite/test_update_delete.py -../sqlalchemy/util/__init__.py -../sqlalchemy/util/_collections.py -../sqlalchemy/util/compat.py -../sqlalchemy/util/deprecations.py -../sqlalchemy/util/langhelpers.py -../sqlalchemy/util/queue.py -../sqlalchemy/util/topological.py -../sqlalchemy/__pycache__/__init__.cpython-34.pyc -../sqlalchemy/__pycache__/events.cpython-34.pyc -../sqlalchemy/__pycache__/exc.cpython-34.pyc -../sqlalchemy/__pycache__/inspection.cpython-34.pyc -../sqlalchemy/__pycache__/interfaces.cpython-34.pyc -../sqlalchemy/__pycache__/log.cpython-34.pyc -../sqlalchemy/__pycache__/pool.cpython-34.pyc -../sqlalchemy/__pycache__/processors.cpython-34.pyc -../sqlalchemy/__pycache__/schema.cpython-34.pyc -../sqlalchemy/__pycache__/types.cpython-34.pyc -../sqlalchemy/connectors/__pycache__/__init__.cpython-34.pyc -../sqlalchemy/connectors/__pycache__/mxodbc.cpython-34.pyc -../sqlalchemy/connectors/__pycache__/mysqldb.cpython-34.pyc -../sqlalchemy/connectors/__pycache__/pyodbc.cpython-34.pyc -../sqlalchemy/connectors/__pycache__/zxJDBC.cpython-34.pyc -../sqlalchemy/databases/__pycache__/__init__.cpython-34.pyc -../sqlalchemy/dialects/__pycache__/__init__.cpython-34.pyc -../sqlalchemy/dialects/__pycache__/postgres.cpython-34.pyc -../sqlalchemy/dialects/drizzle/__pycache__/__init__.cpython-34.pyc -../sqlalchemy/dialects/drizzle/__pycache__/base.cpython-34.pyc -../sqlalchemy/dialects/drizzle/__pycache__/mysqldb.cpython-34.pyc -../sqlalchemy/dialects/firebird/__pycache__/__init__.cpython-34.pyc -../sqlalchemy/dialects/firebird/__pycache__/base.cpython-34.pyc -../sqlalchemy/dialects/firebird/__pycache__/fdb.cpython-34.pyc -../sqlalchemy/dialects/firebird/__pycache__/kinterbasdb.cpython-34.pyc -../sqlalchemy/dialects/mssql/__pycache__/__init__.cpython-34.pyc -../sqlalchemy/dialects/mssql/__pycache__/adodbapi.cpython-34.pyc -../sqlalchemy/dialects/mssql/__pycache__/base.cpython-34.pyc -../sqlalchemy/dialects/mssql/__pycache__/information_schema.cpython-34.pyc -../sqlalchemy/dialects/mssql/__pycache__/mxodbc.cpython-34.pyc -../sqlalchemy/dialects/mssql/__pycache__/pymssql.cpython-34.pyc -../sqlalchemy/dialects/mssql/__pycache__/pyodbc.cpython-34.pyc -../sqlalchemy/dialects/mssql/__pycache__/zxjdbc.cpython-34.pyc -../sqlalchemy/dialects/mysql/__pycache__/__init__.cpython-34.pyc -../sqlalchemy/dialects/mysql/__pycache__/base.cpython-34.pyc -../sqlalchemy/dialects/mysql/__pycache__/cymysql.cpython-34.pyc -../sqlalchemy/dialects/mysql/__pycache__/gaerdbms.cpython-34.pyc -../sqlalchemy/dialects/mysql/__pycache__/mysqlconnector.cpython-34.pyc -../sqlalchemy/dialects/mysql/__pycache__/mysqldb.cpython-34.pyc -../sqlalchemy/dialects/mysql/__pycache__/oursql.cpython-34.pyc -../sqlalchemy/dialects/mysql/__pycache__/pymysql.cpython-34.pyc -../sqlalchemy/dialects/mysql/__pycache__/pyodbc.cpython-34.pyc -../sqlalchemy/dialects/mysql/__pycache__/zxjdbc.cpython-34.pyc -../sqlalchemy/dialects/oracle/__pycache__/__init__.cpython-34.pyc -../sqlalchemy/dialects/oracle/__pycache__/base.cpython-34.pyc -../sqlalchemy/dialects/oracle/__pycache__/cx_oracle.cpython-34.pyc -../sqlalchemy/dialects/oracle/__pycache__/zxjdbc.cpython-34.pyc -../sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-34.pyc -../sqlalchemy/dialects/postgresql/__pycache__/base.cpython-34.pyc -../sqlalchemy/dialects/postgresql/__pycache__/constraints.cpython-34.pyc -../sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-34.pyc -../sqlalchemy/dialects/postgresql/__pycache__/json.cpython-34.pyc -../sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-34.pyc -../sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-34.pyc -../sqlalchemy/dialects/postgresql/__pycache__/pypostgresql.cpython-34.pyc -../sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-34.pyc -../sqlalchemy/dialects/postgresql/__pycache__/zxjdbc.cpython-34.pyc -../sqlalchemy/dialects/sqlite/__pycache__/__init__.cpython-34.pyc -../sqlalchemy/dialects/sqlite/__pycache__/base.cpython-34.pyc -../sqlalchemy/dialects/sqlite/__pycache__/pysqlite.cpython-34.pyc -../sqlalchemy/dialects/sybase/__pycache__/__init__.cpython-34.pyc -../sqlalchemy/dialects/sybase/__pycache__/base.cpython-34.pyc -../sqlalchemy/dialects/sybase/__pycache__/mxodbc.cpython-34.pyc -../sqlalchemy/dialects/sybase/__pycache__/pyodbc.cpython-34.pyc -../sqlalchemy/dialects/sybase/__pycache__/pysybase.cpython-34.pyc -../sqlalchemy/engine/__pycache__/__init__.cpython-34.pyc -../sqlalchemy/engine/__pycache__/base.cpython-34.pyc -../sqlalchemy/engine/__pycache__/default.cpython-34.pyc -../sqlalchemy/engine/__pycache__/interfaces.cpython-34.pyc -../sqlalchemy/engine/__pycache__/reflection.cpython-34.pyc -../sqlalchemy/engine/__pycache__/result.cpython-34.pyc -../sqlalchemy/engine/__pycache__/strategies.cpython-34.pyc -../sqlalchemy/engine/__pycache__/threadlocal.cpython-34.pyc -../sqlalchemy/engine/__pycache__/url.cpython-34.pyc -../sqlalchemy/engine/__pycache__/util.cpython-34.pyc -../sqlalchemy/event/__pycache__/__init__.cpython-34.pyc -../sqlalchemy/event/__pycache__/api.cpython-34.pyc -../sqlalchemy/event/__pycache__/attr.cpython-34.pyc -../sqlalchemy/event/__pycache__/base.cpython-34.pyc -../sqlalchemy/event/__pycache__/legacy.cpython-34.pyc -../sqlalchemy/event/__pycache__/registry.cpython-34.pyc -../sqlalchemy/ext/__pycache__/__init__.cpython-34.pyc -../sqlalchemy/ext/__pycache__/associationproxy.cpython-34.pyc -../sqlalchemy/ext/__pycache__/automap.cpython-34.pyc -../sqlalchemy/ext/__pycache__/compiler.cpython-34.pyc -../sqlalchemy/ext/__pycache__/horizontal_shard.cpython-34.pyc -../sqlalchemy/ext/__pycache__/hybrid.cpython-34.pyc -../sqlalchemy/ext/__pycache__/instrumentation.cpython-34.pyc -../sqlalchemy/ext/__pycache__/mutable.cpython-34.pyc -../sqlalchemy/ext/__pycache__/orderinglist.cpython-34.pyc -../sqlalchemy/ext/__pycache__/serializer.cpython-34.pyc -../sqlalchemy/ext/declarative/__pycache__/__init__.cpython-34.pyc -../sqlalchemy/ext/declarative/__pycache__/api.cpython-34.pyc -../sqlalchemy/ext/declarative/__pycache__/base.cpython-34.pyc -../sqlalchemy/ext/declarative/__pycache__/clsregistry.cpython-34.pyc -../sqlalchemy/orm/__pycache__/__init__.cpython-34.pyc -../sqlalchemy/orm/__pycache__/attributes.cpython-34.pyc -../sqlalchemy/orm/__pycache__/base.cpython-34.pyc -../sqlalchemy/orm/__pycache__/collections.cpython-34.pyc -../sqlalchemy/orm/__pycache__/dependency.cpython-34.pyc -../sqlalchemy/orm/__pycache__/deprecated_interfaces.cpython-34.pyc -../sqlalchemy/orm/__pycache__/descriptor_props.cpython-34.pyc -../sqlalchemy/orm/__pycache__/dynamic.cpython-34.pyc -../sqlalchemy/orm/__pycache__/evaluator.cpython-34.pyc -../sqlalchemy/orm/__pycache__/events.cpython-34.pyc -../sqlalchemy/orm/__pycache__/exc.cpython-34.pyc -../sqlalchemy/orm/__pycache__/identity.cpython-34.pyc -../sqlalchemy/orm/__pycache__/instrumentation.cpython-34.pyc -../sqlalchemy/orm/__pycache__/interfaces.cpython-34.pyc -../sqlalchemy/orm/__pycache__/loading.cpython-34.pyc -../sqlalchemy/orm/__pycache__/mapper.cpython-34.pyc -../sqlalchemy/orm/__pycache__/path_registry.cpython-34.pyc -../sqlalchemy/orm/__pycache__/persistence.cpython-34.pyc -../sqlalchemy/orm/__pycache__/properties.cpython-34.pyc -../sqlalchemy/orm/__pycache__/query.cpython-34.pyc -../sqlalchemy/orm/__pycache__/relationships.cpython-34.pyc -../sqlalchemy/orm/__pycache__/scoping.cpython-34.pyc -../sqlalchemy/orm/__pycache__/session.cpython-34.pyc -../sqlalchemy/orm/__pycache__/state.cpython-34.pyc -../sqlalchemy/orm/__pycache__/strategies.cpython-34.pyc -../sqlalchemy/orm/__pycache__/strategy_options.cpython-34.pyc -../sqlalchemy/orm/__pycache__/sync.cpython-34.pyc -../sqlalchemy/orm/__pycache__/unitofwork.cpython-34.pyc -../sqlalchemy/orm/__pycache__/util.cpython-34.pyc -../sqlalchemy/sql/__pycache__/__init__.cpython-34.pyc -../sqlalchemy/sql/__pycache__/annotation.cpython-34.pyc -../sqlalchemy/sql/__pycache__/base.cpython-34.pyc -../sqlalchemy/sql/__pycache__/compiler.cpython-34.pyc -../sqlalchemy/sql/__pycache__/ddl.cpython-34.pyc -../sqlalchemy/sql/__pycache__/default_comparator.cpython-34.pyc -../sqlalchemy/sql/__pycache__/dml.cpython-34.pyc -../sqlalchemy/sql/__pycache__/elements.cpython-34.pyc -../sqlalchemy/sql/__pycache__/expression.cpython-34.pyc -../sqlalchemy/sql/__pycache__/functions.cpython-34.pyc -../sqlalchemy/sql/__pycache__/naming.cpython-34.pyc -../sqlalchemy/sql/__pycache__/operators.cpython-34.pyc -../sqlalchemy/sql/__pycache__/schema.cpython-34.pyc -../sqlalchemy/sql/__pycache__/selectable.cpython-34.pyc -../sqlalchemy/sql/__pycache__/sqltypes.cpython-34.pyc -../sqlalchemy/sql/__pycache__/type_api.cpython-34.pyc -../sqlalchemy/sql/__pycache__/util.cpython-34.pyc -../sqlalchemy/sql/__pycache__/visitors.cpython-34.pyc -../sqlalchemy/testing/__pycache__/__init__.cpython-34.pyc -../sqlalchemy/testing/__pycache__/assertions.cpython-34.pyc -../sqlalchemy/testing/__pycache__/assertsql.cpython-34.pyc -../sqlalchemy/testing/__pycache__/config.cpython-34.pyc -../sqlalchemy/testing/__pycache__/distutils_run.cpython-34.pyc -../sqlalchemy/testing/__pycache__/engines.cpython-34.pyc -../sqlalchemy/testing/__pycache__/entities.cpython-34.pyc -../sqlalchemy/testing/__pycache__/exclusions.cpython-34.pyc -../sqlalchemy/testing/__pycache__/fixtures.cpython-34.pyc -../sqlalchemy/testing/__pycache__/mock.cpython-34.pyc -../sqlalchemy/testing/__pycache__/pickleable.cpython-34.pyc -../sqlalchemy/testing/__pycache__/profiling.cpython-34.pyc -../sqlalchemy/testing/__pycache__/requirements.cpython-34.pyc -../sqlalchemy/testing/__pycache__/runner.cpython-34.pyc -../sqlalchemy/testing/__pycache__/schema.cpython-34.pyc -../sqlalchemy/testing/__pycache__/util.cpython-34.pyc -../sqlalchemy/testing/__pycache__/warnings.cpython-34.pyc -../sqlalchemy/testing/plugin/__pycache__/__init__.cpython-34.pyc -../sqlalchemy/testing/plugin/__pycache__/noseplugin.cpython-34.pyc -../sqlalchemy/testing/plugin/__pycache__/plugin_base.cpython-34.pyc -../sqlalchemy/testing/plugin/__pycache__/pytestplugin.cpython-34.pyc -../sqlalchemy/testing/suite/__pycache__/__init__.cpython-34.pyc -../sqlalchemy/testing/suite/__pycache__/test_ddl.cpython-34.pyc -../sqlalchemy/testing/suite/__pycache__/test_insert.cpython-34.pyc -../sqlalchemy/testing/suite/__pycache__/test_reflection.cpython-34.pyc -../sqlalchemy/testing/suite/__pycache__/test_results.cpython-34.pyc -../sqlalchemy/testing/suite/__pycache__/test_select.cpython-34.pyc -../sqlalchemy/testing/suite/__pycache__/test_sequence.cpython-34.pyc -../sqlalchemy/testing/suite/__pycache__/test_types.cpython-34.pyc -../sqlalchemy/testing/suite/__pycache__/test_update_delete.cpython-34.pyc -../sqlalchemy/util/__pycache__/__init__.cpython-34.pyc -../sqlalchemy/util/__pycache__/_collections.cpython-34.pyc -../sqlalchemy/util/__pycache__/compat.cpython-34.pyc -../sqlalchemy/util/__pycache__/deprecations.cpython-34.pyc -../sqlalchemy/util/__pycache__/langhelpers.cpython-34.pyc -../sqlalchemy/util/__pycache__/queue.cpython-34.pyc -../sqlalchemy/util/__pycache__/topological.cpython-34.pyc -../sqlalchemy/cprocessors.cpython-34m.so -../sqlalchemy/cresultproxy.cpython-34m.so -../sqlalchemy/cutils.cpython-34m.so -./ -dependency_links.txt -PKG-INFO -SOURCES.txt -top_level.txt diff --git a/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/DESCRIPTION.rst b/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..479eaf5 --- /dev/null +++ b/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/DESCRIPTION.rst @@ -0,0 +1,137 @@ +SQLAlchemy +========== + +The Python SQL Toolkit and Object Relational Mapper + +Introduction +------------- + +SQLAlchemy is the Python SQL toolkit and Object Relational Mapper +that gives application developers the full power and +flexibility of SQL. SQLAlchemy provides a full suite +of well known enterprise-level persistence patterns, +designed for efficient and high-performing database +access, adapted into a simple and Pythonic domain +language. + +Major SQLAlchemy features include: + +* An industrial strength ORM, built + from the core on the identity map, unit of work, + and data mapper patterns. These patterns + allow transparent persistence of objects + using a declarative configuration system. + Domain models + can be constructed and manipulated naturally, + and changes are synchronized with the + current transaction automatically. +* A relationally-oriented query system, exposing + the full range of SQL's capabilities + explicitly, including joins, subqueries, + correlation, and most everything else, + in terms of the object model. + Writing queries with the ORM uses the same + techniques of relational composition you use + when writing SQL. While you can drop into + literal SQL at any time, it's virtually never + needed. +* A comprehensive and flexible system + of eager loading for related collections and objects. + Collections are cached within a session, + and can be loaded on individual access, all + at once using joins, or by query per collection + across the full result set. +* A Core SQL construction system and DBAPI + interaction layer. The SQLAlchemy Core is + separate from the ORM and is a full database + abstraction layer in its own right, and includes + an extensible Python-based SQL expression + language, schema metadata, connection pooling, + type coercion, and custom types. +* All primary and foreign key constraints are + assumed to be composite and natural. Surrogate + integer primary keys are of course still the + norm, but SQLAlchemy never assumes or hardcodes + to this model. +* Database introspection and generation. Database + schemas can be "reflected" in one step into + Python structures representing database metadata; + those same structures can then generate + CREATE statements right back out - all within + the Core, independent of the ORM. + +SQLAlchemy's philosophy: + +* SQL databases behave less and less like object + collections the more size and performance start to + matter; object collections behave less and less like + tables and rows the more abstraction starts to matter. + SQLAlchemy aims to accommodate both of these + principles. +* An ORM doesn't need to hide the "R". A relational + database provides rich, set-based functionality + that should be fully exposed. SQLAlchemy's + ORM provides an open-ended set of patterns + that allow a developer to construct a custom + mediation layer between a domain model and + a relational schema, turning the so-called + "object relational impedance" issue into + a distant memory. +* The developer, in all cases, makes all decisions + regarding the design, structure, and naming conventions + of both the object model as well as the relational + schema. SQLAlchemy only provides the means + to automate the execution of these decisions. +* With SQLAlchemy, there's no such thing as + "the ORM generated a bad query" - you + retain full control over the structure of + queries, including how joins are organized, + how subqueries and correlation is used, what + columns are requested. Everything SQLAlchemy + does is ultimately the result of a developer- + initiated decision. +* Don't use an ORM if the problem doesn't need one. + SQLAlchemy consists of a Core and separate ORM + component. The Core offers a full SQL expression + language that allows Pythonic construction + of SQL constructs that render directly to SQL + strings for a target database, returning + result sets that are essentially enhanced DBAPI + cursors. +* Transactions should be the norm. With SQLAlchemy's + ORM, nothing goes to permanent storage until + commit() is called. SQLAlchemy encourages applications + to create a consistent means of delineating + the start and end of a series of operations. +* Never render a literal value in a SQL statement. + Bound parameters are used to the greatest degree + possible, allowing query optimizers to cache + query plans effectively and making SQL injection + attacks a non-issue. + +Documentation +------------- + +Latest documentation is at: + +http://www.sqlalchemy.org/docs/ + +Installation / Requirements +--------------------------- + +Full documentation for installation is at +`Installation `_. + +Getting Help / Development / Bug reporting +------------------------------------------ + +Please refer to the `SQLAlchemy Community Guide `_. + +License +------- + +SQLAlchemy is distributed under the `MIT license +`_. + + + diff --git a/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/INSTALLER b/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/METADATA b/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/METADATA new file mode 100644 index 0000000..f993bb0 --- /dev/null +++ b/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/METADATA @@ -0,0 +1,157 @@ +Metadata-Version: 2.0 +Name: SQLAlchemy +Version: 1.0.12 +Summary: Database Abstraction Library +Home-page: http://www.sqlalchemy.org +Author: Mike Bayer +Author-email: mike_mp@zzzcomputing.com +License: MIT License +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: Jython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Database :: Front-Ends +Classifier: Operating System :: OS Independent + +SQLAlchemy +========== + +The Python SQL Toolkit and Object Relational Mapper + +Introduction +------------- + +SQLAlchemy is the Python SQL toolkit and Object Relational Mapper +that gives application developers the full power and +flexibility of SQL. SQLAlchemy provides a full suite +of well known enterprise-level persistence patterns, +designed for efficient and high-performing database +access, adapted into a simple and Pythonic domain +language. + +Major SQLAlchemy features include: + +* An industrial strength ORM, built + from the core on the identity map, unit of work, + and data mapper patterns. These patterns + allow transparent persistence of objects + using a declarative configuration system. + Domain models + can be constructed and manipulated naturally, + and changes are synchronized with the + current transaction automatically. +* A relationally-oriented query system, exposing + the full range of SQL's capabilities + explicitly, including joins, subqueries, + correlation, and most everything else, + in terms of the object model. + Writing queries with the ORM uses the same + techniques of relational composition you use + when writing SQL. While you can drop into + literal SQL at any time, it's virtually never + needed. +* A comprehensive and flexible system + of eager loading for related collections and objects. + Collections are cached within a session, + and can be loaded on individual access, all + at once using joins, or by query per collection + across the full result set. +* A Core SQL construction system and DBAPI + interaction layer. The SQLAlchemy Core is + separate from the ORM and is a full database + abstraction layer in its own right, and includes + an extensible Python-based SQL expression + language, schema metadata, connection pooling, + type coercion, and custom types. +* All primary and foreign key constraints are + assumed to be composite and natural. Surrogate + integer primary keys are of course still the + norm, but SQLAlchemy never assumes or hardcodes + to this model. +* Database introspection and generation. Database + schemas can be "reflected" in one step into + Python structures representing database metadata; + those same structures can then generate + CREATE statements right back out - all within + the Core, independent of the ORM. + +SQLAlchemy's philosophy: + +* SQL databases behave less and less like object + collections the more size and performance start to + matter; object collections behave less and less like + tables and rows the more abstraction starts to matter. + SQLAlchemy aims to accommodate both of these + principles. +* An ORM doesn't need to hide the "R". A relational + database provides rich, set-based functionality + that should be fully exposed. SQLAlchemy's + ORM provides an open-ended set of patterns + that allow a developer to construct a custom + mediation layer between a domain model and + a relational schema, turning the so-called + "object relational impedance" issue into + a distant memory. +* The developer, in all cases, makes all decisions + regarding the design, structure, and naming conventions + of both the object model as well as the relational + schema. SQLAlchemy only provides the means + to automate the execution of these decisions. +* With SQLAlchemy, there's no such thing as + "the ORM generated a bad query" - you + retain full control over the structure of + queries, including how joins are organized, + how subqueries and correlation is used, what + columns are requested. Everything SQLAlchemy + does is ultimately the result of a developer- + initiated decision. +* Don't use an ORM if the problem doesn't need one. + SQLAlchemy consists of a Core and separate ORM + component. The Core offers a full SQL expression + language that allows Pythonic construction + of SQL constructs that render directly to SQL + strings for a target database, returning + result sets that are essentially enhanced DBAPI + cursors. +* Transactions should be the norm. With SQLAlchemy's + ORM, nothing goes to permanent storage until + commit() is called. SQLAlchemy encourages applications + to create a consistent means of delineating + the start and end of a series of operations. +* Never render a literal value in a SQL statement. + Bound parameters are used to the greatest degree + possible, allowing query optimizers to cache + query plans effectively and making SQL injection + attacks a non-issue. + +Documentation +------------- + +Latest documentation is at: + +http://www.sqlalchemy.org/docs/ + +Installation / Requirements +--------------------------- + +Full documentation for installation is at +`Installation `_. + +Getting Help / Development / Bug reporting +------------------------------------------ + +Please refer to the `SQLAlchemy Community Guide `_. + +License +------- + +SQLAlchemy is distributed under the `MIT license +`_. + + + diff --git a/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/RECORD b/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/RECORD new file mode 100644 index 0000000..ae021a3 --- /dev/null +++ b/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/RECORD @@ -0,0 +1,376 @@ +SQLAlchemy-1.0.12.dist-info/DESCRIPTION.rst,sha256=ZN8fj2owI_rw0Emr3_RXqoNfTFkThjiZy7xcCzg1W_g,5013 +SQLAlchemy-1.0.12.dist-info/METADATA,sha256=fntYBelbmQAxIrj5_YGLpIGPzwQBxiA_6kJwVdrwMF4,5786 +SQLAlchemy-1.0.12.dist-info/RECORD,, +SQLAlchemy-1.0.12.dist-info/WHEEL,sha256=HslHw5cSLCuyOLxj8duGAooHNvXnupcmoBU1NzRPr2w,104 +SQLAlchemy-1.0.12.dist-info/metadata.json,sha256=JpEwQiqyWE4fnbo3thN5jZ_9e7jYut521x-nYPY27A4,965 +SQLAlchemy-1.0.12.dist-info/top_level.txt,sha256=rp-ZgB7D8G11ivXON5VGPjupT1voYmWqkciDt5Uaw_Q,11 +sqlalchemy/__init__.py,sha256=fTurvwmGkoRt_zdwxoZNWTHg6VdzvBpeHyPmUnexOK4,2112 +sqlalchemy/cprocessors.cpython-34m.so,sha256=9c-YOtWdmfG-k2sVd7Z2mpBiNFXtvi29UgfOxNPOOPM,42824 +sqlalchemy/cresultproxy.cpython-34m.so,sha256=NW56qP26QzyMYvdFfLv15pXoJRQSlDDoxIBzfcMzmQI,54056 +sqlalchemy/cutils.cpython-34m.so,sha256=Nr_Lwl_q1cujcQksdw2oLYMsxh7lbgn06TUhjfNZNZQ,25280 +sqlalchemy/events.py,sha256=j8yref-XfuJxkPKbvnZmB4jeUAIujPcbLAzD2cKV4f4,43944 +sqlalchemy/exc.py,sha256=NhA5R5nDdducWkp0MXtlQ0-Q6iF_rhqkHWblIfuSYGk,11706 +sqlalchemy/inspection.py,sha256=zMa-2nt-OQ0Op1dqq0Z2XCnpdAFSTkqif5Kdi8Wz8AU,3093 +sqlalchemy/interfaces.py,sha256=XSx5y-HittAzc79lU4C7rPbTtSW_Hc2c89NqCy50tsQ,10967 +sqlalchemy/log.py,sha256=opX7UORq5N6_jWxN9aHX9OpiirwAcRA0qq-u5m4SMkQ,6712 +sqlalchemy/pool.py,sha256=-F51TIJYl0XGTV2_sdpV8C1m0jTTQaq0nAezdmSgr84,47220 +sqlalchemy/processors.py,sha256=Li1kdC-I0v03JxeOz4V7u4HAevK6LledyCPvaL06mYc,5220 +sqlalchemy/schema.py,sha256=rZzZJJ8dT9trLSYknFpHm0N1kRERYwhqHH3QD31SJjc,1182 +sqlalchemy/types.py,sha256=qcoy5xKaurDV4kaXr489GL2sz8FKkWX21Us3ZCqeasg,1650 +sqlalchemy/connectors/__init__.py,sha256=97YbriYu5mcljh7opc1JOScRlf3Tk8ldbn5urBVm4WY,278 +sqlalchemy/connectors/mxodbc.py,sha256=-0iqw2k8e-o3OkAKzoCWuAaEPxlEjslvfRM9hnVXENM,5348 +sqlalchemy/connectors/pyodbc.py,sha256=pG2yf3cEDtTr-w_m4to6jF5l8hZk6MJv69K3cg84NfY,6264 +sqlalchemy/connectors/zxJDBC.py,sha256=2KK_sVSgMsdW0ufZqAwgXjd1FsMb4hqbiUQRAkM0RYg,1868 +sqlalchemy/databases/__init__.py,sha256=BaQyAuMjXNpZYV47hCseHrDtPzTfSw-iqUQYxMWJddw,817 +sqlalchemy/dialects/__init__.py,sha256=7SMul8PL3gkbJRUwAwovHLae5qBBApRF-VcRwU-VtdU,1012 +sqlalchemy/dialects/postgres.py,sha256=heNVHys6E91DIBepXT3ls_4_6N8HTTahrZ49W5IR3M0,614 +sqlalchemy/dialects/firebird/__init__.py,sha256=QYmQ0SaGfq3YjDraCV9ALwqVW5A3KDUF0F6air_qp3Q,664 +sqlalchemy/dialects/firebird/base.py,sha256=IT0prWkh1TFSTke-BqGdVMGdof53zmWWk6zbJZ_TuuI,28170 +sqlalchemy/dialects/firebird/fdb.py,sha256=l4s6_8Z0HvqxgqGz0LNcKWP1qUmEc3M2XM718_drN34,4325 +sqlalchemy/dialects/firebird/kinterbasdb.py,sha256=kCsn2ed4u9fyjcyfEI3rXQdKvL05z9wtf5YjW9-NrvI,6299 +sqlalchemy/dialects/mssql/__init__.py,sha256=G12xmirGZgMzfUKZCA8BFfaCmqUDuYca9Fu2VP_eaks,1081 +sqlalchemy/dialects/mssql/adodbapi.py,sha256=dHZgS3pEDX39ixhlDfTtDcjCq6rdjF85VS7rIZ1TfYo,2493 +sqlalchemy/dialects/mssql/base.py,sha256=xqRmK_npoyH5gl626EjazVnu9TEArmrBIFme_avYFUg,66855 +sqlalchemy/dialects/mssql/information_schema.py,sha256=pwuTsgOCY5eSBW9w-g-pyJDRfyuZ_rOEXXNYRuAroCE,6418 +sqlalchemy/dialects/mssql/mxodbc.py,sha256=G9LypIeEizgxeShtDu2M7Vwm8NopnzaTmnZMD49mYeg,3856 +sqlalchemy/dialects/mssql/pymssql.py,sha256=w92w4YQzXdHb53AjCrBcIRHsf6jmie1iN9H7gJNGX4k,3079 +sqlalchemy/dialects/mssql/pyodbc.py,sha256=KRke1Hizrg3r5iYqxdBI0axXVQ_pZR_UPxLaAdF0mKk,9473 +sqlalchemy/dialects/mssql/zxjdbc.py,sha256=u4uBgwk0LbI7_I5CIvM3C4bBb0pmrw2_DqRh_ehJTkI,2282 +sqlalchemy/dialects/mysql/__init__.py,sha256=3cQ2juPT8LsZTicPa2J-0rCQjQIQaPgyBzxjV3O_7xs,1171 +sqlalchemy/dialects/mysql/base.py,sha256=rwC8fnhGZaAnsPB1Jhg4sTcrWE2hjxrZJ5deCS0rAOc,122869 +sqlalchemy/dialects/mysql/cymysql.py,sha256=nqsdQA8LBLIc6eilgX6qwkjm7szsUoqMTVYwK9kkfsE,2349 +sqlalchemy/dialects/mysql/gaerdbms.py,sha256=2MxtTsIqlpq_J32HHqDzz-5vu-mC51Lb7PvyGkJa73M,3387 +sqlalchemy/dialects/mysql/mysqlconnector.py,sha256=DMDm684Shk-ijVo7w-yidopYw7EC6EiOmJY56EPawok,5323 +sqlalchemy/dialects/mysql/mysqldb.py,sha256=McqROngxAknbLOXoUAG9o9mP9FQBLs-ouD-JqqI2Ses,6564 +sqlalchemy/dialects/mysql/oursql.py,sha256=rmdr-r66iJ2amqFeGvCohvE8WCl_i6R9KcgVG0uXOQs,8124 +sqlalchemy/dialects/mysql/pymysql.py,sha256=e-qehI-sASmAjEa0ajHqjZjlyJYWsb3RPQY4iBR5pz0,1504 +sqlalchemy/dialects/mysql/pyodbc.py,sha256=Ze9IOKw6ANVQj25IlmSGR8aaJhM0pMuRtbzKF7UsZCY,2665 +sqlalchemy/dialects/mysql/zxjdbc.py,sha256=LIhe2mHSRVgi8I7qmiTMVBRSpuWJVnuDtpHTUivIx0M,3942 +sqlalchemy/dialects/oracle/__init__.py,sha256=UhF2ZyPfT3EFAnP8ZjGng6GnWSzmAkjMax0Lucpn0Bg,797 +sqlalchemy/dialects/oracle/base.py,sha256=2KJO-sU2CVKK1rij6bAQ5ZFJv203_NmzT8dE5qor9wc,55961 +sqlalchemy/dialects/oracle/cx_oracle.py,sha256=-d5tHbNcCyjbgVtAvWfHgSY2yA8C9bvCzxhwkdWFNe0,38635 +sqlalchemy/dialects/oracle/zxjdbc.py,sha256=nC7XOCY3NdTLrEyIacNTnLDCaeVjWn59q8UYssJL8Wo,8112 +sqlalchemy/dialects/postgresql/__init__.py,sha256=SjCtM5b3EaGyRaTyg_i82sh_qjkLEIVUXW91XDihiCM,1299 +sqlalchemy/dialects/postgresql/base.py,sha256=xhdLeHuWioTv9LYW41pcIPsEjD2fyeh7JflkLKmZMB8,104230 +sqlalchemy/dialects/postgresql/constraints.py,sha256=8UDx_2TNQgqIUSRETZPhgninJigQ6rMfdRNI6vIt3Is,3119 +sqlalchemy/dialects/postgresql/hstore.py,sha256=n8Wsd7Uldk3bbg66tTa0NKjVqjhJUbF1mVeUsM7keXA,11402 +sqlalchemy/dialects/postgresql/json.py,sha256=MTlIGinMDa8iaVbZMOzYnremo0xL4tn2wyGTPwnvX6U,12215 +sqlalchemy/dialects/postgresql/pg8000.py,sha256=x6o3P8Ad0wKsuF9qeyip39BKc5ORJZ4nWxv-8qOdj0E,8375 +sqlalchemy/dialects/postgresql/psycopg2.py,sha256=4ac0upErNRJz6YWJYNbATCU3ncWFvat5kal_Cuq-Jhw,26953 +sqlalchemy/dialects/postgresql/psycopg2cffi.py,sha256=8R3POkJH8z8a2DxwKNmfmQOsxFqsg4tU_OnjGj3OfDA,1651 +sqlalchemy/dialects/postgresql/pypostgresql.py,sha256=raQRfZb8T9-c-jmq1w86Wci5QyiXgf_9_71OInT_sAw,2655 +sqlalchemy/dialects/postgresql/ranges.py,sha256=MihdGXMdmCM6ToIlrj7OJx9Qh_8BX8bv5PSaAepHmII,4814 +sqlalchemy/dialects/postgresql/zxjdbc.py,sha256=AhEGRiAy8q-GM0BStFcsLBgSwjxHkkwy2-BSroIoADo,1397 +sqlalchemy/dialects/sqlite/__init__.py,sha256=0wW0VOhE_RtFDpRcbwvvo3XtD6Y2-SDgG4K7468eh_w,736 +sqlalchemy/dialects/sqlite/base.py,sha256=_L9-854ITf8Fl2BgUymF9fKjDFvXSo7Pb2yuz1CMkDo,55007 +sqlalchemy/dialects/sqlite/pysqlcipher.py,sha256=sgXCqn8ZtNIeTDwyo253Kj5mn4TPlIW3AZCNNmURi2A,4129 +sqlalchemy/dialects/sqlite/pysqlite.py,sha256=G-Cg-iI-ErYsVjOH4UlQTEY9pLnLOLV89ik8q0-reuY,14980 +sqlalchemy/dialects/sybase/__init__.py,sha256=gwCgFR_C_hoj0Re7PiaW3zmKSWaLpsd96UVXdM7EnTM,894 +sqlalchemy/dialects/sybase/base.py,sha256=Xpl3vEd5VDyvoIRMg0DZa48Or--yBSrhaZ2CbTSCt0w,28853 +sqlalchemy/dialects/sybase/mxodbc.py,sha256=E_ask6yFSjyhNPvv7gQsvA41WmyxbBvRGWjCyPVr9Gs,901 +sqlalchemy/dialects/sybase/pyodbc.py,sha256=0a_gKwrIweJGcz3ZRYuQZb5BIvwjGmFEYBo9wGk66kI,2102 +sqlalchemy/dialects/sybase/pysybase.py,sha256=tu2V_EbtgxWYOvt-ybo5_lLiBQzsIFaAtF8e7S1_-rk,3208 +sqlalchemy/engine/__init__.py,sha256=fyIFw2R5wfLQzSbfE9Jz-28ZDP5RyB-5elNH92uTZYM,18803 +sqlalchemy/engine/base.py,sha256=cRqbbG0QuUG-NGs3GOPVQsU0WLsw5bLT0Y07Yf8OOfU,79399 +sqlalchemy/engine/default.py,sha256=U_yaliCazUHp6cfk_NVzhB4F_zOJSyy959rHyk40J4M,36548 +sqlalchemy/engine/interfaces.py,sha256=CmPYM_oDp1zAPH13sKmufO4Tuha6KA-fXRQq-K_3YTE,35908 +sqlalchemy/engine/reflection.py,sha256=jly5YN-cyjoBDxHs9qO6Mlgm1OZSb2NBNFALwZMEGxE,28590 +sqlalchemy/engine/result.py,sha256=ot5RQxa6kjoScXRUR-DTl0iJJISBhmyNTj1JZkZiNsk,44027 +sqlalchemy/engine/strategies.py,sha256=mwy-CTrnXzyaIA1TRQBQ_Z2O8wN0lnTNZwDefEWCR9A,8929 +sqlalchemy/engine/threadlocal.py,sha256=y4wOLjtbeY-dvp2GcJDtos6F2jzfP11JVAaSFwZ0zRM,4191 +sqlalchemy/engine/url.py,sha256=ZhS_Iqiu6V1kfIM2pcv3ud9fOPXkFOHBv8wiLOqbJhc,8228 +sqlalchemy/engine/util.py,sha256=Tvb9sIkyd6qOwIA-RsBmo5j877UXa5x-jQmhqnhHWRA,2338 +sqlalchemy/event/__init__.py,sha256=KnUVp-NVX6k276ntGffxgkjVmIWR22FSlzrbAKqQ6S4,419 +sqlalchemy/event/api.py,sha256=O2udbj5D7HdXcvsGBQk6-dK9CAFfePTypWOrUdqmhYY,5990 +sqlalchemy/event/attr.py,sha256=VfRJJl4RD24mQaIoDwArWL2hsGOX6ISSU6vKusVMNO0,12053 +sqlalchemy/event/base.py,sha256=DWDKZV19fFsLavu2cXOxXV8NhO3XuCbKcKamBKyXuME,9540 +sqlalchemy/event/legacy.py,sha256=ACnVeBUt8uwVfh1GNRu22cWCADC3CWZdrsBKzAd6UQQ,5814 +sqlalchemy/event/registry.py,sha256=13wx1qdEmcQeCoAmgf_WQEMuR43h3v7iyd2Re54QdOE,7786 +sqlalchemy/ext/__init__.py,sha256=smCZIGgjJprT4ddhuYSLZ8PrTn4NdXPP3j03a038SdE,322 +sqlalchemy/ext/associationproxy.py,sha256=y61Y4UIZNBit5lqk2WzdHTCXIWRrBg3hHbRVsqXjnqE,33422 +sqlalchemy/ext/automap.py,sha256=Aet-3zk2vbsJVLqigwZJYau0hB1D6Y21K65QVWeB5pc,41567 +sqlalchemy/ext/baked.py,sha256=BnVaB4pkQxHk-Fyz4nUw225vCxO_zrDuVC6t5cSF9x8,16967 +sqlalchemy/ext/compiler.py,sha256=aSSlySoTsqN-JkACWFIhv3pq2CuZwxKm6pSDfQoc10Q,16257 +sqlalchemy/ext/horizontal_shard.py,sha256=XEBYIfs0YrTt_2vRuaBY6C33ZOZMUHQb2E4X2s3Szns,4814 +sqlalchemy/ext/hybrid.py,sha256=wNXvuYEEmKy-Nc6z7fu1c2gNWCMOiQA0N14Y3FCq5lo,27989 +sqlalchemy/ext/instrumentation.py,sha256=HRgNiuYJ90_uSKC1iDwsEl8_KXscMQkEb9KeElk-yLE,14856 +sqlalchemy/ext/mutable.py,sha256=lx7b_ewFVe7O6I4gTXdi9M6C6TqxWCFiViqCM2VwUac,25444 +sqlalchemy/ext/orderinglist.py,sha256=UCkuZxTWAQ0num-b5oNm8zNJAmVuIFcbFXt5e7JPx-U,13816 +sqlalchemy/ext/serializer.py,sha256=fK3N1miYF16PSIZDjLFS2zI7y-scZ9qtmopXIfzPqrA,5586 +sqlalchemy/ext/declarative/__init__.py,sha256=Jpwf2EukqwNe4RzDfCmX1p-hQ6pPhJEIL_xunaER3tw,756 +sqlalchemy/ext/declarative/api.py,sha256=PdoO_jh50TWaMvXqnjNh-vX42VqB75ZyliluilphvsU,23317 +sqlalchemy/ext/declarative/base.py,sha256=96SJBOfxpTMsU2jAHrvuXbsjUUJ7TvbLm11R8Hy2Irc,25231 +sqlalchemy/ext/declarative/clsregistry.py,sha256=jaLLSr-66XvLnA1Z9kxjKatH_XHxWchqEXMKwvjKAXk,10817 +sqlalchemy/orm/__init__.py,sha256=UzDockQEVMaWvr-FE4y1rptrMb5uX5k8v_UNQs82qFY,8033 +sqlalchemy/orm/attributes.py,sha256=OmXkppJEZxRGc0acZZZkSbUhdfDl8ry3Skmvzl3OtLQ,56510 +sqlalchemy/orm/base.py,sha256=F0aRZGK2_1F8phwBHnVYaChkAb-nnTRoFE1VKSvmAwA,14634 +sqlalchemy/orm/collections.py,sha256=TFutWIn_c07DI48FDOKMsFMnAoQB3BG2FnEMGzEF3iI,53549 +sqlalchemy/orm/dependency.py,sha256=phB8nS1788FSd4dWa2j9d4uj6QFlRL7nzcXvh3Bb7Zo,46192 +sqlalchemy/orm/deprecated_interfaces.py,sha256=A63t6ivbZB3Wq8vWgL8I05uTRR6whcWnIPkquuTIPXU,18254 +sqlalchemy/orm/descriptor_props.py,sha256=uk5r77w1VUWVgn0bkgOItkAlMh9FRgeT6OCgOHz3_bM,25141 +sqlalchemy/orm/dynamic.py,sha256=I_YP7X-H9HLjeFHmYgsOas6JPdqg0Aqe0kaltt4HVzA,13283 +sqlalchemy/orm/evaluator.py,sha256=Hozggsd_Fi0YyqHrr9-tldtOA9NLX0MVBF4e2vSM6GY,4731 +sqlalchemy/orm/events.py,sha256=yRaoXlBL78b3l11itTrAy42UhLu42-7cgXKCFUGNXSg,69410 +sqlalchemy/orm/exc.py,sha256=P5lxi5RMFokiHL136VBK0AP3UmAlJcSDHtzgo-M6Kgs,5439 +sqlalchemy/orm/identity.py,sha256=zsb8xOZaPYKvs4sGhyxW21mILQDrtdSuzD4sTyeKdJs,9021 +sqlalchemy/orm/instrumentation.py,sha256=xtq9soM3mpMws7xqNJIFYXqKw65p2nnxCTfmMpuvpeI,17510 +sqlalchemy/orm/interfaces.py,sha256=AqitvZ_BBkB6L503uhdH55nxHplleJ2kQMwM7xKq9Sc,21552 +sqlalchemy/orm/loading.py,sha256=cjC8DQ5g8_rMxroYrYHfW5s35Z5OFSNBUu0-LpxW7hI,22878 +sqlalchemy/orm/mapper.py,sha256=sfooeslzwWAKN7WNIQoZ2Y3u_mCyIxd0tebp4yEUu8k,115074 +sqlalchemy/orm/path_registry.py,sha256=8Pah0P8yPVUyRjoET7DvIMGtM5PC8HZJC4GtxAyqVAs,8370 +sqlalchemy/orm/persistence.py,sha256=WzUUNm1UGm5mGxbv94hLTQowEDNoXfU1VoyGnoKeN_g,51028 +sqlalchemy/orm/properties.py,sha256=HR3eoY3Ze3FUPPNCXM_FruWz4pEMWrGlqtCGiK2G1qE,10426 +sqlalchemy/orm/query.py,sha256=2q2XprzbZhIlAbs0vihIr9dgqfJtcbrjNewgE9q26gE,147616 +sqlalchemy/orm/relationships.py,sha256=79LRGGz8MxsKsAlv0vuZ6MYZXzDXXtfiOCZg-IQ9hiU,116992 +sqlalchemy/orm/scoping.py,sha256=Ao-K4iqg4pBp7Si5JOAlro5zUL_r500TC3lVLcFMLDs,6421 +sqlalchemy/orm/session.py,sha256=yctpvCsLUcFv9Sy8keT1SElZ2VH5DNScYtO7Z77ptYI,111314 +sqlalchemy/orm/state.py,sha256=4LwwftOtPQldH12SKZV2UFgzqPOCj40QfQ08knZs0_E,22984 +sqlalchemy/orm/strategies.py,sha256=rdLEs2pPrF8nqcQqezyG-fGdmE11r22fUva4ES3KGOE,58529 +sqlalchemy/orm/strategy_options.py,sha256=_z7ZblWCnXh8bZpGSOXDoUwtdUqnXdCaWfKXYDgCuH0,34973 +sqlalchemy/orm/sync.py,sha256=B-d-H1Gzw1TkflpvgJeQghwTzqObzhZCQdvEdSPyDeE,5451 +sqlalchemy/orm/unitofwork.py,sha256=EQvZ7RZ-u5wJT51BWTeMJJi-tt22YRnmqywGUCn0Qrc,23343 +sqlalchemy/orm/util.py,sha256=Mj3NXDd8Mwp4O5Vr5zvRGFUZRlB65WpExdDBFJp04wQ,38092 +sqlalchemy/sql/__init__.py,sha256=IFCJYIilmmAQRnSDhv9Y6LQUSpx6pUU5zp9VT7sOx0c,1737 +sqlalchemy/sql/annotation.py,sha256=8ncgAVUo5QCoinApKjREi8esWNMFklcBqie8Q42KsaQ,6136 +sqlalchemy/sql/base.py,sha256=TuXOp7z0Q30qKAjhgcsts6WGvRbvg6F7OBojMQAxjX0,20990 +sqlalchemy/sql/compiler.py,sha256=G0Ft_Dmq1AousO66eagPhI0g9Vkqui_c_LjqY0AbImU,100710 +sqlalchemy/sql/crud.py,sha256=X86dyvzEnbj0-oeJO5ufi6zXxbSKBtDeu5JHlNg-BJU,19837 +sqlalchemy/sql/ddl.py,sha256=nkjd_B4lKwC2GeyPjE0ZtRB9RKXccQL1g1XoZ4p69sM,37540 +sqlalchemy/sql/default_comparator.py,sha256=QaowWtW4apULq_aohDvmj97j0sDtHQQjMRdNxXm83vk,10447 +sqlalchemy/sql/dml.py,sha256=7846H52IMJfMYi5Jd-Cv6Hy9hZM4dkonXbjfBjl5ED4,33330 +sqlalchemy/sql/elements.py,sha256=MLeecC5dMqeekZmFbPn0J-ODKJj5DBDE5v6kuSkq66I,132898 +sqlalchemy/sql/expression.py,sha256=vFZ9MmBlC9Fg8IYzLMAwXgcsnXZhkZbUstY6dO8BFGY,5833 +sqlalchemy/sql/functions.py,sha256=ZYKyvPnVKZMtHyyjyNwK0M5UWPrZmFz3vtTqHN-8658,18533 +sqlalchemy/sql/naming.py,sha256=foE2lAzngLCFXCeHrpv0S4zT23GCnZLCiata2MPo0kE,4662 +sqlalchemy/sql/operators.py,sha256=UeZgb7eRhWd4H7OfJZkx0ZWOjvo5chIUXQsBAIeeTDY,23013 +sqlalchemy/sql/schema.py,sha256=awhLY5YjUBah8ZYxW9FBfe6lH0v4fW0UJLTNApnx7E0,145511 +sqlalchemy/sql/selectable.py,sha256=o1Hom00WGHjI21Mdb5fkX-f0k2nksQNb_txT0KWK1zQ,118995 +sqlalchemy/sql/sqltypes.py,sha256=JGxizqIjO1WFuZpppWj1Yi5cvCyBczb1JqUQeuhQn8s,54879 +sqlalchemy/sql/type_api.py,sha256=Xe6yH4slgdLA8HRjT19GBOou51SS9o4oUhyK0xfn04c,42846 +sqlalchemy/sql/util.py,sha256=7AsOsyhIq2eSLMWtwvqfTLc2MdCotGzEKQKFE3wk5sk,20382 +sqlalchemy/sql/visitors.py,sha256=4ipGvAkqFaSAWgyNuKjx5x_ms8GIy9aq-wC5pj4-Z3g,10271 +sqlalchemy/testing/__init__.py,sha256=MwKimX0atzs_SmG2j74GXLiyI8O56e3DLq96tcoL0TM,1095 +sqlalchemy/testing/assertions.py,sha256=r1I2nHC599VZcY-5g0JYRQl8bl9kjkf6WFOooOmJ2eE,16112 +sqlalchemy/testing/assertsql.py,sha256=-fP9Iuhdu52BJoT1lEj_KED8jy5ay_XiJu7i4Ry9eWA,12335 +sqlalchemy/testing/config.py,sha256=nqvVm55Vk0BVNjk1Wj3aYR65j_EEEepfB-W9QSFLU-k,2469 +sqlalchemy/testing/distutils_run.py,sha256=tkURrZRwgFiSwseKm1iJRkSjKf2Rtsb3pOXRWtACTHI,247 +sqlalchemy/testing/engines.py,sha256=u6GlDMXt0FKqVTQe_QJ5JXAnkA6W-xdw6Fe_5gMAQhg,9359 +sqlalchemy/testing/entities.py,sha256=IXqTgAihV-1TZyxL0MWdZzu4rFtxdbWKWFetIJWNGM4,2992 +sqlalchemy/testing/exclusions.py,sha256=WuH_tVK5fZJWe8Hu2LzNB4HNQMa_iAUaGC-_6mHUdIM,12570 +sqlalchemy/testing/fixtures.py,sha256=q4nK-81z2EWs17TjeJtPmnaJUCtDdoUiIU7jgLq3l_w,10721 +sqlalchemy/testing/mock.py,sha256=vj5q-GzJrLW6mMVDLqsppxBu_p7K49VvjfiVt5tn0o8,630 +sqlalchemy/testing/pickleable.py,sha256=8I8M4H1XN29pZPMxZdYkmpKWfwzPsUn6WK5FX4UP9L4,2641 +sqlalchemy/testing/profiling.py,sha256=Q_wOTS5JtcGBcs2eCYIvoRoDS_FW_HcfEW3hXWB87Zg,8392 +sqlalchemy/testing/provision.py,sha256=mU9g6JZEHIshqUkE6PWu-t61FVPs_cUJtEtVFRavj9g,9377 +sqlalchemy/testing/replay_fixture.py,sha256=iAxg7XsFkKSCcJnrNPQNJfjMxOgeBAa-ShOkywWPJ4w,5429 +sqlalchemy/testing/requirements.py,sha256=aIdvbfugMzrlVdldEbpcwretX-zjiukPhPUSZgulrzU,19949 +sqlalchemy/testing/runner.py,sha256=hpNH6MNTif4TnBRySxpm92KgFwDK0mOa8eF7wZXumTI,1607 +sqlalchemy/testing/schema.py,sha256=agOzrIMvmuUCeVZY5mYjJ1eJmOP69-wa0gZALtNtJBk,3446 +sqlalchemy/testing/util.py,sha256=IJ688AWzichtXVwWgYf_A4BUbcXPGsK6BQP5fvY3h-U,7544 +sqlalchemy/testing/warnings.py,sha256=-KskRAh1RkJ_69UIY_WR7i15u21U3gDLQ6nKlnJT7_w,987 +sqlalchemy/testing/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +sqlalchemy/testing/plugin/bootstrap.py,sha256=Iw8R-d1gqoz_NKFtPyGfdX56QPcQHny_9Lvwov65aVY,1634 +sqlalchemy/testing/plugin/noseplugin.py,sha256=In79x6zs9DOngfoYpaHojihWlSd4PeS7Nwzh3M_KNM4,2847 +sqlalchemy/testing/plugin/plugin_base.py,sha256=h4RI4nPNdNq9kYABp6IP89Eknm29q8usgO-nWb8Eobc,17120 +sqlalchemy/testing/plugin/pytestplugin.py,sha256=Pbc62y7Km0PHXd4M9dm5ThBwrlXkM4WtIX-W1pOaM84,5812 +sqlalchemy/testing/suite/__init__.py,sha256=wqCTrb28i5FwhQZOyXVlnz3mA94iQOUBio7lszkFq-g,471 +sqlalchemy/testing/suite/test_ddl.py,sha256=Baw0ou9nKdADmrRuXgWzF1FZx0rvkkw3JHc6yw5BN0M,1838 +sqlalchemy/testing/suite/test_dialect.py,sha256=ORQPXUt53XtO-5ENlWgs8BpsSdPBDjyMRl4W2UjXLI4,1165 +sqlalchemy/testing/suite/test_insert.py,sha256=nP0mgVpsVs72MHMADmihB1oXLbFBpsYsLGO3BlQ7RLU,8132 +sqlalchemy/testing/suite/test_reflection.py,sha256=HtJRsJ_vuNMrOhnPTvuIvRg66OakSaSpeCU36zhaSPg,24616 +sqlalchemy/testing/suite/test_results.py,sha256=oAcO1tD0I7c9ErMeSvSZBZfz1IBDMJHJTf64Y1pBodk,6685 +sqlalchemy/testing/suite/test_select.py,sha256=u0wAz1g-GrAFdZpG4zwSrVckVtjULvjlbd0Z1U1jHAA,5729 +sqlalchemy/testing/suite/test_sequence.py,sha256=fmBR4Pc5tOLSkXFxfcqwGx1z3xaxeJeUyqDnTakKTBU,3831 +sqlalchemy/testing/suite/test_types.py,sha256=UKa-ZPdpz16mVKvT-9ISRAfqdrqiKaE7IA-_phQQuxo,17088 +sqlalchemy/testing/suite/test_update_delete.py,sha256=r5p467r-EUsjEcWGfUE0VPIfN4LLXZpLRnnyBLyyjl4,1582 +sqlalchemy/util/__init__.py,sha256=G06a5vBxg27RtWzY6dPZHt1FO8qtOiy_2C9PHTTMblI,2520 +sqlalchemy/util/_collections.py,sha256=JZkeYK4GcIE1A5s6MAvHhmUp_X4wp6r7vMGT-iMftZ8,27842 +sqlalchemy/util/compat.py,sha256=80OXp3D-F_R-pLf7s-zITPlfCqG1s_5o6KTlY1g2p0Q,6821 +sqlalchemy/util/deprecations.py,sha256=D_LTsfb9jHokJtPEWNDRMJOc372xRGNjputAiTIysRU,4403 +sqlalchemy/util/langhelpers.py,sha256=Nhe3Y9ieK6JaFYejjYosVOjOSSIBT2V385Hu6HGcyZk,41607 +sqlalchemy/util/queue.py,sha256=rs3W0LDhKt7M_dlQEjYpI9KS-bzQmmwN38LE_-RRVvU,6548 +sqlalchemy/util/topological.py,sha256=xKsYjjAat4p8cdqRHKwibLzr6WONbPTC0X8Mqg7jYno,2794 +/var/lib/lxc/openmedialibrary/rootfs/srv/client/platform_linux64/p34/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +sqlalchemy/dialects/postgresql/__pycache__/constraints.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/profiling.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/runner.cpython-34.pyc,, +sqlalchemy/dialects/mssql/__pycache__/mxodbc.cpython-34.pyc,, +sqlalchemy/util/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/testing/plugin/__pycache__/plugin_base.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/zxjdbc.cpython-34.pyc,, +sqlalchemy/ext/declarative/__pycache__/clsregistry.cpython-34.pyc,, +sqlalchemy/dialects/oracle/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/properties.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/config.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/ddl.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/test_select.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/exclusions.cpython-34.pyc,, +sqlalchemy/util/__pycache__/topological.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/sqltypes.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/horizontal_shard.cpython-34.pyc,, +sqlalchemy/dialects/oracle/__pycache__/zxjdbc.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/state.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/threadlocal.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/persistence.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/instrumentation.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/crud.cpython-34.pyc,, +sqlalchemy/dialects/sybase/__pycache__/mxodbc.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/mysqlconnector.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/assertsql.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/psycopg2cffi.cpython-34.pyc,, +sqlalchemy/__pycache__/exc.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/mock.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/result.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/base.cpython-34.pyc,, +sqlalchemy/dialects/mssql/__pycache__/pyodbc.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/warnings.cpython-34.pyc,, +sqlalchemy/event/__pycache__/base.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/expression.cpython-34.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/pysqlite.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/test_insert.cpython-34.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/base.cpython-34.pyc,, +sqlalchemy/dialects/oracle/__pycache__/cx_oracle.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/unitofwork.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/base.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/test_update_delete.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/dynamic.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/functions.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/deprecated_interfaces.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/fixtures.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/zxjdbc.cpython-34.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/pysqlcipher.cpython-34.pyc,, +sqlalchemy/dialects/mssql/__pycache__/adodbapi.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/test_types.cpython-34.pyc,, +sqlalchemy/dialects/firebird/__pycache__/kinterbasdb.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/strategies.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/sync.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/serializer.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/naming.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/default.cpython-34.pyc,, +sqlalchemy/__pycache__/processors.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/provision.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/replay_fixture.cpython-34.pyc,, +sqlalchemy/dialects/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/testing/plugin/__pycache__/bootstrap.cpython-34.pyc,, +sqlalchemy/dialects/firebird/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/gaerdbms.cpython-34.pyc,, +sqlalchemy/dialects/mssql/__pycache__/information_schema.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/base.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/base.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/oursql.cpython-34.pyc,, +sqlalchemy/dialects/mssql/__pycache__/zxjdbc.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/test_dialect.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/url.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/test_reflection.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/util.cpython-34.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/dialects/mssql/__pycache__/base.cpython-34.pyc,, +sqlalchemy/event/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/dialects/firebird/__pycache__/fdb.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/identity.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/dml.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/dependency.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/relationships.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/hybrid.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/orderinglist.cpython-34.pyc,, +sqlalchemy/dialects/sybase/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/__pycache__/inspection.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/baked.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/events.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/assertions.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/distutils_run.cpython-34.pyc,, +sqlalchemy/testing/plugin/__pycache__/noseplugin.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/mapper.cpython-34.pyc,, +sqlalchemy/connectors/__pycache__/mxodbc.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/engines.cpython-34.pyc,, +sqlalchemy/__pycache__/types.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/util.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/strategies.cpython-34.pyc,, +sqlalchemy/databases/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/event/__pycache__/registry.cpython-34.pyc,, +sqlalchemy/connectors/__pycache__/zxJDBC.cpython-34.pyc,, +sqlalchemy/__pycache__/log.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/strategy_options.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/schema.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/automap.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/selectable.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/visitors.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/interfaces.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/associationproxy.cpython-34.pyc,, +sqlalchemy/dialects/sybase/__pycache__/pysybase.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/descriptor_props.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/session.cpython-34.pyc,, +sqlalchemy/__pycache__/events.cpython-34.pyc,, +sqlalchemy/event/__pycache__/attr.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/pyodbc.cpython-34.pyc,, +sqlalchemy/testing/plugin/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/collections.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/requirements.cpython-34.pyc,, +sqlalchemy/__pycache__/schema.cpython-34.pyc,, +sqlalchemy/ext/declarative/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/pymysql.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/loading.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/schema.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/test_ddl.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/__pycache__/pool.cpython-34.pyc,, +sqlalchemy/dialects/sybase/__pycache__/pyodbc.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/base.cpython-34.pyc,, +sqlalchemy/testing/plugin/__pycache__/pytestplugin.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/connectors/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/mutable.cpython-34.pyc,, +sqlalchemy/dialects/oracle/__pycache__/base.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/test_results.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/test_sequence.cpython-34.pyc,, +sqlalchemy/dialects/mssql/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/ext/declarative/__pycache__/api.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/operators.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/attributes.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/default_comparator.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/util.cpython-34.pyc,, +sqlalchemy/connectors/__pycache__/pyodbc.cpython-34.pyc,, +sqlalchemy/util/__pycache__/queue.cpython-34.pyc,, +sqlalchemy/dialects/sybase/__pycache__/base.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/query.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/evaluator.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/json.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/pypostgresql.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/mysqldb.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/scoping.cpython-34.pyc,, +sqlalchemy/dialects/__pycache__/postgres.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/reflection.cpython-34.pyc,, +sqlalchemy/event/__pycache__/legacy.cpython-34.pyc,, +sqlalchemy/util/__pycache__/langhelpers.cpython-34.pyc,, +sqlalchemy/__pycache__/interfaces.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/compiler.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/exc.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/type_api.cpython-34.pyc,, +sqlalchemy/dialects/firebird/__pycache__/base.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/instrumentation.cpython-34.pyc,, +sqlalchemy/util/__pycache__/_collections.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/util.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/elements.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/pickleable.cpython-34.pyc,, +sqlalchemy/dialects/mssql/__pycache__/pymssql.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/ext/declarative/__pycache__/base.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/cymysql.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/annotation.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/path_registry.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/interfaces.cpython-34.pyc,, +sqlalchemy/util/__pycache__/deprecations.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/compiler.cpython-34.pyc,, +sqlalchemy/util/__pycache__/compat.cpython-34.pyc,, +sqlalchemy/event/__pycache__/api.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/entities.cpython-34.pyc,, diff --git a/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/WHEEL b/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/WHEEL new file mode 100644 index 0000000..db40973 --- /dev/null +++ b/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: false +Tag: cp34-cp34m-linux_x86_64 + diff --git a/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/metadata.json b/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/metadata.json new file mode 100644 index 0000000..ef57ea4 --- /dev/null +++ b/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: Jython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Database :: Front-Ends", "Operating System :: OS Independent"], "extensions": {"python.details": {"contacts": [{"email": "mike_mp@zzzcomputing.com", "name": "Mike Bayer", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "http://www.sqlalchemy.org"}}}, "generator": "bdist_wheel (0.29.0)", "license": "MIT License", "metadata_version": "2.0", "name": "SQLAlchemy", "summary": "Database Abstraction Library", "test_requires": [{"requires": ["mock", "pytest (>=2.5.2)", "pytest-xdist"]}], "version": "1.0.12"} \ No newline at end of file diff --git a/lib/python3.4/site-packages/SQLAlchemy-0.9.7.egg-info/top_level.txt b/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/top_level.txt similarity index 100% rename from lib/python3.4/site-packages/SQLAlchemy-0.9.7.egg-info/top_level.txt rename to lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/top_level.txt diff --git a/lib/python3.4/site-packages/ed25519-1.4.dist-info/DESCRIPTION.rst b/lib/python3.4/site-packages/ed25519-1.4.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..2d8b087 --- /dev/null +++ b/lib/python3.4/site-packages/ed25519-1.4.dist-info/DESCRIPTION.rst @@ -0,0 +1,11 @@ +Python bindings to the Ed25519 public-key signature system. + +This offers a comfortable python interface to a C implementation of the +Ed25519 public-key signature system (http://ed25519.cr.yp.to/), using the +portable 'ref' code from the 'SUPERCOP' benchmarking suite. + +This system provides high (128-bit) security, short (32-byte) keys, short +(64-byte) signatures, and fast (2-6ms) operation. Please see the README for +more details. + + diff --git a/lib/python3.4/site-packages/ed25519-1.4.dist-info/INSTALLER b/lib/python3.4/site-packages/ed25519-1.4.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/lib/python3.4/site-packages/ed25519-1.4.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/lib/python3.4/site-packages/ed25519-1.4.egg-info/PKG-INFO b/lib/python3.4/site-packages/ed25519-1.4.dist-info/METADATA similarity index 54% rename from lib/python3.4/site-packages/ed25519-1.4.egg-info/PKG-INFO rename to lib/python3.4/site-packages/ed25519-1.4.dist-info/METADATA index 654981e..10628e3 100644 --- a/lib/python3.4/site-packages/ed25519-1.4.egg-info/PKG-INFO +++ b/lib/python3.4/site-packages/ed25519-1.4.dist-info/METADATA @@ -1,4 +1,4 @@ -Metadata-Version: 1.1 +Metadata-Version: 2.0 Name: ed25519 Version: 1.4 Summary: Ed25519 public-key signatures @@ -6,16 +6,6 @@ Home-page: https://github.com/warner/python-ed25519 Author: Brian Warner Author-email: warner-python-ed25519@lothar.com License: MIT -Description: Python bindings to the Ed25519 public-key signature system. - - This offers a comfortable python interface to a C implementation of the - Ed25519 public-key signature system (http://ed25519.cr.yp.to/), using the - portable 'ref' code from the 'SUPERCOP' benchmarking suite. - - This system provides high (128-bit) security, short (32-byte) keys, short - (64-byte) signatures, and fast (2-6ms) operation. Please see the README for - more details. - Platform: UNKNOWN Classifier: Development Status :: 5 - Production/Stable Classifier: Intended Audience :: Developers @@ -26,3 +16,15 @@ Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3.3 Classifier: Programming Language :: Python :: 3.4 Classifier: Topic :: Security :: Cryptography + +Python bindings to the Ed25519 public-key signature system. + +This offers a comfortable python interface to a C implementation of the +Ed25519 public-key signature system (http://ed25519.cr.yp.to/), using the +portable 'ref' code from the 'SUPERCOP' benchmarking suite. + +This system provides high (128-bit) security, short (32-byte) keys, short +(64-byte) signatures, and fast (2-6ms) operation. Please see the README for +more details. + + diff --git a/lib/python3.4/site-packages/ed25519-1.4.dist-info/RECORD b/lib/python3.4/site-packages/ed25519-1.4.dist-info/RECORD new file mode 100644 index 0000000..a77f2e2 --- /dev/null +++ b/lib/python3.4/site-packages/ed25519-1.4.dist-info/RECORD @@ -0,0 +1,17 @@ +ed25519/__init__.py,sha256=0AicD1xQAforRdrUWwmmURJkZ3Gi1lqaifukwZNYJos,401 +ed25519/_ed25519.cpython-34m.so,sha256=lQsx9-Rms4glBHDzWgcSZ4kgoWp-fVN9O3lGztOiNO8,255624 +ed25519/_version.py,sha256=yb119RosJrH_RO02_o3o12GWQvkxx3xD4X7UrJW9vTY,469 +ed25519/keys.py,sha256=AbMFsbxn0qbwmQ6HntpNURsOGq_y4puwFxs6U7Of2eo,7123 +ed25519/test_ed25519.py,sha256=IG8ot-yARHi6PoyJY6ixS1l2L23hE1lCXbSH-XQPCCM,12389 +../../../bin/edsig,sha256=SA1mUUWCjAAaSEe6MKSpVWg-2qXwuiuK3PodCAUwCN0,2853 +ed25519-1.4.dist-info/DESCRIPTION.rst,sha256=8UWGEqjPrB7zPyxLA5Ep6JL58ANbe0Wybqth188exdc,434 +ed25519-1.4.dist-info/METADATA,sha256=5SfqBgerND9vMg8dq81VUTwDclNFkXr30DkD9K95gZU,1114 +ed25519-1.4.dist-info/RECORD,, +ed25519-1.4.dist-info/WHEEL,sha256=HslHw5cSLCuyOLxj8duGAooHNvXnupcmoBU1NzRPr2w,104 +ed25519-1.4.dist-info/metadata.json,sha256=LyRoPQ8zyOxjJH1CoRteHtukVr0HLA_z_rRyigiJl5c,802 +ed25519-1.4.dist-info/top_level.txt,sha256=U3-N9ZJMBO9MUuZLwoiMbsWSkxsd0TfkNSuzO6O_gYY,8 +/var/lib/lxc/openmedialibrary/rootfs/srv/client/platform_linux64/p34/lib/python3.4/site-packages/ed25519-1.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +ed25519/__pycache__/test_ed25519.cpython-34.pyc,, +ed25519/__pycache__/__init__.cpython-34.pyc,, +ed25519/__pycache__/keys.cpython-34.pyc,, +ed25519/__pycache__/_version.cpython-34.pyc,, diff --git a/lib/python3.4/site-packages/ed25519-1.4.dist-info/WHEEL b/lib/python3.4/site-packages/ed25519-1.4.dist-info/WHEEL new file mode 100644 index 0000000..db40973 --- /dev/null +++ b/lib/python3.4/site-packages/ed25519-1.4.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: false +Tag: cp34-cp34m-linux_x86_64 + diff --git a/lib/python3.4/site-packages/ed25519-1.4.dist-info/metadata.json b/lib/python3.4/site-packages/ed25519-1.4.dist-info/metadata.json new file mode 100644 index 0000000..6a558b5 --- /dev/null +++ b/lib/python3.4/site-packages/ed25519-1.4.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Topic :: Security :: Cryptography"], "extensions": {"python.details": {"contacts": [{"email": "warner-python-ed25519@lothar.com", "name": "Brian Warner", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/warner/python-ed25519"}}}, "generator": "bdist_wheel (0.29.0)", "license": "MIT", "metadata_version": "2.0", "name": "ed25519", "summary": "Ed25519 public-key signatures", "version": "1.4"} \ No newline at end of file diff --git a/lib/python3.4/site-packages/ed25519-1.4.egg-info/top_level.txt b/lib/python3.4/site-packages/ed25519-1.4.dist-info/top_level.txt similarity index 100% rename from lib/python3.4/site-packages/ed25519-1.4.egg-info/top_level.txt rename to lib/python3.4/site-packages/ed25519-1.4.dist-info/top_level.txt diff --git a/lib/python3.4/site-packages/ed25519-1.4.egg-info/SOURCES.txt b/lib/python3.4/site-packages/ed25519-1.4.egg-info/SOURCES.txt deleted file mode 100644 index ebe0028..0000000 --- a/lib/python3.4/site-packages/ed25519-1.4.egg-info/SOURCES.txt +++ /dev/null @@ -1,39 +0,0 @@ -LICENSE -MANIFEST.in -Makefile -NEWS -README.md -kat-ed25519.txt -kat.py -setup.cfg -test_ed25519_kat.py -versioneer.py -bin/edsig -ed25519.egg-info/PKG-INFO -ed25519.egg-info/SOURCES.txt -ed25519.egg-info/dependency_links.txt -ed25519.egg-info/top_level.txt -src/ed25519/__init__.py -src/ed25519/_version.py -src/ed25519/keys.py -src/ed25519/test_ed25519.py -src/ed25519-glue/ed25519module.c -src/ed25519-supercop-ref/Makefile -src/ed25519-supercop-ref/api.h -src/ed25519-supercop-ref/crypto_int32.h -src/ed25519-supercop-ref/crypto_sign.h -src/ed25519-supercop-ref/crypto_uint32.h -src/ed25519-supercop-ref/crypto_verify_32.h -src/ed25519-supercop-ref/ed25519.c -src/ed25519-supercop-ref/fe25519.c -src/ed25519-supercop-ref/fe25519.h -src/ed25519-supercop-ref/ge25519.c -src/ed25519-supercop-ref/ge25519.h -src/ed25519-supercop-ref/ge25519_base.data -src/ed25519-supercop-ref/sc25519.c -src/ed25519-supercop-ref/sc25519.h -src/ed25519-supercop-ref/sha512-blocks.c -src/ed25519-supercop-ref/sha512-hash.c -src/ed25519-supercop-ref/sha512.h -src/ed25519-supercop-ref/test.c -src/ed25519-supercop-ref/verify.c \ No newline at end of file diff --git a/lib/python3.4/site-packages/ed25519-1.4.egg-info/dependency_links.txt b/lib/python3.4/site-packages/ed25519-1.4.egg-info/dependency_links.txt deleted file mode 100644 index 8b13789..0000000 --- a/lib/python3.4/site-packages/ed25519-1.4.egg-info/dependency_links.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/lib/python3.4/site-packages/ed25519-1.4.egg-info/installed-files.txt b/lib/python3.4/site-packages/ed25519-1.4.egg-info/installed-files.txt deleted file mode 100644 index dd4431c..0000000 --- a/lib/python3.4/site-packages/ed25519-1.4.egg-info/installed-files.txt +++ /dev/null @@ -1,15 +0,0 @@ -../ed25519/__init__.py -../ed25519/_version.py -../ed25519/keys.py -../ed25519/test_ed25519.py -../ed25519/__pycache__/__init__.cpython-34.pyc -../ed25519/__pycache__/_version.cpython-34.pyc -../ed25519/__pycache__/keys.cpython-34.pyc -../ed25519/__pycache__/test_ed25519.cpython-34.pyc -../ed25519/_ed25519.cpython-34m.so -./ -PKG-INFO -dependency_links.txt -top_level.txt -SOURCES.txt -../../../../bin/edsig diff --git a/lib/python3.4/site-packages/ed25519/_ed25519.cpython-34m.so b/lib/python3.4/site-packages/ed25519/_ed25519.cpython-34m.so index c224b53..1f12c9b 100755 Binary files a/lib/python3.4/site-packages/ed25519/_ed25519.cpython-34m.so and b/lib/python3.4/site-packages/ed25519/_ed25519.cpython-34m.so differ diff --git a/lib/python3.4/site-packages/ed25519/_ed25519.cpython-35m-x86_64-linux-gnu.so b/lib/python3.4/site-packages/ed25519/_ed25519.cpython-35m-x86_64-linux-gnu.so index 52a9dd7..56dafc8 100755 Binary files a/lib/python3.4/site-packages/ed25519/_ed25519.cpython-35m-x86_64-linux-gnu.so and b/lib/python3.4/site-packages/ed25519/_ed25519.cpython-35m-x86_64-linux-gnu.so differ diff --git a/lib/python3.4/site-packages/pip-8.0.2.dist-info/DESCRIPTION.rst b/lib/python3.4/site-packages/pip-8.0.2.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..2c149b7 --- /dev/null +++ b/lib/python3.4/site-packages/pip-8.0.2.dist-info/DESCRIPTION.rst @@ -0,0 +1,34 @@ +pip +=== + +The `PyPA recommended +`_ +tool for installing Python packages. + +* `Installation `_ +* `Documentation `_ +* `Changelog `_ +* `Github Page `_ +* `Issue Tracking `_ +* `User mailing list `_ +* `Dev mailing list `_ +* User IRC: #pypa on Freenode. +* Dev IRC: #pypa-dev on Freenode. + + +.. image:: https://img.shields.io/pypi/v/pip.svg + :target: https://pypi.python.org/pypi/pip + +.. image:: https://img.shields.io/travis/pypa/pip/develop.svg + :target: http://travis-ci.org/pypa/pip + + +Code of Conduct +--------------- + +Everyone interacting in the pip project's codebases, issue trackers, chat +rooms, and mailing lists is expected to follow the `PyPA Code of Conduct`_. + +.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/ + + diff --git a/lib/python3.4/site-packages/pip-8.0.2.dist-info/INSTALLER b/lib/python3.4/site-packages/pip-8.0.2.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/lib/python3.4/site-packages/pip-8.0.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/lib/python3.4/site-packages/pip-8.0.2.dist-info/METADATA b/lib/python3.4/site-packages/pip-8.0.2.dist-info/METADATA new file mode 100644 index 0000000..a49cee5 --- /dev/null +++ b/lib/python3.4/site-packages/pip-8.0.2.dist-info/METADATA @@ -0,0 +1,62 @@ +Metadata-Version: 2.0 +Name: pip +Version: 8.0.2 +Summary: The PyPA recommended tool for installing Python packages. +Home-page: https://pip.pypa.io/ +Author: The pip developers +Author-email: python-virtualenv@groups.google.com +License: MIT +Keywords: easy_install distutils setuptools egg virtualenv +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Topic :: Software Development :: Build Tools +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: Implementation :: PyPy +Provides-Extra: testing +Requires-Dist: mock; extra == 'testing' +Requires-Dist: pytest; extra == 'testing' +Requires-Dist: scripttest (>=1.3); extra == 'testing' +Requires-Dist: virtualenv (>=1.10); extra == 'testing' + +pip +=== + +The `PyPA recommended +`_ +tool for installing Python packages. + +* `Installation `_ +* `Documentation `_ +* `Changelog `_ +* `Github Page `_ +* `Issue Tracking `_ +* `User mailing list `_ +* `Dev mailing list `_ +* User IRC: #pypa on Freenode. +* Dev IRC: #pypa-dev on Freenode. + + +.. image:: https://img.shields.io/pypi/v/pip.svg + :target: https://pypi.python.org/pypi/pip + +.. image:: https://img.shields.io/travis/pypa/pip/develop.svg + :target: http://travis-ci.org/pypa/pip + + +Code of Conduct +--------------- + +Everyone interacting in the pip project's codebases, issue trackers, chat +rooms, and mailing lists is expected to follow the `PyPA Code of Conduct`_. + +.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/ + + diff --git a/lib/python3.4/site-packages/pip-8.0.2.dist-info/RECORD b/lib/python3.4/site-packages/pip-8.0.2.dist-info/RECORD new file mode 100644 index 0000000..5ccf480 --- /dev/null +++ b/lib/python3.4/site-packages/pip-8.0.2.dist-info/RECORD @@ -0,0 +1,113 @@ +pip/__init__.py,sha256=mPGsfFwpIvtXlWhanTBtjdVYuTc0KNDvb2Tr-gHZBvU,10431 +pip/__main__.py,sha256=V6Kh-IEDEFpt1cahRE6MajUF_14qJR_Qsvn4MjWZXzE,584 +pip/basecommand.py,sha256=Zlg6SE42TIjRyt1mct0LCkgNxcKKnss3xvASJyDqucE,11429 +pip/baseparser.py,sha256=Nlc7Un9gat27xtB24SnKL_3pZZOoh62gNNRdS6tDRZY,10465 +pip/cmdoptions.py,sha256=OJhbVR6zQ8kbbGcnv0RTZyvwvFqzKxtmO4lPYymMBKM,15877 +pip/download.py,sha256=srwSU5WnOa59_TPGaCfEWODDSZSRBJUHgU5jkC467MY,31715 +pip/exceptions.py,sha256=4KrgxMQuOpP8JlWc90S0vsJ_Ch-EBeD026knOgk9U8A,7741 +pip/index.py,sha256=VzgEo93kTlHeoPrlgPDg24h2ly0jzdg92OBjkG--gMg,36776 +pip/locations.py,sha256=MqUzS8YI2wDa7oFzTQw4zM4s0Hci05yubxfU_kTXXlU,5632 +pip/pep425tags.py,sha256=nXeMZN4d3h14oVovpI0WholWCNCR0MD2mAERl3YHs08,7240 +pip/status_codes.py,sha256=F6uDG6Gj7RNKQJUDnd87QKqI16Us-t-B0wPF_4QMpWc,156 +pip/wheel.py,sha256=e3iaG7X6Z6eQvfChzGiZHK4yw12Z_PqLSRCi6_AUf4s,32030 +pip/_vendor/__init__.py,sha256=OuNX6SjnmNk4mvSyZXarkqf7LtKke3IH7CWoAi_w-5o,4229 +pip/commands/__init__.py,sha256=BSfOTIkMIBWElsuWJ_uAuCmDQS8b-OHYiyOkk77tWSU,2215 +pip/commands/completion.py,sha256=7JkLif3DF0QGpjMaUjHvF8knJ3IumcED2gWRvMRpFy0,1991 +pip/commands/download.py,sha256=dMRtH0JMBhNGlJWr1qC29vOeiBzG2K0OjOAfzdxSVgA,4804 +pip/commands/freeze.py,sha256=_wHnuHYXC4V0zBLD7LfDhgI_bWL6KdcCgzzQ9bXwDkU,2330 +pip/commands/hash.py,sha256=MCt4jEFyfoce0lVeNEz1x49uaTY-VDkKiBvvxrVcHkw,1597 +pip/commands/help.py,sha256=84HWkEdnGP_AEBHnn8gJP2Te0XTXRKFoXqXopbOZTNo,982 +pip/commands/install.py,sha256=8MOsH3IlL3ovZhTQtZwHhJb19pnkr8eKNE_9klVJ3PU,14971 +pip/commands/list.py,sha256=u76U5TLODQ2g53sSUA4q6WhYus7usbuWuITQJsCnP3E,7412 +pip/commands/search.py,sha256=dJe9rcam1TEfNp9-Gi36WjHc3l4mdj8gheVjqK5BrR0,4605 +pip/commands/show.py,sha256=yxghAwGYaYphL2LJdJbYXVLFr8tBMHnuH8n8s2fWMr4,4903 +pip/commands/uninstall.py,sha256=tz8cXz4WdpUdnt3RvpdQwH6_SNMB50egBIZWa1dwfcc,2884 +pip/commands/wheel.py,sha256=iT92Uo8qpVILl_Yk8L7AtkFVYGmY0ep5oDeyQSpwkLs,7528 +pip/compat/__init__.py,sha256=-k3m7JYe8ztMz2GGCPMc-XK7Uo-RiLdV00dSxWKMjfg,4536 +pip/compat/dictconfig.py,sha256=dRrelPDWrceDSzFT51RTEVY2GuM7UDyc5Igh_tn4Fvk,23096 +pip/models/__init__.py,sha256=0Rs7_RA4DxeOkWT5Cq4CQzDrSEhvYcN3TH2cazr72PE,71 +pip/models/index.py,sha256=pUfbO__v3mD9j-2n_ClwPS8pVyx4l2wIwyvWt8GMCRA,487 +pip/operations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/operations/freeze.py,sha256=Px8en5guEfc6mhYh0cATtT6tlFzqTzLj4ad8gqAkIqw,3925 +pip/req/__init__.py,sha256=vFwZY8_Vc1WU1zFAespg1My_r_AT3n7cN0W9eX0EFqk,276 +pip/req/req_file.py,sha256=h1YmFfD7Opb_ulTTyEp7Osv2d8gbQJ1KGWMptEl_S08,11764 +pip/req/req_install.py,sha256=DYLV95E3U81nRJy4q8qs0fozLiCQZbG8Yg-CbUTwA2w,46670 +pip/req/req_set.py,sha256=0ncBet1v7gbsKeTUPpBj_-6Kowxx-iskw0_kLMGObi4,32236 +pip/req/req_uninstall.py,sha256=fdH2VgCjEC8NRYDS7fRu3ZJaBBUEy-N5muwxDX5MBNM,6897 +pip/utils/__init__.py,sha256=4Tz09B6nsZm6bQ3mR7K-AeDjlMLMFjnehaXH4vG_E-0,26759 +pip/utils/appdirs.py,sha256=KTpZANfjYw5K2tZ0_jNNdP_kMxQAns79qZWelwaJo0c,7896 +pip/utils/build.py,sha256=4smLRrfSCmXmjEnVnMFh2tBEpNcSLRe6J0ejZJ-wWJE,1312 +pip/utils/deprecation.py,sha256=0y-RdGVpnt-_byop0WJOOb509f8jjOzSmKghHorTclU,2282 +pip/utils/filesystem.py,sha256=ZEVBuYM3fqr2_lgOESh4Y7fPFszGD474zVm_M3Mb5Tk,899 +pip/utils/hashes.py,sha256=oMk7cd3PbJgzpSQyXq1MytMud5f6H5Oa2YY5hYuCq6I,2866 +pip/utils/logging.py,sha256=7yWu4gZw-Qclj7X80QVdpGWkdTWGKT4LiUVKcE04pro,3327 +pip/utils/outdated.py,sha256=fNwOCL5r2EftPGhgCYGMKu032HC8cV-JAr9lp0HmToM,5455 +pip/utils/setuptools_build.py,sha256=8IGop-SZ6lxUl5HMOjLRaDlORPugIH_b_b2Y67x4jQc,240 +pip/utils/ui.py,sha256=fY7lHmQg3Pdnsgkge2mpZMNU9e1jg6unYYs2Ryfulhk,11320 +pip/vcs/__init__.py,sha256=lnea41zMq9HqB1Qo7hxy2IjUzk5WtBvnoloCCMR6Vk4,12349 +pip/vcs/bazaar.py,sha256=tYTwc4b4off8mr0O2o8SiGejqBDJxcbDBMSMd9-ISYc,3803 +pip/vcs/git.py,sha256=jN3vZCn1DqE-RdKGfMqlDhObZ3WFjC21dEav29M62xI,10054 +pip/vcs/mercurial.py,sha256=xG6rDiwHCRytJEs23SIHBXl_SwQo2jkkdD_6rVVP5h4,3472 +pip/vcs/subversion.py,sha256=mGT7sAzuVc1u-9MPoXJNyShnRzhdJpDdGNuhhzUPv6w,8687 +pip-8.0.2.dist-info/DESCRIPTION.rst,sha256=_rptqJIyCNNmh7m8q-4qZfQDc9gqAjMxVITAEfItc08,1060 +pip-8.0.2.dist-info/METADATA,sha256=0KwLFgIzCAQ506gjLJ_VyrUxbw2NC8b-kUbTM8Uo42Y,2212 +pip-8.0.2.dist-info/RECORD,, +pip-8.0.2.dist-info/WHEEL,sha256=GrqQvamwgBV4nLoJe0vhYRSWzWsx7xjlt74FT0SWYfE,110 +pip-8.0.2.dist-info/entry_points.txt,sha256=1-e4WB_Fe8mWHrMi1YQo_s5knbh0lu_uRmd8Wb6MJfY,68 +pip-8.0.2.dist-info/metadata.json,sha256=Sl2y0vogC_Ro8peVqn3OBZJQ_kV0PdQ9QfraUV3XPj0,1393 +pip-8.0.2.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +/var/lib/lxc/openmedialibrary/rootfs/srv/client/platform_linux64/p34/bin/pip,sha256=MEQVvFZcu35ZQaa5ungLLm_kHRGvmD4u-AbMJaTm8vU,271 +/var/lib/lxc/openmedialibrary/rootfs/srv/client/platform_linux64/p34/bin/pip3,sha256=MEQVvFZcu35ZQaa5ungLLm_kHRGvmD4u-AbMJaTm8vU,271 +/var/lib/lxc/openmedialibrary/rootfs/srv/client/platform_linux64/p34/bin/pip3.4,sha256=MEQVvFZcu35ZQaa5ungLLm_kHRGvmD4u-AbMJaTm8vU,271 +/var/lib/lxc/openmedialibrary/rootfs/srv/client/platform_linux64/p34/lib/python3.4/site-packages/pip-8.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pip/commands/__pycache__/download.cpython-34.pyc,, +pip/_vendor/__pycache__/__init__.cpython-34.pyc,, +pip/utils/__pycache__/ui.cpython-34.pyc,, +pip/commands/__pycache__/help.cpython-34.pyc,, +pip/utils/__pycache__/filesystem.cpython-34.pyc,, +pip/commands/__pycache__/freeze.cpython-34.pyc,, +pip/models/__pycache__/__init__.cpython-34.pyc,, +pip/commands/__pycache__/__init__.cpython-34.pyc,, +pip/compat/__pycache__/dictconfig.cpython-34.pyc,, +pip/utils/__pycache__/deprecation.cpython-34.pyc,, +pip/req/__pycache__/__init__.cpython-34.pyc,, +pip/commands/__pycache__/list.cpython-34.pyc,, +pip/utils/__pycache__/hashes.cpython-34.pyc,, +pip/commands/__pycache__/completion.cpython-34.pyc,, +pip/vcs/__pycache__/mercurial.cpython-34.pyc,, +pip/operations/__pycache__/freeze.cpython-34.pyc,, +pip/vcs/__pycache__/git.cpython-34.pyc,, +pip/__pycache__/__main__.cpython-34.pyc,, +pip/commands/__pycache__/hash.cpython-34.pyc,, +pip/__pycache__/download.cpython-34.pyc,, +pip/vcs/__pycache__/__init__.cpython-34.pyc,, +pip/__pycache__/index.cpython-34.pyc,, +pip/__pycache__/wheel.cpython-34.pyc,, +pip/__pycache__/cmdoptions.cpython-34.pyc,, +pip/operations/__pycache__/__init__.cpython-34.pyc,, +pip/__pycache__/baseparser.cpython-34.pyc,, +pip/commands/__pycache__/show.cpython-34.pyc,, +pip/req/__pycache__/req_set.cpython-34.pyc,, +pip/__pycache__/basecommand.cpython-34.pyc,, +pip/__pycache__/__init__.cpython-34.pyc,, +pip/commands/__pycache__/wheel.cpython-34.pyc,, +pip/utils/__pycache__/setuptools_build.cpython-34.pyc,, +pip/vcs/__pycache__/bazaar.cpython-34.pyc,, +pip/__pycache__/exceptions.cpython-34.pyc,, +pip/req/__pycache__/req_uninstall.cpython-34.pyc,, +pip/utils/__pycache__/logging.cpython-34.pyc,, +pip/__pycache__/pep425tags.cpython-34.pyc,, +pip/req/__pycache__/req_install.cpython-34.pyc,, +pip/__pycache__/locations.cpython-34.pyc,, +pip/commands/__pycache__/install.cpython-34.pyc,, +pip/utils/__pycache__/build.cpython-34.pyc,, +pip/commands/__pycache__/uninstall.cpython-34.pyc,, +pip/vcs/__pycache__/subversion.cpython-34.pyc,, +pip/utils/__pycache__/outdated.cpython-34.pyc,, +pip/commands/__pycache__/search.cpython-34.pyc,, +pip/__pycache__/status_codes.cpython-34.pyc,, +pip/req/__pycache__/req_file.cpython-34.pyc,, +pip/compat/__pycache__/__init__.cpython-34.pyc,, +pip/models/__pycache__/index.cpython-34.pyc,, +pip/utils/__pycache__/appdirs.cpython-34.pyc,, +pip/utils/__pycache__/__init__.cpython-34.pyc,, diff --git a/lib/python3.4/site-packages/setuptools-18.5.dist-info/WHEEL b/lib/python3.4/site-packages/pip-8.0.2.dist-info/WHEEL similarity index 100% rename from lib/python3.4/site-packages/setuptools-18.5.dist-info/WHEEL rename to lib/python3.4/site-packages/pip-8.0.2.dist-info/WHEEL diff --git a/lib/python3.4/site-packages/pip-8.0.2.dist-info/entry_points.txt b/lib/python3.4/site-packages/pip-8.0.2.dist-info/entry_points.txt new file mode 100644 index 0000000..a237b5e --- /dev/null +++ b/lib/python3.4/site-packages/pip-8.0.2.dist-info/entry_points.txt @@ -0,0 +1,5 @@ +[console_scripts] +pip = pip:main +pip3 = pip:main +pip3.4 = pip:main + diff --git a/lib/python3.4/site-packages/pip-8.0.2.dist-info/metadata.json b/lib/python3.4/site-packages/pip-8.0.2.dist-info/metadata.json new file mode 100644 index 0000000..ccc2ef2 --- /dev/null +++ b/lib/python3.4/site-packages/pip-8.0.2.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Topic :: Software Development :: Build Tools", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: Implementation :: PyPy"], "extensions": {"python.commands": {"wrap_console": {"pip": "pip:main", "pip3": "pip:main", "pip3.4": "pip:main"}}, "python.details": {"contacts": [{"email": "python-virtualenv@groups.google.com", "name": "The pip developers", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://pip.pypa.io/"}}, "python.exports": {"console_scripts": {"pip": "pip:main", "pip3": "pip:main", "pip3.4": "pip:main"}}}, "extras": ["testing"], "generator": "bdist_wheel (0.26.0)", "keywords": ["easy_install", "distutils", "setuptools", "egg", "virtualenv"], "license": "MIT", "metadata_version": "2.0", "name": "pip", "run_requires": [{"extra": "testing", "requires": ["mock", "pytest", "scripttest (>=1.3)", "virtualenv (>=1.10)"]}], "summary": "The PyPA recommended tool for installing Python packages.", "version": "8.0.2"} \ No newline at end of file diff --git a/lib/python3.4/site-packages/pip-8.0.2.dist-info/top_level.txt b/lib/python3.4/site-packages/pip-8.0.2.dist-info/top_level.txt new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/lib/python3.4/site-packages/pip-8.0.2.dist-info/top_level.txt @@ -0,0 +1 @@ +pip diff --git a/lib/python3.4/site-packages/pip/__init__.py b/lib/python3.4/site-packages/pip/__init__.py new file mode 100644 index 0000000..0603ca1 --- /dev/null +++ b/lib/python3.4/site-packages/pip/__init__.py @@ -0,0 +1,315 @@ +#!/usr/bin/env python +from __future__ import absolute_import + +import logging +import os +import optparse +import warnings + +import sys +import re + +from pip.exceptions import InstallationError, CommandError, PipError +from pip.utils import get_installed_distributions, get_prog +from pip.utils import deprecation, dist_is_editable +from pip.vcs import git, mercurial, subversion, bazaar # noqa +from pip.baseparser import ConfigOptionParser, UpdatingDefaultsHelpFormatter +from pip.commands import get_summaries, get_similar_commands +from pip.commands import commands_dict +from pip._vendor.requests.packages.urllib3.exceptions import ( + InsecureRequestWarning, +) + + +# assignment for flake8 to be happy + +# This fixes a peculiarity when importing via __import__ - as we are +# initialising the pip module, "from pip import cmdoptions" is recursive +# and appears not to work properly in that situation. +import pip.cmdoptions +cmdoptions = pip.cmdoptions + +# The version as used in the setup.py and the docs conf.py +__version__ = "8.0.2" + + +logger = logging.getLogger(__name__) + +# Hide the InsecureRequestWArning from urllib3 +warnings.filterwarnings("ignore", category=InsecureRequestWarning) + + +def autocomplete(): + """Command and option completion for the main option parser (and options) + and its subcommands (and options). + + Enable by sourcing one of the completion shell scripts (bash or zsh). + """ + # Don't complete if user hasn't sourced bash_completion file. + if 'PIP_AUTO_COMPLETE' not in os.environ: + return + cwords = os.environ['COMP_WORDS'].split()[1:] + cword = int(os.environ['COMP_CWORD']) + try: + current = cwords[cword - 1] + except IndexError: + current = '' + + subcommands = [cmd for cmd, summary in get_summaries()] + options = [] + # subcommand + try: + subcommand_name = [w for w in cwords if w in subcommands][0] + except IndexError: + subcommand_name = None + + parser = create_main_parser() + # subcommand options + if subcommand_name: + # special case: 'help' subcommand has no options + if subcommand_name == 'help': + sys.exit(1) + # special case: list locally installed dists for uninstall command + if subcommand_name == 'uninstall' and not current.startswith('-'): + installed = [] + lc = current.lower() + for dist in get_installed_distributions(local_only=True): + if dist.key.startswith(lc) and dist.key not in cwords[1:]: + installed.append(dist.key) + # if there are no dists installed, fall back to option completion + if installed: + for dist in installed: + print(dist) + sys.exit(1) + + subcommand = commands_dict[subcommand_name]() + options += [(opt.get_opt_string(), opt.nargs) + for opt in subcommand.parser.option_list_all + if opt.help != optparse.SUPPRESS_HELP] + + # filter out previously specified options from available options + prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]] + options = [(x, v) for (x, v) in options if x not in prev_opts] + # filter options by current input + options = [(k, v) for k, v in options if k.startswith(current)] + for option in options: + opt_label = option[0] + # append '=' to options which require args + if option[1]: + opt_label += '=' + print(opt_label) + else: + # show main parser options only when necessary + if current.startswith('-') or current.startswith('--'): + opts = [i.option_list for i in parser.option_groups] + opts.append(parser.option_list) + opts = (o for it in opts for o in it) + + subcommands += [i.get_opt_string() for i in opts + if i.help != optparse.SUPPRESS_HELP] + + print(' '.join([x for x in subcommands if x.startswith(current)])) + sys.exit(1) + + +def create_main_parser(): + parser_kw = { + 'usage': '\n%prog [options]', + 'add_help_option': False, + 'formatter': UpdatingDefaultsHelpFormatter(), + 'name': 'global', + 'prog': get_prog(), + } + + parser = ConfigOptionParser(**parser_kw) + parser.disable_interspersed_args() + + pip_pkg_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + parser.version = 'pip %s from %s (python %s)' % ( + __version__, pip_pkg_dir, sys.version[:3]) + + # add the general options + gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser) + parser.add_option_group(gen_opts) + + parser.main = True # so the help formatter knows + + # create command listing for description + command_summaries = get_summaries() + description = [''] + ['%-27s %s' % (i, j) for i, j in command_summaries] + parser.description = '\n'.join(description) + + return parser + + +def parseopts(args): + parser = create_main_parser() + + # Note: parser calls disable_interspersed_args(), so the result of this + # call is to split the initial args into the general options before the + # subcommand and everything else. + # For example: + # args: ['--timeout=5', 'install', '--user', 'INITools'] + # general_options: ['--timeout==5'] + # args_else: ['install', '--user', 'INITools'] + general_options, args_else = parser.parse_args(args) + + # --version + if general_options.version: + sys.stdout.write(parser.version) + sys.stdout.write(os.linesep) + sys.exit() + + # pip || pip help -> print_help() + if not args_else or (args_else[0] == 'help' and len(args_else) == 1): + parser.print_help() + sys.exit() + + # the subcommand name + cmd_name = args_else[0] + + if cmd_name not in commands_dict: + guess = get_similar_commands(cmd_name) + + msg = ['unknown command "%s"' % cmd_name] + if guess: + msg.append('maybe you meant "%s"' % guess) + + raise CommandError(' - '.join(msg)) + + # all the args without the subcommand + cmd_args = args[:] + cmd_args.remove(cmd_name) + + return cmd_name, cmd_args + + +def check_isolated(args): + isolated = False + + if "--isolated" in args: + isolated = True + + return isolated + + +def main(args=None): + if args is None: + args = sys.argv[1:] + + # Enable our Deprecation Warnings + for deprecation_warning in deprecation.DEPRECATIONS: + warnings.simplefilter("default", deprecation_warning) + + # Configure our deprecation warnings to be sent through loggers + deprecation.install_warning_logger() + + autocomplete() + + try: + cmd_name, cmd_args = parseopts(args) + except PipError as exc: + sys.stderr.write("ERROR: %s" % exc) + sys.stderr.write(os.linesep) + sys.exit(1) + + command = commands_dict[cmd_name](isolated=check_isolated(cmd_args)) + return command.main(cmd_args) + + +# ########################################################### +# # Writing freeze files + +class FrozenRequirement(object): + + def __init__(self, name, req, editable, comments=()): + self.name = name + self.req = req + self.editable = editable + self.comments = comments + + _rev_re = re.compile(r'-r(\d+)$') + _date_re = re.compile(r'-(20\d\d\d\d\d\d)$') + + @classmethod + def from_dist(cls, dist, dependency_links): + location = os.path.normcase(os.path.abspath(dist.location)) + comments = [] + from pip.vcs import vcs, get_src_requirement + if dist_is_editable(dist) and vcs.get_backend_name(location): + editable = True + try: + req = get_src_requirement(dist, location) + except InstallationError as exc: + logger.warning( + "Error when trying to get requirement for VCS system %s, " + "falling back to uneditable format", exc + ) + req = None + if req is None: + logger.warning( + 'Could not determine repository location of %s', location + ) + comments.append( + '## !! Could not determine repository location' + ) + req = dist.as_requirement() + editable = False + else: + editable = False + req = dist.as_requirement() + specs = req.specs + assert len(specs) == 1 and specs[0][0] in ["==", "==="], \ + 'Expected 1 spec with == or ===; specs = %r; dist = %r' % \ + (specs, dist) + version = specs[0][1] + ver_match = cls._rev_re.search(version) + date_match = cls._date_re.search(version) + if ver_match or date_match: + svn_backend = vcs.get_backend('svn') + if svn_backend: + svn_location = svn_backend().get_location( + dist, + dependency_links, + ) + if not svn_location: + logger.warning( + 'Warning: cannot find svn location for %s', req) + comments.append( + '## FIXME: could not find svn URL in dependency_links ' + 'for this package:' + ) + else: + comments.append( + '# Installing as editable to satisfy requirement %s:' % + req + ) + if ver_match: + rev = ver_match.group(1) + else: + rev = '{%s}' % date_match.group(1) + editable = True + req = '%s@%s#egg=%s' % ( + svn_location, + rev, + cls.egg_name(dist) + ) + return cls(dist.project_name, req, editable, comments) + + @staticmethod + def egg_name(dist): + name = dist.egg_name() + match = re.search(r'-py\d\.\d$', name) + if match: + name = name[:match.start()] + return name + + def __str__(self): + req = self.req + if self.editable: + req = '-e %s' % req + return '\n'.join(list(self.comments) + [str(req)]) + '\n' + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/lib/python3.4/site-packages/pip/__main__.py b/lib/python3.4/site-packages/pip/__main__.py new file mode 100644 index 0000000..5556539 --- /dev/null +++ b/lib/python3.4/site-packages/pip/__main__.py @@ -0,0 +1,19 @@ +from __future__ import absolute_import + +import os +import sys + +# If we are running from a wheel, add the wheel to sys.path +# This allows the usage python pip-*.whl/pip install pip-*.whl +if __package__ == '': + # __file__ is pip-*.whl/pip/__main__.py + # first dirname call strips of '/__main__.py', second strips off '/pip' + # Resulting path is the name of the wheel itself + # Add that to sys.path so we can import pip + path = os.path.dirname(os.path.dirname(__file__)) + sys.path.insert(0, path) + +import pip # noqa + +if __name__ == '__main__': + sys.exit(pip.main()) diff --git a/lib/python3.4/site-packages/pip/_vendor/__init__.py b/lib/python3.4/site-packages/pip/_vendor/__init__.py new file mode 100644 index 0000000..c64896a --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/__init__.py @@ -0,0 +1,104 @@ +""" +pip._vendor is for vendoring dependencies of pip to prevent needing pip to +depend on something external. + +Files inside of pip._vendor should be considered immutable and should only be +updated to versions from upstream. +""" +from __future__ import absolute_import + +import glob +import os.path +import sys + +# Downstream redistributors which have debundled our dependencies should also +# patch this value to be true. This will trigger the additional patching +# to cause things like "six" to be available as pip. +DEBUNDLED = True + +# By default, look in this directory for a bunch of .whl files which we will +# add to the beginning of sys.path before attempting to import anything. This +# is done to support downstream re-distributors like Debian and Fedora who +# wish to create their own Wheels for our dependencies to aid in debundling. +WHEEL_DIR = os.path.abspath(os.path.join(sys.prefix, 'share', 'python-wheels')) + + +# Define a small helper function to alias our vendored modules to the real ones +# if the vendored ones do not exist. This idea of this was taken from +# https://github.com/kennethreitz/requests/pull/2567. +def vendored(modulename): + vendored_name = "{0}.{1}".format(__name__, modulename) + + try: + __import__(vendored_name, globals(), locals(), level=0) + except ImportError: + __import__(modulename, globals(), locals(), level=0) + sys.modules[vendored_name] = sys.modules[modulename] + base, head = vendored_name.rsplit(".", 1) + setattr(sys.modules[base], head, sys.modules[modulename]) + + +# If we're operating in a debundled setup, then we want to go ahead and trigger +# the aliasing of our vendored libraries as well as looking for wheels to add +# to our sys.path. This will cause all of this code to be a no-op typically +# however downstream redistributors can enable it in a consistent way across +# all platforms. +if DEBUNDLED: + # Actually look inside of WHEEL_DIR to find .whl files and add them to the + # front of our sys.path. + sys.path[:] = glob.glob(os.path.join(WHEEL_DIR, "*.whl")) + sys.path + + # Actually alias all of our vendored dependencies. + vendored("cachecontrol") + vendored("colorama") + vendored("distlib") + vendored("html5lib") + vendored("lockfile") + vendored("six") + vendored("six.moves") + vendored("six.moves.urllib") + vendored("packaging") + vendored("packaging.version") + vendored("packaging.specifiers") + vendored("pkg_resources") + vendored("progress") + vendored("retrying") + vendored("requests") + vendored("requests.packages") + vendored("requests.packages.urllib3") + vendored("requests.packages.urllib3._collections") + vendored("requests.packages.urllib3.connection") + vendored("requests.packages.urllib3.connectionpool") + vendored("requests.packages.urllib3.contrib") + try: + vendored("requests.packages.urllib3.contrib.ntlmpool") + except ImportError: + pass + try: + vendored("requests.packages.urllib3.contrib.pyopenssl") + except ImportError: + pass + vendored("requests.packages.urllib3.exceptions") + vendored("requests.packages.urllib3.fields") + vendored("requests.packages.urllib3.filepost") + vendored("requests.packages.urllib3.packages") + try: + vendored("requests.packages.urllib3.packages.ordered_dict") + vendored("requests.packages.urllib3.packages.six") + except ImportError: + # Debian already unbundles these from requests. + pass + vendored("requests.packages.urllib3.packages.ssl_match_hostname") + vendored("requests.packages.urllib3.packages.ssl_match_hostname." + "_implementation") + vendored("requests.packages.urllib3.poolmanager") + vendored("requests.packages.urllib3.request") + vendored("requests.packages.urllib3.response") + vendored("requests.packages.urllib3.util") + vendored("requests.packages.urllib3.util.connection") + vendored("requests.packages.urllib3.util.request") + vendored("requests.packages.urllib3.util.response") + vendored("requests.packages.urllib3.util.retry") + vendored("requests.packages.urllib3.util.ssl_") + vendored("requests.packages.urllib3.util.timeout") + vendored("requests.packages.urllib3.util.url") diff --git a/lib/python3.4/site-packages/pip/basecommand.py b/lib/python3.4/site-packages/pip/basecommand.py new file mode 100644 index 0000000..a07043a --- /dev/null +++ b/lib/python3.4/site-packages/pip/basecommand.py @@ -0,0 +1,325 @@ +"""Base Command class, and related routines""" +from __future__ import absolute_import + +import logging +import os +import sys +import optparse +import warnings + +from pip import cmdoptions +from pip.index import PackageFinder +from pip.locations import running_under_virtualenv +from pip.download import PipSession +from pip.exceptions import (BadCommand, InstallationError, UninstallationError, + CommandError, PreviousBuildDirError) + +from pip.compat import logging_dictConfig +from pip.baseparser import ConfigOptionParser, UpdatingDefaultsHelpFormatter +from pip.req import InstallRequirement, parse_requirements +from pip.status_codes import ( + SUCCESS, ERROR, UNKNOWN_ERROR, VIRTUALENV_NOT_FOUND, + PREVIOUS_BUILD_DIR_ERROR, +) +from pip.utils import deprecation, get_prog, normalize_path +from pip.utils.logging import IndentingFormatter +from pip.utils.outdated import pip_version_check + + +__all__ = ['Command'] + + +logger = logging.getLogger(__name__) + + +class Command(object): + name = None + usage = None + hidden = False + log_streams = ("ext://sys.stdout", "ext://sys.stderr") + + def __init__(self, isolated=False): + parser_kw = { + 'usage': self.usage, + 'prog': '%s %s' % (get_prog(), self.name), + 'formatter': UpdatingDefaultsHelpFormatter(), + 'add_help_option': False, + 'name': self.name, + 'description': self.__doc__, + 'isolated': isolated, + } + + self.parser = ConfigOptionParser(**parser_kw) + + # Commands should add options to this option group + optgroup_name = '%s Options' % self.name.capitalize() + self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name) + + # Add the general options + gen_opts = cmdoptions.make_option_group( + cmdoptions.general_group, + self.parser, + ) + self.parser.add_option_group(gen_opts) + + def _build_session(self, options, retries=None, timeout=None): + session = PipSession( + cache=( + normalize_path(os.path.join(options.cache_dir, "http")) + if options.cache_dir else None + ), + retries=retries if retries is not None else options.retries, + insecure_hosts=options.trusted_hosts, + ) + + # Handle custom ca-bundles from the user + if options.cert: + session.verify = options.cert + + # Handle SSL client certificate + if options.client_cert: + session.cert = options.client_cert + + # Handle timeouts + if options.timeout or timeout: + session.timeout = ( + timeout if timeout is not None else options.timeout + ) + + # Handle configured proxies + if options.proxy: + session.proxies = { + "http": options.proxy, + "https": options.proxy, + } + + # Determine if we can prompt the user for authentication or not + session.auth.prompting = not options.no_input + + return session + + def parse_args(self, args): + # factored out for testability + return self.parser.parse_args(args) + + def main(self, args): + options, args = self.parse_args(args) + + if options.quiet: + if options.quiet == 1: + level = "WARNING" + if options.quiet == 2: + level = "ERROR" + else: + level = "CRITICAL" + elif options.verbose: + level = "DEBUG" + else: + level = "INFO" + + logging_dictConfig({ + "version": 1, + "disable_existing_loggers": False, + "filters": { + "exclude_warnings": { + "()": "pip.utils.logging.MaxLevelFilter", + "level": logging.WARNING, + }, + }, + "formatters": { + "indent": { + "()": IndentingFormatter, + "format": "%(message)s", + }, + }, + "handlers": { + "console": { + "level": level, + "class": "pip.utils.logging.ColorizedStreamHandler", + "stream": self.log_streams[0], + "filters": ["exclude_warnings"], + "formatter": "indent", + }, + "console_errors": { + "level": "WARNING", + "class": "pip.utils.logging.ColorizedStreamHandler", + "stream": self.log_streams[1], + "formatter": "indent", + }, + "user_log": { + "level": "DEBUG", + "class": "pip.utils.logging.BetterRotatingFileHandler", + "filename": options.log or "/dev/null", + "delay": True, + "formatter": "indent", + }, + }, + "root": { + "level": level, + "handlers": list(filter(None, [ + "console", + "console_errors", + "user_log" if options.log else None, + ])), + }, + # Disable any logging besides WARNING unless we have DEBUG level + # logging enabled. These use both pip._vendor and the bare names + # for the case where someone unbundles our libraries. + "loggers": dict( + ( + name, + { + "level": ( + "WARNING" + if level in ["INFO", "ERROR"] + else "DEBUG" + ), + }, + ) + for name in ["pip._vendor", "distlib", "requests", "urllib3"] + ), + }) + + if sys.version_info[:2] == (2, 6): + warnings.warn( + "Python 2.6 is no longer supported by the Python core team, " + "please upgrade your Python. A future version of pip will " + "drop support for Python 2.6", + deprecation.Python26DeprecationWarning + ) + + # TODO: try to get these passing down from the command? + # without resorting to os.environ to hold these. + + if options.no_input: + os.environ['PIP_NO_INPUT'] = '1' + + if options.exists_action: + os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action) + + if options.require_venv: + # If a venv is required check if it can really be found + if not running_under_virtualenv(): + logger.critical( + 'Could not find an activated virtualenv (required).' + ) + sys.exit(VIRTUALENV_NOT_FOUND) + + try: + status = self.run(options, args) + # FIXME: all commands should return an exit status + # and when it is done, isinstance is not needed anymore + if isinstance(status, int): + return status + except PreviousBuildDirError as exc: + logger.critical(str(exc)) + logger.debug('Exception information:', exc_info=True) + + return PREVIOUS_BUILD_DIR_ERROR + except (InstallationError, UninstallationError, BadCommand) as exc: + logger.critical(str(exc)) + logger.debug('Exception information:', exc_info=True) + + return ERROR + except CommandError as exc: + logger.critical('ERROR: %s', exc) + logger.debug('Exception information:', exc_info=True) + + return ERROR + except KeyboardInterrupt: + logger.critical('Operation cancelled by user') + logger.debug('Exception information:', exc_info=True) + + return ERROR + except: + logger.critical('Exception:', exc_info=True) + + return UNKNOWN_ERROR + finally: + # Check if we're using the latest version of pip available + if (not options.disable_pip_version_check and not + getattr(options, "no_index", False)): + with self._build_session( + options, + retries=0, + timeout=min(5, options.timeout)) as session: + pip_version_check(session) + + return SUCCESS + + +class RequirementCommand(Command): + + @staticmethod + def populate_requirement_set(requirement_set, args, options, finder, + session, name, wheel_cache): + """ + Marshal cmd line args into a requirement set. + """ + for filename in options.constraints: + for req in parse_requirements( + filename, + constraint=True, finder=finder, options=options, + session=session, wheel_cache=wheel_cache): + requirement_set.add_requirement(req) + + for req in args: + requirement_set.add_requirement( + InstallRequirement.from_line( + req, None, isolated=options.isolated_mode, + wheel_cache=wheel_cache + ) + ) + + for req in options.editables: + requirement_set.add_requirement( + InstallRequirement.from_editable( + req, + default_vcs=options.default_vcs, + isolated=options.isolated_mode, + wheel_cache=wheel_cache + ) + ) + + found_req_in_file = False + for filename in options.requirements: + for req in parse_requirements( + filename, + finder=finder, options=options, session=session, + wheel_cache=wheel_cache): + found_req_in_file = True + requirement_set.add_requirement(req) + # If --require-hashes was a line in a requirements file, tell + # RequirementSet about it: + requirement_set.require_hashes = options.require_hashes + + if not (args or options.editables or found_req_in_file): + opts = {'name': name} + if options.find_links: + msg = ('You must give at least one requirement to ' + '%(name)s (maybe you meant "pip %(name)s ' + '%(links)s"?)' % + dict(opts, links=' '.join(options.find_links))) + else: + msg = ('You must give at least one requirement ' + 'to %(name)s (see "pip help %(name)s")' % opts) + logger.warning(msg) + + def _build_package_finder(self, options, session): + """ + Create a package finder appropriate to this requirement command. + """ + index_urls = [options.index_url] + options.extra_index_urls + if options.no_index: + logger.info('Ignoring indexes: %s', ','.join(index_urls)) + index_urls = [] + + return PackageFinder( + find_links=options.find_links, + format_control=options.format_control, + index_urls=index_urls, + trusted_hosts=options.trusted_hosts, + allow_all_prereleases=options.pre, + process_dependency_links=options.process_dependency_links, + session=session, + ) diff --git a/lib/python3.4/site-packages/pip/baseparser.py b/lib/python3.4/site-packages/pip/baseparser.py new file mode 100644 index 0000000..ccbf36b --- /dev/null +++ b/lib/python3.4/site-packages/pip/baseparser.py @@ -0,0 +1,292 @@ +"""Base option parser setup""" +from __future__ import absolute_import + +import sys +import optparse +import os +import re +import textwrap +from distutils.util import strtobool + +from pip._vendor.six import string_types +from pip._vendor.six.moves import configparser +from pip.locations import ( + legacy_config_file, config_basename, running_under_virtualenv, + site_config_files +) +from pip.utils import appdirs, get_terminal_size + + +_environ_prefix_re = re.compile(r"^PIP_", re.I) + + +class PrettyHelpFormatter(optparse.IndentedHelpFormatter): + """A prettier/less verbose help formatter for optparse.""" + + def __init__(self, *args, **kwargs): + # help position must be aligned with __init__.parseopts.description + kwargs['max_help_position'] = 30 + kwargs['indent_increment'] = 1 + kwargs['width'] = get_terminal_size()[0] - 2 + optparse.IndentedHelpFormatter.__init__(self, *args, **kwargs) + + def format_option_strings(self, option): + return self._format_option_strings(option, ' <%s>', ', ') + + def _format_option_strings(self, option, mvarfmt=' <%s>', optsep=', '): + """ + Return a comma-separated list of option strings and metavars. + + :param option: tuple of (short opt, long opt), e.g: ('-f', '--format') + :param mvarfmt: metavar format string - evaluated as mvarfmt % metavar + :param optsep: separator + """ + opts = [] + + if option._short_opts: + opts.append(option._short_opts[0]) + if option._long_opts: + opts.append(option._long_opts[0]) + if len(opts) > 1: + opts.insert(1, optsep) + + if option.takes_value(): + metavar = option.metavar or option.dest.lower() + opts.append(mvarfmt % metavar.lower()) + + return ''.join(opts) + + def format_heading(self, heading): + if heading == 'Options': + return '' + return heading + ':\n' + + def format_usage(self, usage): + """ + Ensure there is only one newline between usage and the first heading + if there is no description. + """ + msg = '\nUsage: %s\n' % self.indent_lines(textwrap.dedent(usage), " ") + return msg + + def format_description(self, description): + # leave full control over description to us + if description: + if hasattr(self.parser, 'main'): + label = 'Commands' + else: + label = 'Description' + # some doc strings have initial newlines, some don't + description = description.lstrip('\n') + # some doc strings have final newlines and spaces, some don't + description = description.rstrip() + # dedent, then reindent + description = self.indent_lines(textwrap.dedent(description), " ") + description = '%s:\n%s\n' % (label, description) + return description + else: + return '' + + def format_epilog(self, epilog): + # leave full control over epilog to us + if epilog: + return epilog + else: + return '' + + def indent_lines(self, text, indent): + new_lines = [indent + line for line in text.split('\n')] + return "\n".join(new_lines) + + +class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter): + """Custom help formatter for use in ConfigOptionParser. + + This is updates the defaults before expanding them, allowing + them to show up correctly in the help listing. + """ + + def expand_default(self, option): + if self.parser is not None: + self.parser._update_defaults(self.parser.defaults) + return optparse.IndentedHelpFormatter.expand_default(self, option) + + +class CustomOptionParser(optparse.OptionParser): + def insert_option_group(self, idx, *args, **kwargs): + """Insert an OptionGroup at a given position.""" + group = self.add_option_group(*args, **kwargs) + + self.option_groups.pop() + self.option_groups.insert(idx, group) + + return group + + @property + def option_list_all(self): + """Get a list of all options, including those in option groups.""" + res = self.option_list[:] + for i in self.option_groups: + res.extend(i.option_list) + + return res + + +class ConfigOptionParser(CustomOptionParser): + """Custom option parser which updates its defaults by checking the + configuration files and environmental variables""" + + isolated = False + + def __init__(self, *args, **kwargs): + self.config = configparser.RawConfigParser() + self.name = kwargs.pop('name') + self.isolated = kwargs.pop("isolated", False) + self.files = self.get_config_files() + if self.files: + self.config.read(self.files) + assert self.name + optparse.OptionParser.__init__(self, *args, **kwargs) + + def get_config_files(self): + # the files returned by this method will be parsed in order with the + # first files listed being overridden by later files in standard + # ConfigParser fashion + config_file = os.environ.get('PIP_CONFIG_FILE', False) + if config_file == os.devnull: + return [] + + # at the base we have any site-wide configuration + files = list(site_config_files) + + # per-user configuration next + if not self.isolated: + if config_file and os.path.exists(config_file): + files.append(config_file) + else: + # This is the legacy config file, we consider it to be a lower + # priority than the new file location. + files.append(legacy_config_file) + + # This is the new config file, we consider it to be a higher + # priority than the legacy file. + files.append( + os.path.join( + appdirs.user_config_dir("pip"), + config_basename, + ) + ) + + # finally virtualenv configuration first trumping others + if running_under_virtualenv(): + venv_config_file = os.path.join( + sys.prefix, + config_basename, + ) + if os.path.exists(venv_config_file): + files.append(venv_config_file) + + return files + + def check_default(self, option, key, val): + try: + return option.check_value(key, val) + except optparse.OptionValueError as exc: + print("An error occurred during configuration: %s" % exc) + sys.exit(3) + + def _update_defaults(self, defaults): + """Updates the given defaults with values from the config files and + the environ. Does a little special handling for certain types of + options (lists).""" + # Then go and look for the other sources of configuration: + config = {} + # 1. config files + for section in ('global', self.name): + config.update( + self.normalize_keys(self.get_config_section(section)) + ) + # 2. environmental variables + if not self.isolated: + config.update(self.normalize_keys(self.get_environ_vars())) + # Accumulate complex default state. + self.values = optparse.Values(self.defaults) + late_eval = set() + # Then set the options with those values + for key, val in config.items(): + # ignore empty values + if not val: + continue + + option = self.get_option(key) + # Ignore options not present in this parser. E.g. non-globals put + # in [global] by users that want them to apply to all applicable + # commands. + if option is None: + continue + + if option.action in ('store_true', 'store_false', 'count'): + val = strtobool(val) + elif option.action == 'append': + val = val.split() + val = [self.check_default(option, key, v) for v in val] + elif option.action == 'callback': + late_eval.add(option.dest) + opt_str = option.get_opt_string() + val = option.convert_value(opt_str, val) + # From take_action + args = option.callback_args or () + kwargs = option.callback_kwargs or {} + option.callback(option, opt_str, val, self, *args, **kwargs) + else: + val = self.check_default(option, key, val) + + defaults[option.dest] = val + + for key in late_eval: + defaults[key] = getattr(self.values, key) + self.values = None + return defaults + + def normalize_keys(self, items): + """Return a config dictionary with normalized keys regardless of + whether the keys were specified in environment variables or in config + files""" + normalized = {} + for key, val in items: + key = key.replace('_', '-') + if not key.startswith('--'): + key = '--%s' % key # only prefer long opts + normalized[key] = val + return normalized + + def get_config_section(self, name): + """Get a section of a configuration""" + if self.config.has_section(name): + return self.config.items(name) + return [] + + def get_environ_vars(self): + """Returns a generator with all environmental vars with prefix PIP_""" + for key, val in os.environ.items(): + if _environ_prefix_re.search(key): + yield (_environ_prefix_re.sub("", key).lower(), val) + + def get_default_values(self): + """Overridding to make updating the defaults after instantiation of + the option parser possible, _update_defaults() does the dirty work.""" + if not self.process_default_values: + # Old, pre-Optik 1.5 behaviour. + return optparse.Values(self.defaults) + + defaults = self._update_defaults(self.defaults.copy()) # ours + for option in self._get_all_options(): + default = defaults.get(option.dest) + if isinstance(default, string_types): + opt_str = option.get_opt_string() + defaults[option.dest] = option.check_value(opt_str, default) + return optparse.Values(defaults) + + def error(self, msg): + self.print_usage(sys.stderr) + self.exit(2, "%s\n" % msg) diff --git a/lib/python3.4/site-packages/pip/cmdoptions.py b/lib/python3.4/site-packages/pip/cmdoptions.py new file mode 100644 index 0000000..aced0c0 --- /dev/null +++ b/lib/python3.4/site-packages/pip/cmdoptions.py @@ -0,0 +1,618 @@ +""" +shared options and groups + +The principle here is to define options once, but *not* instantiate them +globally. One reason being that options with action='append' can carry state +between parses. pip parses general options twice internally, and shouldn't +pass on state. To be consistent, all options will follow this design. + +""" +from __future__ import absolute_import + +from functools import partial +from optparse import OptionGroup, SUPPRESS_HELP, Option +import warnings + +from pip.index import ( + FormatControl, fmt_ctl_handle_mutual_exclude, fmt_ctl_no_binary, + fmt_ctl_no_use_wheel) +from pip.models import PyPI +from pip.locations import USER_CACHE_DIR, src_prefix +from pip.utils.hashes import STRONG_HASHES + + +def make_option_group(group, parser): + """ + Return an OptionGroup object + group -- assumed to be dict with 'name' and 'options' keys + parser -- an optparse Parser + """ + option_group = OptionGroup(parser, group['name']) + for option in group['options']: + option_group.add_option(option()) + return option_group + + +def resolve_wheel_no_use_binary(options): + if not options.use_wheel: + control = options.format_control + fmt_ctl_no_use_wheel(control) + + +def check_install_build_global(options, check_options=None): + """Disable wheels if per-setup.py call options are set. + + :param options: The OptionParser options to update. + :param check_options: The options to check, if not supplied defaults to + options. + """ + if check_options is None: + check_options = options + + def getname(n): + return getattr(check_options, n, None) + names = ["build_options", "global_options", "install_options"] + if any(map(getname, names)): + control = options.format_control + fmt_ctl_no_binary(control) + warnings.warn( + 'Disabling all use of wheels due to the use of --build-options ' + '/ --global-options / --install-options.', stacklevel=2) + + +########### +# options # +########### + +help_ = partial( + Option, + '-h', '--help', + dest='help', + action='help', + help='Show help.') + +isolated_mode = partial( + Option, + "--isolated", + dest="isolated_mode", + action="store_true", + default=False, + help=( + "Run pip in an isolated mode, ignoring environment variables and user " + "configuration." + ), +) + +require_virtualenv = partial( + Option, + # Run only if inside a virtualenv, bail if not. + '--require-virtualenv', '--require-venv', + dest='require_venv', + action='store_true', + default=False, + help=SUPPRESS_HELP) + +verbose = partial( + Option, + '-v', '--verbose', + dest='verbose', + action='count', + default=0, + help='Give more output. Option is additive, and can be used up to 3 times.' +) + +version = partial( + Option, + '-V', '--version', + dest='version', + action='store_true', + help='Show version and exit.') + +quiet = partial( + Option, + '-q', '--quiet', + dest='quiet', + action='count', + default=0, + help='Give less output.') + +log = partial( + Option, + "--log", "--log-file", "--local-log", + dest="log", + metavar="path", + help="Path to a verbose appending log." +) + +no_input = partial( + Option, + # Don't ask for input + '--no-input', + dest='no_input', + action='store_true', + default=False, + help=SUPPRESS_HELP) + +proxy = partial( + Option, + '--proxy', + dest='proxy', + type='str', + default='', + help="Specify a proxy in the form [user:passwd@]proxy.server:port.") + +retries = partial( + Option, + '--retries', + dest='retries', + type='int', + default=5, + help="Maximum number of retries each connection should attempt " + "(default %default times).") + +timeout = partial( + Option, + '--timeout', '--default-timeout', + metavar='sec', + dest='timeout', + type='float', + default=15, + help='Set the socket timeout (default %default seconds).') + +default_vcs = partial( + Option, + # The default version control system for editables, e.g. 'svn' + '--default-vcs', + dest='default_vcs', + type='str', + default='', + help=SUPPRESS_HELP) + +skip_requirements_regex = partial( + Option, + # A regex to be used to skip requirements + '--skip-requirements-regex', + dest='skip_requirements_regex', + type='str', + default='', + help=SUPPRESS_HELP) + + +def exists_action(): + return Option( + # Option when path already exist + '--exists-action', + dest='exists_action', + type='choice', + choices=['s', 'i', 'w', 'b'], + default=[], + action='append', + metavar='action', + help="Default action when a path already exists: " + "(s)witch, (i)gnore, (w)ipe, (b)ackup.") + + +cert = partial( + Option, + '--cert', + dest='cert', + type='str', + metavar='path', + help="Path to alternate CA bundle.") + +client_cert = partial( + Option, + '--client-cert', + dest='client_cert', + type='str', + default=None, + metavar='path', + help="Path to SSL client certificate, a single file containing the " + "private key and the certificate in PEM format.") + +index_url = partial( + Option, + '-i', '--index-url', '--pypi-url', + dest='index_url', + metavar='URL', + default=PyPI.simple_url, + help='Base URL of Python Package Index (default %default).') + + +def extra_index_url(): + return Option( + '--extra-index-url', + dest='extra_index_urls', + metavar='URL', + action='append', + default=[], + help='Extra URLs of package indexes to use in addition to --index-url.' + ) + + +no_index = partial( + Option, + '--no-index', + dest='no_index', + action='store_true', + default=False, + help='Ignore package index (only looking at --find-links URLs instead).') + + +def find_links(): + return Option( + '-f', '--find-links', + dest='find_links', + action='append', + default=[], + metavar='url', + help="If a url or path to an html file, then parse for links to " + "archives. If a local path or file:// url that's a directory," + "then look for archives in the directory listing.") + + +def allow_external(): + return Option( + "--allow-external", + dest="allow_external", + action="append", + default=[], + metavar="PACKAGE", + help=SUPPRESS_HELP, + ) + + +allow_all_external = partial( + Option, + "--allow-all-external", + dest="allow_all_external", + action="store_true", + default=False, + help=SUPPRESS_HELP, +) + + +def trusted_host(): + return Option( + "--trusted-host", + dest="trusted_hosts", + action="append", + metavar="HOSTNAME", + default=[], + help="Mark this host as trusted, even though it does not have valid " + "or any HTTPS.", + ) + + +# Remove after 7.0 +no_allow_external = partial( + Option, + "--no-allow-external", + dest="allow_all_external", + action="store_false", + default=False, + help=SUPPRESS_HELP, +) + + +# Remove --allow-insecure after 7.0 +def allow_unsafe(): + return Option( + "--allow-unverified", "--allow-insecure", + dest="allow_unverified", + action="append", + default=[], + metavar="PACKAGE", + help=SUPPRESS_HELP, + ) + +# Remove after 7.0 +no_allow_unsafe = partial( + Option, + "--no-allow-insecure", + dest="allow_all_insecure", + action="store_false", + default=False, + help=SUPPRESS_HELP +) + +# Remove after 1.5 +process_dependency_links = partial( + Option, + "--process-dependency-links", + dest="process_dependency_links", + action="store_true", + default=False, + help="Enable the processing of dependency links.", +) + + +def constraints(): + return Option( + '-c', '--constraint', + dest='constraints', + action='append', + default=[], + metavar='file', + help='Constrain versions using the given constraints file. ' + 'This option can be used multiple times.') + + +def requirements(): + return Option( + '-r', '--requirement', + dest='requirements', + action='append', + default=[], + metavar='file', + help='Install from the given requirements file. ' + 'This option can be used multiple times.') + + +def editable(): + return Option( + '-e', '--editable', + dest='editables', + action='append', + default=[], + metavar='path/url', + help=('Install a project in editable mode (i.e. setuptools ' + '"develop mode") from a local project path or a VCS url.'), + ) + +src = partial( + Option, + '--src', '--source', '--source-dir', '--source-directory', + dest='src_dir', + metavar='dir', + default=src_prefix, + help='Directory to check out editable projects into. ' + 'The default in a virtualenv is "/src". ' + 'The default for global installs is "/src".' +) + +# XXX: deprecated, remove in 9.0 +use_wheel = partial( + Option, + '--use-wheel', + dest='use_wheel', + action='store_true', + default=True, + help=SUPPRESS_HELP, +) + +# XXX: deprecated, remove in 9.0 +no_use_wheel = partial( + Option, + '--no-use-wheel', + dest='use_wheel', + action='store_false', + default=True, + help=('Do not Find and prefer wheel archives when searching indexes and ' + 'find-links locations. DEPRECATED in favour of --no-binary.'), +) + + +def _get_format_control(values, option): + """Get a format_control object.""" + return getattr(values, option.dest) + + +def _handle_no_binary(option, opt_str, value, parser): + existing = getattr(parser.values, option.dest) + fmt_ctl_handle_mutual_exclude( + value, existing.no_binary, existing.only_binary) + + +def _handle_only_binary(option, opt_str, value, parser): + existing = getattr(parser.values, option.dest) + fmt_ctl_handle_mutual_exclude( + value, existing.only_binary, existing.no_binary) + + +def no_binary(): + return Option( + "--no-binary", dest="format_control", action="callback", + callback=_handle_no_binary, type="str", + default=FormatControl(set(), set()), + help="Do not use binary packages. Can be supplied multiple times, and " + "each time adds to the existing value. Accepts either :all: to " + "disable all binary packages, :none: to empty the set, or one or " + "more package names with commas between them. Note that some " + "packages are tricky to compile and may fail to install when " + "this option is used on them.") + + +def only_binary(): + return Option( + "--only-binary", dest="format_control", action="callback", + callback=_handle_only_binary, type="str", + default=FormatControl(set(), set()), + help="Do not use source packages. Can be supplied multiple times, and " + "each time adds to the existing value. Accepts either :all: to " + "disable all source packages, :none: to empty the set, or one or " + "more package names with commas between them. Packages without " + "binary distributions will fail to install when this option is " + "used on them.") + + +cache_dir = partial( + Option, + "--cache-dir", + dest="cache_dir", + default=USER_CACHE_DIR, + metavar="dir", + help="Store the cache data in ." +) + +no_cache = partial( + Option, + "--no-cache-dir", + dest="cache_dir", + action="store_false", + help="Disable the cache.", +) + +no_deps = partial( + Option, + '--no-deps', '--no-dependencies', + dest='ignore_dependencies', + action='store_true', + default=False, + help="Don't install package dependencies.") + +build_dir = partial( + Option, + '-b', '--build', '--build-dir', '--build-directory', + dest='build_dir', + metavar='dir', + help='Directory to unpack packages into and build in.' +) + +install_options = partial( + Option, + '--install-option', + dest='install_options', + action='append', + metavar='options', + help="Extra arguments to be supplied to the setup.py install " + "command (use like --install-option=\"--install-scripts=/usr/local/" + "bin\"). Use multiple --install-option options to pass multiple " + "options to setup.py install. If you are using an option with a " + "directory path, be sure to use absolute path.") + +global_options = partial( + Option, + '--global-option', + dest='global_options', + action='append', + metavar='options', + help="Extra global options to be supplied to the setup.py " + "call before the install command.") + +no_clean = partial( + Option, + '--no-clean', + action='store_true', + default=False, + help="Don't clean up build directories.") + +pre = partial( + Option, + '--pre', + action='store_true', + default=False, + help="Include pre-release and development versions. By default, " + "pip only finds stable versions.") + +disable_pip_version_check = partial( + Option, + "--disable-pip-version-check", + dest="disable_pip_version_check", + action="store_true", + default=False, + help="Don't periodically check PyPI to determine whether a new version " + "of pip is available for download. Implied with --no-index.") + +# Deprecated, Remove later +always_unzip = partial( + Option, + '-Z', '--always-unzip', + dest='always_unzip', + action='store_true', + help=SUPPRESS_HELP, +) + + +def _merge_hash(option, opt_str, value, parser): + """Given a value spelled "algo:digest", append the digest to a list + pointed to in a dict by the algo name.""" + if not parser.values.hashes: + parser.values.hashes = {} + try: + algo, digest = value.split(':', 1) + except ValueError: + parser.error('Arguments to %s must be a hash name ' + 'followed by a value, like --hash=sha256:abcde...' % + opt_str) + if algo not in STRONG_HASHES: + parser.error('Allowed hash algorithms for %s are %s.' % + (opt_str, ', '.join(STRONG_HASHES))) + parser.values.hashes.setdefault(algo, []).append(digest) + + +hash = partial( + Option, + '--hash', + # Hash values eventually end up in InstallRequirement.hashes due to + # __dict__ copying in process_line(). + dest='hashes', + action='callback', + callback=_merge_hash, + type='string', + help="Verify that the package's archive matches this " + 'hash before installing. Example: --hash=sha256:abcdef...') + + +require_hashes = partial( + Option, + '--require-hashes', + dest='require_hashes', + action='store_true', + default=False, + help='Require a hash to check each requirement against, for ' + 'repeatable installs. This option is implied when any package in a ' + 'requirements file has a --hash option.') + + +########## +# groups # +########## + +general_group = { + 'name': 'General Options', + 'options': [ + help_, + isolated_mode, + require_virtualenv, + verbose, + version, + quiet, + log, + no_input, + proxy, + retries, + timeout, + default_vcs, + skip_requirements_regex, + exists_action, + trusted_host, + cert, + client_cert, + cache_dir, + no_cache, + disable_pip_version_check, + ] +} + +non_deprecated_index_group = { + 'name': 'Package Index Options', + 'options': [ + index_url, + extra_index_url, + no_index, + find_links, + process_dependency_links, + ] +} + +index_group = { + 'name': 'Package Index Options (including deprecated options)', + 'options': non_deprecated_index_group['options'] + [ + allow_external, + allow_all_external, + no_allow_external, + allow_unsafe, + no_allow_unsafe, + ] +} diff --git a/lib/python3.4/site-packages/pip/commands/__init__.py b/lib/python3.4/site-packages/pip/commands/__init__.py new file mode 100644 index 0000000..6910f51 --- /dev/null +++ b/lib/python3.4/site-packages/pip/commands/__init__.py @@ -0,0 +1,85 @@ +""" +Package containing all pip commands +""" +from __future__ import absolute_import + +from pip.commands.completion import CompletionCommand +from pip.commands.download import DownloadCommand +from pip.commands.freeze import FreezeCommand +from pip.commands.hash import HashCommand +from pip.commands.help import HelpCommand +from pip.commands.list import ListCommand +from pip.commands.search import SearchCommand +from pip.commands.show import ShowCommand +from pip.commands.install import InstallCommand +from pip.commands.uninstall import UninstallCommand +from pip.commands.wheel import WheelCommand + + +commands_dict = { + CompletionCommand.name: CompletionCommand, + FreezeCommand.name: FreezeCommand, + HashCommand.name: HashCommand, + HelpCommand.name: HelpCommand, + SearchCommand.name: SearchCommand, + ShowCommand.name: ShowCommand, + InstallCommand.name: InstallCommand, + UninstallCommand.name: UninstallCommand, + DownloadCommand.name: DownloadCommand, + ListCommand.name: ListCommand, + WheelCommand.name: WheelCommand, +} + + +commands_order = [ + InstallCommand, + DownloadCommand, + UninstallCommand, + FreezeCommand, + ListCommand, + ShowCommand, + SearchCommand, + WheelCommand, + HashCommand, + HelpCommand, +] + + +def get_summaries(ignore_hidden=True, ordered=True): + """Yields sorted (command name, command summary) tuples.""" + + if ordered: + cmditems = _sort_commands(commands_dict, commands_order) + else: + cmditems = commands_dict.items() + + for name, command_class in cmditems: + if ignore_hidden and command_class.hidden: + continue + + yield (name, command_class.summary) + + +def get_similar_commands(name): + """Command name auto-correct.""" + from difflib import get_close_matches + + name = name.lower() + + close_commands = get_close_matches(name, commands_dict.keys()) + + if close_commands: + return close_commands[0] + else: + return False + + +def _sort_commands(cmddict, order): + def keyfn(key): + try: + return order.index(key[1]) + except ValueError: + # unordered items should come last + return 0xff + + return sorted(cmddict.items(), key=keyfn) diff --git a/lib/python3.4/site-packages/pip/commands/completion.py b/lib/python3.4/site-packages/pip/commands/completion.py new file mode 100644 index 0000000..10f19e3 --- /dev/null +++ b/lib/python3.4/site-packages/pip/commands/completion.py @@ -0,0 +1,68 @@ +from __future__ import absolute_import + +import sys +from pip.basecommand import Command + +BASE_COMPLETION = """ +# pip %(shell)s completion start%(script)s# pip %(shell)s completion end +""" + +COMPLETION_SCRIPTS = { + 'bash': """ +_pip_completion() +{ + COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\ + COMP_CWORD=$COMP_CWORD \\ + PIP_AUTO_COMPLETE=1 $1 ) ) +} +complete -o default -F _pip_completion pip +""", 'zsh': """ +function _pip_completion { + local words cword + read -Ac words + read -cn cword + reply=( $( COMP_WORDS="$words[*]" \\ + COMP_CWORD=$(( cword-1 )) \\ + PIP_AUTO_COMPLETE=1 $words[1] ) ) +} +compctl -K _pip_completion pip +"""} + + +class CompletionCommand(Command): + """A helper command to be used for command completion.""" + name = 'completion' + summary = 'A helper command to be used for command completion' + hidden = True + + def __init__(self, *args, **kw): + super(CompletionCommand, self).__init__(*args, **kw) + + cmd_opts = self.cmd_opts + + cmd_opts.add_option( + '--bash', '-b', + action='store_const', + const='bash', + dest='shell', + help='Emit completion code for bash') + cmd_opts.add_option( + '--zsh', '-z', + action='store_const', + const='zsh', + dest='shell', + help='Emit completion code for zsh') + + self.parser.insert_option_group(0, cmd_opts) + + def run(self, options, args): + """Prints the completion code of the given shell""" + shells = COMPLETION_SCRIPTS.keys() + shell_options = ['--' + shell for shell in sorted(shells)] + if options.shell in shells: + script = COMPLETION_SCRIPTS.get(options.shell, '') + print(BASE_COMPLETION % {'script': script, 'shell': options.shell}) + else: + sys.stderr.write( + 'ERROR: You must pass %s\n' % ' or '.join(shell_options) + ) diff --git a/lib/python3.4/site-packages/pip/commands/download.py b/lib/python3.4/site-packages/pip/commands/download.py new file mode 100644 index 0000000..4155e05 --- /dev/null +++ b/lib/python3.4/site-packages/pip/commands/download.py @@ -0,0 +1,136 @@ +from __future__ import absolute_import + +import logging +import os + +from pip.req import RequirementSet +from pip.basecommand import RequirementCommand +from pip import cmdoptions +from pip.utils import ensure_dir, normalize_path +from pip.utils.build import BuildDirectory +from pip.utils.filesystem import check_path_owner + + +logger = logging.getLogger(__name__) + + +class DownloadCommand(RequirementCommand): + """ + Download packages from: + + - PyPI (and other indexes) using requirement specifiers. + - VCS project urls. + - Local project directories. + - Local or remote source archives. + + pip also supports downloading from "requirements files", which provide + an easy way to specify a whole environment to be downloaded. + """ + name = 'download' + + usage = """ + %prog [options] [package-index-options] ... + %prog [options] -r [package-index-options] ... + %prog [options] [-e] ... + %prog [options] [-e] ... + %prog [options] ...""" + + summary = 'Download packages.' + + def __init__(self, *args, **kw): + super(DownloadCommand, self).__init__(*args, **kw) + + cmd_opts = self.cmd_opts + + cmd_opts.add_option(cmdoptions.constraints()) + cmd_opts.add_option(cmdoptions.editable()) + cmd_opts.add_option(cmdoptions.requirements()) + cmd_opts.add_option(cmdoptions.build_dir()) + cmd_opts.add_option(cmdoptions.no_deps()) + cmd_opts.add_option(cmdoptions.global_options()) + cmd_opts.add_option(cmdoptions.no_binary()) + cmd_opts.add_option(cmdoptions.only_binary()) + cmd_opts.add_option(cmdoptions.src()) + cmd_opts.add_option(cmdoptions.pre()) + cmd_opts.add_option(cmdoptions.no_clean()) + cmd_opts.add_option(cmdoptions.require_hashes()) + + cmd_opts.add_option( + '-d', '--dest', '--destination-dir', '--destination-directory', + dest='download_dir', + metavar='dir', + default=os.curdir, + help=("Download packages into ."), + ) + + index_opts = cmdoptions.make_option_group( + cmdoptions.non_deprecated_index_group, + self.parser, + ) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, cmd_opts) + + def run(self, options, args): + options.ignore_installed = True + options.src_dir = os.path.abspath(options.src_dir) + options.download_dir = normalize_path(options.download_dir) + + ensure_dir(options.download_dir) + + with self._build_session(options) as session: + + finder = self._build_package_finder(options, session) + build_delete = (not (options.no_clean or options.build_dir)) + if options.cache_dir and not check_path_owner(options.cache_dir): + logger.warning( + "The directory '%s' or its parent directory is not owned " + "by the current user and caching wheels has been " + "disabled. check the permissions and owner of that " + "directory. If executing pip with sudo, you may want " + "sudo's -H flag.", + options.cache_dir, + ) + options.cache_dir = None + + with BuildDirectory(options.build_dir, + delete=build_delete) as build_dir: + + requirement_set = RequirementSet( + build_dir=build_dir, + src_dir=options.src_dir, + download_dir=options.download_dir, + ignore_installed=True, + ignore_dependencies=options.ignore_dependencies, + session=session, + isolated=options.isolated_mode, + require_hashes=options.require_hashes + ) + self.populate_requirement_set( + requirement_set, + args, + options, + finder, + session, + self.name, + None + ) + + if not requirement_set.has_requirements: + return + + requirement_set.prepare_files(finder) + + downloaded = ' '.join([ + req.name for req in requirement_set.successfully_downloaded + ]) + if downloaded: + logger.info( + 'Successfully downloaded %s', downloaded + ) + + # Clean up + if not options.no_clean: + requirement_set.cleanup_files() + + return requirement_set diff --git a/lib/python3.4/site-packages/pip/commands/freeze.py b/lib/python3.4/site-packages/pip/commands/freeze.py new file mode 100644 index 0000000..ef4c624 --- /dev/null +++ b/lib/python3.4/site-packages/pip/commands/freeze.py @@ -0,0 +1,71 @@ +from __future__ import absolute_import + +import sys + +import pip +from pip.basecommand import Command +from pip.operations.freeze import freeze +from pip.wheel import WheelCache + + +class FreezeCommand(Command): + """ + Output installed packages in requirements format. + + packages are listed in a case-insensitive sorted order. + """ + name = 'freeze' + usage = """ + %prog [options]""" + summary = 'Output installed packages in requirements format.' + log_streams = ("ext://sys.stderr", "ext://sys.stderr") + + def __init__(self, *args, **kw): + super(FreezeCommand, self).__init__(*args, **kw) + + self.cmd_opts.add_option( + '-r', '--requirement', + dest='requirement', + action='store', + default=None, + metavar='file', + help="Use the order in the given requirements file and its " + "comments when generating output.") + self.cmd_opts.add_option( + '-f', '--find-links', + dest='find_links', + action='append', + default=[], + metavar='URL', + help='URL for finding packages, which will be added to the ' + 'output.') + self.cmd_opts.add_option( + '-l', '--local', + dest='local', + action='store_true', + default=False, + help='If in a virtualenv that has global access, do not output ' + 'globally-installed packages.') + self.cmd_opts.add_option( + '--user', + dest='user', + action='store_true', + default=False, + help='Only output packages installed in user-site.') + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options, args): + format_control = pip.index.FormatControl(set(), set()) + wheel_cache = WheelCache(options.cache_dir, format_control) + freeze_kwargs = dict( + requirement=options.requirement, + find_links=options.find_links, + local_only=options.local, + user_only=options.user, + skip_regex=options.skip_requirements_regex, + isolated=options.isolated_mode, + wheel_cache=wheel_cache) + + for line in freeze(**freeze_kwargs): + sys.stdout.write(line + '\n') diff --git a/lib/python3.4/site-packages/pip/commands/hash.py b/lib/python3.4/site-packages/pip/commands/hash.py new file mode 100644 index 0000000..27cca0b --- /dev/null +++ b/lib/python3.4/site-packages/pip/commands/hash.py @@ -0,0 +1,57 @@ +from __future__ import absolute_import + +import hashlib +import logging +import sys + +from pip.basecommand import Command +from pip.status_codes import ERROR +from pip.utils import read_chunks +from pip.utils.hashes import FAVORITE_HASH, STRONG_HASHES + + +logger = logging.getLogger(__name__) + + +class HashCommand(Command): + """ + Compute a hash of a local package archive. + + These can be used with --hash in a requirements file to do repeatable + installs. + + """ + name = 'hash' + usage = '%prog [options] ...' + summary = 'Compute hashes of package archives.' + + def __init__(self, *args, **kw): + super(HashCommand, self).__init__(*args, **kw) + self.cmd_opts.add_option( + '-a', '--algorithm', + dest='algorithm', + choices=STRONG_HASHES, + action='store', + default=FAVORITE_HASH, + help='The hash algorithm to use: one of %s' % + ', '.join(STRONG_HASHES)) + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options, args): + if not args: + self.parser.print_usage(sys.stderr) + return ERROR + + algorithm = options.algorithm + for path in args: + logger.info('%s:\n--hash=%s:%s', + path, algorithm, _hash_of_file(path, algorithm)) + + +def _hash_of_file(path, algorithm): + """Return the hash digest of a file.""" + with open(path, 'rb') as archive: + hash = hashlib.new(algorithm) + for chunk in read_chunks(archive): + hash.update(chunk) + return hash.hexdigest() diff --git a/lib/python3.4/site-packages/pip/commands/help.py b/lib/python3.4/site-packages/pip/commands/help.py new file mode 100644 index 0000000..11722f1 --- /dev/null +++ b/lib/python3.4/site-packages/pip/commands/help.py @@ -0,0 +1,35 @@ +from __future__ import absolute_import + +from pip.basecommand import Command, SUCCESS +from pip.exceptions import CommandError + + +class HelpCommand(Command): + """Show help for commands""" + name = 'help' + usage = """ + %prog """ + summary = 'Show help for commands.' + + def run(self, options, args): + from pip.commands import commands_dict, get_similar_commands + + try: + # 'pip help' with no args is handled by pip.__init__.parseopt() + cmd_name = args[0] # the command we need help for + except IndexError: + return SUCCESS + + if cmd_name not in commands_dict: + guess = get_similar_commands(cmd_name) + + msg = ['unknown command "%s"' % cmd_name] + if guess: + msg.append('maybe you meant "%s"' % guess) + + raise CommandError(' - '.join(msg)) + + command = commands_dict[cmd_name]() + command.parser.print_help() + + return SUCCESS diff --git a/lib/python3.4/site-packages/pip/commands/install.py b/lib/python3.4/site-packages/pip/commands/install.py new file mode 100644 index 0000000..7ddde93 --- /dev/null +++ b/lib/python3.4/site-packages/pip/commands/install.py @@ -0,0 +1,386 @@ +from __future__ import absolute_import + +import logging +import operator +import os +import tempfile +import shutil +import warnings +try: + import wheel +except ImportError: + wheel = None + +from pip.req import RequirementSet +from pip.basecommand import RequirementCommand +from pip.locations import virtualenv_no_global, distutils_scheme +from pip.exceptions import ( + InstallationError, CommandError, PreviousBuildDirError, +) +from pip import cmdoptions +from pip.utils import ensure_dir +from pip.utils.build import BuildDirectory +from pip.utils.deprecation import RemovedInPip10Warning +from pip.utils.filesystem import check_path_owner +from pip.wheel import WheelCache, WheelBuilder + + +logger = logging.getLogger(__name__) + + +class InstallCommand(RequirementCommand): + """ + Install packages from: + + - PyPI (and other indexes) using requirement specifiers. + - VCS project urls. + - Local project directories. + - Local or remote source archives. + + pip also supports installing from "requirements files", which provide + an easy way to specify a whole environment to be installed. + """ + name = 'install' + + usage = """ + %prog [options] [package-index-options] ... + %prog [options] -r [package-index-options] ... + %prog [options] [-e] ... + %prog [options] [-e] ... + %prog [options] ...""" + + summary = 'Install packages.' + + def __init__(self, *args, **kw): + super(InstallCommand, self).__init__(*args, **kw) + + cmd_opts = self.cmd_opts + + cmd_opts.add_option(cmdoptions.constraints()) + cmd_opts.add_option(cmdoptions.editable()) + cmd_opts.add_option(cmdoptions.requirements()) + cmd_opts.add_option(cmdoptions.build_dir()) + + cmd_opts.add_option( + '-t', '--target', + dest='target_dir', + metavar='dir', + default=None, + help='Install packages into . ' + 'By default this will not replace existing files/folders in ' + '. Use --upgrade to replace existing packages in ' + 'with new versions.' + ) + + cmd_opts.add_option( + '-d', '--download', '--download-dir', '--download-directory', + dest='download_dir', + metavar='dir', + default=None, + help=("Download packages into instead of installing them, " + "regardless of what's already installed."), + ) + + cmd_opts.add_option(cmdoptions.src()) + + cmd_opts.add_option( + '-U', '--upgrade', + dest='upgrade', + action='store_true', + help='Upgrade all specified packages to the newest available ' + 'version. This process is recursive regardless of whether ' + 'a dependency is already satisfied.' + ) + + cmd_opts.add_option( + '--force-reinstall', + dest='force_reinstall', + action='store_true', + help='When upgrading, reinstall all packages even if they are ' + 'already up-to-date.') + + cmd_opts.add_option( + '-I', '--ignore-installed', + dest='ignore_installed', + action='store_true', + help='Ignore the installed packages (reinstalling instead).') + + cmd_opts.add_option(cmdoptions.no_deps()) + + cmd_opts.add_option(cmdoptions.install_options()) + cmd_opts.add_option(cmdoptions.global_options()) + + cmd_opts.add_option( + '--user', + dest='use_user_site', + action='store_true', + help="Install to the Python user install directory for your " + "platform. Typically ~/.local/, or %APPDATA%\Python on " + "Windows. (See the Python documentation for site.USER_BASE " + "for full details.)") + + cmd_opts.add_option( + '--egg', + dest='as_egg', + action='store_true', + help="Install packages as eggs, not 'flat', like pip normally " + "does. This option is not about installing *from* eggs. " + "(WARNING: Because this option overrides pip's normal install" + " logic, requirements files may not behave as expected.)") + + cmd_opts.add_option( + '--root', + dest='root_path', + metavar='dir', + default=None, + help="Install everything relative to this alternate root " + "directory.") + + cmd_opts.add_option( + '--prefix', + dest='prefix_path', + metavar='dir', + default=None, + help="Installation prefix where lib, bin and other top-level " + "folders are placed") + + cmd_opts.add_option( + "--compile", + action="store_true", + dest="compile", + default=True, + help="Compile py files to pyc", + ) + + cmd_opts.add_option( + "--no-compile", + action="store_false", + dest="compile", + help="Do not compile py files to pyc", + ) + + cmd_opts.add_option(cmdoptions.use_wheel()) + cmd_opts.add_option(cmdoptions.no_use_wheel()) + cmd_opts.add_option(cmdoptions.no_binary()) + cmd_opts.add_option(cmdoptions.only_binary()) + cmd_opts.add_option(cmdoptions.pre()) + cmd_opts.add_option(cmdoptions.no_clean()) + cmd_opts.add_option(cmdoptions.require_hashes()) + + index_opts = cmdoptions.make_option_group( + cmdoptions.index_group, + self.parser, + ) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, cmd_opts) + + def run(self, options, args): + cmdoptions.resolve_wheel_no_use_binary(options) + cmdoptions.check_install_build_global(options) + + if options.allow_external: + warnings.warn( + "--allow-external has been deprecated and will be removed in " + "the future. Due to changes in the repository protocol, it no " + "longer has any effect.", + RemovedInPip10Warning, + ) + + if options.allow_all_external: + warnings.warn( + "--allow-all-external has been deprecated and will be removed " + "in the future. Due to changes in the repository protocol, it " + "no longer has any effect.", + RemovedInPip10Warning, + ) + + if options.allow_unverified: + warnings.warn( + "--allow-unverified has been deprecated and will be removed " + "in the future. Due to changes in the repository protocol, it " + "no longer has any effect.", + RemovedInPip10Warning, + ) + + if options.download_dir: + warnings.warn( + "pip install --download has been deprecated and will be " + "removed in the future. Pip now has a download command that " + "should be used instead.", + RemovedInPip10Warning, + ) + options.ignore_installed = True + + if options.build_dir: + options.build_dir = os.path.abspath(options.build_dir) + + options.src_dir = os.path.abspath(options.src_dir) + install_options = options.install_options or [] + if options.use_user_site: + if options.prefix_path: + raise CommandError( + "Can not combine '--user' and '--prefix' as they imply " + "different installation locations" + ) + if virtualenv_no_global(): + raise InstallationError( + "Can not perform a '--user' install. User site-packages " + "are not visible in this virtualenv." + ) + install_options.append('--user') + install_options.append('--prefix=') + + temp_target_dir = None + if options.target_dir: + options.ignore_installed = True + temp_target_dir = tempfile.mkdtemp() + options.target_dir = os.path.abspath(options.target_dir) + if (os.path.exists(options.target_dir) and not + os.path.isdir(options.target_dir)): + raise CommandError( + "Target path exists but is not a directory, will not " + "continue." + ) + install_options.append('--home=' + temp_target_dir) + + global_options = options.global_options or [] + + with self._build_session(options) as session: + + finder = self._build_package_finder(options, session) + build_delete = (not (options.no_clean or options.build_dir)) + wheel_cache = WheelCache(options.cache_dir, options.format_control) + if options.cache_dir and not check_path_owner(options.cache_dir): + logger.warning( + "The directory '%s' or its parent directory is not owned " + "by the current user and caching wheels has been " + "disabled. check the permissions and owner of that " + "directory. If executing pip with sudo, you may want " + "sudo's -H flag.", + options.cache_dir, + ) + options.cache_dir = None + + with BuildDirectory(options.build_dir, + delete=build_delete) as build_dir: + requirement_set = RequirementSet( + build_dir=build_dir, + src_dir=options.src_dir, + download_dir=options.download_dir, + upgrade=options.upgrade, + as_egg=options.as_egg, + ignore_installed=options.ignore_installed, + ignore_dependencies=options.ignore_dependencies, + force_reinstall=options.force_reinstall, + use_user_site=options.use_user_site, + target_dir=temp_target_dir, + session=session, + pycompile=options.compile, + isolated=options.isolated_mode, + wheel_cache=wheel_cache, + require_hashes=options.require_hashes, + ) + + self.populate_requirement_set( + requirement_set, args, options, finder, session, self.name, + wheel_cache + ) + + if not requirement_set.has_requirements: + return + + try: + if (options.download_dir or not wheel or not + options.cache_dir): + # on -d don't do complex things like building + # wheels, and don't try to build wheels when wheel is + # not installed. + requirement_set.prepare_files(finder) + else: + # build wheels before install. + wb = WheelBuilder( + requirement_set, + finder, + build_options=[], + global_options=[], + ) + # Ignore the result: a failed wheel will be + # installed from the sdist/vcs whatever. + wb.build(autobuilding=True) + + if not options.download_dir: + requirement_set.install( + install_options, + global_options, + root=options.root_path, + prefix=options.prefix_path, + ) + reqs = sorted( + requirement_set.successfully_installed, + key=operator.attrgetter('name')) + items = [] + for req in reqs: + item = req.name + try: + if hasattr(req, 'installed_version'): + if req.installed_version: + item += '-' + req.installed_version + except Exception: + pass + items.append(item) + installed = ' '.join(items) + if installed: + logger.info('Successfully installed %s', installed) + else: + downloaded = ' '.join([ + req.name + for req in requirement_set.successfully_downloaded + ]) + if downloaded: + logger.info( + 'Successfully downloaded %s', downloaded + ) + except PreviousBuildDirError: + options.no_clean = True + raise + finally: + # Clean up + if not options.no_clean: + requirement_set.cleanup_files() + + if options.target_dir: + ensure_dir(options.target_dir) + + lib_dir = distutils_scheme('', home=temp_target_dir)['purelib'] + + for item in os.listdir(lib_dir): + target_item_dir = os.path.join(options.target_dir, item) + if os.path.exists(target_item_dir): + if not options.upgrade: + logger.warning( + 'Target directory %s already exists. Specify ' + '--upgrade to force replacement.', + target_item_dir + ) + continue + if os.path.islink(target_item_dir): + logger.warning( + 'Target directory %s already exists and is ' + 'a link. Pip will not automatically replace ' + 'links, please remove if replacement is ' + 'desired.', + target_item_dir + ) + continue + if os.path.isdir(target_item_dir): + shutil.rmtree(target_item_dir) + else: + os.remove(target_item_dir) + + shutil.move( + os.path.join(lib_dir, item), + target_item_dir + ) + shutil.rmtree(temp_target_dir) + return requirement_set diff --git a/lib/python3.4/site-packages/pip/commands/list.py b/lib/python3.4/site-packages/pip/commands/list.py new file mode 100644 index 0000000..5346488 --- /dev/null +++ b/lib/python3.4/site-packages/pip/commands/list.py @@ -0,0 +1,209 @@ +from __future__ import absolute_import + +import logging +import warnings + +from pip.basecommand import Command +from pip.exceptions import CommandError +from pip.index import PackageFinder +from pip.utils import ( + get_installed_distributions, dist_is_editable) +from pip.utils.deprecation import RemovedInPip10Warning +from pip.cmdoptions import make_option_group, index_group + + +logger = logging.getLogger(__name__) + + +class ListCommand(Command): + """ + List installed packages, including editables. + + Packages are listed in a case-insensitive sorted order. + """ + name = 'list' + usage = """ + %prog [options]""" + summary = 'List installed packages.' + + def __init__(self, *args, **kw): + super(ListCommand, self).__init__(*args, **kw) + + cmd_opts = self.cmd_opts + + cmd_opts.add_option( + '-o', '--outdated', + action='store_true', + default=False, + help='List outdated packages') + cmd_opts.add_option( + '-u', '--uptodate', + action='store_true', + default=False, + help='List uptodate packages') + cmd_opts.add_option( + '-e', '--editable', + action='store_true', + default=False, + help='List editable projects.') + cmd_opts.add_option( + '-l', '--local', + action='store_true', + default=False, + help=('If in a virtualenv that has global access, do not list ' + 'globally-installed packages.'), + ) + self.cmd_opts.add_option( + '--user', + dest='user', + action='store_true', + default=False, + help='Only output packages installed in user-site.') + + cmd_opts.add_option( + '--pre', + action='store_true', + default=False, + help=("Include pre-release and development versions. By default, " + "pip only finds stable versions."), + ) + + index_opts = make_option_group(index_group, self.parser) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, cmd_opts) + + def _build_package_finder(self, options, index_urls, session): + """ + Create a package finder appropriate to this list command. + """ + return PackageFinder( + find_links=options.find_links, + index_urls=index_urls, + allow_all_prereleases=options.pre, + trusted_hosts=options.trusted_hosts, + process_dependency_links=options.process_dependency_links, + session=session, + ) + + def run(self, options, args): + if options.allow_external: + warnings.warn( + "--allow-external has been deprecated and will be removed in " + "the future. Due to changes in the repository protocol, it no " + "longer has any effect.", + RemovedInPip10Warning, + ) + + if options.allow_all_external: + warnings.warn( + "--allow-all-external has been deprecated and will be removed " + "in the future. Due to changes in the repository protocol, it " + "no longer has any effect.", + RemovedInPip10Warning, + ) + + if options.allow_unverified: + warnings.warn( + "--allow-unverified has been deprecated and will be removed " + "in the future. Due to changes in the repository protocol, it " + "no longer has any effect.", + RemovedInPip10Warning, + ) + if options.outdated and options.uptodate: + raise CommandError( + "Options --outdated and --uptodate cannot be combined.") + + if options.outdated: + self.run_outdated(options) + elif options.uptodate: + self.run_uptodate(options) + else: + self.run_listing(options) + + def run_outdated(self, options): + for dist, latest_version, typ in sorted( + self.find_packages_latest_versions(options), + key=lambda p: p[0].project_name.lower()): + if latest_version > dist.parsed_version: + logger.info( + '%s - Latest: %s [%s]', + self.output_package(dist), latest_version, typ, + ) + + def find_packages_latest_versions(self, options): + index_urls = [options.index_url] + options.extra_index_urls + if options.no_index: + logger.info('Ignoring indexes: %s', ','.join(index_urls)) + index_urls = [] + + dependency_links = [] + for dist in get_installed_distributions( + local_only=options.local, + user_only=options.user, + editables_only=options.editable): + if dist.has_metadata('dependency_links.txt'): + dependency_links.extend( + dist.get_metadata_lines('dependency_links.txt'), + ) + + with self._build_session(options) as session: + finder = self._build_package_finder(options, index_urls, session) + finder.add_dependency_links(dependency_links) + + installed_packages = get_installed_distributions( + local_only=options.local, + user_only=options.user, + editables_only=options.editable, + ) + for dist in installed_packages: + typ = 'unknown' + all_candidates = finder.find_all_candidates(dist.key) + if not options.pre: + # Remove prereleases + all_candidates = [candidate for candidate in all_candidates + if not candidate.version.is_prerelease] + + if not all_candidates: + continue + best_candidate = max(all_candidates, + key=finder._candidate_sort_key) + remote_version = best_candidate.version + if best_candidate.location.is_wheel: + typ = 'wheel' + else: + typ = 'sdist' + yield dist, remote_version, typ + + def run_listing(self, options): + installed_packages = get_installed_distributions( + local_only=options.local, + user_only=options.user, + editables_only=options.editable, + ) + self.output_package_listing(installed_packages) + + def output_package(self, dist): + if dist_is_editable(dist): + return '%s (%s, %s)' % ( + dist.project_name, + dist.version, + dist.location, + ) + else: + return '%s (%s)' % (dist.project_name, dist.version) + + def output_package_listing(self, installed_packages): + installed_packages = sorted( + installed_packages, + key=lambda dist: dist.project_name.lower(), + ) + for dist in installed_packages: + logger.info(self.output_package(dist)) + + def run_uptodate(self, options): + uptodate = [] + for dist, version, typ in self.find_packages_latest_versions(options): + if dist.parsed_version == version: + uptodate.append(dist) + self.output_package_listing(uptodate) diff --git a/lib/python3.4/site-packages/pip/commands/search.py b/lib/python3.4/site-packages/pip/commands/search.py new file mode 100644 index 0000000..977eb36 --- /dev/null +++ b/lib/python3.4/site-packages/pip/commands/search.py @@ -0,0 +1,139 @@ +from __future__ import absolute_import + +import logging +import sys +import textwrap + +from pip.basecommand import Command, SUCCESS +from pip.download import PipXmlrpcTransport +from pip.models import PyPI +from pip.utils import get_terminal_size +from pip.utils.logging import indent_log +from pip.exceptions import CommandError +from pip.status_codes import NO_MATCHES_FOUND +from pip._vendor import pkg_resources +from pip._vendor.six.moves import xmlrpc_client + + +logger = logging.getLogger(__name__) + + +class SearchCommand(Command): + """Search for PyPI packages whose name or summary contains .""" + name = 'search' + usage = """ + %prog [options] """ + summary = 'Search PyPI for packages.' + + def __init__(self, *args, **kw): + super(SearchCommand, self).__init__(*args, **kw) + self.cmd_opts.add_option( + '--index', + dest='index', + metavar='URL', + default=PyPI.pypi_url, + help='Base URL of Python Package Index (default %default)') + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options, args): + if not args: + raise CommandError('Missing required argument (search query).') + query = args + pypi_hits = self.search(query, options) + hits = transform_hits(pypi_hits) + + terminal_width = None + if sys.stdout.isatty(): + terminal_width = get_terminal_size()[0] + + print_results(hits, terminal_width=terminal_width) + if pypi_hits: + return SUCCESS + return NO_MATCHES_FOUND + + def search(self, query, options): + index_url = options.index + with self._build_session(options) as session: + transport = PipXmlrpcTransport(index_url, session) + pypi = xmlrpc_client.ServerProxy(index_url, transport) + hits = pypi.search({'name': query, 'summary': query}, 'or') + return hits + + +def transform_hits(hits): + """ + The list from pypi is really a list of versions. We want a list of + packages with the list of versions stored inline. This converts the + list from pypi into one we can use. + """ + packages = {} + for hit in hits: + name = hit['name'] + summary = hit['summary'] + version = hit['version'] + score = hit['_pypi_ordering'] + if score is None: + score = 0 + + if name not in packages.keys(): + packages[name] = { + 'name': name, + 'summary': summary, + 'versions': [version], + 'score': score, + } + else: + packages[name]['versions'].append(version) + + # if this is the highest version, replace summary and score + if version == highest_version(packages[name]['versions']): + packages[name]['summary'] = summary + packages[name]['score'] = score + + # each record has a unique name now, so we will convert the dict into a + # list sorted by score + package_list = sorted( + packages.values(), + key=lambda x: x['score'], + reverse=True, + ) + return package_list + + +def print_results(hits, name_column_width=None, terminal_width=None): + if not hits: + return + if name_column_width is None: + name_column_width = max((len(hit['name']) for hit in hits)) + 4 + installed_packages = [p.project_name for p in pkg_resources.working_set] + for hit in hits: + name = hit['name'] + summary = hit['summary'] or '' + if terminal_width is not None: + # wrap and indent summary to fit terminal + summary = textwrap.wrap( + summary, + terminal_width - name_column_width - 5, + ) + summary = ('\n' + ' ' * (name_column_width + 3)).join(summary) + line = '%s - %s' % (name.ljust(name_column_width), summary) + try: + logger.info(line) + if name in installed_packages: + dist = pkg_resources.get_distribution(name) + with indent_log(): + latest = highest_version(hit['versions']) + if dist.version == latest: + logger.info('INSTALLED: %s (latest)', dist.version) + else: + logger.info('INSTALLED: %s', dist.version) + logger.info('LATEST: %s', latest) + except UnicodeEncodeError: + pass + + +def highest_version(versions): + return next(iter( + sorted(versions, key=pkg_resources.parse_version, reverse=True) + )) diff --git a/lib/python3.4/site-packages/pip/commands/show.py b/lib/python3.4/site-packages/pip/commands/show.py new file mode 100644 index 0000000..9df0cc5 --- /dev/null +++ b/lib/python3.4/site-packages/pip/commands/show.py @@ -0,0 +1,131 @@ +from __future__ import absolute_import + +from email.parser import FeedParser +import logging +import os + +from pip.basecommand import Command +from pip.status_codes import SUCCESS, ERROR +from pip._vendor import pkg_resources + + +logger = logging.getLogger(__name__) + + +class ShowCommand(Command): + """Show information about one or more installed packages.""" + name = 'show' + usage = """ + %prog [options] ...""" + summary = 'Show information about installed packages.' + + def __init__(self, *args, **kw): + super(ShowCommand, self).__init__(*args, **kw) + self.cmd_opts.add_option( + '-f', '--files', + dest='files', + action='store_true', + default=False, + help='Show the full list of installed files for each package.') + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options, args): + if not args: + logger.warning('ERROR: Please provide a package name or names.') + return ERROR + query = args + + results = search_packages_info(query) + if not print_results(results, options.files): + return ERROR + return SUCCESS + + +def search_packages_info(query): + """ + Gather details from installed distributions. Print distribution name, + version, location, and installed files. Installed files requires a + pip generated 'installed-files.txt' in the distributions '.egg-info' + directory. + """ + installed = dict( + [(p.project_name.lower(), p) for p in pkg_resources.working_set]) + query_names = [name.lower() for name in query] + for dist in [installed[pkg] for pkg in query_names if pkg in installed]: + package = { + 'name': dist.project_name, + 'version': dist.version, + 'location': dist.location, + 'requires': [dep.project_name for dep in dist.requires()], + } + file_list = None + metadata = None + if isinstance(dist, pkg_resources.DistInfoDistribution): + # RECORDs should be part of .dist-info metadatas + if dist.has_metadata('RECORD'): + lines = dist.get_metadata_lines('RECORD') + paths = [l.split(',')[0] for l in lines] + paths = [os.path.join(dist.location, p) for p in paths] + file_list = [os.path.relpath(p, dist.location) for p in paths] + + if dist.has_metadata('METADATA'): + metadata = dist.get_metadata('METADATA') + else: + # Otherwise use pip's log for .egg-info's + if dist.has_metadata('installed-files.txt'): + paths = dist.get_metadata_lines('installed-files.txt') + paths = [os.path.join(dist.egg_info, p) for p in paths] + file_list = [os.path.relpath(p, dist.location) for p in paths] + + if dist.has_metadata('PKG-INFO'): + metadata = dist.get_metadata('PKG-INFO') + + if dist.has_metadata('entry_points.txt'): + entry_points = dist.get_metadata_lines('entry_points.txt') + package['entry_points'] = entry_points + + # @todo: Should pkg_resources.Distribution have a + # `get_pkg_info` method? + feed_parser = FeedParser() + feed_parser.feed(metadata) + pkg_info_dict = feed_parser.close() + for key in ('metadata-version', 'summary', + 'home-page', 'author', 'author-email', 'license'): + package[key] = pkg_info_dict.get(key) + + if file_list: + package['files'] = sorted(file_list) + yield package + + +def print_results(distributions, list_all_files): + """ + Print the informations from installed distributions found. + """ + results_printed = False + for dist in distributions: + results_printed = True + logger.info("---") + logger.info("Metadata-Version: %s", dist.get('metadata-version')) + logger.info("Name: %s", dist['name']) + logger.info("Version: %s", dist['version']) + logger.info("Summary: %s", dist.get('summary')) + logger.info("Home-page: %s", dist.get('home-page')) + logger.info("Author: %s", dist.get('author')) + logger.info("Author-email: %s", dist.get('author-email')) + logger.info("License: %s", dist.get('license')) + logger.info("Location: %s", dist['location']) + logger.info("Requires: %s", ', '.join(dist['requires'])) + if list_all_files: + logger.info("Files:") + if 'files' in dist: + for line in dist['files']: + logger.info(" %s", line.strip()) + else: + logger.info("Cannot locate installed-files.txt") + if 'entry_points' in dist: + logger.info("Entry-points:") + for line in dist['entry_points']: + logger.info(" %s", line.strip()) + return results_printed diff --git a/lib/python3.4/site-packages/pip/commands/uninstall.py b/lib/python3.4/site-packages/pip/commands/uninstall.py new file mode 100644 index 0000000..8ba1a7c --- /dev/null +++ b/lib/python3.4/site-packages/pip/commands/uninstall.py @@ -0,0 +1,76 @@ +from __future__ import absolute_import + +import pip +from pip.wheel import WheelCache +from pip.req import InstallRequirement, RequirementSet, parse_requirements +from pip.basecommand import Command +from pip.exceptions import InstallationError + + +class UninstallCommand(Command): + """ + Uninstall packages. + + pip is able to uninstall most installed packages. Known exceptions are: + + - Pure distutils packages installed with ``python setup.py install``, which + leave behind no metadata to determine what files were installed. + - Script wrappers installed by ``python setup.py develop``. + """ + name = 'uninstall' + usage = """ + %prog [options] ... + %prog [options] -r ...""" + summary = 'Uninstall packages.' + + def __init__(self, *args, **kw): + super(UninstallCommand, self).__init__(*args, **kw) + self.cmd_opts.add_option( + '-r', '--requirement', + dest='requirements', + action='append', + default=[], + metavar='file', + help='Uninstall all the packages listed in the given requirements ' + 'file. This option can be used multiple times.', + ) + self.cmd_opts.add_option( + '-y', '--yes', + dest='yes', + action='store_true', + help="Don't ask for confirmation of uninstall deletions.") + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options, args): + with self._build_session(options) as session: + format_control = pip.index.FormatControl(set(), set()) + wheel_cache = WheelCache(options.cache_dir, format_control) + requirement_set = RequirementSet( + build_dir=None, + src_dir=None, + download_dir=None, + isolated=options.isolated_mode, + session=session, + wheel_cache=wheel_cache, + ) + for name in args: + requirement_set.add_requirement( + InstallRequirement.from_line( + name, isolated=options.isolated_mode, + wheel_cache=wheel_cache + ) + ) + for filename in options.requirements: + for req in parse_requirements( + filename, + options=options, + session=session, + wheel_cache=wheel_cache): + requirement_set.add_requirement(req) + if not requirement_set.has_requirements: + raise InstallationError( + 'You must give at least one requirement to %(name)s (see ' + '"pip help %(name)s")' % dict(name=self.name) + ) + requirement_set.uninstall(auto_confirm=options.yes) diff --git a/lib/python3.4/site-packages/pip/commands/wheel.py b/lib/python3.4/site-packages/pip/commands/wheel.py new file mode 100644 index 0000000..1d77fe6 --- /dev/null +++ b/lib/python3.4/site-packages/pip/commands/wheel.py @@ -0,0 +1,204 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import + +import logging +import os +import warnings + +from pip.basecommand import RequirementCommand +from pip.exceptions import CommandError, PreviousBuildDirError +from pip.req import RequirementSet +from pip.utils import import_or_raise +from pip.utils.build import BuildDirectory +from pip.utils.deprecation import RemovedInPip10Warning +from pip.wheel import WheelCache, WheelBuilder +from pip import cmdoptions + + +logger = logging.getLogger(__name__) + + +class WheelCommand(RequirementCommand): + """ + Build Wheel archives for your requirements and dependencies. + + Wheel is a built-package format, and offers the advantage of not + recompiling your software during every install. For more details, see the + wheel docs: http://wheel.readthedocs.org/en/latest. + + Requirements: setuptools>=0.8, and wheel. + + 'pip wheel' uses the bdist_wheel setuptools extension from the wheel + package to build individual wheels. + + """ + + name = 'wheel' + usage = """ + %prog [options] ... + %prog [options] -r ... + %prog [options] [-e] ... + %prog [options] [-e] ... + %prog [options] ...""" + + summary = 'Build wheels from your requirements.' + + def __init__(self, *args, **kw): + super(WheelCommand, self).__init__(*args, **kw) + + cmd_opts = self.cmd_opts + + cmd_opts.add_option( + '-w', '--wheel-dir', + dest='wheel_dir', + metavar='dir', + default=os.curdir, + help=("Build wheels into , where the default is the " + "current working directory."), + ) + cmd_opts.add_option(cmdoptions.use_wheel()) + cmd_opts.add_option(cmdoptions.no_use_wheel()) + cmd_opts.add_option(cmdoptions.no_binary()) + cmd_opts.add_option(cmdoptions.only_binary()) + cmd_opts.add_option( + '--build-option', + dest='build_options', + metavar='options', + action='append', + help="Extra arguments to be supplied to 'setup.py bdist_wheel'.") + cmd_opts.add_option(cmdoptions.constraints()) + cmd_opts.add_option(cmdoptions.editable()) + cmd_opts.add_option(cmdoptions.requirements()) + cmd_opts.add_option(cmdoptions.src()) + cmd_opts.add_option(cmdoptions.no_deps()) + cmd_opts.add_option(cmdoptions.build_dir()) + + cmd_opts.add_option( + '--global-option', + dest='global_options', + action='append', + metavar='options', + help="Extra global options to be supplied to the setup.py " + "call before the 'bdist_wheel' command.") + + cmd_opts.add_option( + '--pre', + action='store_true', + default=False, + help=("Include pre-release and development versions. By default, " + "pip only finds stable versions."), + ) + + cmd_opts.add_option(cmdoptions.no_clean()) + cmd_opts.add_option(cmdoptions.require_hashes()) + + index_opts = cmdoptions.make_option_group( + cmdoptions.index_group, + self.parser, + ) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, cmd_opts) + + def check_required_packages(self): + import_or_raise( + 'wheel.bdist_wheel', + CommandError, + "'pip wheel' requires the 'wheel' package. To fix this, run: " + "pip install wheel" + ) + pkg_resources = import_or_raise( + 'pkg_resources', + CommandError, + "'pip wheel' requires setuptools >= 0.8 for dist-info support." + " To fix this, run: pip install --upgrade setuptools" + ) + if not hasattr(pkg_resources, 'DistInfoDistribution'): + raise CommandError( + "'pip wheel' requires setuptools >= 0.8 for dist-info " + "support. To fix this, run: pip install --upgrade " + "setuptools" + ) + + def run(self, options, args): + self.check_required_packages() + cmdoptions.resolve_wheel_no_use_binary(options) + cmdoptions.check_install_build_global(options) + + if options.allow_external: + warnings.warn( + "--allow-external has been deprecated and will be removed in " + "the future. Due to changes in the repository protocol, it no " + "longer has any effect.", + RemovedInPip10Warning, + ) + + if options.allow_all_external: + warnings.warn( + "--allow-all-external has been deprecated and will be removed " + "in the future. Due to changes in the repository protocol, it " + "no longer has any effect.", + RemovedInPip10Warning, + ) + + if options.allow_unverified: + warnings.warn( + "--allow-unverified has been deprecated and will be removed " + "in the future. Due to changes in the repository protocol, it " + "no longer has any effect.", + RemovedInPip10Warning, + ) + + index_urls = [options.index_url] + options.extra_index_urls + if options.no_index: + logger.info('Ignoring indexes: %s', ','.join(index_urls)) + index_urls = [] + + if options.build_dir: + options.build_dir = os.path.abspath(options.build_dir) + + with self._build_session(options) as session: + finder = self._build_package_finder(options, session) + build_delete = (not (options.no_clean or options.build_dir)) + wheel_cache = WheelCache(options.cache_dir, options.format_control) + with BuildDirectory(options.build_dir, + delete=build_delete) as build_dir: + requirement_set = RequirementSet( + build_dir=build_dir, + src_dir=options.src_dir, + download_dir=None, + ignore_dependencies=options.ignore_dependencies, + ignore_installed=True, + isolated=options.isolated_mode, + session=session, + wheel_cache=wheel_cache, + wheel_download_dir=options.wheel_dir, + require_hashes=options.require_hashes + ) + + self.populate_requirement_set( + requirement_set, args, options, finder, session, self.name, + wheel_cache + ) + + if not requirement_set.has_requirements: + return + + try: + # build wheels + wb = WheelBuilder( + requirement_set, + finder, + build_options=options.build_options or [], + global_options=options.global_options or [], + ) + if not wb.build(): + raise CommandError( + "Failed to build one or more wheels" + ) + except PreviousBuildDirError: + options.no_clean = True + raise + finally: + if not options.no_clean: + requirement_set.cleanup_files() diff --git a/lib/python3.4/site-packages/pip/compat/__init__.py b/lib/python3.4/site-packages/pip/compat/__init__.py new file mode 100644 index 0000000..3b01763 --- /dev/null +++ b/lib/python3.4/site-packages/pip/compat/__init__.py @@ -0,0 +1,158 @@ +"""Stuff that differs in different Python versions and platform +distributions.""" +from __future__ import absolute_import, division + +import os +import sys + +from pip._vendor.six import text_type + +try: + from logging.config import dictConfig as logging_dictConfig +except ImportError: + from pip.compat.dictconfig import dictConfig as logging_dictConfig + +try: + import ipaddress +except ImportError: + try: + from pip._vendor import ipaddress + except ImportError: + import ipaddr as ipaddress + ipaddress.ip_address = ipaddress.IPAddress + ipaddress.ip_network = ipaddress.IPNetwork + + +try: + import sysconfig + + def get_stdlib(): + paths = [ + sysconfig.get_path("stdlib"), + sysconfig.get_path("platstdlib"), + ] + return set(filter(bool, paths)) +except ImportError: + from distutils import sysconfig + + def get_stdlib(): + paths = [ + sysconfig.get_python_lib(standard_lib=True), + sysconfig.get_python_lib(standard_lib=True, plat_specific=True), + ] + return set(filter(bool, paths)) + + +__all__ = [ + "logging_dictConfig", "ipaddress", "uses_pycache", "console_to_str", + "native_str", "get_path_uid", "stdlib_pkgs", "WINDOWS", "samefile" +] + + +if sys.version_info >= (3, 4): + uses_pycache = True + from importlib.util import cache_from_source +else: + import imp + uses_pycache = hasattr(imp, 'cache_from_source') + if uses_pycache: + cache_from_source = imp.cache_from_source + else: + cache_from_source = None + + +if sys.version_info >= (3,): + def console_to_str(s): + try: + return s.decode(sys.__stdout__.encoding) + except UnicodeDecodeError: + return s.decode('utf_8') + + def native_str(s, replace=False): + if isinstance(s, bytes): + return s.decode('utf-8', 'replace' if replace else 'strict') + return s + +else: + def console_to_str(s): + return s + + def native_str(s, replace=False): + # Replace is ignored -- unicode to UTF-8 can't fail + if isinstance(s, text_type): + return s.encode('utf-8') + return s + + +def total_seconds(td): + if hasattr(td, "total_seconds"): + return td.total_seconds() + else: + val = td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6 + return val / 10 ** 6 + + +def get_path_uid(path): + """ + Return path's uid. + + Does not follow symlinks: + https://github.com/pypa/pip/pull/935#discussion_r5307003 + + Placed this function in compat due to differences on AIX and + Jython, that should eventually go away. + + :raises OSError: When path is a symlink or can't be read. + """ + if hasattr(os, 'O_NOFOLLOW'): + fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW) + file_uid = os.fstat(fd).st_uid + os.close(fd) + else: # AIX and Jython + # WARNING: time of check vulnerabity, but best we can do w/o NOFOLLOW + if not os.path.islink(path): + # older versions of Jython don't have `os.fstat` + file_uid = os.stat(path).st_uid + else: + # raise OSError for parity with os.O_NOFOLLOW above + raise OSError( + "%s is a symlink; Will not return uid for symlinks" % path + ) + return file_uid + + +def expanduser(path): + """ + Expand ~ and ~user constructions. + + Includes a workaround for http://bugs.python.org/issue14768 + """ + expanded = os.path.expanduser(path) + if path.startswith('~/') and expanded.startswith('//'): + expanded = expanded[1:] + return expanded + + +# packages in the stdlib that may have installation metadata, but should not be +# considered 'installed'. this theoretically could be determined based on +# dist.location (py27:`sysconfig.get_paths()['stdlib']`, +# py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may +# make this ineffective, so hard-coding +stdlib_pkgs = ['python', 'wsgiref'] +if sys.version_info >= (2, 7): + stdlib_pkgs.extend(['argparse']) + + +# windows detection, covers cpython and ironpython +WINDOWS = (sys.platform.startswith("win") or + (sys.platform == 'cli' and os.name == 'nt')) + + +def samefile(file1, file2): + """Provide an alternative for os.path.samefile on Windows/Python2""" + if hasattr(os.path, 'samefile'): + return os.path.samefile(file1, file2) + else: + path1 = os.path.normcase(os.path.abspath(file1)) + path2 = os.path.normcase(os.path.abspath(file2)) + return path1 == path2 diff --git a/lib/python3.4/site-packages/pip/compat/dictconfig.py b/lib/python3.4/site-packages/pip/compat/dictconfig.py new file mode 100644 index 0000000..ec684aa --- /dev/null +++ b/lib/python3.4/site-packages/pip/compat/dictconfig.py @@ -0,0 +1,565 @@ +# This is a copy of the Python logging.config.dictconfig module, +# reproduced with permission. It is provided here for backwards +# compatibility for Python versions prior to 2.7. +# +# Copyright 2009-2010 by Vinay Sajip. All Rights Reserved. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose and without fee is hereby granted, +# provided that the above copyright notice appear in all copies and that +# both that copyright notice and this permission notice appear in +# supporting documentation, and that the name of Vinay Sajip +# not be used in advertising or publicity pertaining to distribution +# of the software without specific, written prior permission. +# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING +# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL +# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR +# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER +# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. +from __future__ import absolute_import + +import logging.handlers +import re +import sys +import types + +from pip._vendor import six + +# flake8: noqa + +IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I) + + +def valid_ident(s): + m = IDENTIFIER.match(s) + if not m: + raise ValueError('Not a valid Python identifier: %r' % s) + return True + +# +# This function is defined in logging only in recent versions of Python +# +try: + from logging import _checkLevel +except ImportError: + def _checkLevel(level): + if isinstance(level, int): + rv = level + elif str(level) == level: + if level not in logging._levelNames: + raise ValueError('Unknown level: %r' % level) + rv = logging._levelNames[level] + else: + raise TypeError('Level not an integer or a ' + 'valid string: %r' % level) + return rv + +# The ConvertingXXX classes are wrappers around standard Python containers, +# and they serve to convert any suitable values in the container. The +# conversion converts base dicts, lists and tuples to their wrapped +# equivalents, whereas strings which match a conversion format are converted +# appropriately. +# +# Each wrapper should have a configurator attribute holding the actual +# configurator to use for conversion. + + +class ConvertingDict(dict): + """A converting dictionary wrapper.""" + + def __getitem__(self, key): + value = dict.__getitem__(self, key) + result = self.configurator.convert(value) + # If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def get(self, key, default=None): + value = dict.get(self, key, default) + result = self.configurator.convert(value) + # If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def pop(self, key, default=None): + value = dict.pop(self, key, default) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + +class ConvertingList(list): + """A converting list wrapper.""" + def __getitem__(self, key): + value = list.__getitem__(self, key) + result = self.configurator.convert(value) + # If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def pop(self, idx=-1): + value = list.pop(self, idx) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + return result + + +class ConvertingTuple(tuple): + """A converting tuple wrapper.""" + def __getitem__(self, key): + value = tuple.__getitem__(self, key) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + +class BaseConfigurator(object): + """ + The configurator base class which defines some useful defaults. + """ + + CONVERT_PATTERN = re.compile(r'^(?P[a-z]+)://(?P.*)$') + + WORD_PATTERN = re.compile(r'^\s*(\w+)\s*') + DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*') + INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*') + DIGIT_PATTERN = re.compile(r'^\d+$') + + value_converters = { + 'ext' : 'ext_convert', + 'cfg' : 'cfg_convert', + } + + # We might want to use a different one, e.g. importlib + importer = __import__ + + def __init__(self, config): + self.config = ConvertingDict(config) + self.config.configurator = self + + def resolve(self, s): + """ + Resolve strings to objects using standard import and attribute + syntax. + """ + name = s.split('.') + used = name.pop(0) + try: + found = self.importer(used) + for frag in name: + used += '.' + frag + try: + found = getattr(found, frag) + except AttributeError: + self.importer(used) + found = getattr(found, frag) + return found + except ImportError: + e, tb = sys.exc_info()[1:] + v = ValueError('Cannot resolve %r: %s' % (s, e)) + v.__cause__, v.__traceback__ = e, tb + raise v + + def ext_convert(self, value): + """Default converter for the ext:// protocol.""" + return self.resolve(value) + + def cfg_convert(self, value): + """Default converter for the cfg:// protocol.""" + rest = value + m = self.WORD_PATTERN.match(rest) + if m is None: + raise ValueError("Unable to convert %r" % value) + else: + rest = rest[m.end():] + d = self.config[m.groups()[0]] + # print d, rest + while rest: + m = self.DOT_PATTERN.match(rest) + if m: + d = d[m.groups()[0]] + else: + m = self.INDEX_PATTERN.match(rest) + if m: + idx = m.groups()[0] + if not self.DIGIT_PATTERN.match(idx): + d = d[idx] + else: + try: + n = int(idx) # try as number first (most likely) + d = d[n] + except TypeError: + d = d[idx] + if m: + rest = rest[m.end():] + else: + raise ValueError('Unable to convert ' + '%r at %r' % (value, rest)) + # rest should be empty + return d + + def convert(self, value): + """ + Convert values to an appropriate type. dicts, lists and tuples are + replaced by their converting alternatives. Strings are checked to + see if they have a conversion format and are converted if they do. + """ + if not isinstance(value, ConvertingDict) and isinstance(value, dict): + value = ConvertingDict(value) + value.configurator = self + elif not isinstance(value, ConvertingList) and isinstance(value, list): + value = ConvertingList(value) + value.configurator = self + elif not isinstance(value, ConvertingTuple) and\ + isinstance(value, tuple): + value = ConvertingTuple(value) + value.configurator = self + elif isinstance(value, six.string_types): # str for py3k + m = self.CONVERT_PATTERN.match(value) + if m: + d = m.groupdict() + prefix = d['prefix'] + converter = self.value_converters.get(prefix, None) + if converter: + suffix = d['suffix'] + converter = getattr(self, converter) + value = converter(suffix) + return value + + def configure_custom(self, config): + """Configure an object with a user-supplied factory.""" + c = config.pop('()') + if not hasattr(c, '__call__') and hasattr(types, 'ClassType') and type(c) != types.ClassType: + c = self.resolve(c) + props = config.pop('.', None) + # Check for valid identifiers + kwargs = dict((k, config[k]) for k in config if valid_ident(k)) + result = c(**kwargs) + if props: + for name, value in props.items(): + setattr(result, name, value) + return result + + def as_tuple(self, value): + """Utility function which converts lists to tuples.""" + if isinstance(value, list): + value = tuple(value) + return value + + +class DictConfigurator(BaseConfigurator): + """ + Configure logging using a dictionary-like object to describe the + configuration. + """ + + def configure(self): + """Do the configuration.""" + + config = self.config + if 'version' not in config: + raise ValueError("dictionary doesn't specify a version") + if config['version'] != 1: + raise ValueError("Unsupported version: %s" % config['version']) + incremental = config.pop('incremental', False) + EMPTY_DICT = {} + logging._acquireLock() + try: + if incremental: + handlers = config.get('handlers', EMPTY_DICT) + # incremental handler config only if handler name + # ties in to logging._handlers (Python 2.7) + if sys.version_info[:2] == (2, 7): + for name in handlers: + if name not in logging._handlers: + raise ValueError('No handler found with ' + 'name %r' % name) + else: + try: + handler = logging._handlers[name] + handler_config = handlers[name] + level = handler_config.get('level', None) + if level: + handler.setLevel(_checkLevel(level)) + except StandardError as e: + raise ValueError('Unable to configure handler ' + '%r: %s' % (name, e)) + loggers = config.get('loggers', EMPTY_DICT) + for name in loggers: + try: + self.configure_logger(name, loggers[name], True) + except StandardError as e: + raise ValueError('Unable to configure logger ' + '%r: %s' % (name, e)) + root = config.get('root', None) + if root: + try: + self.configure_root(root, True) + except StandardError as e: + raise ValueError('Unable to configure root ' + 'logger: %s' % e) + else: + disable_existing = config.pop('disable_existing_loggers', True) + + logging._handlers.clear() + del logging._handlerList[:] + + # Do formatters first - they don't refer to anything else + formatters = config.get('formatters', EMPTY_DICT) + for name in formatters: + try: + formatters[name] = self.configure_formatter( + formatters[name]) + except StandardError as e: + raise ValueError('Unable to configure ' + 'formatter %r: %s' % (name, e)) + # Next, do filters - they don't refer to anything else, either + filters = config.get('filters', EMPTY_DICT) + for name in filters: + try: + filters[name] = self.configure_filter(filters[name]) + except StandardError as e: + raise ValueError('Unable to configure ' + 'filter %r: %s' % (name, e)) + + # Next, do handlers - they refer to formatters and filters + # As handlers can refer to other handlers, sort the keys + # to allow a deterministic order of configuration + handlers = config.get('handlers', EMPTY_DICT) + for name in sorted(handlers): + try: + handler = self.configure_handler(handlers[name]) + handler.name = name + handlers[name] = handler + except StandardError as e: + raise ValueError('Unable to configure handler ' + '%r: %s' % (name, e)) + # Next, do loggers - they refer to handlers and filters + + # we don't want to lose the existing loggers, + # since other threads may have pointers to them. + # existing is set to contain all existing loggers, + # and as we go through the new configuration we + # remove any which are configured. At the end, + # what's left in existing is the set of loggers + # which were in the previous configuration but + # which are not in the new configuration. + root = logging.root + existing = list(root.manager.loggerDict) + # The list needs to be sorted so that we can + # avoid disabling child loggers of explicitly + # named loggers. With a sorted list it is easier + # to find the child loggers. + existing.sort() + # We'll keep the list of existing loggers + # which are children of named loggers here... + child_loggers = [] + # now set up the new ones... + loggers = config.get('loggers', EMPTY_DICT) + for name in loggers: + if name in existing: + i = existing.index(name) + prefixed = name + "." + pflen = len(prefixed) + num_existing = len(existing) + i = i + 1 # look at the entry after name + while (i < num_existing) and\ + (existing[i][:pflen] == prefixed): + child_loggers.append(existing[i]) + i = i + 1 + existing.remove(name) + try: + self.configure_logger(name, loggers[name]) + except StandardError as e: + raise ValueError('Unable to configure logger ' + '%r: %s' % (name, e)) + + # Disable any old loggers. There's no point deleting + # them as other threads may continue to hold references + # and by disabling them, you stop them doing any logging. + # However, don't disable children of named loggers, as that's + # probably not what was intended by the user. + for log in existing: + logger = root.manager.loggerDict[log] + if log in child_loggers: + logger.level = logging.NOTSET + logger.handlers = [] + logger.propagate = True + elif disable_existing: + logger.disabled = True + + # And finally, do the root logger + root = config.get('root', None) + if root: + try: + self.configure_root(root) + except StandardError as e: + raise ValueError('Unable to configure root ' + 'logger: %s' % e) + finally: + logging._releaseLock() + + def configure_formatter(self, config): + """Configure a formatter from a dictionary.""" + if '()' in config: + factory = config['()'] # for use in exception handler + try: + result = self.configure_custom(config) + except TypeError as te: + if "'format'" not in str(te): + raise + # Name of parameter changed from fmt to format. + # Retry with old name. + # This is so that code can be used with older Python versions + #(e.g. by Django) + config['fmt'] = config.pop('format') + config['()'] = factory + result = self.configure_custom(config) + else: + fmt = config.get('format', None) + dfmt = config.get('datefmt', None) + result = logging.Formatter(fmt, dfmt) + return result + + def configure_filter(self, config): + """Configure a filter from a dictionary.""" + if '()' in config: + result = self.configure_custom(config) + else: + name = config.get('name', '') + result = logging.Filter(name) + return result + + def add_filters(self, filterer, filters): + """Add filters to a filterer from a list of names.""" + for f in filters: + try: + filterer.addFilter(self.config['filters'][f]) + except StandardError as e: + raise ValueError('Unable to add filter %r: %s' % (f, e)) + + def configure_handler(self, config): + """Configure a handler from a dictionary.""" + formatter = config.pop('formatter', None) + if formatter: + try: + formatter = self.config['formatters'][formatter] + except StandardError as e: + raise ValueError('Unable to set formatter ' + '%r: %s' % (formatter, e)) + level = config.pop('level', None) + filters = config.pop('filters', None) + if '()' in config: + c = config.pop('()') + if not hasattr(c, '__call__') and hasattr(types, 'ClassType') and type(c) != types.ClassType: + c = self.resolve(c) + factory = c + else: + klass = self.resolve(config.pop('class')) + # Special case for handler which refers to another handler + if issubclass(klass, logging.handlers.MemoryHandler) and\ + 'target' in config: + try: + config['target'] = self.config['handlers'][config['target']] + except StandardError as e: + raise ValueError('Unable to set target handler ' + '%r: %s' % (config['target'], e)) + elif issubclass(klass, logging.handlers.SMTPHandler) and\ + 'mailhost' in config: + config['mailhost'] = self.as_tuple(config['mailhost']) + elif issubclass(klass, logging.handlers.SysLogHandler) and\ + 'address' in config: + config['address'] = self.as_tuple(config['address']) + factory = klass + kwargs = dict((k, config[k]) for k in config if valid_ident(k)) + try: + result = factory(**kwargs) + except TypeError as te: + if "'stream'" not in str(te): + raise + # The argument name changed from strm to stream + # Retry with old name. + # This is so that code can be used with older Python versions + #(e.g. by Django) + kwargs['strm'] = kwargs.pop('stream') + result = factory(**kwargs) + if formatter: + result.setFormatter(formatter) + if level is not None: + result.setLevel(_checkLevel(level)) + if filters: + self.add_filters(result, filters) + return result + + def add_handlers(self, logger, handlers): + """Add handlers to a logger from a list of names.""" + for h in handlers: + try: + logger.addHandler(self.config['handlers'][h]) + except StandardError as e: + raise ValueError('Unable to add handler %r: %s' % (h, e)) + + def common_logger_config(self, logger, config, incremental=False): + """ + Perform configuration which is common to root and non-root loggers. + """ + level = config.get('level', None) + if level is not None: + logger.setLevel(_checkLevel(level)) + if not incremental: + # Remove any existing handlers + for h in logger.handlers[:]: + logger.removeHandler(h) + handlers = config.get('handlers', None) + if handlers: + self.add_handlers(logger, handlers) + filters = config.get('filters', None) + if filters: + self.add_filters(logger, filters) + + def configure_logger(self, name, config, incremental=False): + """Configure a non-root logger from a dictionary.""" + logger = logging.getLogger(name) + self.common_logger_config(logger, config, incremental) + propagate = config.get('propagate', None) + if propagate is not None: + logger.propagate = propagate + + def configure_root(self, config, incremental=False): + """Configure a root logger from a dictionary.""" + root = logging.getLogger() + self.common_logger_config(root, config, incremental) + +dictConfigClass = DictConfigurator + + +def dictConfig(config): + """Configure logging using a dictionary.""" + dictConfigClass(config).configure() diff --git a/lib/python3.4/site-packages/pip/download.py b/lib/python3.4/site-packages/pip/download.py new file mode 100644 index 0000000..e447b01 --- /dev/null +++ b/lib/python3.4/site-packages/pip/download.py @@ -0,0 +1,894 @@ +from __future__ import absolute_import + +import cgi +import email.utils +import getpass +import json +import logging +import mimetypes +import os +import platform +import re +import shutil +import sys +import tempfile + +try: + import ssl # noqa + HAS_TLS = True +except ImportError: + HAS_TLS = False + +from pip._vendor.six.moves.urllib import parse as urllib_parse +from pip._vendor.six.moves.urllib import request as urllib_request + +import pip + +from pip.exceptions import InstallationError, HashMismatch +from pip.models import PyPI +from pip.utils import (splitext, rmtree, format_size, display_path, + backup_dir, ask_path_exists, unpack_file, + ARCHIVE_EXTENSIONS, consume, call_subprocess) +from pip.utils.filesystem import check_path_owner +from pip.utils.logging import indent_log +from pip.utils.setuptools_build import SETUPTOOLS_SHIM +from pip.utils.ui import DownloadProgressBar, DownloadProgressSpinner +from pip.locations import write_delete_marker_file +from pip.vcs import vcs +from pip._vendor import requests, six +from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter +from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth +from pip._vendor.requests.models import Response +from pip._vendor.requests.structures import CaseInsensitiveDict +from pip._vendor.requests.packages import urllib3 +from pip._vendor.cachecontrol import CacheControlAdapter +from pip._vendor.cachecontrol.caches import FileCache +from pip._vendor.lockfile import LockError +from pip._vendor.six.moves import xmlrpc_client + + +__all__ = ['get_file_content', + 'is_url', 'url_to_path', 'path_to_url', + 'is_archive_file', 'unpack_vcs_link', + 'unpack_file_url', 'is_vcs_url', 'is_file_url', + 'unpack_http_url', 'unpack_url'] + + +logger = logging.getLogger(__name__) + + +def user_agent(): + """ + Return a string representing the user agent. + """ + data = { + "installer": {"name": "pip", "version": pip.__version__}, + "python": platform.python_version(), + "implementation": { + "name": platform.python_implementation(), + }, + } + + if data["implementation"]["name"] == 'CPython': + data["implementation"]["version"] = platform.python_version() + elif data["implementation"]["name"] == 'PyPy': + if sys.pypy_version_info.releaselevel == 'final': + pypy_version_info = sys.pypy_version_info[:3] + else: + pypy_version_info = sys.pypy_version_info + data["implementation"]["version"] = ".".join( + [str(x) for x in pypy_version_info] + ) + elif data["implementation"]["name"] == 'Jython': + # Complete Guess + data["implementation"]["version"] = platform.python_version() + elif data["implementation"]["name"] == 'IronPython': + # Complete Guess + data["implementation"]["version"] = platform.python_version() + + if sys.platform.startswith("linux"): + distro = dict(filter( + lambda x: x[1], + zip(["name", "version", "id"], platform.linux_distribution()), + )) + libc = dict(filter( + lambda x: x[1], + zip(["lib", "version"], platform.libc_ver()), + )) + if libc: + distro["libc"] = libc + if distro: + data["distro"] = distro + + if sys.platform.startswith("darwin") and platform.mac_ver()[0]: + data["distro"] = {"name": "OS X", "version": platform.mac_ver()[0]} + + if platform.system(): + data.setdefault("system", {})["name"] = platform.system() + + if platform.release(): + data.setdefault("system", {})["release"] = platform.release() + + if platform.machine(): + data["cpu"] = platform.machine() + + # Python 2.6 doesn't have ssl.OPENSSL_VERSION. + if HAS_TLS and sys.version_info[:2] > (2, 6): + data["openssl_version"] = ssl.OPENSSL_VERSION + + return "{data[installer][name]}/{data[installer][version]} {json}".format( + data=data, + json=json.dumps(data, separators=(",", ":"), sort_keys=True), + ) + + +class MultiDomainBasicAuth(AuthBase): + + def __init__(self, prompting=True): + self.prompting = prompting + self.passwords = {} + + def __call__(self, req): + parsed = urllib_parse.urlparse(req.url) + + # Get the netloc without any embedded credentials + netloc = parsed.netloc.rsplit("@", 1)[-1] + + # Set the url of the request to the url without any credentials + req.url = urllib_parse.urlunparse(parsed[:1] + (netloc,) + parsed[2:]) + + # Use any stored credentials that we have for this netloc + username, password = self.passwords.get(netloc, (None, None)) + + # Extract credentials embedded in the url if we have none stored + if username is None: + username, password = self.parse_credentials(parsed.netloc) + + if username or password: + # Store the username and password + self.passwords[netloc] = (username, password) + + # Send the basic auth with this request + req = HTTPBasicAuth(username or "", password or "")(req) + + # Attach a hook to handle 401 responses + req.register_hook("response", self.handle_401) + + return req + + def handle_401(self, resp, **kwargs): + # We only care about 401 responses, anything else we want to just + # pass through the actual response + if resp.status_code != 401: + return resp + + # We are not able to prompt the user so simple return the response + if not self.prompting: + return resp + + parsed = urllib_parse.urlparse(resp.url) + + # Prompt the user for a new username and password + username = six.moves.input("User for %s: " % parsed.netloc) + password = getpass.getpass("Password: ") + + # Store the new username and password to use for future requests + if username or password: + self.passwords[parsed.netloc] = (username, password) + + # Consume content and release the original connection to allow our new + # request to reuse the same one. + resp.content + resp.raw.release_conn() + + # Add our new username and password to the request + req = HTTPBasicAuth(username or "", password or "")(resp.request) + + # Send our new request + new_resp = resp.connection.send(req, **kwargs) + new_resp.history.append(resp) + + return new_resp + + def parse_credentials(self, netloc): + if "@" in netloc: + userinfo = netloc.rsplit("@", 1)[0] + if ":" in userinfo: + return userinfo.split(":", 1) + return userinfo, None + return None, None + + +class LocalFSAdapter(BaseAdapter): + + def send(self, request, stream=None, timeout=None, verify=None, cert=None, + proxies=None): + pathname = url_to_path(request.url) + + resp = Response() + resp.status_code = 200 + resp.url = request.url + + try: + stats = os.stat(pathname) + except OSError as exc: + resp.status_code = 404 + resp.raw = exc + else: + modified = email.utils.formatdate(stats.st_mtime, usegmt=True) + content_type = mimetypes.guess_type(pathname)[0] or "text/plain" + resp.headers = CaseInsensitiveDict({ + "Content-Type": content_type, + "Content-Length": stats.st_size, + "Last-Modified": modified, + }) + + resp.raw = open(pathname, "rb") + resp.close = resp.raw.close + + return resp + + def close(self): + pass + + +class SafeFileCache(FileCache): + """ + A file based cache which is safe to use even when the target directory may + not be accessible or writable. + """ + + def __init__(self, *args, **kwargs): + super(SafeFileCache, self).__init__(*args, **kwargs) + + # Check to ensure that the directory containing our cache directory + # is owned by the user current executing pip. If it does not exist + # we will check the parent directory until we find one that does exist. + # If it is not owned by the user executing pip then we will disable + # the cache and log a warning. + if not check_path_owner(self.directory): + logger.warning( + "The directory '%s' or its parent directory is not owned by " + "the current user and the cache has been disabled. Please " + "check the permissions and owner of that directory. If " + "executing pip with sudo, you may want sudo's -H flag.", + self.directory, + ) + + # Set our directory to None to disable the Cache + self.directory = None + + def get(self, *args, **kwargs): + # If we don't have a directory, then the cache should be a no-op. + if self.directory is None: + return + + try: + return super(SafeFileCache, self).get(*args, **kwargs) + except (LockError, OSError, IOError): + # We intentionally silence this error, if we can't access the cache + # then we can just skip caching and process the request as if + # caching wasn't enabled. + pass + + def set(self, *args, **kwargs): + # If we don't have a directory, then the cache should be a no-op. + if self.directory is None: + return + + try: + return super(SafeFileCache, self).set(*args, **kwargs) + except (LockError, OSError, IOError): + # We intentionally silence this error, if we can't access the cache + # then we can just skip caching and process the request as if + # caching wasn't enabled. + pass + + def delete(self, *args, **kwargs): + # If we don't have a directory, then the cache should be a no-op. + if self.directory is None: + return + + try: + return super(SafeFileCache, self).delete(*args, **kwargs) + except (LockError, OSError, IOError): + # We intentionally silence this error, if we can't access the cache + # then we can just skip caching and process the request as if + # caching wasn't enabled. + pass + + +class InsecureHTTPAdapter(HTTPAdapter): + + def cert_verify(self, conn, url, verify, cert): + conn.cert_reqs = 'CERT_NONE' + conn.ca_certs = None + + +class PipSession(requests.Session): + + timeout = None + + def __init__(self, *args, **kwargs): + retries = kwargs.pop("retries", 0) + cache = kwargs.pop("cache", None) + insecure_hosts = kwargs.pop("insecure_hosts", []) + + super(PipSession, self).__init__(*args, **kwargs) + + # Attach our User Agent to the request + self.headers["User-Agent"] = user_agent() + + # Attach our Authentication handler to the session + self.auth = MultiDomainBasicAuth() + + # Create our urllib3.Retry instance which will allow us to customize + # how we handle retries. + retries = urllib3.Retry( + # Set the total number of retries that a particular request can + # have. + total=retries, + + # A 503 error from PyPI typically means that the Fastly -> Origin + # connection got interupted in some way. A 503 error in general + # is typically considered a transient error so we'll go ahead and + # retry it. + status_forcelist=[503], + + # Add a small amount of back off between failed requests in + # order to prevent hammering the service. + backoff_factor=0.25, + ) + + # We want to _only_ cache responses on securely fetched origins. We do + # this because we can't validate the response of an insecurely fetched + # origin, and we don't want someone to be able to poison the cache and + # require manual eviction from the cache to fix it. + if cache: + secure_adapter = CacheControlAdapter( + cache=SafeFileCache(cache, use_dir_lock=True), + max_retries=retries, + ) + else: + secure_adapter = HTTPAdapter(max_retries=retries) + + # Our Insecure HTTPAdapter disables HTTPS validation. It does not + # support caching (see above) so we'll use it for all http:// URLs as + # well as any https:// host that we've marked as ignoring TLS errors + # for. + insecure_adapter = InsecureHTTPAdapter(max_retries=retries) + + self.mount("https://", secure_adapter) + self.mount("http://", insecure_adapter) + + # Enable file:// urls + self.mount("file://", LocalFSAdapter()) + + # We want to use a non-validating adapter for any requests which are + # deemed insecure. + for host in insecure_hosts: + self.mount("https://{0}/".format(host), insecure_adapter) + + def request(self, method, url, *args, **kwargs): + # Allow setting a default timeout on a session + kwargs.setdefault("timeout", self.timeout) + + # Dispatch the actual request + return super(PipSession, self).request(method, url, *args, **kwargs) + + +def get_file_content(url, comes_from=None, session=None): + """Gets the content of a file; it may be a filename, file: URL, or + http: URL. Returns (location, content). Content is unicode.""" + if session is None: + raise TypeError( + "get_file_content() missing 1 required keyword argument: 'session'" + ) + + match = _scheme_re.search(url) + if match: + scheme = match.group(1).lower() + if (scheme == 'file' and comes_from and + comes_from.startswith('http')): + raise InstallationError( + 'Requirements file %s references URL %s, which is local' + % (comes_from, url)) + if scheme == 'file': + path = url.split(':', 1)[1] + path = path.replace('\\', '/') + match = _url_slash_drive_re.match(path) + if match: + path = match.group(1) + ':' + path.split('|', 1)[1] + path = urllib_parse.unquote(path) + if path.startswith('/'): + path = '/' + path.lstrip('/') + url = path + else: + # FIXME: catch some errors + resp = session.get(url) + resp.raise_for_status() + + if six.PY3: + return resp.url, resp.text + else: + return resp.url, resp.content + try: + with open(url) as f: + content = f.read() + except IOError as exc: + raise InstallationError( + 'Could not open requirements file: %s' % str(exc) + ) + return url, content + + +_scheme_re = re.compile(r'^(http|https|file):', re.I) +_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I) + + +def is_url(name): + """Returns true if the name looks like a URL""" + if ':' not in name: + return False + scheme = name.split(':', 1)[0].lower() + return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes + + +def url_to_path(url): + """ + Convert a file: URL to a path. + """ + assert url.startswith('file:'), ( + "You can only turn file: urls into filenames (not %r)" % url) + + _, netloc, path, _, _ = urllib_parse.urlsplit(url) + + # if we have a UNC path, prepend UNC share notation + if netloc: + netloc = '\\\\' + netloc + + path = urllib_request.url2pathname(netloc + path) + return path + + +def path_to_url(path): + """ + Convert a path to a file: URL. The path will be made absolute and have + quoted path parts. + """ + path = os.path.normpath(os.path.abspath(path)) + url = urllib_parse.urljoin('file:', urllib_request.pathname2url(path)) + return url + + +def is_archive_file(name): + """Return True if `name` is a considered as an archive file.""" + ext = splitext(name)[1].lower() + if ext in ARCHIVE_EXTENSIONS: + return True + return False + + +def unpack_vcs_link(link, location): + vcs_backend = _get_used_vcs_backend(link) + vcs_backend.unpack(location) + + +def _get_used_vcs_backend(link): + for backend in vcs.backends: + if link.scheme in backend.schemes: + vcs_backend = backend(link.url) + return vcs_backend + + +def is_vcs_url(link): + return bool(_get_used_vcs_backend(link)) + + +def is_file_url(link): + return link.url.lower().startswith('file:') + + +def is_dir_url(link): + """Return whether a file:// Link points to a directory. + + ``link`` must not have any other scheme but file://. Call is_file_url() + first. + + """ + link_path = url_to_path(link.url_without_fragment) + return os.path.isdir(link_path) + + +def _progress_indicator(iterable, *args, **kwargs): + return iterable + + +def _download_url(resp, link, content_file, hashes): + try: + total_length = int(resp.headers['content-length']) + except (ValueError, KeyError, TypeError): + total_length = 0 + + cached_resp = getattr(resp, "from_cache", False) + + if logger.getEffectiveLevel() > logging.INFO: + show_progress = False + elif cached_resp: + show_progress = False + elif total_length > (40 * 1000): + show_progress = True + elif not total_length: + show_progress = True + else: + show_progress = False + + show_url = link.show_url + + def resp_read(chunk_size): + try: + # Special case for urllib3. + for chunk in resp.raw.stream( + chunk_size, + # We use decode_content=False here because we don't + # want urllib3 to mess with the raw bytes we get + # from the server. If we decompress inside of + # urllib3 then we cannot verify the checksum + # because the checksum will be of the compressed + # file. This breakage will only occur if the + # server adds a Content-Encoding header, which + # depends on how the server was configured: + # - Some servers will notice that the file isn't a + # compressible file and will leave the file alone + # and with an empty Content-Encoding + # - Some servers will notice that the file is + # already compressed and will leave the file + # alone and will add a Content-Encoding: gzip + # header + # - Some servers won't notice anything at all and + # will take a file that's already been compressed + # and compress it again and set the + # Content-Encoding: gzip header + # + # By setting this not to decode automatically we + # hope to eliminate problems with the second case. + decode_content=False): + yield chunk + except AttributeError: + # Standard file-like object. + while True: + chunk = resp.raw.read(chunk_size) + if not chunk: + break + yield chunk + + def written_chunks(chunks): + for chunk in chunks: + content_file.write(chunk) + yield chunk + + progress_indicator = _progress_indicator + + if link.netloc == PyPI.netloc: + url = show_url + else: + url = link.url_without_fragment + + if show_progress: # We don't show progress on cached responses + if total_length: + logger.info("Downloading %s (%s)", url, format_size(total_length)) + progress_indicator = DownloadProgressBar(max=total_length).iter + else: + logger.info("Downloading %s", url) + progress_indicator = DownloadProgressSpinner().iter + elif cached_resp: + logger.info("Using cached %s", url) + else: + logger.info("Downloading %s", url) + + logger.debug('Downloading from URL %s', link) + + downloaded_chunks = written_chunks(progress_indicator(resp_read(4096), + 4096)) + if hashes: + hashes.check_against_chunks(downloaded_chunks) + else: + consume(downloaded_chunks) + + +def _copy_file(filename, location, link): + copy = True + download_location = os.path.join(location, link.filename) + if os.path.exists(download_location): + response = ask_path_exists( + 'The file %s exists. (i)gnore, (w)ipe, (b)ackup ' % + display_path(download_location), ('i', 'w', 'b')) + if response == 'i': + copy = False + elif response == 'w': + logger.warning('Deleting %s', display_path(download_location)) + os.remove(download_location) + elif response == 'b': + dest_file = backup_dir(download_location) + logger.warning( + 'Backing up %s to %s', + display_path(download_location), + display_path(dest_file), + ) + shutil.move(download_location, dest_file) + if copy: + shutil.copy(filename, download_location) + logger.info('Saved %s', display_path(download_location)) + + +def unpack_http_url(link, location, download_dir=None, + session=None, hashes=None): + if session is None: + raise TypeError( + "unpack_http_url() missing 1 required keyword argument: 'session'" + ) + + temp_dir = tempfile.mkdtemp('-unpack', 'pip-') + + # If a download dir is specified, is the file already downloaded there? + already_downloaded_path = None + if download_dir: + already_downloaded_path = _check_download_dir(link, + download_dir, + hashes) + + if already_downloaded_path: + from_path = already_downloaded_path + content_type = mimetypes.guess_type(from_path)[0] + else: + # let's download to a tmp dir + from_path, content_type = _download_http_url(link, + session, + temp_dir, + hashes) + + # unpack the archive to the build dir location. even when only downloading + # archives, they have to be unpacked to parse dependencies + unpack_file(from_path, location, content_type, link) + + # a download dir is specified; let's copy the archive there + if download_dir and not already_downloaded_path: + _copy_file(from_path, download_dir, link) + + if not already_downloaded_path: + os.unlink(from_path) + rmtree(temp_dir) + + +def unpack_file_url(link, location, download_dir=None, hashes=None): + """Unpack link into location. + + If download_dir is provided and link points to a file, make a copy + of the link file inside download_dir. + """ + link_path = url_to_path(link.url_without_fragment) + + # If it's a url to a local directory + if is_dir_url(link): + if os.path.isdir(location): + rmtree(location) + shutil.copytree(link_path, location, symlinks=True) + if download_dir: + logger.info('Link is a directory, ignoring download_dir') + return + + # If --require-hashes is off, `hashes` is either empty, the + # link's embeddded hash, or MissingHashes; it is required to + # match. If --require-hashes is on, we are satisfied by any + # hash in `hashes` matching: a URL-based or an option-based + # one; no internet-sourced hash will be in `hashes`. + if hashes: + hashes.check_against_path(link_path) + + # If a download dir is specified, is the file already there and valid? + already_downloaded_path = None + if download_dir: + already_downloaded_path = _check_download_dir(link, + download_dir, + hashes) + + if already_downloaded_path: + from_path = already_downloaded_path + else: + from_path = link_path + + content_type = mimetypes.guess_type(from_path)[0] + + # unpack the archive to the build dir location. even when only downloading + # archives, they have to be unpacked to parse dependencies + unpack_file(from_path, location, content_type, link) + + # a download dir is specified and not already downloaded + if download_dir and not already_downloaded_path: + _copy_file(from_path, download_dir, link) + + +def _copy_dist_from_dir(link_path, location): + """Copy distribution files in `link_path` to `location`. + + Invoked when user requests to install a local directory. E.g.: + + pip install . + pip install ~/dev/git-repos/python-prompt-toolkit + + """ + + # Note: This is currently VERY SLOW if you have a lot of data in the + # directory, because it copies everything with `shutil.copytree`. + # What it should really do is build an sdist and install that. + # See https://github.com/pypa/pip/issues/2195 + + if os.path.isdir(location): + rmtree(location) + + # build an sdist + setup_py = 'setup.py' + sdist_args = [sys.executable] + sdist_args.append('-c') + sdist_args.append(SETUPTOOLS_SHIM % setup_py) + sdist_args.append('sdist') + sdist_args += ['--dist-dir', location] + logger.info('Running setup.py sdist for %s', link_path) + + with indent_log(): + call_subprocess(sdist_args, cwd=link_path, show_stdout=False) + + # unpack sdist into `location` + sdist = os.path.join(location, os.listdir(location)[0]) + logger.info('Unpacking sdist %s into %s', sdist, location) + unpack_file(sdist, location, content_type=None, link=None) + + +class PipXmlrpcTransport(xmlrpc_client.Transport): + """Provide a `xmlrpclib.Transport` implementation via a `PipSession` + object. + """ + def __init__(self, index_url, session, use_datetime=False): + xmlrpc_client.Transport.__init__(self, use_datetime) + index_parts = urllib_parse.urlparse(index_url) + self._scheme = index_parts.scheme + self._session = session + + def request(self, host, handler, request_body, verbose=False): + parts = (self._scheme, host, handler, None, None, None) + url = urllib_parse.urlunparse(parts) + try: + headers = {'Content-Type': 'text/xml'} + response = self._session.post(url, data=request_body, + headers=headers, stream=True) + response.raise_for_status() + self.verbose = verbose + return self.parse_response(response.raw) + except requests.HTTPError as exc: + logger.critical( + "HTTP error %s while getting %s", + exc.response.status_code, url, + ) + raise + + +def unpack_url(link, location, download_dir=None, + only_download=False, session=None, hashes=None): + """Unpack link. + If link is a VCS link: + if only_download, export into download_dir and ignore location + else unpack into location + for other types of link: + - unpack into location + - if download_dir, copy the file into download_dir + - if only_download, mark location for deletion + + :param hashes: A Hashes object, one of whose embedded hashes must match, + or HashMismatch will be raised. If the Hashes is empty, no matches are + required, and unhashable types of requirements (like VCS ones, which + would ordinarily raise HashUnsupported) are allowed. + """ + # non-editable vcs urls + if is_vcs_url(link): + unpack_vcs_link(link, location) + + # file urls + elif is_file_url(link): + unpack_file_url(link, location, download_dir, hashes=hashes) + + # http urls + else: + if session is None: + session = PipSession() + + unpack_http_url( + link, + location, + download_dir, + session, + hashes=hashes + ) + if only_download: + write_delete_marker_file(location) + + +def _download_http_url(link, session, temp_dir, hashes): + """Download link url into temp_dir using provided session""" + target_url = link.url.split('#', 1)[0] + try: + resp = session.get( + target_url, + # We use Accept-Encoding: identity here because requests + # defaults to accepting compressed responses. This breaks in + # a variety of ways depending on how the server is configured. + # - Some servers will notice that the file isn't a compressible + # file and will leave the file alone and with an empty + # Content-Encoding + # - Some servers will notice that the file is already + # compressed and will leave the file alone and will add a + # Content-Encoding: gzip header + # - Some servers won't notice anything at all and will take + # a file that's already been compressed and compress it again + # and set the Content-Encoding: gzip header + # By setting this to request only the identity encoding We're + # hoping to eliminate the third case. Hopefully there does not + # exist a server which when given a file will notice it is + # already compressed and that you're not asking for a + # compressed file and will then decompress it before sending + # because if that's the case I don't think it'll ever be + # possible to make this work. + headers={"Accept-Encoding": "identity"}, + stream=True, + ) + resp.raise_for_status() + except requests.HTTPError as exc: + logger.critical( + "HTTP error %s while getting %s", exc.response.status_code, link, + ) + raise + + content_type = resp.headers.get('content-type', '') + filename = link.filename # fallback + # Have a look at the Content-Disposition header for a better guess + content_disposition = resp.headers.get('content-disposition') + if content_disposition: + type, params = cgi.parse_header(content_disposition) + # We use ``or`` here because we don't want to use an "empty" value + # from the filename param. + filename = params.get('filename') or filename + ext = splitext(filename)[1] + if not ext: + ext = mimetypes.guess_extension(content_type) + if ext: + filename += ext + if not ext and link.url != resp.url: + ext = os.path.splitext(resp.url)[1] + if ext: + filename += ext + file_path = os.path.join(temp_dir, filename) + with open(file_path, 'wb') as content_file: + _download_url(resp, link, content_file, hashes) + return file_path, content_type + + +def _check_download_dir(link, download_dir, hashes): + """ Check download_dir for previously downloaded file with correct hash + If a correct file is found return its path else None + """ + download_path = os.path.join(download_dir, link.filename) + if os.path.exists(download_path): + # If already downloaded, does its hash match? + logger.info('File was already downloaded %s', download_path) + if hashes: + try: + hashes.check_against_path(download_path) + except HashMismatch: + logger.warning( + 'Previously-downloaded file %s has bad hash. ' + 'Re-downloading.', + download_path + ) + os.unlink(download_path) + return None + return download_path + return None diff --git a/lib/python3.4/site-packages/pip/exceptions.py b/lib/python3.4/site-packages/pip/exceptions.py new file mode 100644 index 0000000..e9b639f --- /dev/null +++ b/lib/python3.4/site-packages/pip/exceptions.py @@ -0,0 +1,236 @@ +"""Exceptions used throughout package""" +from __future__ import absolute_import + +from itertools import chain, groupby, repeat + +from pip._vendor.six import iteritems + + +class PipError(Exception): + """Base pip exception""" + + +class InstallationError(PipError): + """General exception during installation""" + + +class UninstallationError(PipError): + """General exception during uninstallation""" + + +class DistributionNotFound(InstallationError): + """Raised when a distribution cannot be found to satisfy a requirement""" + + +class RequirementsFileParseError(InstallationError): + """Raised when a general error occurs parsing a requirements file line.""" + + +class BestVersionAlreadyInstalled(PipError): + """Raised when the most up-to-date version of a package is already + installed.""" + + +class BadCommand(PipError): + """Raised when virtualenv or a command is not found""" + + +class CommandError(PipError): + """Raised when there is an error in command-line arguments""" + + +class PreviousBuildDirError(PipError): + """Raised when there's a previous conflicting build directory""" + + +class InvalidWheelFilename(InstallationError): + """Invalid wheel filename.""" + + +class UnsupportedWheel(InstallationError): + """Unsupported wheel.""" + + +class HashErrors(InstallationError): + """Multiple HashError instances rolled into one for reporting""" + + def __init__(self): + self.errors = [] + + def append(self, error): + self.errors.append(error) + + def __str__(self): + lines = [] + self.errors.sort(key=lambda e: e.order) + for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__): + lines.append(cls.head) + lines.extend(e.body() for e in errors_of_cls) + if lines: + return '\n'.join(lines) + + def __nonzero__(self): + return bool(self.errors) + + def __bool__(self): + return self.__nonzero__() + + +class HashError(InstallationError): + """ + A failure to verify a package against known-good hashes + + :cvar order: An int sorting hash exception classes by difficulty of + recovery (lower being harder), so the user doesn't bother fretting + about unpinned packages when he has deeper issues, like VCS + dependencies, to deal with. Also keeps error reports in a + deterministic order. + :cvar head: A section heading for display above potentially many + exceptions of this kind + :ivar req: The InstallRequirement that triggered this error. This is + pasted on after the exception is instantiated, because it's not + typically available earlier. + + """ + req = None + head = '' + + def body(self): + """Return a summary of me for display under the heading. + + This default implementation simply prints a description of the + triggering requirement. + + :param req: The InstallRequirement that provoked this error, with + populate_link() having already been called + + """ + return ' %s' % self._requirement_name() + + def __str__(self): + return '%s\n%s' % (self.head, self.body()) + + def _requirement_name(self): + """Return a description of the requirement that triggered me. + + This default implementation returns long description of the req, with + line numbers + + """ + return str(self.req) if self.req else 'unknown package' + + +class VcsHashUnsupported(HashError): + """A hash was provided for a version-control-system-based requirement, but + we don't have a method for hashing those.""" + + order = 0 + head = ("Can't verify hashes for these requirements because we don't " + "have a way to hash version control repositories:") + + +class DirectoryUrlHashUnsupported(HashError): + """A hash was provided for a version-control-system-based requirement, but + we don't have a method for hashing those.""" + + order = 1 + head = ("Can't verify hashes for these file:// requirements because they " + "point to directories:") + + +class HashMissing(HashError): + """A hash was needed for a requirement but is absent.""" + + order = 2 + head = ('Hashes are required in --require-hashes mode, but they are ' + 'missing from some requirements. Here is a list of those ' + 'requirements along with the hashes their downloaded archives ' + 'actually had. Add lines like these to your requirements files to ' + 'prevent tampering. (If you did not enable --require-hashes ' + 'manually, note that it turns on automatically when any package ' + 'has a hash.)') + + def __init__(self, gotten_hash): + """ + :param gotten_hash: The hash of the (possibly malicious) archive we + just downloaded + """ + self.gotten_hash = gotten_hash + + def body(self): + from pip.utils.hashes import FAVORITE_HASH # Dodge circular import. + + package_name = (self.req.req if self.req and + # In case someone feeds something + # downright stupid to + # InstallRequirement's constructor: + getattr(self.req, 'req', None) + else 'unknown package') + return ' %s --hash=%s:%s' % (package_name, + FAVORITE_HASH, + self.gotten_hash) + + +class HashUnpinned(HashError): + """A requirement had a hash specified but was not pinned to a specific + version.""" + + order = 3 + head = ('In --require-hashes mode, all requirements must have their ' + 'versions pinned with ==. These do not:') + + +class HashMismatch(HashError): + """ + Distribution file hash values don't match. + + :ivar package_name: The name of the package that triggered the hash + mismatch. Feel free to write to this after the exception is raise to + improve its error message. + + """ + order = 4 + head = ('THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS ' + 'FILE. If you have updated the package versions, please update ' + 'the hashes. Otherwise, examine the package contents carefully; ' + 'someone may have tampered with them.') + + def __init__(self, allowed, gots): + """ + :param allowed: A dict of algorithm names pointing to lists of allowed + hex digests + :param gots: A dict of algorithm names pointing to hashes we + actually got from the files under suspicion + """ + self.allowed = allowed + self.gots = gots + + def body(self): + return ' %s:\n%s' % (self._requirement_name(), + self._hash_comparison()) + + def _hash_comparison(self): + """ + Return a comparison of actual and expected hash values. + + Example:: + + Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde + or 123451234512345123451234512345123451234512345 + Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef + + """ + def hash_then_or(hash_name): + # For now, all the decent hashes have 6-char names, so we can get + # away with hard-coding space literals. + return chain([hash_name], repeat(' or')) + + lines = [] + for hash_name, expecteds in iteritems(self.allowed): + prefix = hash_then_or(hash_name) + lines.extend((' Expected %s %s' % (next(prefix), e)) + for e in expecteds) + lines.append(' Got %s\n' % + self.gots[hash_name].hexdigest()) + prefix = ' or' + return '\n'.join(lines) diff --git a/lib/python3.4/site-packages/pip/index.py b/lib/python3.4/site-packages/pip/index.py new file mode 100644 index 0000000..18c8fc6 --- /dev/null +++ b/lib/python3.4/site-packages/pip/index.py @@ -0,0 +1,1033 @@ +"""Routines related to PyPI, indexes""" +from __future__ import absolute_import + +import logging +import cgi +from collections import namedtuple +import itertools +import sys +import os +import re +import mimetypes +import posixpath +import warnings + +from pip._vendor.six.moves.urllib import parse as urllib_parse +from pip._vendor.six.moves.urllib import request as urllib_request + +from pip.compat import ipaddress +from pip.utils import ( + cached_property, splitext, normalize_path, + ARCHIVE_EXTENSIONS, SUPPORTED_EXTENSIONS, canonicalize_name) +from pip.utils.deprecation import RemovedInPip9Warning +from pip.utils.logging import indent_log +from pip.exceptions import ( + DistributionNotFound, BestVersionAlreadyInstalled, InvalidWheelFilename, + UnsupportedWheel, +) +from pip.download import HAS_TLS, is_url, path_to_url, url_to_path +from pip.wheel import Wheel, wheel_ext +from pip.pep425tags import supported_tags +from pip._vendor import html5lib, requests, six +from pip._vendor.packaging.version import parse as parse_version +from pip._vendor.requests.exceptions import SSLError + + +__all__ = ['FormatControl', 'fmt_ctl_handle_mutual_exclude', 'PackageFinder'] + + +SECURE_ORIGINS = [ + # protocol, hostname, port + # Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC) + ("https", "*", "*"), + ("*", "localhost", "*"), + ("*", "127.0.0.0/8", "*"), + ("*", "::1/128", "*"), + ("file", "*", None), + # ssh is always secure. + ("ssh", "*", "*"), +] + + +logger = logging.getLogger(__name__) + + +class InstallationCandidate(object): + + def __init__(self, project, version, location): + self.project = project + self.version = parse_version(version) + self.location = location + self._key = (self.project, self.version, self.location) + + def __repr__(self): + return "".format( + self.project, self.version, self.location, + ) + + def __hash__(self): + return hash(self._key) + + def __lt__(self, other): + return self._compare(other, lambda s, o: s < o) + + def __le__(self, other): + return self._compare(other, lambda s, o: s <= o) + + def __eq__(self, other): + return self._compare(other, lambda s, o: s == o) + + def __ge__(self, other): + return self._compare(other, lambda s, o: s >= o) + + def __gt__(self, other): + return self._compare(other, lambda s, o: s > o) + + def __ne__(self, other): + return self._compare(other, lambda s, o: s != o) + + def _compare(self, other, method): + if not isinstance(other, InstallationCandidate): + return NotImplemented + + return method(self._key, other._key) + + +class PackageFinder(object): + """This finds packages. + + This is meant to match easy_install's technique for looking for + packages, by reading pages and looking for appropriate links. + """ + + def __init__(self, find_links, index_urls, allow_all_prereleases=False, + trusted_hosts=None, process_dependency_links=False, + session=None, format_control=None): + """Create a PackageFinder. + + :param format_control: A FormatControl object or None. Used to control + the selection of source packages / binary packages when consulting + the index and links. + """ + if session is None: + raise TypeError( + "PackageFinder() missing 1 required keyword argument: " + "'session'" + ) + + # Build find_links. If an argument starts with ~, it may be + # a local file relative to a home directory. So try normalizing + # it and if it exists, use the normalized version. + # This is deliberately conservative - it might be fine just to + # blindly normalize anything starting with a ~... + self.find_links = [] + for link in find_links: + if link.startswith('~'): + new_link = normalize_path(link) + if os.path.exists(new_link): + link = new_link + self.find_links.append(link) + + self.index_urls = index_urls + self.dependency_links = [] + + # These are boring links that have already been logged somehow: + self.logged_links = set() + + self.format_control = format_control or FormatControl(set(), set()) + + # Domains that we won't emit warnings for when not using HTTPS + self.secure_origins = [ + ("*", host, "*") + for host in (trusted_hosts if trusted_hosts else []) + ] + + # Do we want to allow _all_ pre-releases? + self.allow_all_prereleases = allow_all_prereleases + + # Do we process dependency links? + self.process_dependency_links = process_dependency_links + + # The Session we'll use to make requests + self.session = session + + # If we don't have TLS enabled, then WARN if anyplace we're looking + # relies on TLS. + if not HAS_TLS: + for link in itertools.chain(self.index_urls, self.find_links): + parsed = urllib_parse.urlparse(link) + if parsed.scheme == "https": + logger.warning( + "pip is configured with locations that require " + "TLS/SSL, however the ssl module in Python is not " + "available." + ) + break + + def add_dependency_links(self, links): + # # FIXME: this shouldn't be global list this, it should only + # # apply to requirements of the package that specifies the + # # dependency_links value + # # FIXME: also, we should track comes_from (i.e., use Link) + if self.process_dependency_links: + warnings.warn( + "Dependency Links processing has been deprecated and will be " + "removed in a future release.", + RemovedInPip9Warning, + ) + self.dependency_links.extend(links) + + @staticmethod + def _sort_locations(locations, expand_dir=False): + """ + Sort locations into "files" (archives) and "urls", and return + a pair of lists (files,urls) + """ + files = [] + urls = [] + + # puts the url for the given file path into the appropriate list + def sort_path(path): + url = path_to_url(path) + if mimetypes.guess_type(url, strict=False)[0] == 'text/html': + urls.append(url) + else: + files.append(url) + + for url in locations: + + is_local_path = os.path.exists(url) + is_file_url = url.startswith('file:') + + if is_local_path or is_file_url: + if is_local_path: + path = url + else: + path = url_to_path(url) + if os.path.isdir(path): + if expand_dir: + path = os.path.realpath(path) + for item in os.listdir(path): + sort_path(os.path.join(path, item)) + elif is_file_url: + urls.append(url) + elif os.path.isfile(path): + sort_path(path) + else: + logger.warning( + "Url '%s' is ignored: it is neither a file " + "nor a directory.", url) + elif is_url(url): + # Only add url with clear scheme + urls.append(url) + else: + logger.warning( + "Url '%s' is ignored. It is either a non-existing " + "path or lacks a specific scheme.", url) + + return files, urls + + def _candidate_sort_key(self, candidate): + """ + Function used to generate link sort key for link tuples. + The greater the return value, the more preferred it is. + If not finding wheels, then sorted by version only. + If finding wheels, then the sort order is by version, then: + 1. existing installs + 2. wheels ordered via Wheel.support_index_min() + 3. source archives + Note: it was considered to embed this logic into the Link + comparison operators, but then different sdist links + with the same version, would have to be considered equal + """ + support_num = len(supported_tags) + if candidate.location.is_wheel: + # can raise InvalidWheelFilename + wheel = Wheel(candidate.location.filename) + if not wheel.supported(): + raise UnsupportedWheel( + "%s is not a supported wheel for this platform. It " + "can't be sorted." % wheel.filename + ) + pri = -(wheel.support_index_min()) + else: # sdist + pri = -(support_num) + return (candidate.version, pri) + + def _validate_secure_origin(self, logger, location): + # Determine if this url used a secure transport mechanism + parsed = urllib_parse.urlparse(str(location)) + origin = (parsed.scheme, parsed.hostname, parsed.port) + + # The protocol to use to see if the protocol matches. + # Don't count the repository type as part of the protocol: in + # cases such as "git+ssh", only use "ssh". (I.e., Only verify against + # the last scheme.) + protocol = origin[0].rsplit('+', 1)[-1] + + # Determine if our origin is a secure origin by looking through our + # hardcoded list of secure origins, as well as any additional ones + # configured on this PackageFinder instance. + for secure_origin in (SECURE_ORIGINS + self.secure_origins): + if protocol != secure_origin[0] and secure_origin[0] != "*": + continue + + try: + # We need to do this decode dance to ensure that we have a + # unicode object, even on Python 2.x. + addr = ipaddress.ip_address( + origin[1] + if ( + isinstance(origin[1], six.text_type) or + origin[1] is None + ) + else origin[1].decode("utf8") + ) + network = ipaddress.ip_network( + secure_origin[1] + if isinstance(secure_origin[1], six.text_type) + else secure_origin[1].decode("utf8") + ) + except ValueError: + # We don't have both a valid address or a valid network, so + # we'll check this origin against hostnames. + if origin[1] != secure_origin[1] and secure_origin[1] != "*": + continue + else: + # We have a valid address and network, so see if the address + # is contained within the network. + if addr not in network: + continue + + # Check to see if the port patches + if (origin[2] != secure_origin[2] and + secure_origin[2] != "*" and + secure_origin[2] is not None): + continue + + # If we've gotten here, then this origin matches the current + # secure origin and we should return True + return True + + # If we've gotten to this point, then the origin isn't secure and we + # will not accept it as a valid location to search. We will however + # log a warning that we are ignoring it. + logger.warning( + "The repository located at %s is not a trusted or secure host and " + "is being ignored. If this repository is available via HTTPS it " + "is recommended to use HTTPS instead, otherwise you may silence " + "this warning and allow it anyways with '--trusted-host %s'.", + parsed.hostname, + parsed.hostname, + ) + + return False + + def _get_index_urls_locations(self, project_name): + """Returns the locations found via self.index_urls + + Checks the url_name on the main (first in the list) index and + use this url_name to produce all locations + """ + + def mkurl_pypi_url(url): + loc = posixpath.join(url, urllib_parse.quote(project_name.lower())) + # For maximum compatibility with easy_install, ensure the path + # ends in a trailing slash. Although this isn't in the spec + # (and PyPI can handle it without the slash) some other index + # implementations might break if they relied on easy_install's + # behavior. + if not loc.endswith('/'): + loc = loc + '/' + return loc + + return [mkurl_pypi_url(url) for url in self.index_urls] + + def find_all_candidates(self, project_name): + """Find all available InstallationCandidate for project_name + + This checks index_urls, find_links and dependency_links. + All versions found are returned as an InstallationCandidate list. + + See _link_package_versions for details on which files are accepted + """ + index_locations = self._get_index_urls_locations(project_name) + index_file_loc, index_url_loc = self._sort_locations(index_locations) + fl_file_loc, fl_url_loc = self._sort_locations( + self.find_links, expand_dir=True) + dep_file_loc, dep_url_loc = self._sort_locations(self.dependency_links) + + file_locations = ( + Link(url) for url in itertools.chain( + index_file_loc, fl_file_loc, dep_file_loc) + ) + + # We trust every url that the user has given us whether it was given + # via --index-url or --find-links + # We explicitly do not trust links that came from dependency_links + # We want to filter out any thing which does not have a secure origin. + url_locations = [ + link for link in itertools.chain( + (Link(url) for url in index_url_loc), + (Link(url) for url in fl_url_loc), + (Link(url) for url in dep_url_loc), + ) + if self._validate_secure_origin(logger, link) + ] + + logger.debug('%d location(s) to search for versions of %s:', + len(url_locations), project_name) + + for location in url_locations: + logger.debug('* %s', location) + + canonical_name = canonicalize_name(project_name) + formats = fmt_ctl_formats(self.format_control, canonical_name) + search = Search(project_name, canonical_name, formats) + find_links_versions = self._package_versions( + # We trust every directly linked archive in find_links + (Link(url, '-f') for url in self.find_links), + search + ) + + page_versions = [] + for page in self._get_pages(url_locations, project_name): + logger.debug('Analyzing links from page %s', page.url) + with indent_log(): + page_versions.extend( + self._package_versions(page.links, search) + ) + + dependency_versions = self._package_versions( + (Link(url) for url in self.dependency_links), search + ) + if dependency_versions: + logger.debug( + 'dependency_links found: %s', + ', '.join([ + version.location.url for version in dependency_versions + ]) + ) + + file_versions = self._package_versions(file_locations, search) + if file_versions: + file_versions.sort(reverse=True) + logger.debug( + 'Local files found: %s', + ', '.join([ + url_to_path(candidate.location.url) + for candidate in file_versions + ]) + ) + + # This is an intentional priority ordering + return ( + file_versions + find_links_versions + page_versions + + dependency_versions + ) + + def find_requirement(self, req, upgrade): + """Try to find a Link matching req + + Expects req, an InstallRequirement and upgrade, a boolean + Returns a Link if found, + Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise + """ + all_candidates = self.find_all_candidates(req.name) + + # Filter out anything which doesn't match our specifier + compatible_versions = set( + req.specifier.filter( + # We turn the version object into a str here because otherwise + # when we're debundled but setuptools isn't, Python will see + # packaging.version.Version and + # pkg_resources._vendor.packaging.version.Version as different + # types. This way we'll use a str as a common data interchange + # format. If we stop using the pkg_resources provided specifier + # and start using our own, we can drop the cast to str(). + [str(c.version) for c in all_candidates], + prereleases=( + self.allow_all_prereleases + if self.allow_all_prereleases else None + ), + ) + ) + applicable_candidates = [ + # Again, converting to str to deal with debundling. + c for c in all_candidates if str(c.version) in compatible_versions + ] + + if applicable_candidates: + best_candidate = max(applicable_candidates, + key=self._candidate_sort_key) + else: + best_candidate = None + + if req.satisfied_by is not None: + installed_version = parse_version(req.satisfied_by.version) + else: + installed_version = None + + if installed_version is None and best_candidate is None: + logger.critical( + 'Could not find a version that satisfies the requirement %s ' + '(from versions: %s)', + req, + ', '.join( + sorted( + set(str(c.version) for c in all_candidates), + key=parse_version, + ) + ) + ) + + raise DistributionNotFound( + 'No matching distribution found for %s' % req + ) + + best_installed = False + if installed_version and ( + best_candidate is None or + best_candidate.version <= installed_version): + best_installed = True + + if not upgrade and installed_version is not None: + if best_installed: + logger.debug( + 'Existing installed version (%s) is most up-to-date and ' + 'satisfies requirement', + installed_version, + ) + else: + logger.debug( + 'Existing installed version (%s) satisfies requirement ' + '(most up-to-date version is %s)', + installed_version, + best_candidate.version, + ) + return None + + if best_installed: + # We have an existing version, and its the best version + logger.debug( + 'Installed version (%s) is most up-to-date (past versions: ' + '%s)', + installed_version, + ', '.join(sorted(compatible_versions, key=parse_version)) or + "none", + ) + raise BestVersionAlreadyInstalled + + logger.debug( + 'Using version %s (newest of versions: %s)', + best_candidate.version, + ', '.join(sorted(compatible_versions, key=parse_version)) + ) + return best_candidate.location + + def _get_pages(self, locations, project_name): + """ + Yields (page, page_url) from the given locations, skipping + locations that have errors. + """ + seen = set() + for location in locations: + if location in seen: + continue + seen.add(location) + + page = self._get_page(location) + if page is None: + continue + + yield page + + _py_version_re = re.compile(r'-py([123]\.?[0-9]?)$') + + def _sort_links(self, links): + """ + Returns elements of links in order, non-egg links first, egg links + second, while eliminating duplicates + """ + eggs, no_eggs = [], [] + seen = set() + for link in links: + if link not in seen: + seen.add(link) + if link.egg_fragment: + eggs.append(link) + else: + no_eggs.append(link) + return no_eggs + eggs + + def _package_versions(self, links, search): + result = [] + for link in self._sort_links(links): + v = self._link_package_versions(link, search) + if v is not None: + result.append(v) + return result + + def _log_skipped_link(self, link, reason): + if link not in self.logged_links: + logger.debug('Skipping link %s; %s', link, reason) + self.logged_links.add(link) + + def _link_package_versions(self, link, search): + """Return an InstallationCandidate or None""" + + version = None + if link.egg_fragment: + egg_info = link.egg_fragment + ext = link.ext + else: + egg_info, ext = link.splitext() + if not ext: + self._log_skipped_link(link, 'not a file') + return + if ext not in SUPPORTED_EXTENSIONS: + self._log_skipped_link( + link, 'unsupported archive format: %s' % ext) + return + if "binary" not in search.formats and ext == wheel_ext: + self._log_skipped_link( + link, 'No binaries permitted for %s' % search.supplied) + return + if "macosx10" in link.path and ext == '.zip': + self._log_skipped_link(link, 'macosx10 one') + return + if ext == wheel_ext: + try: + wheel = Wheel(link.filename) + except InvalidWheelFilename: + self._log_skipped_link(link, 'invalid wheel filename') + return + if canonicalize_name(wheel.name) != search.canonical: + self._log_skipped_link( + link, 'wrong project name (not %s)' % search.supplied) + return + if not wheel.supported(): + self._log_skipped_link( + link, 'it is not compatible with this Python') + return + + version = wheel.version + + # This should be up by the search.ok_binary check, but see issue 2700. + if "source" not in search.formats and ext != wheel_ext: + self._log_skipped_link( + link, 'No sources permitted for %s' % search.supplied) + return + + if not version: + version = egg_info_matches(egg_info, search.supplied, link) + if version is None: + self._log_skipped_link( + link, 'wrong project name (not %s)' % search.supplied) + return + + match = self._py_version_re.search(version) + if match: + version = version[:match.start()] + py_version = match.group(1) + if py_version != sys.version[:3]: + self._log_skipped_link( + link, 'Python version is incorrect') + return + logger.debug('Found link %s, version: %s', link, version) + + return InstallationCandidate(search.supplied, version, link) + + def _get_page(self, link): + return HTMLPage.get_page(link, session=self.session) + + +def egg_info_matches( + egg_info, search_name, link, + _egg_info_re=re.compile(r'([a-z0-9_.]+)-([a-z0-9_.!+-]+)', re.I)): + """Pull the version part out of a string. + + :param egg_info: The string to parse. E.g. foo-2.1 + :param search_name: The name of the package this belongs to. None to + infer the name. Note that this cannot unambiguously parse strings + like foo-2-2 which might be foo, 2-2 or foo-2, 2. + :param link: The link the string came from, for logging on failure. + """ + match = _egg_info_re.search(egg_info) + if not match: + logger.debug('Could not parse version from link: %s', link) + return None + if search_name is None: + full_match = match.group(0) + return full_match[full_match.index('-'):] + name = match.group(0).lower() + # To match the "safe" name that pkg_resources creates: + name = name.replace('_', '-') + # project name and version must be separated by a dash + look_for = search_name.lower() + "-" + if name.startswith(look_for): + return match.group(0)[len(look_for):] + else: + return None + + +class HTMLPage(object): + """Represents one page, along with its URL""" + + def __init__(self, content, url, headers=None): + # Determine if we have any encoding information in our headers + encoding = None + if headers and "Content-Type" in headers: + content_type, params = cgi.parse_header(headers["Content-Type"]) + + if "charset" in params: + encoding = params['charset'] + + self.content = content + self.parsed = html5lib.parse( + self.content, + encoding=encoding, + namespaceHTMLElements=False, + ) + self.url = url + self.headers = headers + + def __str__(self): + return self.url + + @classmethod + def get_page(cls, link, skip_archives=True, session=None): + if session is None: + raise TypeError( + "get_page() missing 1 required keyword argument: 'session'" + ) + + url = link.url + url = url.split('#', 1)[0] + + # Check for VCS schemes that do not support lookup as web pages. + from pip.vcs import VcsSupport + for scheme in VcsSupport.schemes: + if url.lower().startswith(scheme) and url[len(scheme)] in '+:': + logger.debug('Cannot look at %s URL %s', scheme, link) + return None + + try: + if skip_archives: + filename = link.filename + for bad_ext in ARCHIVE_EXTENSIONS: + if filename.endswith(bad_ext): + content_type = cls._get_content_type( + url, session=session, + ) + if content_type.lower().startswith('text/html'): + break + else: + logger.debug( + 'Skipping page %s because of Content-Type: %s', + link, + content_type, + ) + return + + logger.debug('Getting page %s', url) + + # Tack index.html onto file:// URLs that point to directories + (scheme, netloc, path, params, query, fragment) = \ + urllib_parse.urlparse(url) + if (scheme == 'file' and + os.path.isdir(urllib_request.url2pathname(path))): + # add trailing slash if not present so urljoin doesn't trim + # final segment + if not url.endswith('/'): + url += '/' + url = urllib_parse.urljoin(url, 'index.html') + logger.debug(' file: URL is directory, getting %s', url) + + resp = session.get( + url, + headers={ + "Accept": "text/html", + "Cache-Control": "max-age=600", + }, + ) + resp.raise_for_status() + + # The check for archives above only works if the url ends with + # something that looks like an archive. However that is not a + # requirement of an url. Unless we issue a HEAD request on every + # url we cannot know ahead of time for sure if something is HTML + # or not. However we can check after we've downloaded it. + content_type = resp.headers.get('Content-Type', 'unknown') + if not content_type.lower().startswith("text/html"): + logger.debug( + 'Skipping page %s because of Content-Type: %s', + link, + content_type, + ) + return + + inst = cls(resp.content, resp.url, resp.headers) + except requests.HTTPError as exc: + cls._handle_fail(link, exc, url) + except SSLError as exc: + reason = ("There was a problem confirming the ssl certificate: " + "%s" % exc) + cls._handle_fail(link, reason, url, meth=logger.info) + except requests.ConnectionError as exc: + cls._handle_fail(link, "connection error: %s" % exc, url) + except requests.Timeout: + cls._handle_fail(link, "timed out", url) + else: + return inst + + @staticmethod + def _handle_fail(link, reason, url, meth=None): + if meth is None: + meth = logger.debug + + meth("Could not fetch URL %s: %s - skipping", link, reason) + + @staticmethod + def _get_content_type(url, session): + """Get the Content-Type of the given url, using a HEAD request""" + scheme, netloc, path, query, fragment = urllib_parse.urlsplit(url) + if scheme not in ('http', 'https'): + # FIXME: some warning or something? + # assertion error? + return '' + + resp = session.head(url, allow_redirects=True) + resp.raise_for_status() + + return resp.headers.get("Content-Type", "") + + @cached_property + def base_url(self): + bases = [ + x for x in self.parsed.findall(".//base") + if x.get("href") is not None + ] + if bases and bases[0].get("href"): + return bases[0].get("href") + else: + return self.url + + @property + def links(self): + """Yields all links in the page""" + for anchor in self.parsed.findall(".//a"): + if anchor.get("href"): + href = anchor.get("href") + url = self.clean_link( + urllib_parse.urljoin(self.base_url, href) + ) + yield Link(url, self) + + _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I) + + def clean_link(self, url): + """Makes sure a link is fully encoded. That is, if a ' ' shows up in + the link, it will be rewritten to %20 (while not over-quoting + % or other characters).""" + return self._clean_re.sub( + lambda match: '%%%2x' % ord(match.group(0)), url) + + +class Link(object): + + def __init__(self, url, comes_from=None): + + # url can be a UNC windows share + if url.startswith('\\\\'): + url = path_to_url(url) + + self.url = url + self.comes_from = comes_from + + def __str__(self): + if self.comes_from: + return '%s (from %s)' % (self.url, self.comes_from) + else: + return str(self.url) + + def __repr__(self): + return '' % self + + def __eq__(self, other): + if not isinstance(other, Link): + return NotImplemented + return self.url == other.url + + def __ne__(self, other): + if not isinstance(other, Link): + return NotImplemented + return self.url != other.url + + def __lt__(self, other): + if not isinstance(other, Link): + return NotImplemented + return self.url < other.url + + def __le__(self, other): + if not isinstance(other, Link): + return NotImplemented + return self.url <= other.url + + def __gt__(self, other): + if not isinstance(other, Link): + return NotImplemented + return self.url > other.url + + def __ge__(self, other): + if not isinstance(other, Link): + return NotImplemented + return self.url >= other.url + + def __hash__(self): + return hash(self.url) + + @property + def filename(self): + _, netloc, path, _, _ = urllib_parse.urlsplit(self.url) + name = posixpath.basename(path.rstrip('/')) or netloc + name = urllib_parse.unquote(name) + assert name, ('URL %r produced no filename' % self.url) + return name + + @property + def scheme(self): + return urllib_parse.urlsplit(self.url)[0] + + @property + def netloc(self): + return urllib_parse.urlsplit(self.url)[1] + + @property + def path(self): + return urllib_parse.unquote(urllib_parse.urlsplit(self.url)[2]) + + def splitext(self): + return splitext(posixpath.basename(self.path.rstrip('/'))) + + @property + def ext(self): + return self.splitext()[1] + + @property + def url_without_fragment(self): + scheme, netloc, path, query, fragment = urllib_parse.urlsplit(self.url) + return urllib_parse.urlunsplit((scheme, netloc, path, query, None)) + + _egg_fragment_re = re.compile(r'#egg=([^&]*)') + + @property + def egg_fragment(self): + match = self._egg_fragment_re.search(self.url) + if not match: + return None + return match.group(1) + + _hash_re = re.compile( + r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)' + ) + + @property + def hash(self): + match = self._hash_re.search(self.url) + if match: + return match.group(2) + return None + + @property + def hash_name(self): + match = self._hash_re.search(self.url) + if match: + return match.group(1) + return None + + @property + def show_url(self): + return posixpath.basename(self.url.split('#', 1)[0].split('?', 1)[0]) + + @property + def is_wheel(self): + return self.ext == wheel_ext + + @property + def is_artifact(self): + """ + Determines if this points to an actual artifact (e.g. a tarball) or if + it points to an "abstract" thing like a path or a VCS location. + """ + from pip.vcs import vcs + + if self.scheme in vcs.all_schemes: + return False + + return True + + +FormatControl = namedtuple('FormatControl', 'no_binary only_binary') +"""This object has two fields, no_binary and only_binary. + +If a field is falsy, it isn't set. If it is {':all:'}, it should match all +packages except those listed in the other field. Only one field can be set +to {':all:'} at a time. The rest of the time exact package name matches +are listed, with any given package only showing up in one field at a time. +""" + + +def fmt_ctl_handle_mutual_exclude(value, target, other): + new = value.split(',') + while ':all:' in new: + other.clear() + target.clear() + target.add(':all:') + del new[:new.index(':all:') + 1] + if ':none:' not in new: + # Without a none, we want to discard everything as :all: covers it + return + for name in new: + if name == ':none:': + target.clear() + continue + name = canonicalize_name(name) + other.discard(name) + target.add(name) + + +def fmt_ctl_formats(fmt_ctl, canonical_name): + result = set(["binary", "source"]) + if canonical_name in fmt_ctl.only_binary: + result.discard('source') + elif canonical_name in fmt_ctl.no_binary: + result.discard('binary') + elif ':all:' in fmt_ctl.only_binary: + result.discard('source') + elif ':all:' in fmt_ctl.no_binary: + result.discard('binary') + return frozenset(result) + + +def fmt_ctl_no_binary(fmt_ctl): + fmt_ctl_handle_mutual_exclude( + ':all:', fmt_ctl.no_binary, fmt_ctl.only_binary) + + +def fmt_ctl_no_use_wheel(fmt_ctl): + fmt_ctl_no_binary(fmt_ctl) + warnings.warn( + '--no-use-wheel is deprecated and will be removed in the future. ' + ' Please use --no-binary :all: instead.', DeprecationWarning, + stacklevel=2) + + +Search = namedtuple('Search', 'supplied canonical formats') +"""Capture key aspects of a search. + +:attribute supplied: The user supplied package. +:attribute canonical: The canonical package name. +:attribute formats: The formats allowed for this package. Should be a set + with 'binary' or 'source' or both in it. +""" diff --git a/lib/python3.4/site-packages/pip/locations.py b/lib/python3.4/site-packages/pip/locations.py new file mode 100644 index 0000000..1bd0fae --- /dev/null +++ b/lib/python3.4/site-packages/pip/locations.py @@ -0,0 +1,182 @@ +"""Locations where we look for configs, install stuff, etc""" +from __future__ import absolute_import + +import os +import os.path +import site +import sys + +from distutils import sysconfig +from distutils.command.install import install, SCHEME_KEYS # noqa + +from pip.compat import WINDOWS, expanduser +from pip.utils import appdirs + + +# Application Directories +USER_CACHE_DIR = appdirs.user_cache_dir("pip") + + +DELETE_MARKER_MESSAGE = '''\ +This file is placed here by pip to indicate the source was put +here by pip. + +Once this package is successfully installed this source code will be +deleted (unless you remove this file). +''' +PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt' + + +def write_delete_marker_file(directory): + """ + Write the pip delete marker file into this directory. + """ + filepath = os.path.join(directory, PIP_DELETE_MARKER_FILENAME) + with open(filepath, 'w') as marker_fp: + marker_fp.write(DELETE_MARKER_MESSAGE) + + +def running_under_virtualenv(): + """ + Return True if we're running inside a virtualenv, False otherwise. + + """ + if hasattr(sys, 'real_prefix'): + return True + elif sys.prefix != getattr(sys, "base_prefix", sys.prefix): + return True + + return False + + +def virtualenv_no_global(): + """ + Return True if in a venv and no system site packages. + """ + # this mirrors the logic in virtualenv.py for locating the + # no-global-site-packages.txt file + site_mod_dir = os.path.dirname(os.path.abspath(site.__file__)) + no_global_file = os.path.join(site_mod_dir, 'no-global-site-packages.txt') + if running_under_virtualenv() and os.path.isfile(no_global_file): + return True + + +if running_under_virtualenv(): + src_prefix = os.path.join(sys.prefix, 'src') +else: + # FIXME: keep src in cwd for now (it is not a temporary folder) + try: + src_prefix = os.path.join(os.getcwd(), 'src') + except OSError: + # In case the current working directory has been renamed or deleted + sys.exit( + "The folder you are executing pip from can no longer be found." + ) + +# under Mac OS X + virtualenv sys.prefix is not properly resolved +# it is something like /path/to/python/bin/.. +# Note: using realpath due to tmp dirs on OSX being symlinks +src_prefix = os.path.abspath(src_prefix) + +# FIXME doesn't account for venv linked to global site-packages + +site_packages = sysconfig.get_python_lib() +user_site = site.USER_SITE +user_dir = expanduser('~') +if WINDOWS: + bin_py = os.path.join(sys.prefix, 'Scripts') + bin_user = os.path.join(user_site, 'Scripts') + # buildout uses 'bin' on Windows too? + if not os.path.exists(bin_py): + bin_py = os.path.join(sys.prefix, 'bin') + bin_user = os.path.join(user_site, 'bin') + + config_basename = 'pip.ini' + + legacy_storage_dir = os.path.join(user_dir, 'pip') + legacy_config_file = os.path.join( + legacy_storage_dir, + config_basename, + ) +else: + bin_py = os.path.join(sys.prefix, 'bin') + bin_user = os.path.join(user_site, 'bin') + + config_basename = 'pip.conf' + + legacy_storage_dir = os.path.join(user_dir, '.pip') + legacy_config_file = os.path.join( + legacy_storage_dir, + config_basename, + ) + + # Forcing to use /usr/local/bin for standard Mac OS X framework installs + # Also log to ~/Library/Logs/ for use with the Console.app log viewer + if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/': + bin_py = '/usr/local/bin' + +site_config_files = [ + os.path.join(path, config_basename) + for path in appdirs.site_config_dirs('pip') +] + + +def distutils_scheme(dist_name, user=False, home=None, root=None, + isolated=False, prefix=None): + """ + Return a distutils install scheme + """ + from distutils.dist import Distribution + + scheme = {} + + if isolated: + extra_dist_args = {"script_args": ["--no-user-cfg"]} + else: + extra_dist_args = {} + dist_args = {'name': dist_name} + dist_args.update(extra_dist_args) + + d = Distribution(dist_args) + d.parse_config_files() + i = d.get_command_obj('install', create=True) + # NOTE: setting user or home has the side-effect of creating the home dir + # or user base for installations during finalize_options() + # ideally, we'd prefer a scheme class that has no side-effects. + assert not (user and prefix), "user={0} prefix={1}".format(user, prefix) + i.user = user or i.user + if user: + i.prefix = "" + i.prefix = prefix or i.prefix + i.home = home or i.home + i.root = root or i.root + i.finalize_options() + for key in SCHEME_KEYS: + scheme[key] = getattr(i, 'install_' + key) + + # install_lib specified in setup.cfg should install *everything* + # into there (i.e. it takes precedence over both purelib and + # platlib). Note, i.install_lib is *always* set after + # finalize_options(); we only want to override here if the user + # has explicitly requested it hence going back to the config + if 'install_lib' in d.get_option_dict('install'): + scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib)) + + if running_under_virtualenv(): + scheme['headers'] = os.path.join( + sys.prefix, + 'include', + 'site', + 'python' + sys.version[:3], + dist_name, + ) + + if root is not None: + path_no_drive = os.path.splitdrive( + os.path.abspath(scheme["headers"]))[1] + scheme["headers"] = os.path.join( + root, + path_no_drive[1:], + ) + + return scheme diff --git a/lib/python3.4/site-packages/pip/models/__init__.py b/lib/python3.4/site-packages/pip/models/__init__.py new file mode 100644 index 0000000..1d727d7 --- /dev/null +++ b/lib/python3.4/site-packages/pip/models/__init__.py @@ -0,0 +1,4 @@ +from pip.models.index import Index, PyPI + + +__all__ = ["Index", "PyPI"] diff --git a/lib/python3.4/site-packages/pip/models/index.py b/lib/python3.4/site-packages/pip/models/index.py new file mode 100644 index 0000000..be99119 --- /dev/null +++ b/lib/python3.4/site-packages/pip/models/index.py @@ -0,0 +1,16 @@ +from pip._vendor.six.moves.urllib import parse as urllib_parse + + +class Index(object): + def __init__(self, url): + self.url = url + self.netloc = urllib_parse.urlsplit(url).netloc + self.simple_url = self.url_to_path('simple') + self.pypi_url = self.url_to_path('pypi') + self.pip_json_url = self.url_to_path('pypi/pip/json') + + def url_to_path(self, path): + return urllib_parse.urljoin(self.url, path) + + +PyPI = Index('https://pypi.python.org/') diff --git a/lib/python3.4/site-packages/pip/operations/__init__.py b/lib/python3.4/site-packages/pip/operations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lib/python3.4/site-packages/pip/operations/freeze.py b/lib/python3.4/site-packages/pip/operations/freeze.py new file mode 100644 index 0000000..74ff7d3 --- /dev/null +++ b/lib/python3.4/site-packages/pip/operations/freeze.py @@ -0,0 +1,112 @@ +from __future__ import absolute_import + +import logging +import re + +import pip +from pip.compat import stdlib_pkgs +from pip.req import InstallRequirement +from pip.utils import get_installed_distributions +from pip._vendor import pkg_resources + + +logger = logging.getLogger(__name__) + +# packages to exclude from freeze output +freeze_excludes = stdlib_pkgs + ['setuptools', 'pip', 'distribute'] + + +def freeze( + requirement=None, + find_links=None, local_only=None, user_only=None, skip_regex=None, + default_vcs=None, + isolated=False, + wheel_cache=None): + find_links = find_links or [] + skip_match = None + + if skip_regex: + skip_match = re.compile(skip_regex) + + dependency_links = [] + + for dist in pkg_resources.working_set: + if dist.has_metadata('dependency_links.txt'): + dependency_links.extend( + dist.get_metadata_lines('dependency_links.txt') + ) + for link in find_links: + if '#egg=' in link: + dependency_links.append(link) + for link in find_links: + yield '-f %s' % link + installations = {} + for dist in get_installed_distributions(local_only=local_only, + skip=freeze_excludes, + user_only=user_only): + req = pip.FrozenRequirement.from_dist( + dist, + dependency_links + ) + installations[req.name] = req + + if requirement: + with open(requirement) as req_file: + for line in req_file: + if (not line.strip() or + line.strip().startswith('#') or + (skip_match and skip_match.search(line)) or + line.startswith(( + '-r', '--requirement', + '-Z', '--always-unzip', + '-f', '--find-links', + '-i', '--index-url', + '--extra-index-url'))): + yield line.rstrip() + continue + + if line.startswith('-e') or line.startswith('--editable'): + if line.startswith('-e'): + line = line[2:].strip() + else: + line = line[len('--editable'):].strip().lstrip('=') + line_req = InstallRequirement.from_editable( + line, + default_vcs=default_vcs, + isolated=isolated, + wheel_cache=wheel_cache, + ) + else: + line_req = InstallRequirement.from_line( + line, + isolated=isolated, + wheel_cache=wheel_cache, + ) + + if not line_req.name: + logger.info( + "Skipping line because it's not clear what it " + "would install: %s", + line.strip(), + ) + logger.info( + " (add #egg=PackageName to the URL to avoid" + " this warning)" + ) + elif line_req.name not in installations: + logger.warning( + "Requirement file contains %s, but that package is" + " not installed", + line.strip(), + ) + else: + yield str(installations[line_req.name]).rstrip() + del installations[line_req.name] + + yield( + '## The following requirements were added by ' + 'pip freeze:' + ) + for installation in sorted( + installations.values(), key=lambda x: x.name.lower()): + yield str(installation).rstrip() diff --git a/lib/python3.4/site-packages/pip/pep425tags.py b/lib/python3.4/site-packages/pip/pep425tags.py new file mode 100644 index 0000000..2d91ccc --- /dev/null +++ b/lib/python3.4/site-packages/pip/pep425tags.py @@ -0,0 +1,222 @@ +"""Generate and work with PEP 425 Compatibility Tags.""" +from __future__ import absolute_import + +import re +import sys +import warnings +import platform +import logging + +try: + import sysconfig +except ImportError: # pragma nocover + # Python < 2.7 + import distutils.sysconfig as sysconfig +import distutils.util + + +logger = logging.getLogger(__name__) + + +_osx_arch_pat = re.compile(r'(.+)_(\d+)_(\d+)_(.+)') + + +def get_config_var(var): + try: + return sysconfig.get_config_var(var) + except IOError as e: # Issue #1074 + warnings.warn("{0}".format(e), RuntimeWarning) + return None + + +def get_abbr_impl(): + """Return abbreviated implementation name.""" + if hasattr(sys, 'pypy_version_info'): + pyimpl = 'pp' + elif sys.platform.startswith('java'): + pyimpl = 'jy' + elif sys.platform == 'cli': + pyimpl = 'ip' + else: + pyimpl = 'cp' + return pyimpl + + +def get_impl_ver(): + """Return implementation version.""" + impl_ver = get_config_var("py_version_nodot") + if not impl_ver or get_abbr_impl() == 'pp': + impl_ver = ''.join(map(str, get_impl_version_info())) + return impl_ver + + +def get_impl_version_info(): + """Return sys.version_info-like tuple for use in decrementing the minor + version.""" + if get_abbr_impl() == 'pp': + # as per https://github.com/pypa/pip/issues/2882 + return (sys.version_info[0], sys.pypy_version_info.major, + sys.pypy_version_info.minor) + else: + return sys.version_info[0], sys.version_info[1] + + +def get_impl_tag(): + """ + Returns the Tag for this specific implementation. + """ + return "{0}{1}".format(get_abbr_impl(), get_impl_ver()) + + +def get_flag(var, fallback, expected=True, warn=True): + """Use a fallback method for determining SOABI flags if the needed config + var is unset or unavailable.""" + val = get_config_var(var) + if val is None: + if warn: + logger.debug("Config variable '%s' is unset, Python ABI tag may " + "be incorrect", var) + return fallback() + return val == expected + + +def get_abi_tag(): + """Return the ABI tag based on SOABI (if available) or emulate SOABI + (CPython 2, PyPy).""" + soabi = get_config_var('SOABI') + impl = get_abbr_impl() + if not soabi and impl in ('cp', 'pp') and hasattr(sys, 'maxunicode'): + d = '' + m = '' + u = '' + if get_flag('Py_DEBUG', + lambda: hasattr(sys, 'gettotalrefcount'), + warn=(impl == 'cp')): + d = 'd' + if get_flag('WITH_PYMALLOC', + lambda: impl == 'cp', + warn=(impl == 'cp')): + m = 'm' + if get_flag('Py_UNICODE_SIZE', + lambda: sys.maxunicode == 0x10ffff, + expected=4, + warn=(impl == 'cp' and + sys.version_info < (3, 3))) \ + and sys.version_info < (3, 3): + u = 'u' + abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u) + elif soabi and soabi.startswith('cpython-'): + abi = 'cp' + soabi.split('-')[1] + elif soabi: + abi = soabi.replace('.', '_').replace('-', '_') + else: + abi = None + return abi + + +def get_platform(): + """Return our platform name 'win32', 'linux_x86_64'""" + if sys.platform == 'darwin': + # distutils.util.get_platform() returns the release based on the value + # of MACOSX_DEPLOYMENT_TARGET on which Python was built, which may + # be signficantly older than the user's current machine. + release, _, machine = platform.mac_ver() + split_ver = release.split('.') + return 'macosx_{0}_{1}_{2}'.format(split_ver[0], split_ver[1], machine) + # XXX remove distutils dependency + return distutils.util.get_platform().replace('.', '_').replace('-', '_') + + +def get_supported(versions=None, noarch=False): + """Return a list of supported tags for each version specified in + `versions`. + + :param versions: a list of string versions, of the form ["33", "32"], + or None. The first version will be assumed to support our ABI. + """ + supported = [] + + # Versions must be given with respect to the preference + if versions is None: + versions = [] + version_info = get_impl_version_info() + major = version_info[:-1] + # Support all previous minor Python versions. + for minor in range(version_info[-1], -1, -1): + versions.append(''.join(map(str, major + (minor,)))) + + impl = get_abbr_impl() + + abis = [] + + abi = get_abi_tag() + if abi: + abis[0:0] = [abi] + + abi3s = set() + import imp + for suffix in imp.get_suffixes(): + if suffix[0].startswith('.abi'): + abi3s.add(suffix[0].split('.', 2)[1]) + + abis.extend(sorted(list(abi3s))) + + abis.append('none') + + if not noarch: + arch = get_platform() + if sys.platform == 'darwin': + # support macosx-10.6-intel on macosx-10.9-x86_64 + match = _osx_arch_pat.match(arch) + if match: + name, major, minor, actual_arch = match.groups() + actual_arches = [actual_arch] + if actual_arch in ('i386', 'ppc'): + actual_arches.append('fat') + if actual_arch in ('i386', 'x86_64'): + actual_arches.append('intel') + if actual_arch in ('ppc64', 'x86_64'): + actual_arches.append('fat64') + if actual_arch in ('i386', 'ppc', 'x86_64'): + actual_arches.append('fat32') + if actual_arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'): + actual_arches.append('universal') + tpl = '{0}_{1}_%i_%s'.format(name, major) + arches = [] + for m in reversed(range(int(minor) + 1)): + for a in actual_arches: + arches.append(tpl % (m, a)) + else: + # arch pattern didn't match (?!) + arches = [arch] + else: + arches = [arch] + + # Current version, current API (built specifically for our Python): + for abi in abis: + for arch in arches: + supported.append(('%s%s' % (impl, versions[0]), abi, arch)) + + # Has binaries, does not use the Python API: + supported.append(('py%s' % (versions[0][0]), 'none', arch)) + + # No abi / arch, but requires our implementation: + for i, version in enumerate(versions): + supported.append(('%s%s' % (impl, version), 'none', 'any')) + if i == 0: + # Tagged specifically as being cross-version compatible + # (with just the major version specified) + supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any')) + + # No abi / arch, generic Python + for i, version in enumerate(versions): + supported.append(('py%s' % (version,), 'none', 'any')) + if i == 0: + supported.append(('py%s' % (version[0]), 'none', 'any')) + + return supported + +supported_tags = get_supported() +supported_tags_noarch = get_supported(noarch=True) + +implementation_tag = get_impl_tag() diff --git a/lib/python3.4/site-packages/pip/req/__init__.py b/lib/python3.4/site-packages/pip/req/__init__.py new file mode 100644 index 0000000..00185a4 --- /dev/null +++ b/lib/python3.4/site-packages/pip/req/__init__.py @@ -0,0 +1,10 @@ +from __future__ import absolute_import + +from .req_install import InstallRequirement +from .req_set import RequirementSet, Requirements +from .req_file import parse_requirements + +__all__ = [ + "RequirementSet", "Requirements", "InstallRequirement", + "parse_requirements", +] diff --git a/lib/python3.4/site-packages/pip/req/req_file.py b/lib/python3.4/site-packages/pip/req/req_file.py new file mode 100644 index 0000000..c92c930 --- /dev/null +++ b/lib/python3.4/site-packages/pip/req/req_file.py @@ -0,0 +1,338 @@ +""" +Requirements file parsing +""" + +from __future__ import absolute_import + +import os +import re +import shlex +import optparse +import warnings + +from pip._vendor.six.moves.urllib import parse as urllib_parse +from pip._vendor.six.moves import filterfalse + +import pip +from pip.download import get_file_content +from pip.req.req_install import InstallRequirement +from pip.exceptions import (RequirementsFileParseError) +from pip.utils.deprecation import RemovedInPip10Warning +from pip import cmdoptions + +__all__ = ['parse_requirements'] + +SCHEME_RE = re.compile(r'^(http|https|file):', re.I) +COMMENT_RE = re.compile(r'(^|\s)+#.*$') + +SUPPORTED_OPTIONS = [ + cmdoptions.constraints, + cmdoptions.editable, + cmdoptions.requirements, + cmdoptions.no_index, + cmdoptions.index_url, + cmdoptions.find_links, + cmdoptions.extra_index_url, + cmdoptions.allow_external, + cmdoptions.allow_all_external, + cmdoptions.no_allow_external, + cmdoptions.allow_unsafe, + cmdoptions.no_allow_unsafe, + cmdoptions.use_wheel, + cmdoptions.no_use_wheel, + cmdoptions.always_unzip, + cmdoptions.no_binary, + cmdoptions.only_binary, + cmdoptions.pre, + cmdoptions.process_dependency_links, + cmdoptions.trusted_host, + cmdoptions.require_hashes, +] + +# options to be passed to requirements +SUPPORTED_OPTIONS_REQ = [ + cmdoptions.install_options, + cmdoptions.global_options, + cmdoptions.hash, +] + +# the 'dest' string values +SUPPORTED_OPTIONS_REQ_DEST = [o().dest for o in SUPPORTED_OPTIONS_REQ] + + +def parse_requirements(filename, finder=None, comes_from=None, options=None, + session=None, constraint=False, wheel_cache=None): + """Parse a requirements file and yield InstallRequirement instances. + + :param filename: Path or url of requirements file. + :param finder: Instance of pip.index.PackageFinder. + :param comes_from: Origin description of requirements. + :param options: cli options. + :param session: Instance of pip.download.PipSession. + :param constraint: If true, parsing a constraint file rather than + requirements file. + :param wheel_cache: Instance of pip.wheel.WheelCache + """ + if session is None: + raise TypeError( + "parse_requirements() missing 1 required keyword argument: " + "'session'" + ) + + _, content = get_file_content( + filename, comes_from=comes_from, session=session + ) + + lines_enum = preprocess(content, options) + + for line_number, line in lines_enum: + req_iter = process_line(line, filename, line_number, finder, + comes_from, options, session, wheel_cache, + constraint=constraint) + for req in req_iter: + yield req + + +def preprocess(content, options): + """Split, filter, and join lines, and return a line iterator + + :param content: the content of the requirements file + :param options: cli options + """ + lines_enum = enumerate(content.splitlines(), start=1) + lines_enum = join_lines(lines_enum) + lines_enum = ignore_comments(lines_enum) + lines_enum = skip_regex(lines_enum, options) + return lines_enum + + +def process_line(line, filename, line_number, finder=None, comes_from=None, + options=None, session=None, wheel_cache=None, + constraint=False): + """Process a single requirements line; This can result in creating/yielding + requirements, or updating the finder. + + For lines that contain requirements, the only options that have an effect + are from SUPPORTED_OPTIONS_REQ, and they are scoped to the + requirement. Other options from SUPPORTED_OPTIONS may be present, but are + ignored. + + For lines that do not contain requirements, the only options that have an + effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may + be present, but are ignored. These lines may contain multiple options + (although our docs imply only one is supported), and all our parsed and + affect the finder. + + :param constraint: If True, parsing a constraints file. + :param options: OptionParser options that we may update + """ + parser = build_parser() + defaults = parser.get_default_values() + defaults.index_url = None + if finder: + # `finder.format_control` will be updated during parsing + defaults.format_control = finder.format_control + args_str, options_str = break_args_options(line) + opts, _ = parser.parse_args(shlex.split(options_str), defaults) + + # preserve for the nested code path + line_comes_from = '%s %s (line %s)' % ( + '-c' if constraint else '-r', filename, line_number) + + # yield a line requirement + if args_str: + isolated = options.isolated_mode if options else False + if options: + cmdoptions.check_install_build_global(options, opts) + # get the options that apply to requirements + req_options = {} + for dest in SUPPORTED_OPTIONS_REQ_DEST: + if dest in opts.__dict__ and opts.__dict__[dest]: + req_options[dest] = opts.__dict__[dest] + yield InstallRequirement.from_line( + args_str, line_comes_from, constraint=constraint, + isolated=isolated, options=req_options, wheel_cache=wheel_cache + ) + + # yield an editable requirement + elif opts.editables: + isolated = options.isolated_mode if options else False + default_vcs = options.default_vcs if options else None + yield InstallRequirement.from_editable( + opts.editables[0], comes_from=line_comes_from, + constraint=constraint, default_vcs=default_vcs, isolated=isolated, + wheel_cache=wheel_cache + ) + + # parse a nested requirements file + elif opts.requirements or opts.constraints: + if opts.requirements: + req_path = opts.requirements[0] + nested_constraint = False + else: + req_path = opts.constraints[0] + nested_constraint = True + # original file is over http + if SCHEME_RE.search(filename): + # do a url join so relative paths work + req_path = urllib_parse.urljoin(filename, req_path) + # original file and nested file are paths + elif not SCHEME_RE.search(req_path): + # do a join so relative paths work + req_path = os.path.join(os.path.dirname(filename), req_path) + # TODO: Why not use `comes_from='-r {} (line {})'` here as well? + parser = parse_requirements( + req_path, finder, comes_from, options, session, + constraint=nested_constraint, wheel_cache=wheel_cache + ) + for req in parser: + yield req + + # percolate hash-checking option upward + elif opts.require_hashes: + options.require_hashes = opts.require_hashes + + # set finder options + elif finder: + if opts.allow_external: + warnings.warn( + "--allow-external has been deprecated and will be removed in " + "the future. Due to changes in the repository protocol, it no " + "longer has any effect.", + RemovedInPip10Warning, + ) + + if opts.allow_all_external: + warnings.warn( + "--allow-all-external has been deprecated and will be removed " + "in the future. Due to changes in the repository protocol, it " + "no longer has any effect.", + RemovedInPip10Warning, + ) + + if opts.allow_unverified: + warnings.warn( + "--allow-unverified has been deprecated and will be removed " + "in the future. Due to changes in the repository protocol, it " + "no longer has any effect.", + RemovedInPip10Warning, + ) + + if opts.index_url: + finder.index_urls = [opts.index_url] + if opts.use_wheel is False: + finder.use_wheel = False + pip.index.fmt_ctl_no_use_wheel(finder.format_control) + if opts.no_index is True: + finder.index_urls = [] + if opts.extra_index_urls: + finder.index_urls.extend(opts.extra_index_urls) + if opts.find_links: + # FIXME: it would be nice to keep track of the source + # of the find_links: support a find-links local path + # relative to a requirements file. + value = opts.find_links[0] + req_dir = os.path.dirname(os.path.abspath(filename)) + relative_to_reqs_file = os.path.join(req_dir, value) + if os.path.exists(relative_to_reqs_file): + value = relative_to_reqs_file + finder.find_links.append(value) + if opts.pre: + finder.allow_all_prereleases = True + if opts.process_dependency_links: + finder.process_dependency_links = True + if opts.trusted_hosts: + finder.secure_origins.extend( + ("*", host, "*") for host in opts.trusted_hosts) + + +def break_args_options(line): + """Break up the line into an args and options string. We only want to shlex + (and then optparse) the options, not the args. args can contain markers + which are corrupted by shlex. + """ + tokens = line.split(' ') + args = [] + options = tokens[:] + for token in tokens: + if token.startswith('-') or token.startswith('--'): + break + else: + args.append(token) + options.pop(0) + return ' '.join(args), ' '.join(options) + + +def build_parser(): + """ + Return a parser for parsing requirement lines + """ + parser = optparse.OptionParser(add_help_option=False) + + option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ + for option_factory in option_factories: + option = option_factory() + parser.add_option(option) + + # By default optparse sys.exits on parsing errors. We want to wrap + # that in our own exception. + def parser_exit(self, msg): + raise RequirementsFileParseError(msg) + parser.exit = parser_exit + + return parser + + +def join_lines(lines_enum): + """Joins a line ending in '\' with the previous line (except when following + comments). The joined line takes on the index of the first line. + """ + primary_line_number = None + new_line = [] + for line_number, line in lines_enum: + if not line.endswith('\\') or COMMENT_RE.match(line): + if COMMENT_RE.match(line): + # this ensures comments are always matched later + line = ' ' + line + if new_line: + new_line.append(line) + yield primary_line_number, ''.join(new_line) + new_line = [] + else: + yield line_number, line + else: + if not new_line: + primary_line_number = line_number + new_line.append(line.strip('\\')) + + # last line contains \ + if new_line: + yield primary_line_number, ''.join(new_line) + + # TODO: handle space after '\'. + + +def ignore_comments(lines_enum): + """ + Strips comments and filter empty lines. + """ + for line_number, line in lines_enum: + line = COMMENT_RE.sub('', line) + line = line.strip() + if line: + yield line_number, line + + +def skip_regex(lines_enum, options): + """ + Skip lines that match '--skip-requirements-regex' pattern + + Note: the regex pattern is only built once + """ + skip_regex = options.skip_requirements_regex if options else None + if skip_regex: + pattern = re.compile(skip_regex) + lines_enum = filterfalse( + lambda e: pattern.search(e[1]), + lines_enum) + return lines_enum diff --git a/lib/python3.4/site-packages/pip/req/req_install.py b/lib/python3.4/site-packages/pip/req/req_install.py new file mode 100644 index 0000000..3b48431 --- /dev/null +++ b/lib/python3.4/site-packages/pip/req/req_install.py @@ -0,0 +1,1217 @@ +from __future__ import absolute_import + +import logging +import os +import re +import shutil +import sys +import tempfile +import traceback +import warnings +import zipfile + +from distutils import sysconfig +from distutils.util import change_root +from email.parser import FeedParser + +from pip._vendor import pkg_resources, six +from pip._vendor.distlib.markers import interpret as markers_interpret +from pip._vendor.packaging import specifiers +from pip._vendor.six.moves import configparser + +import pip.wheel + +from pip.compat import native_str, get_stdlib, WINDOWS +from pip.download import is_url, url_to_path, path_to_url, is_archive_file +from pip.exceptions import ( + InstallationError, UninstallationError, UnsupportedWheel, +) +from pip.locations import ( + bin_py, running_under_virtualenv, PIP_DELETE_MARKER_FILENAME, bin_user, +) +from pip.utils import ( + display_path, rmtree, ask_path_exists, backup_dir, is_installable_dir, + dist_in_usersite, dist_in_site_packages, egg_link_path, + call_subprocess, read_text_file, FakeFile, _make_build_dir, ensure_dir, + get_installed_version, canonicalize_name, normalize_path, dist_is_local, +) + +from pip.utils.hashes import Hashes +from pip.utils.deprecation import RemovedInPip10Warning +from pip.utils.logging import indent_log +from pip.utils.setuptools_build import SETUPTOOLS_SHIM +from pip.utils.ui import open_spinner +from pip.req.req_uninstall import UninstallPathSet +from pip.vcs import vcs +from pip.wheel import move_wheel_files, Wheel +from pip._vendor.packaging.version import Version + + +logger = logging.getLogger(__name__) + +operators = specifiers.Specifier._operators.keys() + + +def _strip_extras(path): + m = re.match(r'^(.+)(\[[^\]]+\])$', path) + extras = None + if m: + path_no_extras = m.group(1) + extras = m.group(2) + else: + path_no_extras = path + + return path_no_extras, extras + + +class InstallRequirement(object): + + def __init__(self, req, comes_from, source_dir=None, editable=False, + link=None, as_egg=False, update=True, editable_options=None, + pycompile=True, markers=None, isolated=False, options=None, + wheel_cache=None, constraint=False): + self.extras = () + if isinstance(req, six.string_types): + try: + req = pkg_resources.Requirement.parse(req) + except pkg_resources.RequirementParseError: + if os.path.sep in req: + add_msg = "It looks like a path. Does it exist ?" + elif '=' in req and not any(op in req for op in operators): + add_msg = "= is not a valid operator. Did you mean == ?" + else: + add_msg = traceback.format_exc() + raise InstallationError( + "Invalid requirement: '%s'\n%s" % (req, add_msg)) + self.extras = req.extras + + self.req = req + self.comes_from = comes_from + self.constraint = constraint + self.source_dir = source_dir + self.editable = editable + + if editable_options is None: + editable_options = {} + + self.editable_options = editable_options + self._wheel_cache = wheel_cache + self.link = self.original_link = link + self.as_egg = as_egg + self.markers = markers + self._egg_info_path = None + # This holds the pkg_resources.Distribution object if this requirement + # is already available: + self.satisfied_by = None + # This hold the pkg_resources.Distribution object if this requirement + # conflicts with another installed distribution: + self.conflicts_with = None + # Temporary build location + self._temp_build_dir = None + # Used to store the global directory where the _temp_build_dir should + # have been created. Cf _correct_build_location method. + self._ideal_build_dir = None + # True if the editable should be updated: + self.update = update + # Set to True after successful installation + self.install_succeeded = None + # UninstallPathSet of uninstalled distribution (for possible rollback) + self.uninstalled = None + # Set True if a legitimate do-nothing-on-uninstall has happened - e.g. + # system site packages, stdlib packages. + self.nothing_to_uninstall = False + self.use_user_site = False + self.target_dir = None + self.options = options if options else {} + self.pycompile = pycompile + # Set to True after successful preparation of this requirement + self.prepared = False + + self.isolated = isolated + + @classmethod + def from_editable(cls, editable_req, comes_from=None, default_vcs=None, + isolated=False, options=None, wheel_cache=None, + constraint=False): + from pip.index import Link + + name, url, extras_override, editable_options = parse_editable( + editable_req, default_vcs) + if url.startswith('file:'): + source_dir = url_to_path(url) + else: + source_dir = None + + res = cls(name, comes_from, source_dir=source_dir, + editable=True, + link=Link(url), + constraint=constraint, + editable_options=editable_options, + isolated=isolated, + options=options if options else {}, + wheel_cache=wheel_cache) + + if extras_override is not None: + res.extras = extras_override + + return res + + @classmethod + def from_line( + cls, name, comes_from=None, isolated=False, options=None, + wheel_cache=None, constraint=False): + """Creates an InstallRequirement from a name, which might be a + requirement, directory containing 'setup.py', filename, or URL. + """ + from pip.index import Link + + if is_url(name): + marker_sep = '; ' + else: + marker_sep = ';' + if marker_sep in name: + name, markers = name.split(marker_sep, 1) + markers = markers.strip() + if not markers: + markers = None + else: + markers = None + name = name.strip() + req = None + path = os.path.normpath(os.path.abspath(name)) + link = None + extras = None + + if is_url(name): + link = Link(name) + else: + p, extras = _strip_extras(path) + if (os.path.isdir(p) and + (os.path.sep in name or name.startswith('.'))): + + if not is_installable_dir(p): + raise InstallationError( + "Directory %r is not installable. File 'setup.py' " + "not found." % name + ) + link = Link(path_to_url(p)) + elif is_archive_file(p): + if not os.path.isfile(p): + logger.warning( + 'Requirement %r looks like a filename, but the ' + 'file does not exist', + name + ) + link = Link(path_to_url(p)) + + # it's a local file, dir, or url + if link: + # Handle relative file URLs + if link.scheme == 'file' and re.search(r'\.\./', link.url): + link = Link( + path_to_url(os.path.normpath(os.path.abspath(link.path)))) + # wheel file + if link.is_wheel: + wheel = Wheel(link.filename) # can raise InvalidWheelFilename + if not wheel.supported(): + raise UnsupportedWheel( + "%s is not a supported wheel on this platform." % + wheel.filename + ) + req = "%s==%s" % (wheel.name, wheel.version) + else: + # set the req to the egg fragment. when it's not there, this + # will become an 'unnamed' requirement + req = link.egg_fragment + + # a requirement specifier + else: + req = name + + options = options if options else {} + res = cls(req, comes_from, link=link, markers=markers, + isolated=isolated, options=options, + wheel_cache=wheel_cache, constraint=constraint) + + if extras: + res.extras = pkg_resources.Requirement.parse('__placeholder__' + + extras).extras + + return res + + def __str__(self): + if self.req: + s = str(self.req) + if self.link: + s += ' from %s' % self.link.url + else: + s = self.link.url if self.link else None + if self.satisfied_by is not None: + s += ' in %s' % display_path(self.satisfied_by.location) + if self.comes_from: + if isinstance(self.comes_from, six.string_types): + comes_from = self.comes_from + else: + comes_from = self.comes_from.from_path() + if comes_from: + s += ' (from %s)' % comes_from + return s + + def __repr__(self): + return '<%s object: %s editable=%r>' % ( + self.__class__.__name__, str(self), self.editable) + + def populate_link(self, finder, upgrade, require_hashes): + """Ensure that if a link can be found for this, that it is found. + + Note that self.link may still be None - if Upgrade is False and the + requirement is already installed. + + If require_hashes is True, don't use the wheel cache, because cached + wheels, always built locally, have different hashes than the files + downloaded from the index server and thus throw false hash mismatches. + Furthermore, cached wheels at present have undeterministic contents due + to file modification times. + """ + if self.link is None: + self.link = finder.find_requirement(self, upgrade) + if self._wheel_cache is not None and not require_hashes: + old_link = self.link + self.link = self._wheel_cache.cached_wheel(self.link, self.name) + if old_link != self.link: + logger.debug('Using cached wheel link: %s', self.link) + + @property + def specifier(self): + return self.req.specifier + + @property + def is_pinned(self): + """Return whether I am pinned to an exact version. + + For example, some-package==1.2 is pinned; some-package>1.2 is not. + """ + specifiers = self.specifier + return (len(specifiers) == 1 and + next(iter(specifiers)).operator in ('==', '===')) + + def from_path(self): + if self.req is None: + return None + s = str(self.req) + if self.comes_from: + if isinstance(self.comes_from, six.string_types): + comes_from = self.comes_from + else: + comes_from = self.comes_from.from_path() + if comes_from: + s += '->' + comes_from + return s + + def build_location(self, build_dir): + if self._temp_build_dir is not None: + return self._temp_build_dir + if self.req is None: + # for requirement via a path to a directory: the name of the + # package is not available yet so we create a temp directory + # Once run_egg_info will have run, we'll be able + # to fix it via _correct_build_location + self._temp_build_dir = tempfile.mkdtemp('-build', 'pip-') + self._ideal_build_dir = build_dir + return self._temp_build_dir + if self.editable: + name = self.name.lower() + else: + name = self.name + # FIXME: Is there a better place to create the build_dir? (hg and bzr + # need this) + if not os.path.exists(build_dir): + logger.debug('Creating directory %s', build_dir) + _make_build_dir(build_dir) + return os.path.join(build_dir, name) + + def _correct_build_location(self): + """Move self._temp_build_dir to self._ideal_build_dir/self.req.name + + For some requirements (e.g. a path to a directory), the name of the + package is not available until we run egg_info, so the build_location + will return a temporary directory and store the _ideal_build_dir. + + This is only called by self.egg_info_path to fix the temporary build + directory. + """ + if self.source_dir is not None: + return + assert self.req is not None + assert self._temp_build_dir + assert self._ideal_build_dir + old_location = self._temp_build_dir + self._temp_build_dir = None + new_location = self.build_location(self._ideal_build_dir) + if os.path.exists(new_location): + raise InstallationError( + 'A package already exists in %s; please remove it to continue' + % display_path(new_location)) + logger.debug( + 'Moving package %s from %s to new location %s', + self, display_path(old_location), display_path(new_location), + ) + shutil.move(old_location, new_location) + self._temp_build_dir = new_location + self._ideal_build_dir = None + self.source_dir = new_location + self._egg_info_path = None + + @property + def name(self): + if self.req is None: + return None + return native_str(self.req.project_name) + + @property + def setup_py(self): + assert self.source_dir, "No source dir for %s" % self + try: + import setuptools # noqa + except ImportError: + if get_installed_version('setuptools') is None: + add_msg = "Please install setuptools." + else: + add_msg = traceback.format_exc() + # Setuptools is not available + raise InstallationError( + "Could not import setuptools which is required to " + "install from a source distribution.\n%s" % add_msg + ) + + setup_file = 'setup.py' + + if self.editable_options and 'subdirectory' in self.editable_options: + setup_py = os.path.join(self.source_dir, + self.editable_options['subdirectory'], + setup_file) + + else: + setup_py = os.path.join(self.source_dir, setup_file) + + # Python2 __file__ should not be unicode + if six.PY2 and isinstance(setup_py, six.text_type): + setup_py = setup_py.encode(sys.getfilesystemencoding()) + + return setup_py + + def run_egg_info(self): + assert self.source_dir + if self.name: + logger.debug( + 'Running setup.py (path:%s) egg_info for package %s', + self.setup_py, self.name, + ) + else: + logger.debug( + 'Running setup.py (path:%s) egg_info for package from %s', + self.setup_py, self.link, + ) + + with indent_log(): + script = SETUPTOOLS_SHIM % self.setup_py + base_cmd = [sys.executable, '-c', script] + if self.isolated: + base_cmd += ["--no-user-cfg"] + egg_info_cmd = base_cmd + ['egg_info'] + # We can't put the .egg-info files at the root, because then the + # source code will be mistaken for an installed egg, causing + # problems + if self.editable: + egg_base_option = [] + else: + egg_info_dir = os.path.join(self.source_dir, 'pip-egg-info') + ensure_dir(egg_info_dir) + egg_base_option = ['--egg-base', 'pip-egg-info'] + cwd = self.source_dir + if self.editable_options and \ + 'subdirectory' in self.editable_options: + cwd = os.path.join(cwd, self.editable_options['subdirectory']) + call_subprocess( + egg_info_cmd + egg_base_option, + cwd=cwd, + show_stdout=False, + command_level=logging.DEBUG, + command_desc='python setup.py egg_info') + + if not self.req: + if isinstance( + pkg_resources.parse_version(self.pkg_info()["Version"]), + Version): + op = "==" + else: + op = "===" + self.req = pkg_resources.Requirement.parse( + "".join([ + self.pkg_info()["Name"], + op, + self.pkg_info()["Version"], + ])) + self._correct_build_location() + else: + metadata_name = canonicalize_name(self.pkg_info()["Name"]) + if canonicalize_name(self.req.project_name) != metadata_name: + logger.warning( + 'Running setup.py (path:%s) egg_info for package %s ' + 'produced metadata for project name %s. Fix your ' + '#egg=%s fragments.', + self.setup_py, self.name, metadata_name, self.name + ) + self.req = pkg_resources.Requirement.parse(metadata_name) + + def egg_info_data(self, filename): + if self.satisfied_by is not None: + if not self.satisfied_by.has_metadata(filename): + return None + return self.satisfied_by.get_metadata(filename) + assert self.source_dir + filename = self.egg_info_path(filename) + if not os.path.exists(filename): + return None + data = read_text_file(filename) + return data + + def egg_info_path(self, filename): + if self._egg_info_path is None: + if self.editable: + base = self.source_dir + else: + base = os.path.join(self.source_dir, 'pip-egg-info') + filenames = os.listdir(base) + if self.editable: + filenames = [] + for root, dirs, files in os.walk(base): + for dir in vcs.dirnames: + if dir in dirs: + dirs.remove(dir) + # Iterate over a copy of ``dirs``, since mutating + # a list while iterating over it can cause trouble. + # (See https://github.com/pypa/pip/pull/462.) + for dir in list(dirs): + # Don't search in anything that looks like a virtualenv + # environment + if ( + os.path.exists( + os.path.join(root, dir, 'bin', 'python') + ) or + os.path.exists( + os.path.join( + root, dir, 'Scripts', 'Python.exe' + ) + )): + dirs.remove(dir) + # Also don't search through tests + elif dir == 'test' or dir == 'tests': + dirs.remove(dir) + filenames.extend([os.path.join(root, dir) + for dir in dirs]) + filenames = [f for f in filenames if f.endswith('.egg-info')] + + if not filenames: + raise InstallationError( + 'No files/directories in %s (from %s)' % (base, filename) + ) + assert filenames, \ + "No files/directories in %s (from %s)" % (base, filename) + + # if we have more than one match, we pick the toplevel one. This + # can easily be the case if there is a dist folder which contains + # an extracted tarball for testing purposes. + if len(filenames) > 1: + filenames.sort( + key=lambda x: x.count(os.path.sep) + + (os.path.altsep and x.count(os.path.altsep) or 0) + ) + self._egg_info_path = os.path.join(base, filenames[0]) + return os.path.join(self._egg_info_path, filename) + + def pkg_info(self): + p = FeedParser() + data = self.egg_info_data('PKG-INFO') + if not data: + logger.warning( + 'No PKG-INFO file found in %s', + display_path(self.egg_info_path('PKG-INFO')), + ) + p.feed(data or '') + return p.close() + + _requirements_section_re = re.compile(r'\[(.*?)\]') + + @property + def installed_version(self): + return get_installed_version(self.name) + + def assert_source_matches_version(self): + assert self.source_dir + version = self.pkg_info()['version'] + if version not in self.req: + logger.warning( + 'Requested %s, but installing version %s', + self, + self.installed_version, + ) + else: + logger.debug( + 'Source in %s has version %s, which satisfies requirement %s', + display_path(self.source_dir), + version, + self, + ) + + def update_editable(self, obtain=True): + if not self.link: + logger.debug( + "Cannot update repository at %s; repository location is " + "unknown", + self.source_dir, + ) + return + assert self.editable + assert self.source_dir + if self.link.scheme == 'file': + # Static paths don't get updated + return + assert '+' in self.link.url, "bad url: %r" % self.link.url + if not self.update: + return + vc_type, url = self.link.url.split('+', 1) + backend = vcs.get_backend(vc_type) + if backend: + vcs_backend = backend(self.link.url) + if obtain: + vcs_backend.obtain(self.source_dir) + else: + vcs_backend.export(self.source_dir) + else: + assert 0, ( + 'Unexpected version control type (in %s): %s' + % (self.link, vc_type)) + + def uninstall(self, auto_confirm=False): + """ + Uninstall the distribution currently satisfying this requirement. + + Prompts before removing or modifying files unless + ``auto_confirm`` is True. + + Refuses to delete or modify files outside of ``sys.prefix`` - + thus uninstallation within a virtual environment can only + modify that virtual environment, even if the virtualenv is + linked to global site-packages. + + """ + if not self.check_if_exists(): + raise UninstallationError( + "Cannot uninstall requirement %s, not installed" % (self.name,) + ) + dist = self.satisfied_by or self.conflicts_with + + dist_path = normalize_path(dist.location) + if not dist_is_local(dist): + logger.info( + "Not uninstalling %s at %s, outside environment %s", + dist.key, + dist_path, + sys.prefix, + ) + self.nothing_to_uninstall = True + return + + if dist_path in get_stdlib(): + logger.info( + "Not uninstalling %s at %s, as it is in the standard library.", + dist.key, + dist_path, + ) + self.nothing_to_uninstall = True + return + + paths_to_remove = UninstallPathSet(dist) + develop_egg_link = egg_link_path(dist) + develop_egg_link_egg_info = '{0}.egg-info'.format( + pkg_resources.to_filename(dist.project_name)) + egg_info_exists = dist.egg_info and os.path.exists(dist.egg_info) + # Special case for distutils installed package + distutils_egg_info = getattr(dist._provider, 'path', None) + + # Uninstall cases order do matter as in the case of 2 installs of the + # same package, pip needs to uninstall the currently detected version + if (egg_info_exists and dist.egg_info.endswith('.egg-info') and + not dist.egg_info.endswith(develop_egg_link_egg_info)): + # if dist.egg_info.endswith(develop_egg_link_egg_info), we + # are in fact in the develop_egg_link case + paths_to_remove.add(dist.egg_info) + if dist.has_metadata('installed-files.txt'): + for installed_file in dist.get_metadata( + 'installed-files.txt').splitlines(): + path = os.path.normpath( + os.path.join(dist.egg_info, installed_file) + ) + paths_to_remove.add(path) + # FIXME: need a test for this elif block + # occurs with --single-version-externally-managed/--record outside + # of pip + elif dist.has_metadata('top_level.txt'): + if dist.has_metadata('namespace_packages.txt'): + namespaces = dist.get_metadata('namespace_packages.txt') + else: + namespaces = [] + for top_level_pkg in [ + p for p + in dist.get_metadata('top_level.txt').splitlines() + if p and p not in namespaces]: + path = os.path.join(dist.location, top_level_pkg) + paths_to_remove.add(path) + paths_to_remove.add(path + '.py') + paths_to_remove.add(path + '.pyc') + paths_to_remove.add(path + '.pyo') + + elif distutils_egg_info: + warnings.warn( + "Uninstalling a distutils installed project ({0}) has been " + "deprecated and will be removed in a future version. This is " + "due to the fact that uninstalling a distutils project will " + "only partially uninstall the project.".format(self.name), + RemovedInPip10Warning, + ) + paths_to_remove.add(distutils_egg_info) + + elif dist.location.endswith('.egg'): + # package installed by easy_install + # We cannot match on dist.egg_name because it can slightly vary + # i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg + paths_to_remove.add(dist.location) + easy_install_egg = os.path.split(dist.location)[1] + easy_install_pth = os.path.join(os.path.dirname(dist.location), + 'easy-install.pth') + paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg) + + elif develop_egg_link: + # develop egg + with open(develop_egg_link, 'r') as fh: + link_pointer = os.path.normcase(fh.readline().strip()) + assert (link_pointer == dist.location), ( + 'Egg-link %s does not match installed location of %s ' + '(at %s)' % (link_pointer, self.name, dist.location) + ) + paths_to_remove.add(develop_egg_link) + easy_install_pth = os.path.join(os.path.dirname(develop_egg_link), + 'easy-install.pth') + paths_to_remove.add_pth(easy_install_pth, dist.location) + + elif egg_info_exists and dist.egg_info.endswith('.dist-info'): + for path in pip.wheel.uninstallation_paths(dist): + paths_to_remove.add(path) + + else: + logger.debug( + 'Not sure how to uninstall: %s - Check: %s', + dist, dist.location) + + # find distutils scripts= scripts + if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'): + for script in dist.metadata_listdir('scripts'): + if dist_in_usersite(dist): + bin_dir = bin_user + else: + bin_dir = bin_py + paths_to_remove.add(os.path.join(bin_dir, script)) + if WINDOWS: + paths_to_remove.add(os.path.join(bin_dir, script) + '.bat') + + # find console_scripts + if dist.has_metadata('entry_points.txt'): + config = configparser.SafeConfigParser() + config.readfp( + FakeFile(dist.get_metadata_lines('entry_points.txt')) + ) + if config.has_section('console_scripts'): + for name, value in config.items('console_scripts'): + if dist_in_usersite(dist): + bin_dir = bin_user + else: + bin_dir = bin_py + paths_to_remove.add(os.path.join(bin_dir, name)) + if WINDOWS: + paths_to_remove.add( + os.path.join(bin_dir, name) + '.exe' + ) + paths_to_remove.add( + os.path.join(bin_dir, name) + '.exe.manifest' + ) + paths_to_remove.add( + os.path.join(bin_dir, name) + '-script.py' + ) + + paths_to_remove.remove(auto_confirm) + self.uninstalled = paths_to_remove + + def rollback_uninstall(self): + if self.uninstalled: + self.uninstalled.rollback() + else: + logger.error( + "Can't rollback %s, nothing uninstalled.", self.name, + ) + + def commit_uninstall(self): + if self.uninstalled: + self.uninstalled.commit() + elif not self.nothing_to_uninstall: + logger.error( + "Can't commit %s, nothing uninstalled.", self.name, + ) + + def archive(self, build_dir): + assert self.source_dir + create_archive = True + archive_name = '%s-%s.zip' % (self.name, self.pkg_info()["version"]) + archive_path = os.path.join(build_dir, archive_name) + if os.path.exists(archive_path): + response = ask_path_exists( + 'The file %s exists. (i)gnore, (w)ipe, (b)ackup ' % + display_path(archive_path), ('i', 'w', 'b')) + if response == 'i': + create_archive = False + elif response == 'w': + logger.warning('Deleting %s', display_path(archive_path)) + os.remove(archive_path) + elif response == 'b': + dest_file = backup_dir(archive_path) + logger.warning( + 'Backing up %s to %s', + display_path(archive_path), + display_path(dest_file), + ) + shutil.move(archive_path, dest_file) + if create_archive: + zip = zipfile.ZipFile( + archive_path, 'w', zipfile.ZIP_DEFLATED, + allowZip64=True + ) + dir = os.path.normcase(os.path.abspath(self.source_dir)) + for dirpath, dirnames, filenames in os.walk(dir): + if 'pip-egg-info' in dirnames: + dirnames.remove('pip-egg-info') + for dirname in dirnames: + dirname = os.path.join(dirpath, dirname) + name = self._clean_zip_name(dirname, dir) + zipdir = zipfile.ZipInfo(self.name + '/' + name + '/') + zipdir.external_attr = 0x1ED << 16 # 0o755 + zip.writestr(zipdir, '') + for filename in filenames: + if filename == PIP_DELETE_MARKER_FILENAME: + continue + filename = os.path.join(dirpath, filename) + name = self._clean_zip_name(filename, dir) + zip.write(filename, self.name + '/' + name) + zip.close() + logger.info('Saved %s', display_path(archive_path)) + + def _clean_zip_name(self, name, prefix): + assert name.startswith(prefix + os.path.sep), ( + "name %r doesn't start with prefix %r" % (name, prefix) + ) + name = name[len(prefix) + 1:] + name = name.replace(os.path.sep, '/') + return name + + def match_markers(self): + if self.markers is not None: + return markers_interpret(self.markers) + else: + return True + + def install(self, install_options, global_options=[], root=None, + prefix=None): + if self.editable: + self.install_editable( + install_options, global_options, prefix=prefix) + return + if self.is_wheel: + version = pip.wheel.wheel_version(self.source_dir) + pip.wheel.check_compatibility(version, self.name) + + self.move_wheel_files(self.source_dir, root=root, prefix=prefix) + self.install_succeeded = True + return + + # Extend the list of global and install options passed on to + # the setup.py call with the ones from the requirements file. + # Options specified in requirements file override those + # specified on the command line, since the last option given + # to setup.py is the one that is used. + global_options += self.options.get('global_options', []) + install_options += self.options.get('install_options', []) + + if self.isolated: + global_options = list(global_options) + ["--no-user-cfg"] + + temp_location = tempfile.mkdtemp('-record', 'pip-') + record_filename = os.path.join(temp_location, 'install-record.txt') + try: + install_args = [sys.executable, "-u"] + install_args.append('-c') + install_args.append(SETUPTOOLS_SHIM % self.setup_py) + install_args += list(global_options) + \ + ['install', '--record', record_filename] + + if not self.as_egg: + install_args += ['--single-version-externally-managed'] + + if root is not None: + install_args += ['--root', root] + if prefix is not None: + install_args += ['--prefix', prefix] + + if self.pycompile: + install_args += ["--compile"] + else: + install_args += ["--no-compile"] + + if running_under_virtualenv(): + py_ver_str = 'python' + sysconfig.get_python_version() + install_args += ['--install-headers', + os.path.join(sys.prefix, 'include', 'site', + py_ver_str, self.name)] + msg = 'Running setup.py install for %s' % (self.name,) + with open_spinner(msg) as spinner: + with indent_log(): + call_subprocess( + install_args + install_options, + cwd=self.source_dir, + show_stdout=False, + spinner=spinner, + ) + + if not os.path.exists(record_filename): + logger.debug('Record file %s not found', record_filename) + return + self.install_succeeded = True + if self.as_egg: + # there's no --always-unzip option we can pass to install + # command so we unable to save the installed-files.txt + return + + def prepend_root(path): + if root is None or not os.path.isabs(path): + return path + else: + return change_root(root, path) + + with open(record_filename) as f: + for line in f: + directory = os.path.dirname(line) + if directory.endswith('.egg-info'): + egg_info_dir = prepend_root(directory) + break + else: + logger.warning( + 'Could not find .egg-info directory in install record' + ' for %s', + self, + ) + # FIXME: put the record somewhere + # FIXME: should this be an error? + return + new_lines = [] + with open(record_filename) as f: + for line in f: + filename = line.strip() + if os.path.isdir(filename): + filename += os.path.sep + new_lines.append( + os.path.relpath( + prepend_root(filename), egg_info_dir) + ) + inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt') + with open(inst_files_path, 'w') as f: + f.write('\n'.join(new_lines) + '\n') + finally: + if os.path.exists(record_filename): + os.remove(record_filename) + rmtree(temp_location) + + def ensure_has_source_dir(self, parent_dir): + """Ensure that a source_dir is set. + + This will create a temporary build dir if the name of the requirement + isn't known yet. + + :param parent_dir: The ideal pip parent_dir for the source_dir. + Generally src_dir for editables and build_dir for sdists. + :return: self.source_dir + """ + if self.source_dir is None: + self.source_dir = self.build_location(parent_dir) + return self.source_dir + + def remove_temporary_source(self): + """Remove the source files from this requirement, if they are marked + for deletion""" + if self.source_dir and os.path.exists( + os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME)): + logger.debug('Removing source in %s', self.source_dir) + rmtree(self.source_dir) + self.source_dir = None + if self._temp_build_dir and os.path.exists(self._temp_build_dir): + rmtree(self._temp_build_dir) + self._temp_build_dir = None + + def install_editable(self, install_options, + global_options=(), prefix=None): + logger.info('Running setup.py develop for %s', self.name) + + if self.isolated: + global_options = list(global_options) + ["--no-user-cfg"] + + if prefix: + prefix_param = ['--prefix={0}'.format(prefix)] + install_options = list(install_options) + prefix_param + + with indent_log(): + # FIXME: should we do --install-headers here too? + cwd = self.source_dir + if self.editable_options and \ + 'subdirectory' in self.editable_options: + cwd = os.path.join(cwd, self.editable_options['subdirectory']) + call_subprocess( + [ + sys.executable, + '-c', + SETUPTOOLS_SHIM % self.setup_py + ] + + list(global_options) + + ['develop', '--no-deps'] + + list(install_options), + + cwd=cwd, + show_stdout=False) + + self.install_succeeded = True + + def check_if_exists(self): + """Find an installed distribution that satisfies or conflicts + with this requirement, and set self.satisfied_by or + self.conflicts_with appropriately. + """ + if self.req is None: + return False + try: + self.satisfied_by = pkg_resources.get_distribution(self.req) + except pkg_resources.DistributionNotFound: + return False + except pkg_resources.VersionConflict: + existing_dist = pkg_resources.get_distribution( + self.req.project_name + ) + if self.use_user_site: + if dist_in_usersite(existing_dist): + self.conflicts_with = existing_dist + elif (running_under_virtualenv() and + dist_in_site_packages(existing_dist)): + raise InstallationError( + "Will not install to the user site because it will " + "lack sys.path precedence to %s in %s" % + (existing_dist.project_name, existing_dist.location) + ) + else: + self.conflicts_with = existing_dist + return True + + @property + def is_wheel(self): + return self.link and self.link.is_wheel + + def move_wheel_files(self, wheeldir, root=None, prefix=None): + move_wheel_files( + self.name, self.req, wheeldir, + user=self.use_user_site, + home=self.target_dir, + root=root, + prefix=prefix, + pycompile=self.pycompile, + isolated=self.isolated, + ) + + def get_dist(self): + """Return a pkg_resources.Distribution built from self.egg_info_path""" + egg_info = self.egg_info_path('').rstrip('/') + base_dir = os.path.dirname(egg_info) + metadata = pkg_resources.PathMetadata(base_dir, egg_info) + dist_name = os.path.splitext(os.path.basename(egg_info))[0] + return pkg_resources.Distribution( + os.path.dirname(egg_info), + project_name=dist_name, + metadata=metadata) + + @property + def has_hash_options(self): + """Return whether any known-good hashes are specified as options. + + These activate --require-hashes mode; hashes specified as part of a + URL do not. + + """ + return bool(self.options.get('hashes', {})) + + def hashes(self, trust_internet=True): + """Return a hash-comparer that considers my option- and URL-based + hashes to be known-good. + + Hashes in URLs--ones embedded in the requirements file, not ones + downloaded from an index server--are almost peers with ones from + flags. They satisfy --require-hashes (whether it was implicitly or + explicitly activated) but do not activate it. md5 and sha224 are not + allowed in flags, which should nudge people toward good algos. We + always OR all hashes together, even ones from URLs. + + :param trust_internet: Whether to trust URL-based (#md5=...) hashes + downloaded from the internet, as by populate_link() + + """ + good_hashes = self.options.get('hashes', {}).copy() + link = self.link if trust_internet else self.original_link + if link and link.hash: + good_hashes.setdefault(link.hash_name, []).append(link.hash) + return Hashes(good_hashes) + + +def _strip_postfix(req): + """ + Strip req postfix ( -dev, 0.2, etc ) + """ + # FIXME: use package_to_requirement? + match = re.search(r'^(.*?)(?:-dev|-\d.*)$', req) + if match: + # Strip off -dev, -0.2, etc. + req = match.group(1) + return req + + +def _build_req_from_url(url): + + parts = [p for p in url.split('#', 1)[0].split('/') if p] + + req = None + if parts[-2] in ('tags', 'branches', 'tag', 'branch'): + req = parts[-3] + elif parts[-1] == 'trunk': + req = parts[-2] + return req + + +def _build_editable_options(req): + + """ + This method generates a dictionary of the query string + parameters contained in a given editable URL. + """ + regexp = re.compile(r"[\?#&](?P[^&=]+)=(?P[^&=]+)") + matched = regexp.findall(req) + + if matched: + ret = dict() + for option in matched: + (name, value) = option + if name in ret: + raise Exception("%s option already defined" % name) + ret[name] = value + return ret + return None + + +def parse_editable(editable_req, default_vcs=None): + """Parses an editable requirement into: + - a requirement name + - an URL + - extras + - editable options + Accepted requirements: + svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir + .[some_extra] + """ + + from pip.index import Link + + url = editable_req + extras = None + + # If a file path is specified with extras, strip off the extras. + m = re.match(r'^(.+)(\[[^\]]+\])$', url) + if m: + url_no_extras = m.group(1) + extras = m.group(2) + else: + url_no_extras = url + + if os.path.isdir(url_no_extras): + if not os.path.exists(os.path.join(url_no_extras, 'setup.py')): + raise InstallationError( + "Directory %r is not installable. File 'setup.py' not found." % + url_no_extras + ) + # Treating it as code that has already been checked out + url_no_extras = path_to_url(url_no_extras) + + if url_no_extras.lower().startswith('file:'): + package_name = Link(url_no_extras).egg_fragment + if extras: + return ( + package_name, + url_no_extras, + pkg_resources.Requirement.parse( + '__placeholder__' + extras + ).extras, + {}, + ) + else: + return package_name, url_no_extras, None, {} + + for version_control in vcs: + if url.lower().startswith('%s:' % version_control): + url = '%s+%s' % (version_control, url) + break + + if '+' not in url: + if default_vcs: + url = default_vcs + '+' + url + else: + raise InstallationError( + '%s should either be a path to a local project or a VCS url ' + 'beginning with svn+, git+, hg+, or bzr+' % + editable_req + ) + + vc_type = url.split('+', 1)[0].lower() + + if not vcs.get_backend(vc_type): + error_message = 'For --editable=%s only ' % editable_req + \ + ', '.join([backend.name + '+URL' for backend in vcs.backends]) + \ + ' is currently supported' + raise InstallationError(error_message) + + try: + options = _build_editable_options(editable_req) + except Exception as exc: + raise InstallationError( + '--editable=%s error in editable options:%s' % (editable_req, exc) + ) + if not options or 'egg' not in options: + req = _build_req_from_url(editable_req) + if not req: + raise InstallationError( + '--editable=%s is not the right format; it must have ' + '#egg=Package' % editable_req + ) + else: + req = options['egg'] + + package = _strip_postfix(req) + return package, url, None, options diff --git a/lib/python3.4/site-packages/pip/req/req_set.py b/lib/python3.4/site-packages/pip/req/req_set.py new file mode 100644 index 0000000..b484dd9 --- /dev/null +++ b/lib/python3.4/site-packages/pip/req/req_set.py @@ -0,0 +1,745 @@ +from __future__ import absolute_import + +from collections import defaultdict +from itertools import chain +import logging +import os + +from pip._vendor import pkg_resources +from pip._vendor import requests + +from pip.compat import expanduser +from pip.download import (is_file_url, is_dir_url, is_vcs_url, url_to_path, + unpack_url) +from pip.exceptions import (InstallationError, BestVersionAlreadyInstalled, + DistributionNotFound, PreviousBuildDirError, + HashError, HashErrors, HashUnpinned, + DirectoryUrlHashUnsupported, VcsHashUnsupported) +from pip.req.req_install import InstallRequirement +from pip.utils import ( + display_path, dist_in_usersite, ensure_dir, normalize_path) +from pip.utils.hashes import MissingHashes +from pip.utils.logging import indent_log +from pip.vcs import vcs + + +logger = logging.getLogger(__name__) + + +class Requirements(object): + + def __init__(self): + self._keys = [] + self._dict = {} + + def keys(self): + return self._keys + + def values(self): + return [self._dict[key] for key in self._keys] + + def __contains__(self, item): + return item in self._keys + + def __setitem__(self, key, value): + if key not in self._keys: + self._keys.append(key) + self._dict[key] = value + + def __getitem__(self, key): + return self._dict[key] + + def __repr__(self): + values = ['%s: %s' % (repr(k), repr(self[k])) for k in self.keys()] + return 'Requirements({%s})' % ', '.join(values) + + +class DistAbstraction(object): + """Abstracts out the wheel vs non-wheel prepare_files logic. + + The requirements for anything installable are as follows: + - we must be able to determine the requirement name + (or we can't correctly handle the non-upgrade case). + - we must be able to generate a list of run-time dependencies + without installing any additional packages (or we would + have to either burn time by doing temporary isolated installs + or alternatively violate pips 'don't start installing unless + all requirements are available' rule - neither of which are + desirable). + - for packages with setup requirements, we must also be able + to determine their requirements without installing additional + packages (for the same reason as run-time dependencies) + - we must be able to create a Distribution object exposing the + above metadata. + """ + + def __init__(self, req_to_install): + self.req_to_install = req_to_install + + def dist(self, finder): + """Return a setuptools Dist object.""" + raise NotImplementedError(self.dist) + + def prep_for_dist(self): + """Ensure that we can get a Dist for this requirement.""" + raise NotImplementedError(self.dist) + + +def make_abstract_dist(req_to_install): + """Factory to make an abstract dist object. + + Preconditions: Either an editable req with a source_dir, or satisfied_by or + a wheel link, or a non-editable req with a source_dir. + + :return: A concrete DistAbstraction. + """ + if req_to_install.editable: + return IsSDist(req_to_install) + elif req_to_install.link and req_to_install.link.is_wheel: + return IsWheel(req_to_install) + else: + return IsSDist(req_to_install) + + +class IsWheel(DistAbstraction): + + def dist(self, finder): + return list(pkg_resources.find_distributions( + self.req_to_install.source_dir))[0] + + def prep_for_dist(self): + # FIXME:https://github.com/pypa/pip/issues/1112 + pass + + +class IsSDist(DistAbstraction): + + def dist(self, finder): + dist = self.req_to_install.get_dist() + # FIXME: shouldn't be globally added: + if dist.has_metadata('dependency_links.txt'): + finder.add_dependency_links( + dist.get_metadata_lines('dependency_links.txt') + ) + return dist + + def prep_for_dist(self): + self.req_to_install.run_egg_info() + self.req_to_install.assert_source_matches_version() + + +class Installed(DistAbstraction): + + def dist(self, finder): + return self.req_to_install.satisfied_by + + def prep_for_dist(self): + pass + + +class RequirementSet(object): + + def __init__(self, build_dir, src_dir, download_dir, upgrade=False, + ignore_installed=False, as_egg=False, target_dir=None, + ignore_dependencies=False, force_reinstall=False, + use_user_site=False, session=None, pycompile=True, + isolated=False, wheel_download_dir=None, + wheel_cache=None, require_hashes=False): + """Create a RequirementSet. + + :param wheel_download_dir: Where still-packed .whl files should be + written to. If None they are written to the download_dir parameter. + Separate to download_dir to permit only keeping wheel archives for + pip wheel. + :param download_dir: Where still packed archives should be written to. + If None they are not saved, and are deleted immediately after + unpacking. + :param wheel_cache: The pip wheel cache, for passing to + InstallRequirement. + """ + if session is None: + raise TypeError( + "RequirementSet() missing 1 required keyword argument: " + "'session'" + ) + + self.build_dir = build_dir + self.src_dir = src_dir + # XXX: download_dir and wheel_download_dir overlap semantically and may + # be combined if we're willing to have non-wheel archives present in + # the wheelhouse output by 'pip wheel'. + self.download_dir = download_dir + self.upgrade = upgrade + self.ignore_installed = ignore_installed + self.force_reinstall = force_reinstall + self.requirements = Requirements() + # Mapping of alias: real_name + self.requirement_aliases = {} + self.unnamed_requirements = [] + self.ignore_dependencies = ignore_dependencies + self.successfully_downloaded = [] + self.successfully_installed = [] + self.reqs_to_cleanup = [] + self.as_egg = as_egg + self.use_user_site = use_user_site + self.target_dir = target_dir # set from --target option + self.session = session + self.pycompile = pycompile + self.isolated = isolated + if wheel_download_dir: + wheel_download_dir = normalize_path(wheel_download_dir) + self.wheel_download_dir = wheel_download_dir + self._wheel_cache = wheel_cache + self.require_hashes = require_hashes + # Maps from install_req -> dependencies_of_install_req + self._dependencies = defaultdict(list) + + def __str__(self): + reqs = [req for req in self.requirements.values() + if not req.comes_from] + reqs.sort(key=lambda req: req.name.lower()) + return ' '.join([str(req.req) for req in reqs]) + + def __repr__(self): + reqs = [req for req in self.requirements.values()] + reqs.sort(key=lambda req: req.name.lower()) + reqs_str = ', '.join([str(req.req) for req in reqs]) + return ('<%s object; %d requirement(s): %s>' + % (self.__class__.__name__, len(reqs), reqs_str)) + + def add_requirement(self, install_req, parent_req_name=None): + """Add install_req as a requirement to install. + + :param parent_req_name: The name of the requirement that needed this + added. The name is used because when multiple unnamed requirements + resolve to the same name, we could otherwise end up with dependency + links that point outside the Requirements set. parent_req must + already be added. Note that None implies that this is a user + supplied requirement, vs an inferred one. + :return: Additional requirements to scan. That is either [] if + the requirement is not applicable, or [install_req] if the + requirement is applicable and has just been added. + """ + name = install_req.name + if not install_req.match_markers(): + logger.warning("Ignoring %s: markers %r don't match your " + "environment", install_req.name, + install_req.markers) + return [] + + install_req.as_egg = self.as_egg + install_req.use_user_site = self.use_user_site + install_req.target_dir = self.target_dir + install_req.pycompile = self.pycompile + if not name: + # url or path requirement w/o an egg fragment + self.unnamed_requirements.append(install_req) + return [install_req] + else: + try: + existing_req = self.get_requirement(name) + except KeyError: + existing_req = None + if (parent_req_name is None and existing_req and not + existing_req.constraint and + existing_req.extras == install_req.extras): + raise InstallationError( + 'Double requirement given: %s (already in %s, name=%r)' + % (install_req, existing_req, name)) + if not existing_req: + # Add requirement + self.requirements[name] = install_req + # FIXME: what about other normalizations? E.g., _ vs. -? + if name.lower() != name: + self.requirement_aliases[name.lower()] = name + result = [install_req] + else: + # Assume there's no need to scan, and that we've already + # encountered this for scanning. + result = [] + if not install_req.constraint and existing_req.constraint: + if (install_req.link and not (existing_req.link and + install_req.link.path == existing_req.link.path)): + self.reqs_to_cleanup.append(install_req) + raise InstallationError( + "Could not satisfy constraints for '%s': " + "installation from path or url cannot be " + "constrained to a version" % name) + # If we're now installing a constraint, mark the existing + # object for real installation. + existing_req.constraint = False + existing_req.extras = tuple( + sorted(set(existing_req.extras).union( + set(install_req.extras)))) + logger.debug("Setting %s extras to: %s", + existing_req, existing_req.extras) + # And now we need to scan this. + result = [existing_req] + # Canonicalise to the already-added object for the backref + # check below. + install_req = existing_req + if parent_req_name: + parent_req = self.get_requirement(parent_req_name) + self._dependencies[parent_req].append(install_req) + return result + + def has_requirement(self, project_name): + name = project_name.lower() + if (name in self.requirements and + not self.requirements[name].constraint or + name in self.requirement_aliases and + not self.requirements[self.requirement_aliases[name]].constraint): + return True + return False + + @property + def has_requirements(self): + return list(req for req in self.requirements.values() if not + req.constraint) or self.unnamed_requirements + + @property + def is_download(self): + if self.download_dir: + self.download_dir = expanduser(self.download_dir) + if os.path.exists(self.download_dir): + return True + else: + logger.critical('Could not find download directory') + raise InstallationError( + "Could not find or access download directory '%s'" + % display_path(self.download_dir)) + return False + + def get_requirement(self, project_name): + for name in project_name, project_name.lower(): + if name in self.requirements: + return self.requirements[name] + if name in self.requirement_aliases: + return self.requirements[self.requirement_aliases[name]] + raise KeyError("No project with the name %r" % project_name) + + def uninstall(self, auto_confirm=False): + for req in self.requirements.values(): + if req.constraint: + continue + req.uninstall(auto_confirm=auto_confirm) + req.commit_uninstall() + + def prepare_files(self, finder): + """ + Prepare process. Create temp directories, download and/or unpack files. + """ + # make the wheelhouse + if self.wheel_download_dir: + ensure_dir(self.wheel_download_dir) + + # If any top-level requirement has a hash specified, enter + # hash-checking mode, which requires hashes from all. + root_reqs = self.unnamed_requirements + self.requirements.values() + require_hashes = (self.require_hashes or + any(req.has_hash_options for req in root_reqs)) + if require_hashes and self.as_egg: + raise InstallationError( + '--egg is not allowed with --require-hashes mode, since it ' + 'delegates dependency resolution to setuptools and could thus ' + 'result in installation of unhashed packages.') + + # Actually prepare the files, and collect any exceptions. Most hash + # exceptions cannot be checked ahead of time, because + # req.populate_link() needs to be called before we can make decisions + # based on link type. + discovered_reqs = [] + hash_errors = HashErrors() + for req in chain(root_reqs, discovered_reqs): + try: + discovered_reqs.extend(self._prepare_file( + finder, + req, + require_hashes=require_hashes, + ignore_dependencies=self.ignore_dependencies)) + except HashError as exc: + exc.req = req + hash_errors.append(exc) + + if hash_errors: + raise hash_errors + + def _check_skip_installed(self, req_to_install, finder): + """Check if req_to_install should be skipped. + + This will check if the req is installed, and whether we should upgrade + or reinstall it, taking into account all the relevant user options. + + After calling this req_to_install will only have satisfied_by set to + None if the req_to_install is to be upgraded/reinstalled etc. Any + other value will be a dist recording the current thing installed that + satisfies the requirement. + + Note that for vcs urls and the like we can't assess skipping in this + routine - we simply identify that we need to pull the thing down, + then later on it is pulled down and introspected to assess upgrade/ + reinstalls etc. + + :return: A text reason for why it was skipped, or None. + """ + # Check whether to upgrade/reinstall this req or not. + req_to_install.check_if_exists() + if req_to_install.satisfied_by: + skip_reason = 'satisfied (use --upgrade to upgrade)' + if self.upgrade: + best_installed = False + # For link based requirements we have to pull the + # tree down and inspect to assess the version #, so + # its handled way down. + if not (self.force_reinstall or req_to_install.link): + try: + finder.find_requirement(req_to_install, self.upgrade) + except BestVersionAlreadyInstalled: + skip_reason = 'up-to-date' + best_installed = True + except DistributionNotFound: + # No distribution found, so we squash the + # error - it will be raised later when we + # re-try later to do the install. + # Why don't we just raise here? + pass + + if not best_installed: + # don't uninstall conflict if user install and + # conflict is not user install + if not (self.use_user_site and not + dist_in_usersite(req_to_install.satisfied_by)): + req_to_install.conflicts_with = \ + req_to_install.satisfied_by + req_to_install.satisfied_by = None + return skip_reason + else: + return None + + def _prepare_file(self, + finder, + req_to_install, + require_hashes=False, + ignore_dependencies=False): + """Prepare a single requirements file. + + :return: A list of additional InstallRequirements to also install. + """ + # Tell user what we are doing for this requirement: + # obtain (editable), skipping, processing (local url), collecting + # (remote url or package name) + if req_to_install.constraint or req_to_install.prepared: + return [] + + req_to_install.prepared = True + + # ###################### # + # # print log messages # # + # ###################### # + if req_to_install.editable: + logger.info('Obtaining %s', req_to_install) + else: + # satisfied_by is only evaluated by calling _check_skip_installed, + # so it must be None here. + assert req_to_install.satisfied_by is None + if not self.ignore_installed: + skip_reason = self._check_skip_installed( + req_to_install, finder) + + if req_to_install.satisfied_by: + assert skip_reason is not None, ( + '_check_skip_installed returned None but ' + 'req_to_install.satisfied_by is set to %r' + % (req_to_install.satisfied_by,)) + logger.info( + 'Requirement already %s: %s', skip_reason, + req_to_install) + else: + if (req_to_install.link and + req_to_install.link.scheme == 'file'): + path = url_to_path(req_to_install.link.url) + logger.info('Processing %s', display_path(path)) + else: + logger.info('Collecting %s', req_to_install) + + with indent_log(): + # ################################ # + # # vcs update or unpack archive # # + # ################################ # + if req_to_install.editable: + if require_hashes: + raise InstallationError( + 'The editable requirement %s cannot be installed when ' + 'requiring hashes, because there is no single file to ' + 'hash.' % req_to_install) + req_to_install.ensure_has_source_dir(self.src_dir) + req_to_install.update_editable(not self.is_download) + abstract_dist = make_abstract_dist(req_to_install) + abstract_dist.prep_for_dist() + if self.is_download: + req_to_install.archive(self.download_dir) + elif req_to_install.satisfied_by: + if require_hashes: + logger.debug( + 'Since it is already installed, we are trusting this ' + 'package without checking its hash. To ensure a ' + 'completely repeatable environment, install into an ' + 'empty virtualenv.') + abstract_dist = Installed(req_to_install) + else: + # @@ if filesystem packages are not marked + # editable in a req, a non deterministic error + # occurs when the script attempts to unpack the + # build directory + req_to_install.ensure_has_source_dir(self.build_dir) + # If a checkout exists, it's unwise to keep going. version + # inconsistencies are logged later, but do not fail the + # installation. + # FIXME: this won't upgrade when there's an existing + # package unpacked in `req_to_install.source_dir` + if os.path.exists( + os.path.join(req_to_install.source_dir, 'setup.py')): + raise PreviousBuildDirError( + "pip can't proceed with requirements '%s' due to a" + " pre-existing build directory (%s). This is " + "likely due to a previous installation that failed" + ". pip is being responsible and not assuming it " + "can delete this. Please delete it and try again." + % (req_to_install, req_to_install.source_dir) + ) + req_to_install.populate_link( + finder, self.upgrade, require_hashes) + # We can't hit this spot and have populate_link return None. + # req_to_install.satisfied_by is None here (because we're + # guarded) and upgrade has no impact except when satisfied_by + # is not None. + # Then inside find_requirement existing_applicable -> False + # If no new versions are found, DistributionNotFound is raised, + # otherwise a result is guaranteed. + assert req_to_install.link + link = req_to_install.link + + # Now that we have the real link, we can tell what kind of + # requirements we have and raise some more informative errors + # than otherwise. (For example, we can raise VcsHashUnsupported + # for a VCS URL rather than HashMissing.) + if require_hashes: + # We could check these first 2 conditions inside + # unpack_url and save repetition of conditions, but then + # we would report less-useful error messages for + # unhashable requirements, complaining that there's no + # hash provided. + if is_vcs_url(link): + raise VcsHashUnsupported() + elif is_file_url(link) and is_dir_url(link): + raise DirectoryUrlHashUnsupported() + if (not req_to_install.original_link and + not req_to_install.is_pinned): + # Unpinned packages are asking for trouble when a new + # version is uploaded. This isn't a security check, but + # it saves users a surprising hash mismatch in the + # future. + # + # file:/// URLs aren't pinnable, so don't complain + # about them not being pinned. + raise HashUnpinned() + hashes = req_to_install.hashes( + trust_internet=not require_hashes) + if require_hashes and not hashes: + # Known-good hashes are missing for this requirement, so + # shim it with a facade object that will provoke hash + # computation and then raise a HashMissing exception + # showing the user what the hash should be. + hashes = MissingHashes() + + try: + download_dir = self.download_dir + # We always delete unpacked sdists after pip ran. + autodelete_unpacked = True + if req_to_install.link.is_wheel \ + and self.wheel_download_dir: + # when doing 'pip wheel` we download wheels to a + # dedicated dir. + download_dir = self.wheel_download_dir + if req_to_install.link.is_wheel: + if download_dir: + # When downloading, we only unpack wheels to get + # metadata. + autodelete_unpacked = True + else: + # When installing a wheel, we use the unpacked + # wheel. + autodelete_unpacked = False + unpack_url( + req_to_install.link, req_to_install.source_dir, + download_dir, autodelete_unpacked, + session=self.session, hashes=hashes) + except requests.HTTPError as exc: + logger.critical( + 'Could not install requirement %s because ' + 'of error %s', + req_to_install, + exc, + ) + raise InstallationError( + 'Could not install requirement %s because ' + 'of HTTP error %s for URL %s' % + (req_to_install, exc, req_to_install.link) + ) + abstract_dist = make_abstract_dist(req_to_install) + abstract_dist.prep_for_dist() + if self.is_download: + # Make a .zip of the source_dir we already created. + if req_to_install.link.scheme in vcs.all_schemes: + req_to_install.archive(self.download_dir) + # req_to_install.req is only avail after unpack for URL + # pkgs repeat check_if_exists to uninstall-on-upgrade + # (#14) + if not self.ignore_installed: + req_to_install.check_if_exists() + if req_to_install.satisfied_by: + if self.upgrade or self.ignore_installed: + # don't uninstall conflict if user install and + # conflict is not user install + if not (self.use_user_site and not + dist_in_usersite( + req_to_install.satisfied_by)): + req_to_install.conflicts_with = \ + req_to_install.satisfied_by + req_to_install.satisfied_by = None + else: + logger.info( + 'Requirement already satisfied (use ' + '--upgrade to upgrade): %s', + req_to_install, + ) + + # ###################### # + # # parse dependencies # # + # ###################### # + dist = abstract_dist.dist(finder) + more_reqs = [] + + def add_req(subreq): + sub_install_req = InstallRequirement( + str(subreq), + req_to_install, + isolated=self.isolated, + wheel_cache=self._wheel_cache, + ) + more_reqs.extend(self.add_requirement( + sub_install_req, req_to_install.name)) + + # We add req_to_install before its dependencies, so that we + # can refer to it when adding dependencies. + if not self.has_requirement(req_to_install.name): + # 'unnamed' requirements will get added here + self.add_requirement(req_to_install, None) + + if not ignore_dependencies: + if (req_to_install.extras): + logger.debug( + "Installing extra requirements: %r", + ','.join(req_to_install.extras), + ) + missing_requested = sorted( + set(req_to_install.extras) - set(dist.extras) + ) + for missing in missing_requested: + logger.warning( + '%s does not provide the extra \'%s\'', + dist, missing + ) + + available_requested = sorted( + set(dist.extras) & set(req_to_install.extras) + ) + for subreq in dist.requires(available_requested): + add_req(subreq) + + # cleanup tmp src + self.reqs_to_cleanup.append(req_to_install) + + if not req_to_install.editable and not req_to_install.satisfied_by: + # XXX: --no-install leads this to report 'Successfully + # downloaded' for only non-editable reqs, even though we took + # action on them. + self.successfully_downloaded.append(req_to_install) + + return more_reqs + + def cleanup_files(self): + """Clean up files, remove builds.""" + logger.debug('Cleaning up...') + with indent_log(): + for req in self.reqs_to_cleanup: + req.remove_temporary_source() + + def _to_install(self): + """Create the installation order. + + The installation order is topological - requirements are installed + before the requiring thing. We break cycles at an arbitrary point, + and make no other guarantees. + """ + # The current implementation, which we may change at any point + # installs the user specified things in the order given, except when + # dependencies must come earlier to achieve topological order. + order = [] + ordered_reqs = set() + + def schedule(req): + if req.satisfied_by or req in ordered_reqs: + return + if req.constraint: + return + ordered_reqs.add(req) + for dep in self._dependencies[req]: + schedule(dep) + order.append(req) + for install_req in self.requirements.values(): + schedule(install_req) + return order + + def install(self, install_options, global_options=(), *args, **kwargs): + """ + Install everything in this set (after having downloaded and unpacked + the packages) + """ + to_install = self._to_install() + + if to_install: + logger.info( + 'Installing collected packages: %s', + ', '.join([req.name for req in to_install]), + ) + + with indent_log(): + for requirement in to_install: + if requirement.conflicts_with: + logger.info( + 'Found existing installation: %s', + requirement.conflicts_with, + ) + with indent_log(): + requirement.uninstall(auto_confirm=True) + try: + requirement.install( + install_options, + global_options, + *args, + **kwargs + ) + except: + # if install did not succeed, rollback previous uninstall + if (requirement.conflicts_with and not + requirement.install_succeeded): + requirement.rollback_uninstall() + raise + else: + if (requirement.conflicts_with and + requirement.install_succeeded): + requirement.commit_uninstall() + requirement.remove_temporary_source() + + self.successfully_installed = to_install diff --git a/lib/python3.4/site-packages/pip/req/req_uninstall.py b/lib/python3.4/site-packages/pip/req/req_uninstall.py new file mode 100644 index 0000000..5248430 --- /dev/null +++ b/lib/python3.4/site-packages/pip/req/req_uninstall.py @@ -0,0 +1,195 @@ +from __future__ import absolute_import + +import logging +import os +import tempfile + +from pip.compat import uses_pycache, WINDOWS, cache_from_source +from pip.exceptions import UninstallationError +from pip.utils import rmtree, ask, is_local, renames, normalize_path +from pip.utils.logging import indent_log + + +logger = logging.getLogger(__name__) + + +class UninstallPathSet(object): + """A set of file paths to be removed in the uninstallation of a + requirement.""" + def __init__(self, dist): + self.paths = set() + self._refuse = set() + self.pth = {} + self.dist = dist + self.save_dir = None + self._moved_paths = [] + + def _permitted(self, path): + """ + Return True if the given path is one we are permitted to + remove/modify, False otherwise. + + """ + return is_local(path) + + def add(self, path): + head, tail = os.path.split(path) + + # we normalize the head to resolve parent directory symlinks, but not + # the tail, since we only want to uninstall symlinks, not their targets + path = os.path.join(normalize_path(head), os.path.normcase(tail)) + + if not os.path.exists(path): + return + if self._permitted(path): + self.paths.add(path) + else: + self._refuse.add(path) + + # __pycache__ files can show up after 'installed-files.txt' is created, + # due to imports + if os.path.splitext(path)[1] == '.py' and uses_pycache: + self.add(cache_from_source(path)) + + def add_pth(self, pth_file, entry): + pth_file = normalize_path(pth_file) + if self._permitted(pth_file): + if pth_file not in self.pth: + self.pth[pth_file] = UninstallPthEntries(pth_file) + self.pth[pth_file].add(entry) + else: + self._refuse.add(pth_file) + + def compact(self, paths): + """Compact a path set to contain the minimal number of paths + necessary to contain all paths in the set. If /a/path/ and + /a/path/to/a/file.txt are both in the set, leave only the + shorter path.""" + short_paths = set() + for path in sorted(paths, key=len): + if not any([ + (path.startswith(shortpath) and + path[len(shortpath.rstrip(os.path.sep))] == os.path.sep) + for shortpath in short_paths]): + short_paths.add(path) + return short_paths + + def _stash(self, path): + return os.path.join( + self.save_dir, os.path.splitdrive(path)[1].lstrip(os.path.sep)) + + def remove(self, auto_confirm=False): + """Remove paths in ``self.paths`` with confirmation (unless + ``auto_confirm`` is True).""" + if not self.paths: + logger.info( + "Can't uninstall '%s'. No files were found to uninstall.", + self.dist.project_name, + ) + return + logger.info( + 'Uninstalling %s-%s:', + self.dist.project_name, self.dist.version + ) + + with indent_log(): + paths = sorted(self.compact(self.paths)) + + if auto_confirm: + response = 'y' + else: + for path in paths: + logger.info(path) + response = ask('Proceed (y/n)? ', ('y', 'n')) + if self._refuse: + logger.info('Not removing or modifying (outside of prefix):') + for path in self.compact(self._refuse): + logger.info(path) + if response == 'y': + self.save_dir = tempfile.mkdtemp(suffix='-uninstall', + prefix='pip-') + for path in paths: + new_path = self._stash(path) + logger.debug('Removing file or directory %s', path) + self._moved_paths.append(path) + renames(path, new_path) + for pth in self.pth.values(): + pth.remove() + logger.info( + 'Successfully uninstalled %s-%s', + self.dist.project_name, self.dist.version + ) + + def rollback(self): + """Rollback the changes previously made by remove().""" + if self.save_dir is None: + logger.error( + "Can't roll back %s; was not uninstalled", + self.dist.project_name, + ) + return False + logger.info('Rolling back uninstall of %s', self.dist.project_name) + for path in self._moved_paths: + tmp_path = self._stash(path) + logger.debug('Replacing %s', path) + renames(tmp_path, path) + for pth in self.pth.values(): + pth.rollback() + + def commit(self): + """Remove temporary save dir: rollback will no longer be possible.""" + if self.save_dir is not None: + rmtree(self.save_dir) + self.save_dir = None + self._moved_paths = [] + + +class UninstallPthEntries(object): + def __init__(self, pth_file): + if not os.path.isfile(pth_file): + raise UninstallationError( + "Cannot remove entries from nonexistent file %s" % pth_file + ) + self.file = pth_file + self.entries = set() + self._saved_lines = None + + def add(self, entry): + entry = os.path.normcase(entry) + # On Windows, os.path.normcase converts the entry to use + # backslashes. This is correct for entries that describe absolute + # paths outside of site-packages, but all the others use forward + # slashes. + if WINDOWS and not os.path.splitdrive(entry)[0]: + entry = entry.replace('\\', '/') + self.entries.add(entry) + + def remove(self): + logger.debug('Removing pth entries from %s:', self.file) + with open(self.file, 'rb') as fh: + # windows uses '\r\n' with py3k, but uses '\n' with py2.x + lines = fh.readlines() + self._saved_lines = lines + if any(b'\r\n' in line for line in lines): + endline = '\r\n' + else: + endline = '\n' + for entry in self.entries: + try: + logger.debug('Removing entry: %s', entry) + lines.remove((entry + endline).encode("utf-8")) + except ValueError: + pass + with open(self.file, 'wb') as fh: + fh.writelines(lines) + + def rollback(self): + if self._saved_lines is None: + logger.error( + 'Cannot roll back changes to %s, none were made', self.file + ) + return False + logger.debug('Rolling %s back to previous state', self.file) + with open(self.file, 'wb') as fh: + fh.writelines(self._saved_lines) + return True diff --git a/lib/python3.4/site-packages/pip/status_codes.py b/lib/python3.4/site-packages/pip/status_codes.py new file mode 100644 index 0000000..275360a --- /dev/null +++ b/lib/python3.4/site-packages/pip/status_codes.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import + +SUCCESS = 0 +ERROR = 1 +UNKNOWN_ERROR = 2 +VIRTUALENV_NOT_FOUND = 3 +PREVIOUS_BUILD_DIR_ERROR = 4 +NO_MATCHES_FOUND = 23 diff --git a/lib/python3.4/site-packages/pip/utils/__init__.py b/lib/python3.4/site-packages/pip/utils/__init__.py new file mode 100644 index 0000000..7bf8de4 --- /dev/null +++ b/lib/python3.4/site-packages/pip/utils/__init__.py @@ -0,0 +1,852 @@ +from __future__ import absolute_import + +from collections import deque +import contextlib +import errno +import locale +# we have a submodule named 'logging' which would shadow this if we used the +# regular name: +import logging as std_logging +import re +import os +import posixpath +import shutil +import stat +import subprocess +import sys +import tarfile +import zipfile + +from pip.exceptions import InstallationError +from pip.compat import console_to_str, expanduser, stdlib_pkgs +from pip.locations import ( + site_packages, user_site, running_under_virtualenv, virtualenv_no_global, + write_delete_marker_file, +) +from pip._vendor import pkg_resources +from pip._vendor.six.moves import input +from pip._vendor.six import PY2 +from pip._vendor.retrying import retry + +if PY2: + from io import BytesIO as StringIO +else: + from io import StringIO + +__all__ = ['rmtree', 'display_path', 'backup_dir', + 'ask', 'splitext', + 'format_size', 'is_installable_dir', + 'is_svn_page', 'file_contents', + 'split_leading_dir', 'has_leading_dir', + 'normalize_path', 'canonicalize_name', + 'renames', 'get_terminal_size', 'get_prog', + 'unzip_file', 'untar_file', 'unpack_file', 'call_subprocess', + 'captured_stdout', 'remove_tracebacks', 'ensure_dir', + 'ARCHIVE_EXTENSIONS', 'SUPPORTED_EXTENSIONS', + 'get_installed_version'] + + +logger = std_logging.getLogger(__name__) + +BZ2_EXTENSIONS = ('.tar.bz2', '.tbz') +XZ_EXTENSIONS = ('.tar.xz', '.txz', '.tlz', '.tar.lz', '.tar.lzma') +ZIP_EXTENSIONS = ('.zip', '.whl') +TAR_EXTENSIONS = ('.tar.gz', '.tgz', '.tar') +ARCHIVE_EXTENSIONS = ( + ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS) +SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS +try: + import bz2 # noqa + SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS +except ImportError: + logger.debug('bz2 module is not available') + +try: + # Only for Python 3.3+ + import lzma # noqa + SUPPORTED_EXTENSIONS += XZ_EXTENSIONS +except ImportError: + logger.debug('lzma module is not available') + + +def import_or_raise(pkg_or_module_string, ExceptionType, *args, **kwargs): + try: + return __import__(pkg_or_module_string) + except ImportError: + raise ExceptionType(*args, **kwargs) + + +def ensure_dir(path): + """os.path.makedirs without EEXIST.""" + try: + os.makedirs(path) + except OSError as e: + if e.errno != errno.EEXIST: + raise + + +def get_prog(): + try: + if os.path.basename(sys.argv[0]) in ('__main__.py', '-c'): + return "%s -m pip" % sys.executable + except (AttributeError, TypeError, IndexError): + pass + return 'pip' + + +# Retry every half second for up to 3 seconds +@retry(stop_max_delay=3000, wait_fixed=500) +def rmtree(dir, ignore_errors=False): + shutil.rmtree(dir, ignore_errors=ignore_errors, + onerror=rmtree_errorhandler) + + +def rmtree_errorhandler(func, path, exc_info): + """On Windows, the files in .svn are read-only, so when rmtree() tries to + remove them, an exception is thrown. We catch that here, remove the + read-only attribute, and hopefully continue without problems.""" + # if file type currently read only + if os.stat(path).st_mode & stat.S_IREAD: + # convert to read/write + os.chmod(path, stat.S_IWRITE) + # use the original function to repeat the operation + func(path) + return + else: + raise + + +def display_path(path): + """Gives the display value for a given path, making it relative to cwd + if possible.""" + path = os.path.normcase(os.path.abspath(path)) + if sys.version_info[0] == 2: + path = path.decode(sys.getfilesystemencoding(), 'replace') + path = path.encode(sys.getdefaultencoding(), 'replace') + if path.startswith(os.getcwd() + os.path.sep): + path = '.' + path[len(os.getcwd()):] + return path + + +def backup_dir(dir, ext='.bak'): + """Figure out the name of a directory to back up the given dir to + (adding .bak, .bak2, etc)""" + n = 1 + extension = ext + while os.path.exists(dir + extension): + n += 1 + extension = ext + str(n) + return dir + extension + + +def ask_path_exists(message, options): + for action in os.environ.get('PIP_EXISTS_ACTION', '').split(): + if action in options: + return action + return ask(message, options) + + +def ask(message, options): + """Ask the message interactively, with the given possible responses""" + while 1: + if os.environ.get('PIP_NO_INPUT'): + raise Exception( + 'No input was expected ($PIP_NO_INPUT set); question: %s' % + message + ) + response = input(message) + response = response.strip().lower() + if response not in options: + print( + 'Your response (%r) was not one of the expected responses: ' + '%s' % (response, ', '.join(options)) + ) + else: + return response + + +def format_size(bytes): + if bytes > 1000 * 1000: + return '%.1fMB' % (bytes / 1000.0 / 1000) + elif bytes > 10 * 1000: + return '%ikB' % (bytes / 1000) + elif bytes > 1000: + return '%.1fkB' % (bytes / 1000.0) + else: + return '%ibytes' % bytes + + +def is_installable_dir(path): + """Return True if `path` is a directory containing a setup.py file.""" + if not os.path.isdir(path): + return False + setup_py = os.path.join(path, 'setup.py') + if os.path.isfile(setup_py): + return True + return False + + +def is_svn_page(html): + """ + Returns true if the page appears to be the index page of an svn repository + """ + return (re.search(r'[^<]*Revision \d+:', html) and + re.search(r'Powered by (?:<a[^>]*?>)?Subversion', html, re.I)) + + +def file_contents(filename): + with open(filename, 'rb') as fp: + return fp.read().decode('utf-8') + + +def read_chunks(file, size=4096): + """Yield pieces of data from a file-like object until EOF.""" + while True: + chunk = file.read(size) + if not chunk: + break + yield chunk + + +def split_leading_dir(path): + path = path.lstrip('/').lstrip('\\') + if '/' in path and (('\\' in path and path.find('/') < path.find('\\')) or + '\\' not in path): + return path.split('/', 1) + elif '\\' in path: + return path.split('\\', 1) + else: + return path, '' + + +def has_leading_dir(paths): + """Returns true if all the paths have the same leading path name + (i.e., everything is in one subdirectory in an archive)""" + common_prefix = None + for path in paths: + prefix, rest = split_leading_dir(path) + if not prefix: + return False + elif common_prefix is None: + common_prefix = prefix + elif prefix != common_prefix: + return False + return True + + +def normalize_path(path, resolve_symlinks=True): + """ + Convert a path to its canonical, case-normalized, absolute version. + + """ + path = expanduser(path) + if resolve_symlinks: + path = os.path.realpath(path) + else: + path = os.path.abspath(path) + return os.path.normcase(path) + + +def splitext(path): + """Like os.path.splitext, but take off .tar too""" + base, ext = posixpath.splitext(path) + if base.lower().endswith('.tar'): + ext = base[-4:] + ext + base = base[:-4] + return base, ext + + +def renames(old, new): + """Like os.renames(), but handles renaming across devices.""" + # Implementation borrowed from os.renames(). + head, tail = os.path.split(new) + if head and tail and not os.path.exists(head): + os.makedirs(head) + + shutil.move(old, new) + + head, tail = os.path.split(old) + if head and tail: + try: + os.removedirs(head) + except OSError: + pass + + +def is_local(path): + """ + Return True if this is a path pip is allowed to modify. + + If we're in a virtualenv, sys.prefix points to the virtualenv's + prefix; only sys.prefix is considered local. + + If we're not in a virtualenv, in general we can modify anything. + However, if the OS vendor has configured distutils to install + somewhere other than sys.prefix (which could be a subdirectory of + sys.prefix, e.g. /usr/local), we consider sys.prefix itself nonlocal + and the domain of the OS vendor. (In other words, everything _other + than_ sys.prefix is considered local.) + + """ + + path = normalize_path(path) + prefix = normalize_path(sys.prefix) + + if running_under_virtualenv(): + return path.startswith(normalize_path(sys.prefix)) + else: + from pip.locations import distutils_scheme + if path.startswith(prefix): + for local_path in distutils_scheme("").values(): + if path.startswith(normalize_path(local_path)): + return True + return False + else: + return True + + +def dist_is_local(dist): + """ + Return True if given Distribution object is installed somewhere pip + is allowed to modify. + + """ + return is_local(dist_location(dist)) + + +def dist_in_usersite(dist): + """ + Return True if given Distribution is installed in user site. + """ + norm_path = normalize_path(dist_location(dist)) + return norm_path.startswith(normalize_path(user_site)) + + +def dist_in_site_packages(dist): + """ + Return True if given Distribution is installed in + distutils.sysconfig.get_python_lib(). + """ + return normalize_path( + dist_location(dist) + ).startswith(normalize_path(site_packages)) + + +def dist_is_editable(dist): + """Is distribution an editable install?""" + for path_item in sys.path: + egg_link = os.path.join(path_item, dist.project_name + '.egg-link') + if os.path.isfile(egg_link): + return True + return False + + +def get_installed_distributions(local_only=True, + skip=stdlib_pkgs, + include_editables=True, + editables_only=False, + user_only=False): + """ + Return a list of installed Distribution objects. + + If ``local_only`` is True (default), only return installations + local to the current virtualenv, if in a virtualenv. + + ``skip`` argument is an iterable of lower-case project names to + ignore; defaults to stdlib_pkgs + + If ``editables`` is False, don't report editables. + + If ``editables_only`` is True , only report editables. + + If ``user_only`` is True , only report installations in the user + site directory. + + """ + if local_only: + local_test = dist_is_local + else: + def local_test(d): + return True + + if include_editables: + def editable_test(d): + return True + else: + def editable_test(d): + return not dist_is_editable(d) + + if editables_only: + def editables_only_test(d): + return dist_is_editable(d) + else: + def editables_only_test(d): + return True + + if user_only: + user_test = dist_in_usersite + else: + def user_test(d): + return True + + return [d for d in pkg_resources.working_set + if local_test(d) and + d.key not in skip and + editable_test(d) and + editables_only_test(d) and + user_test(d) + ] + + +def egg_link_path(dist): + """ + Return the path for the .egg-link file if it exists, otherwise, None. + + There's 3 scenarios: + 1) not in a virtualenv + try to find in site.USER_SITE, then site_packages + 2) in a no-global virtualenv + try to find in site_packages + 3) in a yes-global virtualenv + try to find in site_packages, then site.USER_SITE + (don't look in global location) + + For #1 and #3, there could be odd cases, where there's an egg-link in 2 + locations. + + This method will just return the first one found. + """ + sites = [] + if running_under_virtualenv(): + if virtualenv_no_global(): + sites.append(site_packages) + else: + sites.append(site_packages) + if user_site: + sites.append(user_site) + else: + if user_site: + sites.append(user_site) + sites.append(site_packages) + + for site in sites: + egglink = os.path.join(site, dist.project_name) + '.egg-link' + if os.path.isfile(egglink): + return egglink + + +def dist_location(dist): + """ + Get the site-packages location of this distribution. Generally + this is dist.location, except in the case of develop-installed + packages, where dist.location is the source code location, and we + want to know where the egg-link file is. + + """ + egg_link = egg_link_path(dist) + if egg_link: + return egg_link + return dist.location + + +def get_terminal_size(): + """Returns a tuple (x, y) representing the width(x) and the height(x) + in characters of the terminal window.""" + def ioctl_GWINSZ(fd): + try: + import fcntl + import termios + import struct + cr = struct.unpack( + 'hh', + fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234') + ) + except: + return None + if cr == (0, 0): + return None + return cr + cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2) + if not cr: + try: + fd = os.open(os.ctermid(), os.O_RDONLY) + cr = ioctl_GWINSZ(fd) + os.close(fd) + except: + pass + if not cr: + cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80)) + return int(cr[1]), int(cr[0]) + + +def current_umask(): + """Get the current umask which involves having to set it temporarily.""" + mask = os.umask(0) + os.umask(mask) + return mask + + +def unzip_file(filename, location, flatten=True): + """ + Unzip the file (with path `filename`) to the destination `location`. All + files are written based on system defaults and umask (i.e. permissions are + not preserved), except that regular file members with any execute + permissions (user, group, or world) have "chmod +x" applied after being + written. Note that for windows, any execute changes using os.chmod are + no-ops per the python docs. + """ + ensure_dir(location) + zipfp = open(filename, 'rb') + try: + zip = zipfile.ZipFile(zipfp, allowZip64=True) + leading = has_leading_dir(zip.namelist()) and flatten + for info in zip.infolist(): + name = info.filename + data = zip.read(name) + fn = name + if leading: + fn = split_leading_dir(name)[1] + fn = os.path.join(location, fn) + dir = os.path.dirname(fn) + if fn.endswith('/') or fn.endswith('\\'): + # A directory + ensure_dir(fn) + else: + ensure_dir(dir) + fp = open(fn, 'wb') + try: + fp.write(data) + finally: + fp.close() + mode = info.external_attr >> 16 + # if mode and regular file and any execute permissions for + # user/group/world? + if mode and stat.S_ISREG(mode) and mode & 0o111: + # make dest file have execute for user/group/world + # (chmod +x) no-op on windows per python docs + os.chmod(fn, (0o777 - current_umask() | 0o111)) + finally: + zipfp.close() + + +def untar_file(filename, location): + """ + Untar the file (with path `filename`) to the destination `location`. + All files are written based on system defaults and umask (i.e. permissions + are not preserved), except that regular file members with any execute + permissions (user, group, or world) have "chmod +x" applied after being + written. Note that for windows, any execute changes using os.chmod are + no-ops per the python docs. + """ + ensure_dir(location) + if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'): + mode = 'r:gz' + elif filename.lower().endswith(BZ2_EXTENSIONS): + mode = 'r:bz2' + elif filename.lower().endswith(XZ_EXTENSIONS): + mode = 'r:xz' + elif filename.lower().endswith('.tar'): + mode = 'r' + else: + logger.warning( + 'Cannot determine compression type for file %s', filename, + ) + mode = 'r:*' + tar = tarfile.open(filename, mode) + try: + # note: python<=2.5 doesn't seem to know about pax headers, filter them + leading = has_leading_dir([ + member.name for member in tar.getmembers() + if member.name != 'pax_global_header' + ]) + for member in tar.getmembers(): + fn = member.name + if fn == 'pax_global_header': + continue + if leading: + fn = split_leading_dir(fn)[1] + path = os.path.join(location, fn) + if member.isdir(): + ensure_dir(path) + elif member.issym(): + try: + tar._extract_member(member, path) + except Exception as exc: + # Some corrupt tar files seem to produce this + # (specifically bad symlinks) + logger.warning( + 'In the tar file %s the member %s is invalid: %s', + filename, member.name, exc, + ) + continue + else: + try: + fp = tar.extractfile(member) + except (KeyError, AttributeError) as exc: + # Some corrupt tar files seem to produce this + # (specifically bad symlinks) + logger.warning( + 'In the tar file %s the member %s is invalid: %s', + filename, member.name, exc, + ) + continue + ensure_dir(os.path.dirname(path)) + with open(path, 'wb') as destfp: + shutil.copyfileobj(fp, destfp) + fp.close() + # Update the timestamp (useful for cython compiled files) + tar.utime(member, path) + # member have any execute permissions for user/group/world? + if member.mode & 0o111: + # make dest file have execute for user/group/world + # no-op on windows per python docs + os.chmod(path, (0o777 - current_umask() | 0o111)) + finally: + tar.close() + + +def unpack_file(filename, location, content_type, link): + filename = os.path.realpath(filename) + if (content_type == 'application/zip' or + filename.lower().endswith(ZIP_EXTENSIONS) or + zipfile.is_zipfile(filename)): + unzip_file( + filename, + location, + flatten=not filename.endswith('.whl') + ) + elif (content_type == 'application/x-gzip' or + tarfile.is_tarfile(filename) or + filename.lower().endswith( + TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS)): + untar_file(filename, location) + elif (content_type and content_type.startswith('text/html') and + is_svn_page(file_contents(filename))): + # We don't really care about this + from pip.vcs.subversion import Subversion + Subversion('svn+' + link.url).unpack(location) + else: + # FIXME: handle? + # FIXME: magic signatures? + logger.critical( + 'Cannot unpack file %s (downloaded from %s, content-type: %s); ' + 'cannot detect archive format', + filename, location, content_type, + ) + raise InstallationError( + 'Cannot determine archive format of %s' % location + ) + + +def remove_tracebacks(output): + pattern = (r'(?:\W+File "(?:.*)", line (?:.*)\W+(?:.*)\W+\^\W+)?' + r'Syntax(?:Error|Warning): (?:.*)') + output = re.sub(pattern, '', output) + if PY2: + return output + # compileall.compile_dir() prints different messages to stdout + # in Python 3 + return re.sub(r"\*\*\* Error compiling (?:.*)", '', output) + + +def call_subprocess(cmd, show_stdout=True, cwd=None, + on_returncode='raise', + command_level=std_logging.DEBUG, command_desc=None, + extra_environ=None, spinner=None): + if command_desc is None: + cmd_parts = [] + for part in cmd: + if ' ' in part or '\n' in part or '"' in part or "'" in part: + part = '"%s"' % part.replace('"', '\\"') + cmd_parts.append(part) + command_desc = ' '.join(cmd_parts) + logger.log(command_level, "Running command %s", command_desc) + env = os.environ.copy() + if extra_environ: + env.update(extra_environ) + try: + proc = subprocess.Popen( + cmd, stderr=subprocess.STDOUT, stdin=None, stdout=subprocess.PIPE, + cwd=cwd, env=env) + except Exception as exc: + logger.critical( + "Error %s while executing command %s", exc, command_desc, + ) + raise + all_output = [] + while True: + line = console_to_str(proc.stdout.readline()) + if not line: + break + line = line.rstrip() + all_output.append(line + '\n') + if show_stdout: + logger.debug(line) + if spinner is not None: + spinner.spin() + proc.wait() + if spinner is not None: + if proc.returncode: + spinner.finish("error") + else: + spinner.finish("done") + if proc.returncode: + if on_returncode == 'raise': + if all_output: + logger.info( + 'Complete output from command %s:', command_desc, + ) + logger.info( + ''.join(all_output) + + '\n----------------------------------------' + ) + raise InstallationError( + 'Command "%s" failed with error code %s in %s' + % (command_desc, proc.returncode, cwd)) + elif on_returncode == 'warn': + logger.warning( + 'Command "%s" had error code %s in %s', + command_desc, proc.returncode, cwd, + ) + elif on_returncode == 'ignore': + pass + else: + raise ValueError('Invalid value: on_returncode=%s' % + repr(on_returncode)) + if not show_stdout: + return remove_tracebacks(''.join(all_output)) + + +def read_text_file(filename): + """Return the contents of *filename*. + + Try to decode the file contents with utf-8, the preferred system encoding + (e.g., cp1252 on some Windows machines), and latin1, in that order. + Decoding a byte string with latin1 will never raise an error. In the worst + case, the returned string will contain some garbage characters. + + """ + with open(filename, 'rb') as fp: + data = fp.read() + + encodings = ['utf-8', locale.getpreferredencoding(False), 'latin1'] + for enc in encodings: + try: + data = data.decode(enc) + except UnicodeDecodeError: + continue + break + + assert type(data) != bytes # Latin1 should have worked. + return data + + +def _make_build_dir(build_dir): + os.makedirs(build_dir) + write_delete_marker_file(build_dir) + + +class FakeFile(object): + """Wrap a list of lines in an object with readline() to make + ConfigParser happy.""" + def __init__(self, lines): + self._gen = (l for l in lines) + + def readline(self): + try: + try: + return next(self._gen) + except NameError: + return self._gen.next() + except StopIteration: + return '' + + def __iter__(self): + return self._gen + + +class StreamWrapper(StringIO): + + @classmethod + def from_stream(cls, orig_stream): + cls.orig_stream = orig_stream + return cls() + + # compileall.compile_dir() needs stdout.encoding to print to stdout + @property + def encoding(self): + return self.orig_stream.encoding + + +@contextlib.contextmanager +def captured_output(stream_name): + """Return a context manager used by captured_stdout/stdin/stderr + that temporarily replaces the sys stream *stream_name* with a StringIO. + + Taken from Lib/support/__init__.py in the CPython repo. + """ + orig_stdout = getattr(sys, stream_name) + setattr(sys, stream_name, StreamWrapper.from_stream(orig_stdout)) + try: + yield getattr(sys, stream_name) + finally: + setattr(sys, stream_name, orig_stdout) + + +def captured_stdout(): + """Capture the output of sys.stdout: + + with captured_stdout() as stdout: + print('hello') + self.assertEqual(stdout.getvalue(), 'hello\n') + + Taken from Lib/support/__init__.py in the CPython repo. + """ + return captured_output('stdout') + + +class cached_property(object): + """A property that is only computed once per instance and then replaces + itself with an ordinary attribute. Deleting the attribute resets the + property. + + Source: https://github.com/bottlepy/bottle/blob/0.11.5/bottle.py#L175 + """ + + def __init__(self, func): + self.__doc__ = getattr(func, '__doc__') + self.func = func + + def __get__(self, obj, cls): + if obj is None: + # We're being accessed from the class itself, not from an object + return self + value = obj.__dict__[self.func.__name__] = self.func(obj) + return value + + +def get_installed_version(dist_name): + """Get the installed version of dist_name avoiding pkg_resources cache""" + # Create a requirement that we'll look for inside of setuptools. + req = pkg_resources.Requirement.parse(dist_name) + + # We want to avoid having this cached, so we need to construct a new + # working set each time. + working_set = pkg_resources.WorkingSet() + + # Get the installed distribution from our working set + dist = working_set.find(req) + + # Check to see if we got an installed distribution or not, if we did + # we want to return it's version. + return dist.version if dist else None + + +def canonicalize_name(name): + """Convert an arbitrary string to a canonical name used for comparison""" + return pkg_resources.safe_name(name).lower() + + +def consume(iterator): + """Consume an iterable at C speed.""" + deque(iterator, maxlen=0) diff --git a/lib/python3.4/site-packages/pip/utils/appdirs.py b/lib/python3.4/site-packages/pip/utils/appdirs.py new file mode 100644 index 0000000..60ae76e --- /dev/null +++ b/lib/python3.4/site-packages/pip/utils/appdirs.py @@ -0,0 +1,224 @@ +""" +This code was taken from https://github.com/ActiveState/appdirs and modified +to suit our purposes. +""" +from __future__ import absolute_import + +import os +import sys + +from pip.compat import WINDOWS, expanduser + + +def user_cache_dir(appname): + r""" + Return full path to the user-specific cache dir for this application. + + "appname" is the name of application. + + Typical user cache directories are: + Mac OS X: ~/Library/Caches/<AppName> + Unix: ~/.cache/<AppName> (XDG default) + Windows: C:\Users\<username>\AppData\Local\<AppName>\Cache + + On Windows the only suggestion in the MSDN docs is that local settings go + in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the + non-roaming app data dir (the default returned by `user_data_dir`). Apps + typically put cache data somewhere *under* the given dir here. Some + examples: + ...\Mozilla\Firefox\Profiles\<ProfileName>\Cache + ...\Acme\SuperApp\Cache\1.0 + + OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. + """ + if WINDOWS: + # Get the base path + path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) + + # Add our app name and Cache directory to it + path = os.path.join(path, appname, "Cache") + elif sys.platform == "darwin": + # Get the base path + path = expanduser("~/Library/Caches") + + # Add our app name to it + path = os.path.join(path, appname) + else: + # Get the base path + path = os.getenv("XDG_CACHE_HOME", expanduser("~/.cache")) + + # Add our app name to it + path = os.path.join(path, appname) + + return path + + +def user_data_dir(appname, roaming=False): + """ + Return full path to the user-specific data dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "roaming" (boolean, default False) can be set True to use the Windows + roaming appdata directory. That means that for users on a Windows + network setup for roaming profiles, this user data will be + sync'd on login. See + <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> + for a discussion of issues. + + Typical user data directories are: + Mac OS X: ~/Library/Application Support/<AppName> + Unix: ~/.local/share/<AppName> # or in + $XDG_DATA_HOME, if defined + Win XP (not roaming): C:\Documents and Settings\<username>\ ... + ...Application Data\<AppName> + Win XP (roaming): C:\Documents and Settings\<username>\Local ... + ...Settings\Application Data\<AppName> + Win 7 (not roaming): C:\\Users\<username>\AppData\Local\<AppName> + Win 7 (roaming): C:\\Users\<username>\AppData\Roaming\<AppName> + + For Unix, we follow the XDG spec and support $XDG_DATA_HOME. + That means, by default "~/.local/share/<AppName>". + """ + if WINDOWS: + const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA" + path = os.path.join(os.path.normpath(_get_win_folder(const)), appname) + elif sys.platform == "darwin": + path = os.path.join( + expanduser('~/Library/Application Support/'), + appname, + ) + else: + path = os.path.join( + os.getenv('XDG_DATA_HOME', expanduser("~/.local/share")), + appname, + ) + + return path + + +def user_config_dir(appname, roaming=True): + """Return full path to the user-specific config dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "roaming" (boolean, default True) can be set False to not use the + Windows roaming appdata directory. That means that for users on a + Windows network setup for roaming profiles, this user data will be + sync'd on login. See + <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> + for a discussion of issues. + + Typical user data directories are: + Mac OS X: same as user_data_dir + Unix: ~/.config/<AppName> + Win *: same as user_data_dir + + For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. + That means, by deafult "~/.config/<AppName>". + """ + if WINDOWS: + path = user_data_dir(appname, roaming=roaming) + elif sys.platform == "darwin": + path = user_data_dir(appname) + else: + path = os.getenv('XDG_CONFIG_HOME', expanduser("~/.config")) + path = os.path.join(path, appname) + + return path + + +# for the discussion regarding site_config_dirs locations +# see <https://github.com/pypa/pip/issues/1733> +def site_config_dirs(appname): + """Return a list of potential user-shared config dirs for this application. + + "appname" is the name of application. + + Typical user config directories are: + Mac OS X: /Library/Application Support/<AppName>/ + Unix: /etc or $XDG_CONFIG_DIRS[i]/<AppName>/ for each value in + $XDG_CONFIG_DIRS + Win XP: C:\Documents and Settings\All Users\Application ... + ...Data\<AppName>\ + Vista: (Fail! "C:\ProgramData" is a hidden *system* directory + on Vista.) + Win 7: Hidden, but writeable on Win 7: + C:\ProgramData\<AppName>\ + """ + if WINDOWS: + path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA")) + pathlist = [os.path.join(path, appname)] + elif sys.platform == 'darwin': + pathlist = [os.path.join('/Library/Application Support', appname)] + else: + # try looking in $XDG_CONFIG_DIRS + xdg_config_dirs = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg') + if xdg_config_dirs: + pathlist = [ + os.path.join(expanduser(x), appname) + for x in xdg_config_dirs.split(os.pathsep) + ] + else: + pathlist = [] + + # always look in /etc directly as well + pathlist.append('/etc') + + return pathlist + + +# -- Windows support functions -- + +def _get_win_folder_from_registry(csidl_name): + """ + This is a fallback technique at best. I'm not sure if using the + registry for this guarantees us the correct answer for all CSIDL_* + names. + """ + import _winreg + + shell_folder_name = { + "CSIDL_APPDATA": "AppData", + "CSIDL_COMMON_APPDATA": "Common AppData", + "CSIDL_LOCAL_APPDATA": "Local AppData", + }[csidl_name] + + key = _winreg.OpenKey( + _winreg.HKEY_CURRENT_USER, + r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders" + ) + directory, _type = _winreg.QueryValueEx(key, shell_folder_name) + return directory + + +def _get_win_folder_with_ctypes(csidl_name): + csidl_const = { + "CSIDL_APPDATA": 26, + "CSIDL_COMMON_APPDATA": 35, + "CSIDL_LOCAL_APPDATA": 28, + }[csidl_name] + + buf = ctypes.create_unicode_buffer(1024) + ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf) + + # Downgrade to short path name if have highbit chars. See + # <http://bugs.activestate.com/show_bug.cgi?id=85099>. + has_high_char = False + for c in buf: + if ord(c) > 255: + has_high_char = True + break + if has_high_char: + buf2 = ctypes.create_unicode_buffer(1024) + if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): + buf = buf2 + + return buf.value + +if WINDOWS: + try: + import ctypes + _get_win_folder = _get_win_folder_with_ctypes + except ImportError: + _get_win_folder = _get_win_folder_from_registry diff --git a/lib/python3.4/site-packages/pip/utils/build.py b/lib/python3.4/site-packages/pip/utils/build.py new file mode 100644 index 0000000..fc65cfa --- /dev/null +++ b/lib/python3.4/site-packages/pip/utils/build.py @@ -0,0 +1,42 @@ +from __future__ import absolute_import + +import os.path +import tempfile + +from pip.utils import rmtree + + +class BuildDirectory(object): + + def __init__(self, name=None, delete=None): + # If we were not given an explicit directory, and we were not given an + # explicit delete option, then we'll default to deleting. + if name is None and delete is None: + delete = True + + if name is None: + # We realpath here because some systems have their default tmpdir + # symlinked to another directory. This tends to confuse build + # scripts, so we canonicalize the path by traversing potential + # symlinks here. + name = os.path.realpath(tempfile.mkdtemp(prefix="pip-build-")) + # If we were not given an explicit directory, and we were not given + # an explicit delete option, then we'll default to deleting. + if delete is None: + delete = True + + self.name = name + self.delete = delete + + def __repr__(self): + return "<{} {!r}>".format(self.__class__.__name__, self.name) + + def __enter__(self): + return self.name + + def __exit__(self, exc, value, tb): + self.cleanup() + + def cleanup(self): + if self.delete: + rmtree(self.name) diff --git a/lib/python3.4/site-packages/pip/utils/deprecation.py b/lib/python3.4/site-packages/pip/utils/deprecation.py new file mode 100644 index 0000000..d336cee --- /dev/null +++ b/lib/python3.4/site-packages/pip/utils/deprecation.py @@ -0,0 +1,76 @@ +""" +A module that implments tooling to enable easy warnings about deprecations. +""" +from __future__ import absolute_import + +import logging +import warnings + + +class PipDeprecationWarning(Warning): + pass + + +class RemovedInPip9Warning(PipDeprecationWarning, DeprecationWarning): + pass + + +class RemovedInPip10Warning(PipDeprecationWarning, PendingDeprecationWarning): + pass + + +class Python26DeprecationWarning( + PipDeprecationWarning, PendingDeprecationWarning +): + pass + + +DEPRECATIONS = [ + RemovedInPip9Warning, RemovedInPip10Warning, Python26DeprecationWarning +] + + +# Warnings <-> Logging Integration + + +_warnings_showwarning = None + + +def _showwarning(message, category, filename, lineno, file=None, line=None): + if file is not None: + if _warnings_showwarning is not None: + _warnings_showwarning( + message, category, filename, lineno, file, line, + ) + else: + if issubclass(category, PipDeprecationWarning): + # We use a specially named logger which will handle all of the + # deprecation messages for pip. + logger = logging.getLogger("pip.deprecations") + + # This is purposely using the % formatter here instead of letting + # the logging module handle the interpolation. This is because we + # want it to appear as if someone typed this entire message out. + log_message = "DEPRECATION: %s" % message + + # Things that are DeprecationWarnings will be removed in the very + # next version of pip. We want these to be more obvious so we + # use the ERROR logging level while the PendingDeprecationWarnings + # are still have at least 2 versions to go until they are removed + # so they can just be warnings. + if issubclass(category, DeprecationWarning): + logger.error(log_message) + else: + logger.warning(log_message) + else: + _warnings_showwarning( + message, category, filename, lineno, file, line, + ) + + +def install_warning_logger(): + global _warnings_showwarning + + if _warnings_showwarning is None: + _warnings_showwarning = warnings.showwarning + warnings.showwarning = _showwarning diff --git a/lib/python3.4/site-packages/pip/utils/filesystem.py b/lib/python3.4/site-packages/pip/utils/filesystem.py new file mode 100644 index 0000000..25ad516 --- /dev/null +++ b/lib/python3.4/site-packages/pip/utils/filesystem.py @@ -0,0 +1,28 @@ +import os +import os.path + +from pip.compat import get_path_uid + + +def check_path_owner(path): + # If we don't have a way to check the effective uid of this process, then + # we'll just assume that we own the directory. + if not hasattr(os, "geteuid"): + return True + + previous = None + while path != previous: + if os.path.lexists(path): + # Check if path is writable by current user. + if os.geteuid() == 0: + # Special handling for root user in order to handle properly + # cases where users use sudo without -H flag. + try: + path_uid = get_path_uid(path) + except OSError: + return False + return path_uid == 0 + else: + return os.access(path, os.W_OK) + else: + previous, path = path, os.path.dirname(path) diff --git a/lib/python3.4/site-packages/pip/utils/hashes.py b/lib/python3.4/site-packages/pip/utils/hashes.py new file mode 100644 index 0000000..9602970 --- /dev/null +++ b/lib/python3.4/site-packages/pip/utils/hashes.py @@ -0,0 +1,92 @@ +from __future__ import absolute_import + +import hashlib + +from pip.exceptions import HashMismatch, HashMissing, InstallationError +from pip.utils import read_chunks +from pip._vendor.six import iteritems, iterkeys, itervalues + + +# The recommended hash algo of the moment. Change this whenever the state of +# the art changes; it won't hurt backward compatibility. +FAVORITE_HASH = 'sha256' + + +# Names of hashlib algorithms allowed by the --hash option and ``pip hash`` +# Currently, those are the ones at least as collision-resistant as sha256. +STRONG_HASHES = ['sha256', 'sha384', 'sha512'] + + +class Hashes(object): + """A wrapper that builds multiple hashes at once and checks them against + known-good values + + """ + def __init__(self, hashes=None): + """ + :param hashes: A dict of algorithm names pointing to lists of allowed + hex digests + """ + self._allowed = {} if hashes is None else hashes + + def check_against_chunks(self, chunks): + """Check good hashes against ones built from iterable of chunks of + data. + + Raise HashMismatch if none match. + + """ + gots = {} + for hash_name in iterkeys(self._allowed): + try: + gots[hash_name] = hashlib.new(hash_name) + except (ValueError, TypeError): + raise InstallationError('Unknown hash name: %s' % hash_name) + + for chunk in chunks: + for hash in itervalues(gots): + hash.update(chunk) + + for hash_name, got in iteritems(gots): + if got.hexdigest() in self._allowed[hash_name]: + return + self._raise(gots) + + def _raise(self, gots): + raise HashMismatch(self._allowed, gots) + + def check_against_file(self, file): + """Check good hashes against a file-like object + + Raise HashMismatch if none match. + + """ + return self.check_against_chunks(read_chunks(file)) + + def check_against_path(self, path): + with open(path, 'rb') as file: + return self.check_against_file(file) + + def __nonzero__(self): + """Return whether I know any known-good hashes.""" + return bool(self._allowed) + + def __bool__(self): + return self.__nonzero__() + + +class MissingHashes(Hashes): + """A workalike for Hashes used when we're missing a hash for a requirement + + It computes the actual hash of the requirement and raises a HashMissing + exception showing it to the user. + + """ + def __init__(self): + """Don't offer the ``hashes`` kwarg.""" + # Pass our favorite hash in to generate a "gotten hash". With the + # empty list, it will never match, so an error will always raise. + super(MissingHashes, self).__init__(hashes={FAVORITE_HASH: []}) + + def _raise(self, gots): + raise HashMissing(gots[FAVORITE_HASH].hexdigest()) diff --git a/lib/python3.4/site-packages/pip/utils/logging.py b/lib/python3.4/site-packages/pip/utils/logging.py new file mode 100644 index 0000000..1c1053a --- /dev/null +++ b/lib/python3.4/site-packages/pip/utils/logging.py @@ -0,0 +1,130 @@ +from __future__ import absolute_import + +import contextlib +import logging +import logging.handlers +import os + +try: + import threading +except ImportError: + import dummy_threading as threading + +from pip.compat import WINDOWS +from pip.utils import ensure_dir + +try: + from pip._vendor import colorama +# Lots of different errors can come from this, including SystemError and +# ImportError. +except Exception: + colorama = None + + +_log_state = threading.local() +_log_state.indentation = 0 + + +@contextlib.contextmanager +def indent_log(num=2): + """ + A context manager which will cause the log output to be indented for any + log messages emitted inside it. + """ + _log_state.indentation += num + try: + yield + finally: + _log_state.indentation -= num + + +def get_indentation(): + return getattr(_log_state, 'indentation', 0) + + +class IndentingFormatter(logging.Formatter): + + def format(self, record): + """ + Calls the standard formatter, but will indent all of the log messages + by our current indentation level. + """ + formatted = logging.Formatter.format(self, record) + formatted = "".join([ + (" " * get_indentation()) + line + for line in formatted.splitlines(True) + ]) + return formatted + + +def _color_wrap(*colors): + def wrapped(inp): + return "".join(list(colors) + [inp, colorama.Style.RESET_ALL]) + return wrapped + + +class ColorizedStreamHandler(logging.StreamHandler): + + # Don't build up a list of colors if we don't have colorama + if colorama: + COLORS = [ + # This needs to be in order from highest logging level to lowest. + (logging.ERROR, _color_wrap(colorama.Fore.RED)), + (logging.WARNING, _color_wrap(colorama.Fore.YELLOW)), + ] + else: + COLORS = [] + + def __init__(self, stream=None): + logging.StreamHandler.__init__(self, stream) + + if WINDOWS and colorama: + self.stream = colorama.AnsiToWin32(self.stream) + + def should_color(self): + # Don't colorize things if we do not have colorama + if not colorama: + return False + + real_stream = ( + self.stream if not isinstance(self.stream, colorama.AnsiToWin32) + else self.stream.wrapped + ) + + # If the stream is a tty we should color it + if hasattr(real_stream, "isatty") and real_stream.isatty(): + return True + + # If we have an ASNI term we should color it + if os.environ.get("TERM") == "ANSI": + return True + + # If anything else we should not color it + return False + + def format(self, record): + msg = logging.StreamHandler.format(self, record) + + if self.should_color(): + for level, color in self.COLORS: + if record.levelno >= level: + msg = color(msg) + break + + return msg + + +class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler): + + def _open(self): + ensure_dir(os.path.dirname(self.baseFilename)) + return logging.handlers.RotatingFileHandler._open(self) + + +class MaxLevelFilter(logging.Filter): + + def __init__(self, level): + self.level = level + + def filter(self, record): + return record.levelno < self.level diff --git a/lib/python3.4/site-packages/pip/utils/outdated.py b/lib/python3.4/site-packages/pip/utils/outdated.py new file mode 100644 index 0000000..2164cc3 --- /dev/null +++ b/lib/python3.4/site-packages/pip/utils/outdated.py @@ -0,0 +1,162 @@ +from __future__ import absolute_import + +import datetime +import json +import logging +import os.path +import sys + +from pip._vendor import lockfile +from pip._vendor.packaging import version as packaging_version + +from pip.compat import total_seconds, WINDOWS +from pip.models import PyPI +from pip.locations import USER_CACHE_DIR, running_under_virtualenv +from pip.utils import ensure_dir, get_installed_version +from pip.utils.filesystem import check_path_owner + + +SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ" + + +logger = logging.getLogger(__name__) + + +class VirtualenvSelfCheckState(object): + def __init__(self): + self.statefile_path = os.path.join(sys.prefix, "pip-selfcheck.json") + + # Load the existing state + try: + with open(self.statefile_path) as statefile: + self.state = json.load(statefile) + except (IOError, ValueError): + self.state = {} + + def save(self, pypi_version, current_time): + # Attempt to write out our version check file + with open(self.statefile_path, "w") as statefile: + json.dump( + { + "last_check": current_time.strftime(SELFCHECK_DATE_FMT), + "pypi_version": pypi_version, + }, + statefile, + sort_keys=True, + separators=(",", ":") + ) + + +class GlobalSelfCheckState(object): + def __init__(self): + self.statefile_path = os.path.join(USER_CACHE_DIR, "selfcheck.json") + + # Load the existing state + try: + with open(self.statefile_path) as statefile: + self.state = json.load(statefile)[sys.prefix] + except (IOError, ValueError, KeyError): + self.state = {} + + def save(self, pypi_version, current_time): + # Check to make sure that we own the directory + if not check_path_owner(os.path.dirname(self.statefile_path)): + return + + # Now that we've ensured the directory is owned by this user, we'll go + # ahead and make sure that all our directories are created. + ensure_dir(os.path.dirname(self.statefile_path)) + + # Attempt to write out our version check file + with lockfile.LockFile(self.statefile_path): + if os.path.exists(self.statefile_path): + with open(self.statefile_path) as statefile: + state = json.load(statefile) + else: + state = {} + + state[sys.prefix] = { + "last_check": current_time.strftime(SELFCHECK_DATE_FMT), + "pypi_version": pypi_version, + } + + with open(self.statefile_path, "w") as statefile: + json.dump(state, statefile, sort_keys=True, + separators=(",", ":")) + + +def load_selfcheck_statefile(): + if running_under_virtualenv(): + return VirtualenvSelfCheckState() + else: + return GlobalSelfCheckState() + + +def pip_version_check(session): + """Check for an update for pip. + + Limit the frequency of checks to once per week. State is stored either in + the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix + of the pip script path. + """ + installed_version = get_installed_version("pip") + if installed_version is None: + return + + pip_version = packaging_version.parse(installed_version) + pypi_version = None + + try: + state = load_selfcheck_statefile() + + current_time = datetime.datetime.utcnow() + # Determine if we need to refresh the state + if "last_check" in state.state and "pypi_version" in state.state: + last_check = datetime.datetime.strptime( + state.state["last_check"], + SELFCHECK_DATE_FMT + ) + if total_seconds(current_time - last_check) < 7 * 24 * 60 * 60: + pypi_version = state.state["pypi_version"] + + # Refresh the version if we need to or just see if we need to warn + if pypi_version is None: + resp = session.get( + PyPI.pip_json_url, + headers={"Accept": "application/json"}, + ) + resp.raise_for_status() + pypi_version = [ + v for v in sorted( + list(resp.json()["releases"]), + key=packaging_version.parse, + ) + if not packaging_version.parse(v).is_prerelease + ][-1] + + # save that we've performed a check + state.save(pypi_version, current_time) + + remote_version = packaging_version.parse(pypi_version) + + # Determine if our pypi_version is older + if (pip_version < remote_version and + pip_version.base_version != remote_version.base_version): + # Advise "python -m pip" on Windows to avoid issues + # with overwriting pip.exe. + if WINDOWS: + pip_cmd = "python -m pip" + else: + pip_cmd = "pip" + logger.warning( + "You are using pip version %s, however version %s is " + "available.\nYou should consider upgrading via the " + "'%s install --upgrade pip' command.", + pip_version, pypi_version, pip_cmd + ) + + except Exception: + logger.debug( + "There was an error checking the latest version of pip", + exc_info=True, + ) diff --git a/lib/python3.4/site-packages/pip/utils/setuptools_build.py b/lib/python3.4/site-packages/pip/utils/setuptools_build.py new file mode 100644 index 0000000..4c9095e --- /dev/null +++ b/lib/python3.4/site-packages/pip/utils/setuptools_build.py @@ -0,0 +1,6 @@ +# Shim to wrap setup.py invocation with setuptools +SETUPTOOLS_SHIM = ( + "import setuptools, tokenize;__file__=%r;" + "exec(compile(getattr(tokenize, 'open', open)(__file__).read()" + ".replace('\\r\\n', '\\n'), __file__, 'exec'))" +) diff --git a/lib/python3.4/site-packages/pip/utils/ui.py b/lib/python3.4/site-packages/pip/utils/ui.py new file mode 100644 index 0000000..973bc64 --- /dev/null +++ b/lib/python3.4/site-packages/pip/utils/ui.py @@ -0,0 +1,339 @@ +from __future__ import absolute_import +from __future__ import division + +import itertools +import sys +from signal import signal, SIGINT, default_int_handler +import time +import contextlib +import logging + +from pip.compat import WINDOWS +from pip.utils import format_size +from pip.utils.logging import get_indentation +from pip._vendor import six +from pip._vendor.progress.bar import Bar, IncrementalBar +from pip._vendor.progress.helpers import (WritelnMixin, + HIDE_CURSOR, SHOW_CURSOR) +from pip._vendor.progress.spinner import Spinner + +try: + from pip._vendor import colorama +# Lots of different errors can come from this, including SystemError and +# ImportError. +except Exception: + colorama = None + +logger = logging.getLogger(__name__) + + +def _select_progress_class(preferred, fallback): + encoding = getattr(preferred.file, "encoding", None) + + # If we don't know what encoding this file is in, then we'll just assume + # that it doesn't support unicode and use the ASCII bar. + if not encoding: + return fallback + + # Collect all of the possible characters we want to use with the preferred + # bar. + characters = [ + getattr(preferred, "empty_fill", six.text_type()), + getattr(preferred, "fill", six.text_type()), + ] + characters += list(getattr(preferred, "phases", [])) + + # Try to decode the characters we're using for the bar using the encoding + # of the given file, if this works then we'll assume that we can use the + # fancier bar and if not we'll fall back to the plaintext bar. + try: + six.text_type().join(characters).encode(encoding) + except UnicodeEncodeError: + return fallback + else: + return preferred + + +_BaseBar = _select_progress_class(IncrementalBar, Bar) + + +class InterruptibleMixin(object): + """ + Helper to ensure that self.finish() gets called on keyboard interrupt. + + This allows downloads to be interrupted without leaving temporary state + (like hidden cursors) behind. + + This class is similar to the progress library's existing SigIntMixin + helper, but as of version 1.2, that helper has the following problems: + + 1. It calls sys.exit(). + 2. It discards the existing SIGINT handler completely. + 3. It leaves its own handler in place even after an uninterrupted finish, + which will have unexpected delayed effects if the user triggers an + unrelated keyboard interrupt some time after a progress-displaying + download has already completed, for example. + """ + + def __init__(self, *args, **kwargs): + """ + Save the original SIGINT handler for later. + """ + super(InterruptibleMixin, self).__init__(*args, **kwargs) + + self.original_handler = signal(SIGINT, self.handle_sigint) + + # If signal() returns None, the previous handler was not installed from + # Python, and we cannot restore it. This probably should not happen, + # but if it does, we must restore something sensible instead, at least. + # The least bad option should be Python's default SIGINT handler, which + # just raises KeyboardInterrupt. + if self.original_handler is None: + self.original_handler = default_int_handler + + def finish(self): + """ + Restore the original SIGINT handler after finishing. + + This should happen regardless of whether the progress display finishes + normally, or gets interrupted. + """ + super(InterruptibleMixin, self).finish() + signal(SIGINT, self.original_handler) + + def handle_sigint(self, signum, frame): + """ + Call self.finish() before delegating to the original SIGINT handler. + + This handler should only be in place while the progress display is + active. + """ + self.finish() + self.original_handler(signum, frame) + + +class DownloadProgressMixin(object): + + def __init__(self, *args, **kwargs): + super(DownloadProgressMixin, self).__init__(*args, **kwargs) + self.message = (" " * (get_indentation() + 2)) + self.message + + @property + def downloaded(self): + return format_size(self.index) + + @property + def download_speed(self): + # Avoid zero division errors... + if self.avg == 0.0: + return "..." + return format_size(1 / self.avg) + "/s" + + @property + def pretty_eta(self): + if self.eta: + return "eta %s" % self.eta_td + return "" + + def iter(self, it, n=1): + for x in it: + yield x + self.next(n) + self.finish() + + +class WindowsMixin(object): + + def __init__(self, *args, **kwargs): + # The Windows terminal does not support the hide/show cursor ANSI codes + # even with colorama. So we'll ensure that hide_cursor is False on + # Windows. + # This call neds to go before the super() call, so that hide_cursor + # is set in time. The base progress bar class writes the "hide cursor" + # code to the terminal in its init, so if we don't set this soon + # enough, we get a "hide" with no corresponding "show"... + if WINDOWS and self.hide_cursor: + self.hide_cursor = False + + super(WindowsMixin, self).__init__(*args, **kwargs) + + # Check if we are running on Windows and we have the colorama module, + # if we do then wrap our file with it. + if WINDOWS and colorama: + self.file = colorama.AnsiToWin32(self.file) + # The progress code expects to be able to call self.file.isatty() + # but the colorama.AnsiToWin32() object doesn't have that, so we'll + # add it. + self.file.isatty = lambda: self.file.wrapped.isatty() + # The progress code expects to be able to call self.file.flush() + # but the colorama.AnsiToWin32() object doesn't have that, so we'll + # add it. + self.file.flush = lambda: self.file.wrapped.flush() + + +class DownloadProgressBar(WindowsMixin, InterruptibleMixin, + DownloadProgressMixin, _BaseBar): + + file = sys.stdout + message = "%(percent)d%%" + suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s" + + +class DownloadProgressSpinner(WindowsMixin, InterruptibleMixin, + DownloadProgressMixin, WritelnMixin, Spinner): + + file = sys.stdout + suffix = "%(downloaded)s %(download_speed)s" + + def next_phase(self): + if not hasattr(self, "_phaser"): + self._phaser = itertools.cycle(self.phases) + return next(self._phaser) + + def update(self): + message = self.message % self + phase = self.next_phase() + suffix = self.suffix % self + line = ''.join([ + message, + " " if message else "", + phase, + " " if suffix else "", + suffix, + ]) + + self.writeln(line) + + +################################################################ +# Generic "something is happening" spinners +# +# We don't even try using progress.spinner.Spinner here because it's actually +# simpler to reimplement from scratch than to coerce their code into doing +# what we need. +################################################################ + +@contextlib.contextmanager +def hidden_cursor(file): + # The Windows terminal does not support the hide/show cursor ANSI codes, + # even via colorama. So don't even try. + if WINDOWS: + yield + else: + file.write(HIDE_CURSOR) + try: + yield + finally: + file.write(SHOW_CURSOR) + + +class RateLimiter(object): + def __init__(self, min_update_interval_seconds): + self._min_update_interval_seconds = min_update_interval_seconds + self._last_update = 0 + + def ready(self): + now = time.time() + delta = now - self._last_update + return delta >= self._min_update_interval_seconds + + def reset(self): + self._last_update = time.time() + + +class InteractiveSpinner(object): + def __init__(self, message, file=None, spin_chars="-\\|/", + # Empirically, 8 updates/second looks nice + min_update_interval_seconds=0.125): + self._message = message + if file is None: + file = sys.stdout + self._file = file + self._rate_limiter = RateLimiter(min_update_interval_seconds) + self._finished = False + + self._spin_cycle = itertools.cycle(spin_chars) + + self._file.write(" " * get_indentation() + self._message + " ... ") + self._width = 0 + + def _write(self, status): + assert not self._finished + # Erase what we wrote before by backspacing to the beginning, writing + # spaces to overwrite the old text, and then backspacing again + backup = "\b" * self._width + self._file.write(backup + " " * self._width + backup) + # Now we have a blank slate to add our status + self._file.write(status) + self._width = len(status) + self._file.flush() + self._rate_limiter.reset() + + def spin(self): + if self._finished: + return + if not self._rate_limiter.ready(): + return + self._write(next(self._spin_cycle)) + + def finish(self, final_status): + if self._finished: + return + self._write(final_status) + self._file.write("\n") + self._file.flush() + self._finished = True + + +# Used for dumb terminals, non-interactive installs (no tty), etc. +# We still print updates occasionally (once every 60 seconds by default) to +# act as a keep-alive for systems like Travis-CI that take lack-of-output as +# an indication that a task has frozen. +class NonInteractiveSpinner(object): + def __init__(self, message, min_update_interval_seconds=60): + self._message = message + self._finished = False + self._rate_limiter = RateLimiter(min_update_interval_seconds) + self._update("started") + + def _update(self, status): + assert not self._finished + self._rate_limiter.reset() + logger.info("%s: %s", self._message, status) + + def spin(self): + if self._finished: + return + if not self._rate_limiter.ready(): + return + self._update("still running...") + + def finish(self, final_status): + if self._finished: + return + self._update("finished with status '%s'" % (final_status,)) + self._finished = True + + +@contextlib.contextmanager +def open_spinner(message): + # Interactive spinner goes directly to sys.stdout rather than being routed + # through the logging system, but it acts like it has level INFO, + # i.e. it's only displayed if we're at level INFO or better. + # Non-interactive spinner goes through the logging system, so it is always + # in sync with logging configuration. + if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO: + spinner = InteractiveSpinner(message) + else: + spinner = NonInteractiveSpinner(message) + try: + with hidden_cursor(sys.stdout): + yield spinner + except KeyboardInterrupt: + spinner.finish("canceled") + raise + except Exception: + spinner.finish("error") + raise + else: + spinner.finish("done") diff --git a/lib/python3.4/site-packages/pip/vcs/__init__.py b/lib/python3.4/site-packages/pip/vcs/__init__.py new file mode 100644 index 0000000..9dc1c60 --- /dev/null +++ b/lib/python3.4/site-packages/pip/vcs/__init__.py @@ -0,0 +1,363 @@ +"""Handles all VCS (version control) support""" +from __future__ import absolute_import + +import errno +import logging +import os +import shutil + +from pip._vendor.six.moves.urllib import parse as urllib_parse + +from pip.exceptions import BadCommand +from pip.utils import (display_path, backup_dir, call_subprocess, + rmtree, ask_path_exists) + + +__all__ = ['vcs', 'get_src_requirement'] + + +logger = logging.getLogger(__name__) + + +class VcsSupport(object): + _registry = {} + schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn'] + + def __init__(self): + # Register more schemes with urlparse for various version control + # systems + urllib_parse.uses_netloc.extend(self.schemes) + # Python >= 2.7.4, 3.3 doesn't have uses_fragment + if getattr(urllib_parse, 'uses_fragment', None): + urllib_parse.uses_fragment.extend(self.schemes) + super(VcsSupport, self).__init__() + + def __iter__(self): + return self._registry.__iter__() + + @property + def backends(self): + return list(self._registry.values()) + + @property + def dirnames(self): + return [backend.dirname for backend in self.backends] + + @property + def all_schemes(self): + schemes = [] + for backend in self.backends: + schemes.extend(backend.schemes) + return schemes + + def register(self, cls): + if not hasattr(cls, 'name'): + logger.warning('Cannot register VCS %s', cls.__name__) + return + if cls.name not in self._registry: + self._registry[cls.name] = cls + logger.debug('Registered VCS backend: %s', cls.name) + + def unregister(self, cls=None, name=None): + if name in self._registry: + del self._registry[name] + elif cls in self._registry.values(): + del self._registry[cls.name] + else: + logger.warning('Cannot unregister because no class or name given') + + def get_backend_name(self, location): + """ + Return the name of the version control backend if found at given + location, e.g. vcs.get_backend_name('/path/to/vcs/checkout') + """ + for vc_type in self._registry.values(): + if vc_type.controls_location(location): + logger.debug('Determine that %s uses VCS: %s', + location, vc_type.name) + return vc_type.name + return None + + def get_backend(self, name): + name = name.lower() + if name in self._registry: + return self._registry[name] + + def get_backend_from_location(self, location): + vc_type = self.get_backend_name(location) + if vc_type: + return self.get_backend(vc_type) + return None + + +vcs = VcsSupport() + + +class VersionControl(object): + name = '' + dirname = '' + # List of supported schemes for this Version Control + schemes = () + + def __init__(self, url=None, *args, **kwargs): + self.url = url + super(VersionControl, self).__init__(*args, **kwargs) + + def _is_local_repository(self, repo): + """ + posix absolute paths start with os.path.sep, + win32 ones ones start with drive (like c:\\folder) + """ + drive, tail = os.path.splitdrive(repo) + return repo.startswith(os.path.sep) or drive + + # See issue #1083 for why this method was introduced: + # https://github.com/pypa/pip/issues/1083 + def translate_egg_surname(self, surname): + # For example, Django has branches of the form "stable/1.7.x". + return surname.replace('/', '_') + + def export(self, location): + """ + Export the repository at the url to the destination location + i.e. only download the files, without vcs informations + """ + raise NotImplementedError + + def get_url_rev(self): + """ + Returns the correct repository URL and revision by parsing the given + repository URL + """ + error_message = ( + "Sorry, '%s' is a malformed VCS url. " + "The format is <vcs>+<protocol>://<url>, " + "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp" + ) + assert '+' in self.url, error_message % self.url + url = self.url.split('+', 1)[1] + scheme, netloc, path, query, frag = urllib_parse.urlsplit(url) + rev = None + if '@' in path: + path, rev = path.rsplit('@', 1) + url = urllib_parse.urlunsplit((scheme, netloc, path, query, '')) + return url, rev + + def get_info(self, location): + """ + Returns (url, revision), where both are strings + """ + assert not location.rstrip('/').endswith(self.dirname), \ + 'Bad directory: %s' % location + return self.get_url(location), self.get_revision(location) + + def normalize_url(self, url): + """ + Normalize a URL for comparison by unquoting it and removing any + trailing slash. + """ + return urllib_parse.unquote(url).rstrip('/') + + def compare_urls(self, url1, url2): + """ + Compare two repo URLs for identity, ignoring incidental differences. + """ + return (self.normalize_url(url1) == self.normalize_url(url2)) + + def obtain(self, dest): + """ + Called when installing or updating an editable package, takes the + source path of the checkout. + """ + raise NotImplementedError + + def switch(self, dest, url, rev_options): + """ + Switch the repo at ``dest`` to point to ``URL``. + """ + raise NotImplementedError + + def update(self, dest, rev_options): + """ + Update an already-existing repo to the given ``rev_options``. + """ + raise NotImplementedError + + def check_version(self, dest, rev_options): + """ + Return True if the version is identical to what exists and + doesn't need to be updated. + """ + raise NotImplementedError + + def check_destination(self, dest, url, rev_options, rev_display): + """ + Prepare a location to receive a checkout/clone. + + Return True if the location is ready for (and requires) a + checkout/clone, False otherwise. + """ + checkout = True + prompt = False + if os.path.exists(dest): + checkout = False + if os.path.exists(os.path.join(dest, self.dirname)): + existing_url = self.get_url(dest) + if self.compare_urls(existing_url, url): + logger.debug( + '%s in %s exists, and has correct URL (%s)', + self.repo_name.title(), + display_path(dest), + url, + ) + if not self.check_version(dest, rev_options): + logger.info( + 'Updating %s %s%s', + display_path(dest), + self.repo_name, + rev_display, + ) + self.update(dest, rev_options) + else: + logger.info( + 'Skipping because already up-to-date.') + else: + logger.warning( + '%s %s in %s exists with URL %s', + self.name, + self.repo_name, + display_path(dest), + existing_url, + ) + prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ', + ('s', 'i', 'w', 'b')) + else: + logger.warning( + 'Directory %s already exists, and is not a %s %s.', + dest, + self.name, + self.repo_name, + ) + prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b')) + if prompt: + logger.warning( + 'The plan is to install the %s repository %s', + self.name, + url, + ) + response = ask_path_exists('What to do? %s' % prompt[0], + prompt[1]) + + if response == 's': + logger.info( + 'Switching %s %s to %s%s', + self.repo_name, + display_path(dest), + url, + rev_display, + ) + self.switch(dest, url, rev_options) + elif response == 'i': + # do nothing + pass + elif response == 'w': + logger.warning('Deleting %s', display_path(dest)) + rmtree(dest) + checkout = True + elif response == 'b': + dest_dir = backup_dir(dest) + logger.warning( + 'Backing up %s to %s', display_path(dest), dest_dir, + ) + shutil.move(dest, dest_dir) + checkout = True + return checkout + + def unpack(self, location): + """ + Clean up current location and download the url repository + (and vcs infos) into location + """ + if os.path.exists(location): + rmtree(location) + self.obtain(location) + + def get_src_requirement(self, dist, location): + """ + Return a string representing the requirement needed to + redownload the files currently present in location, something + like: + {repository_url}@{revision}#egg={project_name}-{version_identifier} + """ + raise NotImplementedError + + def get_url(self, location): + """ + Return the url used at location + Used in get_info or check_destination + """ + raise NotImplementedError + + def get_revision(self, location): + """ + Return the current revision of the files at location + Used in get_info + """ + raise NotImplementedError + + def run_command(self, cmd, show_stdout=True, cwd=None, + on_returncode='raise', + command_level=logging.DEBUG, command_desc=None, + extra_environ=None, spinner=None): + """ + Run a VCS subcommand + This is simply a wrapper around call_subprocess that adds the VCS + command name, and checks that the VCS is available + """ + cmd = [self.name] + cmd + try: + return call_subprocess(cmd, show_stdout, cwd, + on_returncode, command_level, + command_desc, extra_environ, + spinner) + except OSError as e: + # errno.ENOENT = no such file or directory + # In other words, the VCS executable isn't available + if e.errno == errno.ENOENT: + raise BadCommand('Cannot find command %r' % self.name) + else: + raise # re-raise exception if a different error occurred + + @classmethod + def controls_location(cls, location): + """ + Check if a location is controlled by the vcs. + It is meant to be overridden to implement smarter detection + mechanisms for specific vcs. + """ + logger.debug('Checking in %s for %s (%s)...', + location, cls.dirname, cls.name) + path = os.path.join(location, cls.dirname) + return os.path.exists(path) + + +def get_src_requirement(dist, location): + version_control = vcs.get_backend_from_location(location) + if version_control: + try: + return version_control().get_src_requirement(dist, + location) + except BadCommand: + logger.warning( + 'cannot determine version of editable source in %s ' + '(%s command not found in path)', + location, + version_control.name, + ) + return dist.as_requirement() + logger.warning( + 'cannot determine version of editable source in %s (is not SVN ' + 'checkout, Git clone, Mercurial clone or Bazaar branch)', + location, + ) + return dist.as_requirement() diff --git a/lib/python3.4/site-packages/pip/vcs/bazaar.py b/lib/python3.4/site-packages/pip/vcs/bazaar.py new file mode 100644 index 0000000..0f09584 --- /dev/null +++ b/lib/python3.4/site-packages/pip/vcs/bazaar.py @@ -0,0 +1,116 @@ +from __future__ import absolute_import + +import logging +import os +import tempfile + +# TODO: Get this into six.moves.urllib.parse +try: + from urllib import parse as urllib_parse +except ImportError: + import urlparse as urllib_parse + +from pip.utils import rmtree, display_path +from pip.vcs import vcs, VersionControl +from pip.download import path_to_url + + +logger = logging.getLogger(__name__) + + +class Bazaar(VersionControl): + name = 'bzr' + dirname = '.bzr' + repo_name = 'branch' + schemes = ( + 'bzr', 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp', 'bzr+ftp', + 'bzr+lp', + ) + + def __init__(self, url=None, *args, **kwargs): + super(Bazaar, self).__init__(url, *args, **kwargs) + # Python >= 2.7.4, 3.3 doesn't have uses_fragment or non_hierarchical + # Register lp but do not expose as a scheme to support bzr+lp. + if getattr(urllib_parse, 'uses_fragment', None): + urllib_parse.uses_fragment.extend(['lp']) + urllib_parse.non_hierarchical.extend(['lp']) + + def export(self, location): + """ + Export the Bazaar repository at the url to the destination location + """ + temp_dir = tempfile.mkdtemp('-export', 'pip-') + self.unpack(temp_dir) + if os.path.exists(location): + # Remove the location to make sure Bazaar can export it correctly + rmtree(location) + try: + self.run_command(['export', location], cwd=temp_dir, + show_stdout=False) + finally: + rmtree(temp_dir) + + def switch(self, dest, url, rev_options): + self.run_command(['switch', url], cwd=dest) + + def update(self, dest, rev_options): + self.run_command(['pull', '-q'] + rev_options, cwd=dest) + + def obtain(self, dest): + url, rev = self.get_url_rev() + if rev: + rev_options = ['-r', rev] + rev_display = ' (to revision %s)' % rev + else: + rev_options = [] + rev_display = '' + if self.check_destination(dest, url, rev_options, rev_display): + logger.info( + 'Checking out %s%s to %s', + url, + rev_display, + display_path(dest), + ) + self.run_command(['branch', '-q'] + rev_options + [url, dest]) + + def get_url_rev(self): + # hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it + url, rev = super(Bazaar, self).get_url_rev() + if url.startswith('ssh://'): + url = 'bzr+' + url + return url, rev + + def get_url(self, location): + urls = self.run_command(['info'], show_stdout=False, cwd=location) + for line in urls.splitlines(): + line = line.strip() + for x in ('checkout of branch: ', + 'parent branch: '): + if line.startswith(x): + repo = line.split(x)[1] + if self._is_local_repository(repo): + return path_to_url(repo) + return repo + return None + + def get_revision(self, location): + revision = self.run_command( + ['revno'], show_stdout=False, cwd=location) + return revision.splitlines()[-1] + + def get_src_requirement(self, dist, location): + repo = self.get_url(location) + if not repo: + return None + if not repo.lower().startswith('bzr:'): + repo = 'bzr+' + repo + egg_project_name = dist.egg_name().split('-', 1)[0] + current_rev = self.get_revision(location) + return '%s@%s#egg=%s' % (repo, current_rev, egg_project_name) + + def check_version(self, dest, rev_options): + """Always assume the versions don't match""" + return False + + +vcs.register(Bazaar) diff --git a/lib/python3.4/site-packages/pip/vcs/git.py b/lib/python3.4/site-packages/pip/vcs/git.py new file mode 100644 index 0000000..baee7e8 --- /dev/null +++ b/lib/python3.4/site-packages/pip/vcs/git.py @@ -0,0 +1,274 @@ +from __future__ import absolute_import + +import logging +import tempfile +import os.path + +from pip.compat import samefile +from pip.exceptions import BadCommand +from pip._vendor.six.moves.urllib import parse as urllib_parse +from pip._vendor.six.moves.urllib import request as urllib_request + +from pip.utils import display_path, rmtree +from pip.vcs import vcs, VersionControl + + +urlsplit = urllib_parse.urlsplit +urlunsplit = urllib_parse.urlunsplit + + +logger = logging.getLogger(__name__) + + +class Git(VersionControl): + name = 'git' + dirname = '.git' + repo_name = 'clone' + schemes = ( + 'git', 'git+http', 'git+https', 'git+ssh', 'git+git', 'git+file', + ) + + def __init__(self, url=None, *args, **kwargs): + + # Works around an apparent Git bug + # (see http://article.gmane.org/gmane.comp.version-control.git/146500) + if url: + scheme, netloc, path, query, fragment = urlsplit(url) + if scheme.endswith('file'): + initial_slashes = path[:-len(path.lstrip('/'))] + newpath = ( + initial_slashes + + urllib_request.url2pathname(path) + .replace('\\', '/').lstrip('/') + ) + url = urlunsplit((scheme, netloc, newpath, query, fragment)) + after_plus = scheme.find('+') + 1 + url = scheme[:after_plus] + urlunsplit( + (scheme[after_plus:], netloc, newpath, query, fragment), + ) + + super(Git, self).__init__(url, *args, **kwargs) + + def export(self, location): + """Export the Git repository at the url to the destination location""" + temp_dir = tempfile.mkdtemp('-export', 'pip-') + self.unpack(temp_dir) + try: + if not location.endswith('/'): + location = location + '/' + self.run_command( + ['checkout-index', '-a', '-f', '--prefix', location], + show_stdout=False, cwd=temp_dir) + finally: + rmtree(temp_dir) + + def check_rev_options(self, rev, dest, rev_options): + """Check the revision options before checkout to compensate that tags + and branches may need origin/ as a prefix. + Returns the SHA1 of the branch or tag if found. + """ + revisions = self.get_short_refs(dest) + + origin_rev = 'origin/%s' % rev + if origin_rev in revisions: + # remote branch + return [revisions[origin_rev]] + elif rev in revisions: + # a local tag or branch name + return [revisions[rev]] + else: + logger.warning( + "Could not find a tag or branch '%s', assuming commit.", rev, + ) + return rev_options + + def check_version(self, dest, rev_options): + """ + Compare the current sha to the ref. ref may be a branch or tag name, + but current rev will always point to a sha. This means that a branch + or tag will never compare as True. So this ultimately only matches + against exact shas. + """ + return self.get_revision(dest).startswith(rev_options[0]) + + def switch(self, dest, url, rev_options): + self.run_command(['config', 'remote.origin.url', url], cwd=dest) + self.run_command(['checkout', '-q'] + rev_options, cwd=dest) + + self.update_submodules(dest) + + def update(self, dest, rev_options): + # First fetch changes from the default remote + self.run_command(['fetch', '-q'], cwd=dest) + # Then reset to wanted revision (maybe even origin/master) + if rev_options: + rev_options = self.check_rev_options( + rev_options[0], dest, rev_options, + ) + self.run_command(['reset', '--hard', '-q'] + rev_options, cwd=dest) + #: update submodules + self.update_submodules(dest) + + def obtain(self, dest): + url, rev = self.get_url_rev() + if rev: + rev_options = [rev] + rev_display = ' (to %s)' % rev + else: + rev_options = ['origin/master'] + rev_display = '' + if self.check_destination(dest, url, rev_options, rev_display): + logger.info( + 'Cloning %s%s to %s', url, rev_display, display_path(dest), + ) + self.run_command(['clone', '-q', url, dest]) + + if rev: + rev_options = self.check_rev_options(rev, dest, rev_options) + # Only do a checkout if rev_options differs from HEAD + if not self.check_version(dest, rev_options): + self.run_command( + ['checkout', '-q'] + rev_options, + cwd=dest, + ) + #: repo may contain submodules + self.update_submodules(dest) + + def get_url(self, location): + url = self.run_command( + ['config', 'remote.origin.url'], + show_stdout=False, cwd=location) + return url.strip() + + def get_revision(self, location): + current_rev = self.run_command( + ['rev-parse', 'HEAD'], show_stdout=False, cwd=location) + return current_rev.strip() + + def get_full_refs(self, location): + """Yields tuples of (commit, ref) for branches and tags""" + output = self.run_command(['show-ref'], + show_stdout=False, cwd=location) + for line in output.strip().splitlines(): + commit, ref = line.split(' ', 1) + yield commit.strip(), ref.strip() + + def is_ref_remote(self, ref): + return ref.startswith('refs/remotes/') + + def is_ref_branch(self, ref): + return ref.startswith('refs/heads/') + + def is_ref_tag(self, ref): + return ref.startswith('refs/tags/') + + def is_ref_commit(self, ref): + """A ref is a commit sha if it is not anything else""" + return not any(( + self.is_ref_remote(ref), + self.is_ref_branch(ref), + self.is_ref_tag(ref), + )) + + # Should deprecate `get_refs` since it's ambiguous + def get_refs(self, location): + return self.get_short_refs(location) + + def get_short_refs(self, location): + """Return map of named refs (branches or tags) to commit hashes.""" + rv = {} + for commit, ref in self.get_full_refs(location): + ref_name = None + if self.is_ref_remote(ref): + ref_name = ref[len('refs/remotes/'):] + elif self.is_ref_branch(ref): + ref_name = ref[len('refs/heads/'):] + elif self.is_ref_tag(ref): + ref_name = ref[len('refs/tags/'):] + if ref_name is not None: + rv[ref_name] = commit + return rv + + def _get_subdirectory(self, location): + """Return the relative path of setup.py to the git repo root.""" + # find the repo root + git_dir = self.run_command(['rev-parse', '--git-dir'], + show_stdout=False, cwd=location).strip() + if not os.path.isabs(git_dir): + git_dir = os.path.join(location, git_dir) + root_dir = os.path.join(git_dir, '..') + # find setup.py + orig_location = location + while not os.path.exists(os.path.join(location, 'setup.py')): + last_location = location + location = os.path.dirname(location) + if location == last_location: + # We've traversed up to the root of the filesystem without + # finding setup.py + logger.warning( + "Could not find setup.py for directory %s (tried all " + "parent directories)", + orig_location, + ) + return None + # relative path of setup.py to repo root + if samefile(root_dir, location): + return None + return os.path.relpath(location, root_dir) + + def get_src_requirement(self, dist, location): + repo = self.get_url(location) + if not repo.lower().startswith('git:'): + repo = 'git+' + repo + egg_project_name = dist.egg_name().split('-', 1)[0] + if not repo: + return None + current_rev = self.get_revision(location) + req = '%s@%s#egg=%s' % (repo, current_rev, egg_project_name) + subdirectory = self._get_subdirectory(location) + if subdirectory: + req += '&subdirectory=' + subdirectory + return req + + def get_url_rev(self): + """ + Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'. + That's required because although they use SSH they sometimes doesn't + work with a ssh:// scheme (e.g. Github). But we need a scheme for + parsing. Hence we remove it again afterwards and return it as a stub. + """ + if '://' not in self.url: + assert 'file:' not in self.url + self.url = self.url.replace('git+', 'git+ssh://') + url, rev = super(Git, self).get_url_rev() + url = url.replace('ssh://', '') + else: + url, rev = super(Git, self).get_url_rev() + + return url, rev + + def update_submodules(self, location): + if not os.path.exists(os.path.join(location, '.gitmodules')): + return + self.run_command( + ['submodule', 'update', '--init', '--recursive', '-q'], + cwd=location, + ) + + @classmethod + def controls_location(cls, location): + if super(Git, cls).controls_location(location): + return True + try: + r = cls().run_command(['rev-parse'], + cwd=location, + show_stdout=False, + on_returncode='ignore') + return not r + except BadCommand: + logger.debug("could not determine if %s is under git control " + "because git is not available", location) + return False + + +vcs.register(Git) diff --git a/lib/python3.4/site-packages/pip/vcs/mercurial.py b/lib/python3.4/site-packages/pip/vcs/mercurial.py new file mode 100644 index 0000000..1aa83b9 --- /dev/null +++ b/lib/python3.4/site-packages/pip/vcs/mercurial.py @@ -0,0 +1,103 @@ +from __future__ import absolute_import + +import logging +import os +import tempfile + +from pip.utils import display_path, rmtree +from pip.vcs import vcs, VersionControl +from pip.download import path_to_url +from pip._vendor.six.moves import configparser + + +logger = logging.getLogger(__name__) + + +class Mercurial(VersionControl): + name = 'hg' + dirname = '.hg' + repo_name = 'clone' + schemes = ('hg', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http') + + def export(self, location): + """Export the Hg repository at the url to the destination location""" + temp_dir = tempfile.mkdtemp('-export', 'pip-') + self.unpack(temp_dir) + try: + self.run_command( + ['archive', location], show_stdout=False, cwd=temp_dir) + finally: + rmtree(temp_dir) + + def switch(self, dest, url, rev_options): + repo_config = os.path.join(dest, self.dirname, 'hgrc') + config = configparser.SafeConfigParser() + try: + config.read(repo_config) + config.set('paths', 'default', url) + with open(repo_config, 'w') as config_file: + config.write(config_file) + except (OSError, configparser.NoSectionError) as exc: + logger.warning( + 'Could not switch Mercurial repository to %s: %s', url, exc, + ) + else: + self.run_command(['update', '-q'] + rev_options, cwd=dest) + + def update(self, dest, rev_options): + self.run_command(['pull', '-q'], cwd=dest) + self.run_command(['update', '-q'] + rev_options, cwd=dest) + + def obtain(self, dest): + url, rev = self.get_url_rev() + if rev: + rev_options = [rev] + rev_display = ' (to revision %s)' % rev + else: + rev_options = [] + rev_display = '' + if self.check_destination(dest, url, rev_options, rev_display): + logger.info( + 'Cloning hg %s%s to %s', + url, + rev_display, + display_path(dest), + ) + self.run_command(['clone', '--noupdate', '-q', url, dest]) + self.run_command(['update', '-q'] + rev_options, cwd=dest) + + def get_url(self, location): + url = self.run_command( + ['showconfig', 'paths.default'], + show_stdout=False, cwd=location).strip() + if self._is_local_repository(url): + url = path_to_url(url) + return url.strip() + + def get_revision(self, location): + current_revision = self.run_command( + ['parents', '--template={rev}'], + show_stdout=False, cwd=location).strip() + return current_revision + + def get_revision_hash(self, location): + current_rev_hash = self.run_command( + ['parents', '--template={node}'], + show_stdout=False, cwd=location).strip() + return current_rev_hash + + def get_src_requirement(self, dist, location): + repo = self.get_url(location) + if not repo.lower().startswith('hg:'): + repo = 'hg+' + repo + egg_project_name = dist.egg_name().split('-', 1)[0] + if not repo: + return None + current_rev_hash = self.get_revision_hash(location) + return '%s@%s#egg=%s' % (repo, current_rev_hash, egg_project_name) + + def check_version(self, dest, rev_options): + """Always assume the versions don't match""" + return False + +vcs.register(Mercurial) diff --git a/lib/python3.4/site-packages/pip/vcs/subversion.py b/lib/python3.4/site-packages/pip/vcs/subversion.py new file mode 100644 index 0000000..aa78fa6 --- /dev/null +++ b/lib/python3.4/site-packages/pip/vcs/subversion.py @@ -0,0 +1,249 @@ +from __future__ import absolute_import + +import logging +import os +import re + +from pip._vendor.six.moves.urllib import parse as urllib_parse + +from pip.index import Link +from pip.utils import rmtree, display_path +from pip.utils.logging import indent_log +from pip.vcs import vcs, VersionControl + +_svn_xml_url_re = re.compile('url="([^"]+)"') +_svn_rev_re = re.compile('committed-rev="(\d+)"') +_svn_url_re = re.compile(r'URL: (.+)') +_svn_revision_re = re.compile(r'Revision: (.+)') +_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"') +_svn_info_xml_url_re = re.compile(r'<url>(.*)</url>') + + +logger = logging.getLogger(__name__) + + +class Subversion(VersionControl): + name = 'svn' + dirname = '.svn' + repo_name = 'checkout' + schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn') + + def get_info(self, location): + """Returns (url, revision), where both are strings""" + assert not location.rstrip('/').endswith(self.dirname), \ + 'Bad directory: %s' % location + output = self.run_command( + ['info', location], + show_stdout=False, + extra_environ={'LANG': 'C'}, + ) + match = _svn_url_re.search(output) + if not match: + logger.warning( + 'Cannot determine URL of svn checkout %s', + display_path(location), + ) + logger.debug('Output that cannot be parsed: \n%s', output) + return None, None + url = match.group(1).strip() + match = _svn_revision_re.search(output) + if not match: + logger.warning( + 'Cannot determine revision of svn checkout %s', + display_path(location), + ) + logger.debug('Output that cannot be parsed: \n%s', output) + return url, None + return url, match.group(1) + + def export(self, location): + """Export the svn repository at the url to the destination location""" + url, rev = self.get_url_rev() + rev_options = get_rev_options(url, rev) + logger.info('Exporting svn repository %s to %s', url, location) + with indent_log(): + if os.path.exists(location): + # Subversion doesn't like to check out over an existing + # directory --force fixes this, but was only added in svn 1.5 + rmtree(location) + self.run_command( + ['export'] + rev_options + [url, location], + show_stdout=False) + + def switch(self, dest, url, rev_options): + self.run_command(['switch'] + rev_options + [url, dest]) + + def update(self, dest, rev_options): + self.run_command(['update'] + rev_options + [dest]) + + def obtain(self, dest): + url, rev = self.get_url_rev() + rev_options = get_rev_options(url, rev) + if rev: + rev_display = ' (to revision %s)' % rev + else: + rev_display = '' + if self.check_destination(dest, url, rev_options, rev_display): + logger.info( + 'Checking out %s%s to %s', + url, + rev_display, + display_path(dest), + ) + self.run_command(['checkout', '-q'] + rev_options + [url, dest]) + + def get_location(self, dist, dependency_links): + for url in dependency_links: + egg_fragment = Link(url).egg_fragment + if not egg_fragment: + continue + if '-' in egg_fragment: + # FIXME: will this work when a package has - in the name? + key = '-'.join(egg_fragment.split('-')[:-1]).lower() + else: + key = egg_fragment + if key == dist.key: + return url.split('#', 1)[0] + return None + + def get_revision(self, location): + """ + Return the maximum revision for all files under a given location + """ + # Note: taken from setuptools.command.egg_info + revision = 0 + + for base, dirs, files in os.walk(location): + if self.dirname not in dirs: + dirs[:] = [] + continue # no sense walking uncontrolled subdirs + dirs.remove(self.dirname) + entries_fn = os.path.join(base, self.dirname, 'entries') + if not os.path.exists(entries_fn): + # FIXME: should we warn? + continue + + dirurl, localrev = self._get_svn_url_rev(base) + + if base == location: + base_url = dirurl + '/' # save the root url + elif not dirurl or not dirurl.startswith(base_url): + dirs[:] = [] + continue # not part of the same svn tree, skip it + revision = max(revision, localrev) + return revision + + def get_url_rev(self): + # hotfix the URL scheme after removing svn+ from svn+ssh:// readd it + url, rev = super(Subversion, self).get_url_rev() + if url.startswith('ssh://'): + url = 'svn+' + url + return url, rev + + def get_url(self, location): + # In cases where the source is in a subdirectory, not alongside + # setup.py we have to look up in the location until we find a real + # setup.py + orig_location = location + while not os.path.exists(os.path.join(location, 'setup.py')): + last_location = location + location = os.path.dirname(location) + if location == last_location: + # We've traversed up to the root of the filesystem without + # finding setup.py + logger.warning( + "Could not find setup.py for directory %s (tried all " + "parent directories)", + orig_location, + ) + return None + + return self._get_svn_url_rev(location)[0] + + def _get_svn_url_rev(self, location): + from pip.exceptions import InstallationError + + entries_path = os.path.join(location, self.dirname, 'entries') + if os.path.exists(entries_path): + with open(entries_path) as f: + data = f.read() + else: # subversion >= 1.7 does not have the 'entries' file + data = '' + + if (data.startswith('8') or + data.startswith('9') or + data.startswith('10')): + data = list(map(str.splitlines, data.split('\n\x0c\n'))) + del data[0][0] # get rid of the '8' + url = data[0][3] + revs = [int(d[9]) for d in data if len(d) > 9 and d[9]] + [0] + elif data.startswith('<?xml'): + match = _svn_xml_url_re.search(data) + if not match: + raise ValueError('Badly formatted data: %r' % data) + url = match.group(1) # get repository URL + revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)] + [0] + else: + try: + # subversion >= 1.7 + xml = self.run_command( + ['info', '--xml', location], + show_stdout=False, + ) + url = _svn_info_xml_url_re.search(xml).group(1) + revs = [ + int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml) + ] + except InstallationError: + url, revs = None, [] + + if revs: + rev = max(revs) + else: + rev = 0 + + return url, rev + + def get_src_requirement(self, dist, location): + repo = self.get_url(location) + if repo is None: + return None + # FIXME: why not project name? + egg_project_name = dist.egg_name().split('-', 1)[0] + rev = self.get_revision(location) + return 'svn+%s@%s#egg=%s' % (repo, rev, egg_project_name) + + def check_version(self, dest, rev_options): + """Always assume the versions don't match""" + return False + + +def get_rev_options(url, rev): + if rev: + rev_options = ['-r', rev] + else: + rev_options = [] + + r = urllib_parse.urlsplit(url) + if hasattr(r, 'username'): + # >= Python-2.5 + username, password = r.username, r.password + else: + netloc = r[1] + if '@' in netloc: + auth = netloc.split('@')[0] + if ':' in auth: + username, password = auth.split(':', 1) + else: + username, password = auth, None + else: + username, password = None, None + + if username: + rev_options += ['--username', username] + if password: + rev_options += ['--password', password] + return rev_options + + +vcs.register(Subversion) diff --git a/lib/python3.4/site-packages/pip/wheel.py b/lib/python3.4/site-packages/pip/wheel.py new file mode 100644 index 0000000..ffda773 --- /dev/null +++ b/lib/python3.4/site-packages/pip/wheel.py @@ -0,0 +1,854 @@ +""" +Support for installing and building the "wheel" binary package format. +""" +from __future__ import absolute_import + +import compileall +import csv +import errno +import functools +import hashlib +import logging +import os +import os.path +import re +import shutil +import stat +import sys +import tempfile +import warnings + +from base64 import urlsafe_b64encode +from email.parser import Parser + +from pip._vendor.six import StringIO + +import pip +from pip.compat import expanduser +from pip.download import path_to_url, unpack_url +from pip.exceptions import ( + InstallationError, InvalidWheelFilename, UnsupportedWheel) +from pip.locations import distutils_scheme, PIP_DELETE_MARKER_FILENAME +from pip import pep425tags +from pip.utils import ( + call_subprocess, ensure_dir, captured_stdout, rmtree, canonicalize_name, + read_chunks) +from pip.utils.ui import open_spinner +from pip.utils.logging import indent_log +from pip.utils.setuptools_build import SETUPTOOLS_SHIM +from pip._vendor.distlib.scripts import ScriptMaker +from pip._vendor import pkg_resources +from pip._vendor.six.moves import configparser + + +wheel_ext = '.whl' + +VERSION_COMPATIBLE = (1, 0) + + +logger = logging.getLogger(__name__) + + +class WheelCache(object): + """A cache of wheels for future installs.""" + + def __init__(self, cache_dir, format_control): + """Create a wheel cache. + + :param cache_dir: The root of the cache. + :param format_control: A pip.index.FormatControl object to limit + binaries being read from the cache. + """ + self._cache_dir = expanduser(cache_dir) if cache_dir else None + self._format_control = format_control + + def cached_wheel(self, link, package_name): + return cached_wheel( + self._cache_dir, link, self._format_control, package_name) + + +def _cache_for_link(cache_dir, link): + """ + Return a directory to store cached wheels in for link. + + Because there are M wheels for any one sdist, we provide a directory + to cache them in, and then consult that directory when looking up + cache hits. + + We only insert things into the cache if they have plausible version + numbers, so that we don't contaminate the cache with things that were not + unique. E.g. ./package might have dozens of installs done for it and build + a version of 0.0...and if we built and cached a wheel, we'd end up using + the same wheel even if the source has been edited. + + :param cache_dir: The cache_dir being used by pip. + :param link: The link of the sdist for which this will cache wheels. + """ + + # We want to generate an url to use as our cache key, we don't want to just + # re-use the URL because it might have other items in the fragment and we + # don't care about those. + key_parts = [link.url_without_fragment] + if link.hash_name is not None and link.hash is not None: + key_parts.append("=".join([link.hash_name, link.hash])) + key_url = "#".join(key_parts) + + # Encode our key url with sha224, we'll use this because it has similar + # security properties to sha256, but with a shorter total output (and thus + # less secure). However the differences don't make a lot of difference for + # our use case here. + hashed = hashlib.sha224(key_url.encode()).hexdigest() + + # We want to nest the directories some to prevent having a ton of top level + # directories where we might run out of sub directories on some FS. + parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]] + + # Inside of the base location for cached wheels, expand our parts and join + # them all together. + return os.path.join(cache_dir, "wheels", *parts) + + +def cached_wheel(cache_dir, link, format_control, package_name): + if not cache_dir: + return link + if not link: + return link + if link.is_wheel: + return link + if not link.is_artifact: + return link + if not package_name: + return link + canonical_name = canonicalize_name(package_name) + formats = pip.index.fmt_ctl_formats(format_control, canonical_name) + if "binary" not in formats: + return link + root = _cache_for_link(cache_dir, link) + try: + wheel_names = os.listdir(root) + except OSError as e: + if e.errno in (errno.ENOENT, errno.ENOTDIR): + return link + raise + candidates = [] + for wheel_name in wheel_names: + try: + wheel = Wheel(wheel_name) + except InvalidWheelFilename: + continue + if not wheel.supported(): + # Built for a different python/arch/etc + continue + candidates.append((wheel.support_index_min(), wheel_name)) + if not candidates: + return link + candidates.sort() + path = os.path.join(root, candidates[0][1]) + return pip.index.Link(path_to_url(path)) + + +def rehash(path, algo='sha256', blocksize=1 << 20): + """Return (hash, length) for path using hashlib.new(algo)""" + h = hashlib.new(algo) + length = 0 + with open(path, 'rb') as f: + for block in read_chunks(f, size=blocksize): + length += len(block) + h.update(block) + digest = 'sha256=' + urlsafe_b64encode( + h.digest() + ).decode('latin1').rstrip('=') + return (digest, length) + + +def open_for_csv(name, mode): + if sys.version_info[0] < 3: + nl = {} + bin = 'b' + else: + nl = {'newline': ''} + bin = '' + return open(name, mode + bin, **nl) + + +def fix_script(path): + """Replace #!python with #!/path/to/python + Return True if file was changed.""" + # XXX RECORD hashes will need to be updated + if os.path.isfile(path): + with open(path, 'rb') as script: + firstline = script.readline() + if not firstline.startswith(b'#!python'): + return False + exename = sys.executable.encode(sys.getfilesystemencoding()) + firstline = b'#!' + exename + os.linesep.encode("ascii") + rest = script.read() + with open(path, 'wb') as script: + script.write(firstline) + script.write(rest) + return True + +dist_info_re = re.compile(r"""^(?P<namever>(?P<name>.+?)(-(?P<ver>\d.+?))?) + \.dist-info$""", re.VERBOSE) + + +def root_is_purelib(name, wheeldir): + """ + Return True if the extracted wheel in wheeldir should go into purelib. + """ + name_folded = name.replace("-", "_") + for item in os.listdir(wheeldir): + match = dist_info_re.match(item) + if match and match.group('name') == name_folded: + with open(os.path.join(wheeldir, item, 'WHEEL')) as wheel: + for line in wheel: + line = line.lower().rstrip() + if line == "root-is-purelib: true": + return True + return False + + +def get_entrypoints(filename): + if not os.path.exists(filename): + return {}, {} + + # This is done because you can pass a string to entry_points wrappers which + # means that they may or may not be valid INI files. The attempt here is to + # strip leading and trailing whitespace in order to make them valid INI + # files. + with open(filename) as fp: + data = StringIO() + for line in fp: + data.write(line.strip()) + data.write("\n") + data.seek(0) + + cp = configparser.RawConfigParser() + cp.optionxform = lambda option: option + cp.readfp(data) + + console = {} + gui = {} + if cp.has_section('console_scripts'): + console = dict(cp.items('console_scripts')) + if cp.has_section('gui_scripts'): + gui = dict(cp.items('gui_scripts')) + return console, gui + + +def move_wheel_files(name, req, wheeldir, user=False, home=None, root=None, + pycompile=True, scheme=None, isolated=False, prefix=None): + """Install a wheel""" + + if not scheme: + scheme = distutils_scheme( + name, user=user, home=home, root=root, isolated=isolated, + prefix=prefix, + ) + + if root_is_purelib(name, wheeldir): + lib_dir = scheme['purelib'] + else: + lib_dir = scheme['platlib'] + + info_dir = [] + data_dirs = [] + source = wheeldir.rstrip(os.path.sep) + os.path.sep + + # Record details of the files moved + # installed = files copied from the wheel to the destination + # changed = files changed while installing (scripts #! line typically) + # generated = files newly generated during the install (script wrappers) + installed = {} + changed = set() + generated = [] + + # Compile all of the pyc files that we're going to be installing + if pycompile: + with captured_stdout() as stdout: + with warnings.catch_warnings(): + warnings.filterwarnings('ignore') + compileall.compile_dir(source, force=True, quiet=True) + logger.debug(stdout.getvalue()) + + def normpath(src, p): + return os.path.relpath(src, p).replace(os.path.sep, '/') + + def record_installed(srcfile, destfile, modified=False): + """Map archive RECORD paths to installation RECORD paths.""" + oldpath = normpath(srcfile, wheeldir) + newpath = normpath(destfile, lib_dir) + installed[oldpath] = newpath + if modified: + changed.add(destfile) + + def clobber(source, dest, is_base, fixer=None, filter=None): + ensure_dir(dest) # common for the 'include' path + + for dir, subdirs, files in os.walk(source): + basedir = dir[len(source):].lstrip(os.path.sep) + destdir = os.path.join(dest, basedir) + if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'): + continue + for s in subdirs: + destsubdir = os.path.join(dest, basedir, s) + if is_base and basedir == '' and destsubdir.endswith('.data'): + data_dirs.append(s) + continue + elif (is_base and + s.endswith('.dist-info') and + # is self.req.project_name case preserving? + s.lower().startswith( + req.project_name.replace('-', '_').lower())): + assert not info_dir, 'Multiple .dist-info directories' + info_dir.append(destsubdir) + for f in files: + # Skip unwanted files + if filter and filter(f): + continue + srcfile = os.path.join(dir, f) + destfile = os.path.join(dest, basedir, f) + # directory creation is lazy and after the file filtering above + # to ensure we don't install empty dirs; empty dirs can't be + # uninstalled. + ensure_dir(destdir) + + # We use copyfile (not move, copy, or copy2) to be extra sure + # that we are not moving directories over (copyfile fails for + # directories) as well as to ensure that we are not copying + # over any metadata because we want more control over what + # metadata we actually copy over. + shutil.copyfile(srcfile, destfile) + + # Copy over the metadata for the file, currently this only + # includes the atime and mtime. + st = os.stat(srcfile) + if hasattr(os, "utime"): + os.utime(destfile, (st.st_atime, st.st_mtime)) + + # If our file is executable, then make our destination file + # executable. + if os.access(srcfile, os.X_OK): + st = os.stat(srcfile) + permissions = ( + st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH + ) + os.chmod(destfile, permissions) + + changed = False + if fixer: + changed = fixer(destfile) + record_installed(srcfile, destfile, changed) + + clobber(source, lib_dir, True) + + assert info_dir, "%s .dist-info directory not found" % req + + # Get the defined entry points + ep_file = os.path.join(info_dir[0], 'entry_points.txt') + console, gui = get_entrypoints(ep_file) + + def is_entrypoint_wrapper(name): + # EP, EP.exe and EP-script.py are scripts generated for + # entry point EP by setuptools + if name.lower().endswith('.exe'): + matchname = name[:-4] + elif name.lower().endswith('-script.py'): + matchname = name[:-10] + elif name.lower().endswith(".pya"): + matchname = name[:-4] + else: + matchname = name + # Ignore setuptools-generated scripts + return (matchname in console or matchname in gui) + + for datadir in data_dirs: + fixer = None + filter = None + for subdir in os.listdir(os.path.join(wheeldir, datadir)): + fixer = None + if subdir == 'scripts': + fixer = fix_script + filter = is_entrypoint_wrapper + source = os.path.join(wheeldir, datadir, subdir) + dest = scheme[subdir] + clobber(source, dest, False, fixer=fixer, filter=filter) + + maker = ScriptMaker(None, scheme['scripts']) + + # Ensure old scripts are overwritten. + # See https://github.com/pypa/pip/issues/1800 + maker.clobber = True + + # Ensure we don't generate any variants for scripts because this is almost + # never what somebody wants. + # See https://bitbucket.org/pypa/distlib/issue/35/ + maker.variants = set(('', )) + + # This is required because otherwise distlib creates scripts that are not + # executable. + # See https://bitbucket.org/pypa/distlib/issue/32/ + maker.set_mode = True + + # Simplify the script and fix the fact that the default script swallows + # every single stack trace. + # See https://bitbucket.org/pypa/distlib/issue/34/ + # See https://bitbucket.org/pypa/distlib/issue/33/ + def _get_script_text(entry): + if entry.suffix is None: + raise InstallationError( + "Invalid script entry point: %s for req: %s - A callable " + "suffix is required. Cf https://packaging.python.org/en/" + "latest/distributing.html#console-scripts for more " + "information." % (entry, req) + ) + return maker.script_template % { + "module": entry.prefix, + "import_name": entry.suffix.split(".")[0], + "func": entry.suffix, + } + + maker._get_script_text = _get_script_text + maker.script_template = """# -*- coding: utf-8 -*- +import re +import sys + +from %(module)s import %(import_name)s + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(%(func)s()) +""" + + # Special case pip and setuptools to generate versioned wrappers + # + # The issue is that some projects (specifically, pip and setuptools) use + # code in setup.py to create "versioned" entry points - pip2.7 on Python + # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into + # the wheel metadata at build time, and so if the wheel is installed with + # a *different* version of Python the entry points will be wrong. The + # correct fix for this is to enhance the metadata to be able to describe + # such versioned entry points, but that won't happen till Metadata 2.0 is + # available. + # In the meantime, projects using versioned entry points will either have + # incorrect versioned entry points, or they will not be able to distribute + # "universal" wheels (i.e., they will need a wheel per Python version). + # + # Because setuptools and pip are bundled with _ensurepip and virtualenv, + # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we + # override the versioned entry points in the wheel and generate the + # correct ones. This code is purely a short-term measure until Metadat 2.0 + # is available. + # + # To add the level of hack in this section of code, in order to support + # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment + # variable which will control which version scripts get installed. + # + # ENSUREPIP_OPTIONS=altinstall + # - Only pipX.Y and easy_install-X.Y will be generated and installed + # ENSUREPIP_OPTIONS=install + # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note + # that this option is technically if ENSUREPIP_OPTIONS is set and is + # not altinstall + # DEFAULT + # - The default behavior is to install pip, pipX, pipX.Y, easy_install + # and easy_install-X.Y. + pip_script = console.pop('pip', None) + if pip_script: + if "ENSUREPIP_OPTIONS" not in os.environ: + spec = 'pip = ' + pip_script + generated.extend(maker.make(spec)) + + if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall": + spec = 'pip%s = %s' % (sys.version[:1], pip_script) + generated.extend(maker.make(spec)) + + spec = 'pip%s = %s' % (sys.version[:3], pip_script) + generated.extend(maker.make(spec)) + # Delete any other versioned pip entry points + pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)] + for k in pip_ep: + del console[k] + easy_install_script = console.pop('easy_install', None) + if easy_install_script: + if "ENSUREPIP_OPTIONS" not in os.environ: + spec = 'easy_install = ' + easy_install_script + generated.extend(maker.make(spec)) + + spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script) + generated.extend(maker.make(spec)) + # Delete any other versioned easy_install entry points + easy_install_ep = [ + k for k in console if re.match(r'easy_install(-\d\.\d)?$', k) + ] + for k in easy_install_ep: + del console[k] + + # Generate the console and GUI entry points specified in the wheel + if len(console) > 0: + generated.extend( + maker.make_multiple(['%s = %s' % kv for kv in console.items()]) + ) + if len(gui) > 0: + generated.extend( + maker.make_multiple( + ['%s = %s' % kv for kv in gui.items()], + {'gui': True} + ) + ) + + # Record pip as the installer + installer = os.path.join(info_dir[0], 'INSTALLER') + temp_installer = os.path.join(info_dir[0], 'INSTALLER.pip') + with open(temp_installer, 'wb') as installer_file: + installer_file.write(b'pip\n') + shutil.move(temp_installer, installer) + generated.append(installer) + + # Record details of all files installed + record = os.path.join(info_dir[0], 'RECORD') + temp_record = os.path.join(info_dir[0], 'RECORD.pip') + with open_for_csv(record, 'r') as record_in: + with open_for_csv(temp_record, 'w+') as record_out: + reader = csv.reader(record_in) + writer = csv.writer(record_out) + for row in reader: + row[0] = installed.pop(row[0], row[0]) + if row[0] in changed: + row[1], row[2] = rehash(row[0]) + writer.writerow(row) + for f in generated: + h, l = rehash(f) + writer.writerow((f, h, l)) + for f in installed: + writer.writerow((installed[f], '', '')) + shutil.move(temp_record, record) + + +def _unique(fn): + @functools.wraps(fn) + def unique(*args, **kw): + seen = set() + for item in fn(*args, **kw): + if item not in seen: + seen.add(item) + yield item + return unique + + +# TODO: this goes somewhere besides the wheel module +@_unique +def uninstallation_paths(dist): + """ + Yield all the uninstallation paths for dist based on RECORD-without-.pyc + + Yield paths to all the files in RECORD. For each .py file in RECORD, add + the .pyc in the same directory. + + UninstallPathSet.add() takes care of the __pycache__ .pyc. + """ + from pip.utils import FakeFile # circular import + r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD'))) + for row in r: + path = os.path.join(dist.location, row[0]) + yield path + if path.endswith('.py'): + dn, fn = os.path.split(path) + base = fn[:-3] + path = os.path.join(dn, base + '.pyc') + yield path + + +def wheel_version(source_dir): + """ + Return the Wheel-Version of an extracted wheel, if possible. + + Otherwise, return False if we couldn't parse / extract it. + """ + try: + dist = [d for d in pkg_resources.find_on_path(None, source_dir)][0] + + wheel_data = dist.get_metadata('WHEEL') + wheel_data = Parser().parsestr(wheel_data) + + version = wheel_data['Wheel-Version'].strip() + version = tuple(map(int, version.split('.'))) + return version + except: + return False + + +def check_compatibility(version, name): + """ + Raises errors or warns if called with an incompatible Wheel-Version. + + Pip should refuse to install a Wheel-Version that's a major series + ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when + installing a version only minor version ahead (e.g 1.2 > 1.1). + + version: a 2-tuple representing a Wheel-Version (Major, Minor) + name: name of wheel or package to raise exception about + + :raises UnsupportedWheel: when an incompatible Wheel-Version is given + """ + if not version: + raise UnsupportedWheel( + "%s is in an unsupported or invalid wheel" % name + ) + if version[0] > VERSION_COMPATIBLE[0]: + raise UnsupportedWheel( + "%s's Wheel-Version (%s) is not compatible with this version " + "of pip" % (name, '.'.join(map(str, version))) + ) + elif version > VERSION_COMPATIBLE: + logger.warning( + 'Installing from a newer Wheel-Version (%s)', + '.'.join(map(str, version)), + ) + + +class Wheel(object): + """A wheel file""" + + # TODO: maybe move the install code into this class + + wheel_file_re = re.compile( + r"""^(?P<namever>(?P<name>.+?)-(?P<ver>\d.*?)) + ((-(?P<build>\d.*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?) + \.whl|\.dist-info)$""", + re.VERBOSE + ) + + def __init__(self, filename): + """ + :raises InvalidWheelFilename: when the filename is invalid for a wheel + """ + wheel_info = self.wheel_file_re.match(filename) + if not wheel_info: + raise InvalidWheelFilename( + "%s is not a valid wheel filename." % filename + ) + self.filename = filename + self.name = wheel_info.group('name').replace('_', '-') + # we'll assume "_" means "-" due to wheel naming scheme + # (https://github.com/pypa/pip/issues/1150) + self.version = wheel_info.group('ver').replace('_', '-') + self.pyversions = wheel_info.group('pyver').split('.') + self.abis = wheel_info.group('abi').split('.') + self.plats = wheel_info.group('plat').split('.') + + # All the tag combinations from this file + self.file_tags = set( + (x, y, z) for x in self.pyversions + for y in self.abis for z in self.plats + ) + + def support_index_min(self, tags=None): + """ + Return the lowest index that one of the wheel's file_tag combinations + achieves in the supported_tags list e.g. if there are 8 supported tags, + and one of the file tags is first in the list, then return 0. Returns + None is the wheel is not supported. + """ + if tags is None: # for mock + tags = pep425tags.supported_tags + indexes = [tags.index(c) for c in self.file_tags if c in tags] + return min(indexes) if indexes else None + + def supported(self, tags=None): + """Is this wheel supported on this system?""" + if tags is None: # for mock + tags = pep425tags.supported_tags + return bool(set(tags).intersection(self.file_tags)) + + +class WheelBuilder(object): + """Build wheels from a RequirementSet.""" + + def __init__(self, requirement_set, finder, build_options=None, + global_options=None): + self.requirement_set = requirement_set + self.finder = finder + self._cache_root = requirement_set._wheel_cache._cache_dir + self._wheel_dir = requirement_set.wheel_download_dir + self.build_options = build_options or [] + self.global_options = global_options or [] + + def _build_one(self, req, output_dir, python_tag=None): + """Build one wheel. + + :return: The filename of the built wheel, or None if the build failed. + """ + tempd = tempfile.mkdtemp('pip-wheel-') + try: + if self.__build_one(req, tempd, python_tag=python_tag): + try: + wheel_name = os.listdir(tempd)[0] + wheel_path = os.path.join(output_dir, wheel_name) + shutil.move(os.path.join(tempd, wheel_name), wheel_path) + logger.info('Stored in directory: %s', output_dir) + return wheel_path + except: + pass + # Ignore return, we can't do anything else useful. + self._clean_one(req) + return None + finally: + rmtree(tempd) + + def _base_setup_args(self, req): + return [ + sys.executable, "-u", '-c', + SETUPTOOLS_SHIM % req.setup_py + ] + list(self.global_options) + + def __build_one(self, req, tempd, python_tag=None): + base_args = self._base_setup_args(req) + + spin_message = 'Running setup.py bdist_wheel for %s' % (req.name,) + with open_spinner(spin_message) as spinner: + logger.debug('Destination directory: %s', tempd) + wheel_args = base_args + ['bdist_wheel', '-d', tempd] \ + + self.build_options + + if python_tag is not None: + wheel_args += ["--python-tag", python_tag] + + try: + call_subprocess(wheel_args, cwd=req.source_dir, + show_stdout=False, spinner=spinner) + return True + except: + spinner.finish("error") + logger.error('Failed building wheel for %s', req.name) + return False + + def _clean_one(self, req): + base_args = self._base_setup_args(req) + + logger.info('Running setup.py clean for %s', req.name) + clean_args = base_args + ['clean', '--all'] + try: + call_subprocess(clean_args, cwd=req.source_dir, show_stdout=False) + return True + except: + logger.error('Failed cleaning build dir for %s', req.name) + return False + + def build(self, autobuilding=False): + """Build wheels. + + :param unpack: If True, replace the sdist we built from with the + newly built wheel, in preparation for installation. + :return: True if all the wheels built correctly. + """ + assert self._wheel_dir or (autobuilding and self._cache_root) + # unpack sdists and constructs req set + self.requirement_set.prepare_files(self.finder) + + reqset = self.requirement_set.requirements.values() + + buildset = [] + for req in reqset: + if req.constraint: + continue + if req.is_wheel: + if not autobuilding: + logger.info( + 'Skipping %s, due to already being wheel.', req.name) + elif req.editable: + if not autobuilding: + logger.info( + 'Skipping bdist_wheel for %s, due to being editable', + req.name) + elif autobuilding and req.link and not req.link.is_artifact: + pass + elif autobuilding and not req.source_dir: + pass + else: + if autobuilding: + link = req.link + base, ext = link.splitext() + if pip.index.egg_info_matches(base, None, link) is None: + # Doesn't look like a package - don't autobuild a wheel + # because we'll have no way to lookup the result sanely + continue + if "binary" not in pip.index.fmt_ctl_formats( + self.finder.format_control, + canonicalize_name(req.name)): + logger.info( + "Skipping bdist_wheel for %s, due to binaries " + "being disabled for it.", req.name) + continue + buildset.append(req) + + if not buildset: + return True + + # Build the wheels. + logger.info( + 'Building wheels for collected packages: %s', + ', '.join([req.name for req in buildset]), + ) + with indent_log(): + build_success, build_failure = [], [] + for req in buildset: + python_tag = None + if autobuilding: + python_tag = pep425tags.implementation_tag + output_dir = _cache_for_link(self._cache_root, req.link) + try: + ensure_dir(output_dir) + except OSError as e: + logger.warn("Building wheel for %s failed: %s", + req.name, e) + build_failure.append(req) + continue + else: + output_dir = self._wheel_dir + wheel_file = self._build_one( + req, output_dir, + python_tag=python_tag, + ) + if wheel_file: + build_success.append(req) + if autobuilding: + # XXX: This is mildly duplicative with prepare_files, + # but not close enough to pull out to a single common + # method. + # The code below assumes temporary source dirs - + # prevent it doing bad things. + if req.source_dir and not os.path.exists(os.path.join( + req.source_dir, PIP_DELETE_MARKER_FILENAME)): + raise AssertionError( + "bad source dir - missing marker") + # Delete the source we built the wheel from + req.remove_temporary_source() + # set the build directory again - name is known from + # the work prepare_files did. + req.source_dir = req.build_location( + self.requirement_set.build_dir) + # Update the link for this. + req.link = pip.index.Link( + path_to_url(wheel_file)) + assert req.link.is_wheel + # extract the wheel into the dir + unpack_url( + req.link, req.source_dir, None, False, + session=self.requirement_set.session) + else: + build_failure.append(req) + + # notify success/failure + if build_success: + logger.info( + 'Successfully built %s', + ' '.join([req.name for req in build_success]), + ) + if build_failure: + logger.info( + 'Failed to build %s', + ' '.join([req.name for req in build_failure]), + ) + # Return True if all builds were successful + return len(build_failure) == 0 diff --git a/lib/python3.4/site-packages/pkg_resources/__init__.py b/lib/python3.4/site-packages/pkg_resources/__init__.py index d09e0b6..d04cd34 100644 --- a/lib/python3.4/site-packages/pkg_resources/__init__.py +++ b/lib/python3.4/site-packages/pkg_resources/__init__.py @@ -45,21 +45,8 @@ except ImportError: # Python 3.2 compatibility import imp as _imp -PY3 = sys.version_info > (3,) -PY2 = not PY3 - -if PY3: - from urllib.parse import urlparse, urlunparse - -if PY2: - from urlparse import urlparse, urlunparse - -if PY3: - string_types = str, -else: - string_types = str, eval('unicode') - -iteritems = (lambda i: i.items()) if PY3 else lambda i: i.iteritems() +from pkg_resources.extern import six +from pkg_resources.extern.six.moves import urllib, map, filter # capture these to bypass sandboxing from os import utime @@ -73,10 +60,11 @@ except ImportError: from os import open as os_open from os.path import isdir, split -# Avoid try/except due to potential problems with delayed import mechanisms. -if sys.version_info >= (3, 3) and sys.implementation.name == "cpython": +try: import importlib.machinery as importlib_machinery -else: + # access attribute to force import under delayed import mechanisms. + importlib_machinery.__name__ +except ImportError: importlib_machinery = None try: @@ -84,15 +72,9 @@ try: except ImportError: pass -try: - import pkg_resources._vendor.packaging.version - import pkg_resources._vendor.packaging.specifiers - packaging = pkg_resources._vendor.packaging -except ImportError: - # fallback to naturally-installed version; allows system packagers to - # omit vendored packages. - import packaging.version - import packaging.specifiers +from pkg_resources.extern import packaging +__import__('pkg_resources.extern.packaging.version') +__import__('pkg_resources.extern.packaging.specifiers') if (3, 0) < sys.version_info < (3, 3): @@ -552,7 +534,7 @@ run_main = run_script def get_distribution(dist): """Return a current distribution object for a Requirement or string""" - if isinstance(dist, string_types): + if isinstance(dist, six.string_types): dist = Requirement.parse(dist) if isinstance(dist, Requirement): dist = get_provider(dist) @@ -771,7 +753,7 @@ class WorkingSet(object): will be called. """ if insert: - dist.insert_on(self.entries, entry) + dist.insert_on(self.entries, entry, replace=replace) if entry is None: entry = dist.location @@ -1192,22 +1174,23 @@ class ResourceManager: old_exc = sys.exc_info()[1] cache_path = self.extraction_path or get_default_cache() - err = ExtractionError("""Can't extract file(s) to egg cache + tmpl = textwrap.dedent(""" + Can't extract file(s) to egg cache -The following error occurred while trying to extract file(s) to the Python egg -cache: + The following error occurred while trying to extract file(s) to the Python egg + cache: - %s + {old_exc} -The Python egg cache directory is currently set to: + The Python egg cache directory is currently set to: - %s + {cache_path} -Perhaps your account does not have write access to this directory? You can -change the cache directory by setting the PYTHON_EGG_CACHE environment -variable to point to an accessible directory. -""" % (old_exc, cache_path) - ) + Perhaps your account does not have write access to this directory? You can + change the cache directory by setting the PYTHON_EGG_CACHE environment + variable to point to an accessible directory. + """).lstrip() + err = ExtractionError(tmpl.format(**locals())) err.manager = self err.cache_path = cache_path err.original_error = old_exc @@ -1534,7 +1517,7 @@ class MarkerEvaluation(object): """ return dict( (key.replace('.', '_'), value) - for key, value in env + for key, value in env.items() ) @classmethod @@ -1716,7 +1699,7 @@ class EggProvider(NullProvider): path = self.module_path old = None while path!=old: - if path.lower().endswith('.egg'): + if _is_unpacked_egg(path): self.egg_name = os.path.basename(path) self.egg_info = os.path.join(path, 'EGG-INFO') self.egg_root = path @@ -1743,10 +1726,13 @@ class DefaultProvider(EggProvider): with open(path, 'rb') as stream: return stream.read() -register_loader_type(type(None), DefaultProvider) + @classmethod + def _register(cls): + loader_cls = getattr(importlib_machinery, 'SourceFileLoader', + type(None)) + register_loader_type(loader_cls, cls) -if importlib_machinery is not None: - register_loader_type(importlib_machinery.SourceFileLoader, DefaultProvider) +DefaultProvider._register() class EmptyProvider(NullProvider): @@ -2014,11 +2000,11 @@ class FileMetadata(EmptyProvider): self.path = path def has_metadata(self, name): - return name=='PKG-INFO' + return name=='PKG-INFO' and os.path.isfile(self.path) def get_metadata(self, name): if name=='PKG-INFO': - with open(self.path,'rU') as f: + with io.open(self.path, encoding='utf-8') as f: metadata = f.read() return metadata raise KeyError("No metadata except PKG-INFO is available") @@ -2099,7 +2085,7 @@ def find_eggs_in_zip(importer, path_item, only=False): # don't yield nested distros return for subitem in metadata.resource_listdir('/'): - if subitem.endswith('.egg'): + if _is_unpacked_egg(subitem): subpath = os.path.join(path_item, subitem) for dist in find_eggs_in_zip(zipimport.zipimporter(subpath), subpath): yield dist @@ -2115,8 +2101,7 @@ def find_on_path(importer, path_item, only=False): path_item = _normalize_cached(path_item) if os.path.isdir(path_item) and os.access(path_item, os.R_OK): - if path_item.lower().endswith('.egg'): - # unpacked egg + if _is_unpacked_egg(path_item): yield Distribution.from_filename( path_item, metadata=PathMetadata( path_item, os.path.join(path_item,'EGG-INFO') @@ -2136,7 +2121,7 @@ def find_on_path(importer, path_item, only=False): yield Distribution.from_location( path_item, entry, metadata, precedence=DEVELOP_DIST ) - elif not only and lower.endswith('.egg'): + elif not only and _is_unpacked_egg(entry): dists = find_distributions(os.path.join(path_item, entry)) for dist in dists: yield dist @@ -2153,7 +2138,7 @@ def find_on_path(importer, path_item, only=False): break register_finder(pkgutil.ImpImporter, find_on_path) -if importlib_machinery is not None: +if hasattr(importlib_machinery, 'FileFinder'): register_finder(importlib_machinery.FileFinder, find_on_path) _declare_state('dict', _namespace_handlers={}) @@ -2199,11 +2184,28 @@ def _handle_ns(packageName, path_item): path = module.__path__ path.append(subpath) loader.load_module(packageName) - for path_item in path: - if path_item not in module.__path__: - module.__path__.append(path_item) + _rebuild_mod_path(path, packageName, module) return subpath + +def _rebuild_mod_path(orig_path, package_name, module): + """ + Rebuild module.__path__ ensuring that all entries are ordered + corresponding to their sys.path order + """ + sys_path = [_normalize_cached(p) for p in sys.path] + def position_in_sys_path(p): + """ + Return the ordinal of the path based on its position in sys.path + """ + parts = p.split(os.sep) + parts = parts[:-(package_name.count('.') + 1)] + return sys_path.index(_normalize_cached(os.sep.join(parts))) + + orig_path.sort(key=position_in_sys_path) + module.__path__[:] = [_normalize_cached(p) for p in orig_path] + + def declare_namespace(packageName): """Declare that package 'packageName' is a namespace package""" @@ -2262,7 +2264,7 @@ def file_ns_handler(importer, path_item, packageName, module): register_namespace_handler(pkgutil.ImpImporter, file_ns_handler) register_namespace_handler(zipimport.zipimporter, file_ns_handler) -if importlib_machinery is not None: +if hasattr(importlib_machinery, 'FileFinder'): register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler) @@ -2283,6 +2285,14 @@ def _normalize_cached(filename, _cache={}): _cache[filename] = result = normalize_path(filename) return result +def _is_unpacked_egg(path): + """ + Determine if given path appears to be an unpacked egg. + """ + return ( + path.lower().endswith('.egg') + ) + def _set_parent_ns(packageName): parts = packageName.split('.') name = parts.pop() @@ -2293,7 +2303,7 @@ def _set_parent_ns(packageName): def yield_lines(strs): """Yield non-empty/non-comment lines of a string or sequence""" - if isinstance(strs, string_types): + if isinstance(strs, six.string_types): for s in strs.splitlines(): s = s.strip() # skip blank lines/comments @@ -2460,12 +2470,24 @@ class EntryPoint(object): def _remove_md5_fragment(location): if not location: return '' - parsed = urlparse(location) + parsed = urllib.parse.urlparse(location) if parsed[-1].startswith('md5='): - return urlunparse(parsed[:-1] + ('',)) + return urllib.parse.urlunparse(parsed[:-1] + ('',)) return location +def _version_from_file(lines): + """ + Given an iterable of lines from a Metadata file, return + the value of the Version field, if present, or None otherwise. + """ + is_version_line = lambda line: line.lower().startswith('version:') + version_lines = filter(is_version_line, lines) + line = next(iter(version_lines), '') + _, _, value = line.partition(':') + return safe_version(value.strip()) or None + + class Distribution(object): """Wrap an actual or potential sys.path entry w/metadata""" PKG_INFO = 'PKG-INFO' @@ -2483,21 +2505,24 @@ class Distribution(object): self._provider = metadata or empty_provider @classmethod - def from_location(cls, location, basename, metadata=None,**kw): + def from_location(cls, location, basename, metadata=None, **kw): project_name, version, py_version, platform = [None]*4 basename, ext = os.path.splitext(basename) if ext.lower() in _distributionImpl: - # .dist-info gets much metadata differently + cls = _distributionImpl[ext.lower()] + match = EGG_NAME(basename) if match: project_name, version, py_version, platform = match.group( - 'name','ver','pyver','plat' + 'name', 'ver', 'pyver', 'plat' ) - cls = _distributionImpl[ext.lower()] return cls( location, metadata, project_name=project_name, version=version, py_version=py_version, platform=platform, **kw - ) + )._reload_version() + + def _reload_version(self): + return self @property def hashcmp(self): @@ -2584,13 +2609,11 @@ class Distribution(object): try: return self._version except AttributeError: - for line in self._get_metadata(self.PKG_INFO): - if line.lower().startswith('version:'): - self._version = safe_version(line.split(':',1)[1].strip()) - return self._version - else: + version = _version_from_file(self._get_metadata(self.PKG_INFO)) + if version is None: tmpl = "Missing 'Version:' header and/or %s file" raise ValueError(tmpl % self.PKG_INFO, self) + return version @property def _dep_map(self): @@ -2635,7 +2658,7 @@ class Distribution(object): """Ensure distribution is importable on `path` (default=sys.path)""" if path is None: path = sys.path - self.insert_on(path) + self.insert_on(path, replace=True) if path is sys.path: fixup_namespace_packages(self.location) for pkg in self._get_metadata('namespace_packages.txt'): @@ -2712,7 +2735,7 @@ class Distribution(object): """Return the EntryPoint object for `group`+`name`, or ``None``""" return self.get_entry_map(group).get(name) - def insert_on(self, path, loc = None): + def insert_on(self, path, loc=None, replace=False): """Insert self.location in path before its nearest parent directory""" loc = loc or self.location @@ -2736,7 +2759,10 @@ class Distribution(object): else: if path is sys.path: self.check_version_conflict() - path.append(loc) + if replace: + path.insert(0, loc) + else: + path.append(loc) return # p is the spot where we found or inserted loc; now remove duplicates @@ -2795,6 +2821,26 @@ class Distribution(object): return [dep for dep in self._dep_map if dep] +class EggInfoDistribution(Distribution): + + def _reload_version(self): + """ + Packages installed by distutils (e.g. numpy or scipy), + which uses an old safe_version, and so + their version numbers can get mangled when + converted to filenames (e.g., 1.11.0.dev0+2329eae to + 1.11.0.dev0_2329eae). These distributions will not be + parsed properly + downstream by Distribution and safe_version, so + take an extra step and try to get the version number from + the metadata file itself instead of the filename. + """ + md_version = _version_from_file(self._get_metadata(self.PKG_INFO)) + if md_version: + self._version = md_version + return self + + class DistInfoDistribution(Distribution): """Wrap an actual or potential sys.path entry w/metadata, .dist-info style""" PKG_INFO = 'METADATA' @@ -2860,7 +2906,7 @@ class DistInfoDistribution(Distribution): _distributionImpl = { '.egg': Distribution, - '.egg-info': Distribution, + '.egg-info': EggInfoDistribution, '.dist-info': DistInfoDistribution, } diff --git a/lib/python3.4/site-packages/pkg_resources/_vendor/six.py b/lib/python3.4/site-packages/pkg_resources/_vendor/six.py new file mode 100644 index 0000000..190c023 --- /dev/null +++ b/lib/python3.4/site-packages/pkg_resources/_vendor/six.py @@ -0,0 +1,868 @@ +"""Utilities for writing code that runs on Python 2 and 3""" + +# Copyright (c) 2010-2015 Benjamin Peterson +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +from __future__ import absolute_import + +import functools +import itertools +import operator +import sys +import types + +__author__ = "Benjamin Peterson <benjamin@python.org>" +__version__ = "1.10.0" + + +# Useful for very coarse version differentiation. +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 +PY34 = sys.version_info[0:2] >= (3, 4) + +if PY3: + string_types = str, + integer_types = int, + class_types = type, + text_type = str + binary_type = bytes + + MAXSIZE = sys.maxsize +else: + string_types = basestring, + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + + if sys.platform.startswith("java"): + # Jython always uses 32 bits. + MAXSIZE = int((1 << 31) - 1) + else: + # It's possible to have sizeof(long) != sizeof(Py_ssize_t). + class X(object): + + def __len__(self): + return 1 << 31 + try: + len(X()) + except OverflowError: + # 32-bit + MAXSIZE = int((1 << 31) - 1) + else: + # 64-bit + MAXSIZE = int((1 << 63) - 1) + del X + + +def _add_doc(func, doc): + """Add documentation to a function.""" + func.__doc__ = doc + + +def _import_module(name): + """Import module, returning the module after the last dot.""" + __import__(name) + return sys.modules[name] + + +class _LazyDescr(object): + + def __init__(self, name): + self.name = name + + def __get__(self, obj, tp): + result = self._resolve() + setattr(obj, self.name, result) # Invokes __set__. + try: + # This is a bit ugly, but it avoids running this again by + # removing this descriptor. + delattr(obj.__class__, self.name) + except AttributeError: + pass + return result + + +class MovedModule(_LazyDescr): + + def __init__(self, name, old, new=None): + super(MovedModule, self).__init__(name) + if PY3: + if new is None: + new = name + self.mod = new + else: + self.mod = old + + def _resolve(self): + return _import_module(self.mod) + + def __getattr__(self, attr): + _module = self._resolve() + value = getattr(_module, attr) + setattr(self, attr, value) + return value + + +class _LazyModule(types.ModuleType): + + def __init__(self, name): + super(_LazyModule, self).__init__(name) + self.__doc__ = self.__class__.__doc__ + + def __dir__(self): + attrs = ["__doc__", "__name__"] + attrs += [attr.name for attr in self._moved_attributes] + return attrs + + # Subclasses should override this + _moved_attributes = [] + + +class MovedAttribute(_LazyDescr): + + def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): + super(MovedAttribute, self).__init__(name) + if PY3: + if new_mod is None: + new_mod = name + self.mod = new_mod + if new_attr is None: + if old_attr is None: + new_attr = name + else: + new_attr = old_attr + self.attr = new_attr + else: + self.mod = old_mod + if old_attr is None: + old_attr = name + self.attr = old_attr + + def _resolve(self): + module = _import_module(self.mod) + return getattr(module, self.attr) + + +class _SixMetaPathImporter(object): + + """ + A meta path importer to import six.moves and its submodules. + + This class implements a PEP302 finder and loader. It should be compatible + with Python 2.5 and all existing versions of Python3 + """ + + def __init__(self, six_module_name): + self.name = six_module_name + self.known_modules = {} + + def _add_module(self, mod, *fullnames): + for fullname in fullnames: + self.known_modules[self.name + "." + fullname] = mod + + def _get_module(self, fullname): + return self.known_modules[self.name + "." + fullname] + + def find_module(self, fullname, path=None): + if fullname in self.known_modules: + return self + return None + + def __get_module(self, fullname): + try: + return self.known_modules[fullname] + except KeyError: + raise ImportError("This loader does not know module " + fullname) + + def load_module(self, fullname): + try: + # in case of a reload + return sys.modules[fullname] + except KeyError: + pass + mod = self.__get_module(fullname) + if isinstance(mod, MovedModule): + mod = mod._resolve() + else: + mod.__loader__ = self + sys.modules[fullname] = mod + return mod + + def is_package(self, fullname): + """ + Return true, if the named module is a package. + + We need this method to get correct spec objects with + Python 3.4 (see PEP451) + """ + return hasattr(self.__get_module(fullname), "__path__") + + def get_code(self, fullname): + """Return None + + Required, if is_package is implemented""" + self.__get_module(fullname) # eventually raises ImportError + return None + get_source = get_code # same as get_code + +_importer = _SixMetaPathImporter(__name__) + + +class _MovedItems(_LazyModule): + + """Lazy loading of moved objects""" + __path__ = [] # mark as package + + +_moved_attributes = [ + MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), + MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), + MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), + MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), + MovedAttribute("intern", "__builtin__", "sys"), + MovedAttribute("map", "itertools", "builtins", "imap", "map"), + MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), + MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), + MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), + MovedAttribute("reduce", "__builtin__", "functools"), + MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), + MovedAttribute("StringIO", "StringIO", "io"), + MovedAttribute("UserDict", "UserDict", "collections"), + MovedAttribute("UserList", "UserList", "collections"), + MovedAttribute("UserString", "UserString", "collections"), + MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), + MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), + MovedModule("builtins", "__builtin__"), + MovedModule("configparser", "ConfigParser"), + MovedModule("copyreg", "copy_reg"), + MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), + MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), + MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), + MovedModule("http_cookies", "Cookie", "http.cookies"), + MovedModule("html_entities", "htmlentitydefs", "html.entities"), + MovedModule("html_parser", "HTMLParser", "html.parser"), + MovedModule("http_client", "httplib", "http.client"), + MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), + MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), + MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), + MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), + MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), + MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), + MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), + MovedModule("cPickle", "cPickle", "pickle"), + MovedModule("queue", "Queue"), + MovedModule("reprlib", "repr"), + MovedModule("socketserver", "SocketServer"), + MovedModule("_thread", "thread", "_thread"), + MovedModule("tkinter", "Tkinter"), + MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), + MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), + MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), + MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), + MovedModule("tkinter_tix", "Tix", "tkinter.tix"), + MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), + MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), + MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), + MovedModule("tkinter_colorchooser", "tkColorChooser", + "tkinter.colorchooser"), + MovedModule("tkinter_commondialog", "tkCommonDialog", + "tkinter.commondialog"), + MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), + MovedModule("tkinter_font", "tkFont", "tkinter.font"), + MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), + MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", + "tkinter.simpledialog"), + MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), + MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), + MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), + MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), + MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), + MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), +] +# Add windows specific modules. +if sys.platform == "win32": + _moved_attributes += [ + MovedModule("winreg", "_winreg"), + ] + +for attr in _moved_attributes: + setattr(_MovedItems, attr.name, attr) + if isinstance(attr, MovedModule): + _importer._add_module(attr, "moves." + attr.name) +del attr + +_MovedItems._moved_attributes = _moved_attributes + +moves = _MovedItems(__name__ + ".moves") +_importer._add_module(moves, "moves") + + +class Module_six_moves_urllib_parse(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_parse""" + + +_urllib_parse_moved_attributes = [ + MovedAttribute("ParseResult", "urlparse", "urllib.parse"), + MovedAttribute("SplitResult", "urlparse", "urllib.parse"), + MovedAttribute("parse_qs", "urlparse", "urllib.parse"), + MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), + MovedAttribute("urldefrag", "urlparse", "urllib.parse"), + MovedAttribute("urljoin", "urlparse", "urllib.parse"), + MovedAttribute("urlparse", "urlparse", "urllib.parse"), + MovedAttribute("urlsplit", "urlparse", "urllib.parse"), + MovedAttribute("urlunparse", "urlparse", "urllib.parse"), + MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), + MovedAttribute("quote", "urllib", "urllib.parse"), + MovedAttribute("quote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote", "urllib", "urllib.parse"), + MovedAttribute("unquote_plus", "urllib", "urllib.parse"), + MovedAttribute("urlencode", "urllib", "urllib.parse"), + MovedAttribute("splitquery", "urllib", "urllib.parse"), + MovedAttribute("splittag", "urllib", "urllib.parse"), + MovedAttribute("splituser", "urllib", "urllib.parse"), + MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), + MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), + MovedAttribute("uses_params", "urlparse", "urllib.parse"), + MovedAttribute("uses_query", "urlparse", "urllib.parse"), + MovedAttribute("uses_relative", "urlparse", "urllib.parse"), +] +for attr in _urllib_parse_moved_attributes: + setattr(Module_six_moves_urllib_parse, attr.name, attr) +del attr + +Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes + +_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), + "moves.urllib_parse", "moves.urllib.parse") + + +class Module_six_moves_urllib_error(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_error""" + + +_urllib_error_moved_attributes = [ + MovedAttribute("URLError", "urllib2", "urllib.error"), + MovedAttribute("HTTPError", "urllib2", "urllib.error"), + MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), +] +for attr in _urllib_error_moved_attributes: + setattr(Module_six_moves_urllib_error, attr.name, attr) +del attr + +Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes + +_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), + "moves.urllib_error", "moves.urllib.error") + + +class Module_six_moves_urllib_request(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_request""" + + +_urllib_request_moved_attributes = [ + MovedAttribute("urlopen", "urllib2", "urllib.request"), + MovedAttribute("install_opener", "urllib2", "urllib.request"), + MovedAttribute("build_opener", "urllib2", "urllib.request"), + MovedAttribute("pathname2url", "urllib", "urllib.request"), + MovedAttribute("url2pathname", "urllib", "urllib.request"), + MovedAttribute("getproxies", "urllib", "urllib.request"), + MovedAttribute("Request", "urllib2", "urllib.request"), + MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), + MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), + MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), + MovedAttribute("BaseHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), + MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), + MovedAttribute("FileHandler", "urllib2", "urllib.request"), + MovedAttribute("FTPHandler", "urllib2", "urllib.request"), + MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), + MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), + MovedAttribute("urlretrieve", "urllib", "urllib.request"), + MovedAttribute("urlcleanup", "urllib", "urllib.request"), + MovedAttribute("URLopener", "urllib", "urllib.request"), + MovedAttribute("FancyURLopener", "urllib", "urllib.request"), + MovedAttribute("proxy_bypass", "urllib", "urllib.request"), +] +for attr in _urllib_request_moved_attributes: + setattr(Module_six_moves_urllib_request, attr.name, attr) +del attr + +Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes + +_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), + "moves.urllib_request", "moves.urllib.request") + + +class Module_six_moves_urllib_response(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_response""" + + +_urllib_response_moved_attributes = [ + MovedAttribute("addbase", "urllib", "urllib.response"), + MovedAttribute("addclosehook", "urllib", "urllib.response"), + MovedAttribute("addinfo", "urllib", "urllib.response"), + MovedAttribute("addinfourl", "urllib", "urllib.response"), +] +for attr in _urllib_response_moved_attributes: + setattr(Module_six_moves_urllib_response, attr.name, attr) +del attr + +Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes + +_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), + "moves.urllib_response", "moves.urllib.response") + + +class Module_six_moves_urllib_robotparser(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_robotparser""" + + +_urllib_robotparser_moved_attributes = [ + MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), +] +for attr in _urllib_robotparser_moved_attributes: + setattr(Module_six_moves_urllib_robotparser, attr.name, attr) +del attr + +Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes + +_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), + "moves.urllib_robotparser", "moves.urllib.robotparser") + + +class Module_six_moves_urllib(types.ModuleType): + + """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" + __path__ = [] # mark as package + parse = _importer._get_module("moves.urllib_parse") + error = _importer._get_module("moves.urllib_error") + request = _importer._get_module("moves.urllib_request") + response = _importer._get_module("moves.urllib_response") + robotparser = _importer._get_module("moves.urllib_robotparser") + + def __dir__(self): + return ['parse', 'error', 'request', 'response', 'robotparser'] + +_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), + "moves.urllib") + + +def add_move(move): + """Add an item to six.moves.""" + setattr(_MovedItems, move.name, move) + + +def remove_move(name): + """Remove item from six.moves.""" + try: + delattr(_MovedItems, name) + except AttributeError: + try: + del moves.__dict__[name] + except KeyError: + raise AttributeError("no such move, %r" % (name,)) + + +if PY3: + _meth_func = "__func__" + _meth_self = "__self__" + + _func_closure = "__closure__" + _func_code = "__code__" + _func_defaults = "__defaults__" + _func_globals = "__globals__" +else: + _meth_func = "im_func" + _meth_self = "im_self" + + _func_closure = "func_closure" + _func_code = "func_code" + _func_defaults = "func_defaults" + _func_globals = "func_globals" + + +try: + advance_iterator = next +except NameError: + def advance_iterator(it): + return it.next() +next = advance_iterator + + +try: + callable = callable +except NameError: + def callable(obj): + return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) + + +if PY3: + def get_unbound_function(unbound): + return unbound + + create_bound_method = types.MethodType + + def create_unbound_method(func, cls): + return func + + Iterator = object +else: + def get_unbound_function(unbound): + return unbound.im_func + + def create_bound_method(func, obj): + return types.MethodType(func, obj, obj.__class__) + + def create_unbound_method(func, cls): + return types.MethodType(func, None, cls) + + class Iterator(object): + + def next(self): + return type(self).__next__(self) + + callable = callable +_add_doc(get_unbound_function, + """Get the function out of a possibly unbound function""") + + +get_method_function = operator.attrgetter(_meth_func) +get_method_self = operator.attrgetter(_meth_self) +get_function_closure = operator.attrgetter(_func_closure) +get_function_code = operator.attrgetter(_func_code) +get_function_defaults = operator.attrgetter(_func_defaults) +get_function_globals = operator.attrgetter(_func_globals) + + +if PY3: + def iterkeys(d, **kw): + return iter(d.keys(**kw)) + + def itervalues(d, **kw): + return iter(d.values(**kw)) + + def iteritems(d, **kw): + return iter(d.items(**kw)) + + def iterlists(d, **kw): + return iter(d.lists(**kw)) + + viewkeys = operator.methodcaller("keys") + + viewvalues = operator.methodcaller("values") + + viewitems = operator.methodcaller("items") +else: + def iterkeys(d, **kw): + return d.iterkeys(**kw) + + def itervalues(d, **kw): + return d.itervalues(**kw) + + def iteritems(d, **kw): + return d.iteritems(**kw) + + def iterlists(d, **kw): + return d.iterlists(**kw) + + viewkeys = operator.methodcaller("viewkeys") + + viewvalues = operator.methodcaller("viewvalues") + + viewitems = operator.methodcaller("viewitems") + +_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") +_add_doc(itervalues, "Return an iterator over the values of a dictionary.") +_add_doc(iteritems, + "Return an iterator over the (key, value) pairs of a dictionary.") +_add_doc(iterlists, + "Return an iterator over the (key, [values]) pairs of a dictionary.") + + +if PY3: + def b(s): + return s.encode("latin-1") + + def u(s): + return s + unichr = chr + import struct + int2byte = struct.Struct(">B").pack + del struct + byte2int = operator.itemgetter(0) + indexbytes = operator.getitem + iterbytes = iter + import io + StringIO = io.StringIO + BytesIO = io.BytesIO + _assertCountEqual = "assertCountEqual" + if sys.version_info[1] <= 1: + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + else: + _assertRaisesRegex = "assertRaisesRegex" + _assertRegex = "assertRegex" +else: + def b(s): + return s + # Workaround for standalone backslash + + def u(s): + return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") + unichr = unichr + int2byte = chr + + def byte2int(bs): + return ord(bs[0]) + + def indexbytes(buf, i): + return ord(buf[i]) + iterbytes = functools.partial(itertools.imap, ord) + import StringIO + StringIO = BytesIO = StringIO.StringIO + _assertCountEqual = "assertItemsEqual" + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" +_add_doc(b, """Byte literal""") +_add_doc(u, """Text literal""") + + +def assertCountEqual(self, *args, **kwargs): + return getattr(self, _assertCountEqual)(*args, **kwargs) + + +def assertRaisesRegex(self, *args, **kwargs): + return getattr(self, _assertRaisesRegex)(*args, **kwargs) + + +def assertRegex(self, *args, **kwargs): + return getattr(self, _assertRegex)(*args, **kwargs) + + +if PY3: + exec_ = getattr(moves.builtins, "exec") + + def reraise(tp, value, tb=None): + if value is None: + value = tp() + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + +else: + def exec_(_code_, _globs_=None, _locs_=None): + """Execute code in a namespace.""" + if _globs_ is None: + frame = sys._getframe(1) + _globs_ = frame.f_globals + if _locs_ is None: + _locs_ = frame.f_locals + del frame + elif _locs_ is None: + _locs_ = _globs_ + exec("""exec _code_ in _globs_, _locs_""") + + exec_("""def reraise(tp, value, tb=None): + raise tp, value, tb +""") + + +if sys.version_info[:2] == (3, 2): + exec_("""def raise_from(value, from_value): + if from_value is None: + raise value + raise value from from_value +""") +elif sys.version_info[:2] > (3, 2): + exec_("""def raise_from(value, from_value): + raise value from from_value +""") +else: + def raise_from(value, from_value): + raise value + + +print_ = getattr(moves.builtins, "print", None) +if print_ is None: + def print_(*args, **kwargs): + """The new-style print function for Python 2.4 and 2.5.""" + fp = kwargs.pop("file", sys.stdout) + if fp is None: + return + + def write(data): + if not isinstance(data, basestring): + data = str(data) + # If the file has an encoding, encode unicode with it. + if (isinstance(fp, file) and + isinstance(data, unicode) and + fp.encoding is not None): + errors = getattr(fp, "errors", None) + if errors is None: + errors = "strict" + data = data.encode(fp.encoding, errors) + fp.write(data) + want_unicode = False + sep = kwargs.pop("sep", None) + if sep is not None: + if isinstance(sep, unicode): + want_unicode = True + elif not isinstance(sep, str): + raise TypeError("sep must be None or a string") + end = kwargs.pop("end", None) + if end is not None: + if isinstance(end, unicode): + want_unicode = True + elif not isinstance(end, str): + raise TypeError("end must be None or a string") + if kwargs: + raise TypeError("invalid keyword arguments to print()") + if not want_unicode: + for arg in args: + if isinstance(arg, unicode): + want_unicode = True + break + if want_unicode: + newline = unicode("\n") + space = unicode(" ") + else: + newline = "\n" + space = " " + if sep is None: + sep = space + if end is None: + end = newline + for i, arg in enumerate(args): + if i: + write(sep) + write(arg) + write(end) +if sys.version_info[:2] < (3, 3): + _print = print_ + + def print_(*args, **kwargs): + fp = kwargs.get("file", sys.stdout) + flush = kwargs.pop("flush", False) + _print(*args, **kwargs) + if flush and fp is not None: + fp.flush() + +_add_doc(reraise, """Reraise an exception.""") + +if sys.version_info[0:2] < (3, 4): + def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): + def wrapper(f): + f = functools.wraps(wrapped, assigned, updated)(f) + f.__wrapped__ = wrapped + return f + return wrapper +else: + wraps = functools.wraps + + +def with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(meta): + + def __new__(cls, name, this_bases, d): + return meta(name, bases, d) + return type.__new__(metaclass, 'temporary_class', (), {}) + + +def add_metaclass(metaclass): + """Class decorator for creating a class with a metaclass.""" + def wrapper(cls): + orig_vars = cls.__dict__.copy() + slots = orig_vars.get('__slots__') + if slots is not None: + if isinstance(slots, str): + slots = [slots] + for slots_var in slots: + orig_vars.pop(slots_var) + orig_vars.pop('__dict__', None) + orig_vars.pop('__weakref__', None) + return metaclass(cls.__name__, cls.__bases__, orig_vars) + return wrapper + + +def python_2_unicode_compatible(klass): + """ + A decorator that defines __unicode__ and __str__ methods under Python 2. + Under Python 3 it does nothing. + + To support Python 2 and 3 with a single code base, define a __str__ method + returning text and apply this decorator to the class. + """ + if PY2: + if '__str__' not in klass.__dict__: + raise ValueError("@python_2_unicode_compatible cannot be applied " + "to %s because it doesn't define __str__()." % + klass.__name__) + klass.__unicode__ = klass.__str__ + klass.__str__ = lambda self: self.__unicode__().encode('utf-8') + return klass + + +# Complete the moves implementation. +# This code is at the end of this module to speed up module loading. +# Turn this module into a package. +__path__ = [] # required for PEP 302 and PEP 451 +__package__ = __name__ # see PEP 366 @ReservedAssignment +if globals().get("__spec__") is not None: + __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable +# Remove other six meta path importers, since they cause problems. This can +# happen if six is removed from sys.modules and then reloaded. (Setuptools does +# this for some reason.) +if sys.meta_path: + for i, importer in enumerate(sys.meta_path): + # Here's some real nastiness: Another "instance" of the six module might + # be floating around. Therefore, we can't use isinstance() to check for + # the six meta path importer, since the other six instance will have + # inserted an importer with different class. + if (type(importer).__name__ == "_SixMetaPathImporter" and + importer.name == __name__): + del sys.meta_path[i] + break + del i, importer +# Finally, add the importer to the meta path import hook. +sys.meta_path.append(_importer) diff --git a/lib/python3.4/site-packages/pkg_resources/extern/__init__.py b/lib/python3.4/site-packages/pkg_resources/extern/__init__.py new file mode 100644 index 0000000..317f4b8 --- /dev/null +++ b/lib/python3.4/site-packages/pkg_resources/extern/__init__.py @@ -0,0 +1,71 @@ +import sys + + +class VendorImporter: + """ + A PEP 302 meta path importer for finding optionally-vendored + or otherwise naturally-installed packages from root_name. + """ + def __init__(self, root_name, vendored_names=(), vendor_pkg=None): + self.root_name = root_name + self.vendored_names = set(vendored_names) + self.vendor_pkg = vendor_pkg or root_name.replace('extern', '_vendor') + + @property + def search_path(self): + """ + Search first the vendor package then as a natural package. + """ + yield self.vendor_pkg + '.' + yield '' + + def find_module(self, fullname, path=None): + """ + Return self when fullname starts with root_name and the + target module is one vendored through this importer. + """ + root, base, target = fullname.partition(self.root_name + '.') + if root: + return + if not any(map(target.startswith, self.vendored_names)): + return + return self + + def load_module(self, fullname): + """ + Iterate over the search path to locate and load fullname. + """ + root, base, target = fullname.partition(self.root_name + '.') + for prefix in self.search_path: + try: + extant = prefix + target + __import__(extant) + mod = sys.modules[extant] + sys.modules[fullname] = mod + # mysterious hack: + # Remove the reference to the extant package/module + # on later Python versions to cause relative imports + # in the vendor package to resolve the same modules + # as those going through this importer. + if sys.version_info > (3, 3): + del sys.modules[extant] + return mod + except ImportError: + pass + else: + raise ImportError( + "The '{target}' package is required; " + "normally this is bundled with this package so if you get " + "this warning, consult the packager of your " + "distribution.".format(**locals()) + ) + + def install(self): + """ + Install this importer into sys.meta_path if not already present. + """ + if self not in sys.meta_path: + sys.meta_path.append(self) + +names = 'packaging', 'six' +VendorImporter(__name__, names).install() diff --git a/lib/python3.4/site-packages/setuptools-18.5.dist-info/metadata.json b/lib/python3.4/site-packages/setuptools-18.5.dist-info/metadata.json deleted file mode 100644 index 5cb9549..0000000 --- a/lib/python3.4/site-packages/setuptools-18.5.dist-info/metadata.json +++ /dev/null @@ -1 +0,0 @@ -{"generator": "bdist_wheel (0.26.0)", "summary": "Easily download, build, install, upgrade, and uninstall Python packages", "classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: Python Software Foundation License", "License :: OSI Approved :: Zope Public License", "Operating System :: OS Independent", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: System :: Archiving :: Packaging", "Topic :: System :: Systems Administration", "Topic :: Utilities"], "extensions": {"python.details": {"project_urls": {"Home": "https://bitbucket.org/pypa/setuptools"}, "contacts": [{"email": "distutils-sig@python.org", "name": "Python Packaging Authority", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}}, "python.exports": {"console_scripts": {"easy_install": "setuptools.command.easy_install:main", "easy_install-3.5": "setuptools.command.easy_install:main"}, "distutils.commands": {"alias": "setuptools.command.alias:alias", "bdist_egg": "setuptools.command.bdist_egg:bdist_egg", "bdist_rpm": "setuptools.command.bdist_rpm:bdist_rpm", "bdist_wininst": "setuptools.command.bdist_wininst:bdist_wininst", "build_ext": "setuptools.command.build_ext:build_ext", "build_py": "setuptools.command.build_py:build_py", "develop": "setuptools.command.develop:develop", "easy_install": "setuptools.command.easy_install:easy_install", "egg_info": "setuptools.command.egg_info:egg_info", "install": "setuptools.command.install:install", "install_egg_info": "setuptools.command.install_egg_info:install_egg_info", "install_lib": "setuptools.command.install_lib:install_lib", "install_scripts": "setuptools.command.install_scripts:install_scripts", "register": "setuptools.command.register:register", "rotate": "setuptools.command.rotate:rotate", "saveopts": "setuptools.command.saveopts:saveopts", "sdist": "setuptools.command.sdist:sdist", "setopt": "setuptools.command.setopt:setopt", "test": "setuptools.command.test:test", "upload_docs": "setuptools.command.upload_docs:upload_docs"}, "distutils.setup_keywords": {"convert_2to3_doctests": "setuptools.dist:assert_string_list", "dependency_links": "setuptools.dist:assert_string_list", "eager_resources": "setuptools.dist:assert_string_list", "entry_points": "setuptools.dist:check_entry_points", "exclude_package_data": "setuptools.dist:check_package_data", "extras_require": "setuptools.dist:check_extras", "include_package_data": "setuptools.dist:assert_bool", "install_requires": "setuptools.dist:check_requirements", "namespace_packages": "setuptools.dist:check_nsp", "package_data": "setuptools.dist:check_package_data", "packages": "setuptools.dist:check_packages", "setup_requires": "setuptools.dist:check_requirements", "test_loader": "setuptools.dist:check_importable", "test_runner": "setuptools.dist:check_importable", "test_suite": "setuptools.dist:check_test_suite", "tests_require": "setuptools.dist:check_requirements", "use_2to3": "setuptools.dist:assert_bool", "use_2to3_exclude_fixers": "setuptools.dist:assert_string_list", "use_2to3_fixers": "setuptools.dist:assert_string_list", "zip_safe": "setuptools.dist:assert_bool"}, "egg_info.writers": {"PKG-INFO": "setuptools.command.egg_info:write_pkg_info", "dependency_links.txt": "setuptools.command.egg_info:overwrite_arg", "depends.txt": "setuptools.command.egg_info:warn_depends_obsolete", "eager_resources.txt": "setuptools.command.egg_info:overwrite_arg", "entry_points.txt": "setuptools.command.egg_info:write_entries", "namespace_packages.txt": "setuptools.command.egg_info:overwrite_arg", "requires.txt": "setuptools.command.egg_info:write_requirements", "top_level.txt": "setuptools.command.egg_info:write_toplevel_names"}, "setuptools.installation": {"eggsecutable": "setuptools.command.easy_install:bootstrap"}}, "python.commands": {"wrap_console": {"easy_install": "setuptools.command.easy_install:main", "easy_install-3.5": "setuptools.command.easy_install:main"}}}, "keywords": ["CPAN", "PyPI", "distutils", "eggs", "package", "management"], "license": "PSF or ZPL", "metadata_version": "2.0", "name": "setuptools", "extras": ["certs", "ssl"], "run_requires": [{"requires": ["certifi (==2015.04.28)"], "extra": "certs"}, {"requires": ["wincertstore (==0.2)"], "extra": "ssl", "environment": "sys_platform=='win32'"}], "version": "18.5", "test_requires": [{"requires": ["pytest", "setuptools[ssl]"]}]} \ No newline at end of file diff --git a/lib/python3.4/site-packages/setuptools-18.5.dist-info/DESCRIPTION.rst b/lib/python3.4/site-packages/setuptools-20.1.1.dist-info/DESCRIPTION.rst similarity index 100% rename from lib/python3.4/site-packages/setuptools-18.5.dist-info/DESCRIPTION.rst rename to lib/python3.4/site-packages/setuptools-20.1.1.dist-info/DESCRIPTION.rst diff --git a/lib/python3.4/site-packages/setuptools-20.1.1.dist-info/INSTALLER b/lib/python3.4/site-packages/setuptools-20.1.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/lib/python3.4/site-packages/setuptools-20.1.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/lib/python3.4/site-packages/setuptools-18.5.dist-info/METADATA b/lib/python3.4/site-packages/setuptools-20.1.1.dist-info/METADATA similarity index 98% rename from lib/python3.4/site-packages/setuptools-18.5.dist-info/METADATA rename to lib/python3.4/site-packages/setuptools-20.1.1.dist-info/METADATA index 20fa41e..6faaab5 100644 --- a/lib/python3.4/site-packages/setuptools-18.5.dist-info/METADATA +++ b/lib/python3.4/site-packages/setuptools-20.1.1.dist-info/METADATA @@ -1,17 +1,16 @@ Metadata-Version: 2.0 Name: setuptools -Version: 18.5 +Version: 20.1.1 Summary: Easily download, build, install, upgrade, and uninstall Python packages Home-page: https://bitbucket.org/pypa/setuptools Author: Python Packaging Authority Author-email: distutils-sig@python.org -License: PSF or ZPL +License: UNKNOWN Keywords: CPAN PyPI distutils eggs package management Platform: UNKNOWN Classifier: Development Status :: 5 - Production/Stable Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Python Software Foundation License -Classifier: License :: OSI Approved :: Zope Public License +Classifier: License :: OSI Approved :: MIT License Classifier: Operating System :: OS Independent Classifier: Programming Language :: Python :: 2.6 Classifier: Programming Language :: Python :: 2.7 @@ -24,7 +23,7 @@ Classifier: Topic :: System :: Archiving :: Packaging Classifier: Topic :: System :: Systems Administration Classifier: Topic :: Utilities Provides-Extra: certs -Requires-Dist: certifi (==2015.04.28); extra == 'certs' +Requires-Dist: certifi (==2015.11.20); extra == 'certs' Provides-Extra: ssl Requires-Dist: wincertstore (==0.2); sys_platform=='win32' and extra == 'ssl' diff --git a/lib/python3.4/site-packages/setuptools-18.5.dist-info/RECORD b/lib/python3.4/site-packages/setuptools-20.1.1.dist-info/RECORD similarity index 57% rename from lib/python3.4/site-packages/setuptools-18.5.dist-info/RECORD rename to lib/python3.4/site-packages/setuptools-20.1.1.dist-info/RECORD index cbb0d41..f5cf85d 100644 --- a/lib/python3.4/site-packages/setuptools-18.5.dist-info/RECORD +++ b/lib/python3.4/site-packages/setuptools-20.1.1.dist-info/RECORD @@ -1,124 +1,133 @@ easy_install.py,sha256=MDC9vt5AxDsXX5qcKlBz2TnW6Tpuv_AobnfhCJ9X3PM,126 _markerlib/__init__.py,sha256=GSmhZqvAitLJHhSgtqqusfq2nJ_ClP3oy3Lm0uZLIsU,552 _markerlib/markers.py,sha256=YuFp0-osufFIoqnzG3L0Z2fDCx4Vln3VUDeXJ2DA_1I,3979 -pkg_resources/__init__.py,sha256=HXzgG0K1DI6hcI-gkMYoq6hPsoERSezP32FMP0Hxt00,106833 +pkg_resources/__init__.py,sha256=bucu_98c11mzrGldEJeqxArn14F7ZmURsb-8CaNSbVo,108616 pkg_resources/_vendor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pkg_resources/_vendor/six.py,sha256=A6hdJZVjI3t_geebZ9BzUvwRrIXo0lfwzQlM2LcKyas,30098 pkg_resources/_vendor/packaging/__about__.py,sha256=YzdrW-1lWmyCBDyrcNkZbJo4tiDWXpoiqPjfyCYMzIE,1073 pkg_resources/_vendor/packaging/__init__.py,sha256=2V8n-eEpSgBuXlV8hlMmhU7ZklpsrrusWMZNp2gC4Hs,906 pkg_resources/_vendor/packaging/_compat.py,sha256=wofog8iYo_zudt_10i6JiXKHDs5GhCuXC09hCuSJiv4,1253 pkg_resources/_vendor/packaging/_structures.py,sha256=93YvgrEE2HgFp8AdXy0pwCRVnZeutRHO_-puJ7T0cPw,1809 pkg_resources/_vendor/packaging/specifiers.py,sha256=UV9T01_kKloA8PSeMI3HTYBSJ_4KLs00yLvrlciZ3yU,28079 pkg_resources/_vendor/packaging/version.py,sha256=dEGrWZJZ6sef1xMxSfDCego2hS3Q86by0hUIFVk-AGc,11949 -setuptools/__init__.py,sha256=3k29_xXPwjFtkdBbuqaQ-VA3_Mdqyq4ADDIwfsc9ISo,5424 +pkg_resources/extern/__init__.py,sha256=azKvXDutMVFe3c641wdiwndjtku92Bl3_iGVAIMKnsM,2461 +setuptools/__init__.py,sha256=WEGb6BRGN2dz3eJTbNRUfInUAhb6_OZJyYAndPGJm6w,5440 setuptools/archive_util.py,sha256=N30WE5ZQjkytzhAodAXw4FkK-9J5AP1ChrClHnZthOA,6609 setuptools/cli-32.exe,sha256=dfEuovMNnA2HLa3jRfMPVi5tk4R7alCbpTvuxtCyw0Y,65536 setuptools/cli-64.exe,sha256=KLABu5pyrnokJCv6skjXZ6GsXeyYHGcqOUT3oHI3Xpo,74752 setuptools/cli-arm-32.exe,sha256=0pFNIi2SmY2gdY91Y4LRhj1wuBsnv5cG1fus3iBJv40,69120 setuptools/cli.exe,sha256=dfEuovMNnA2HLa3jRfMPVi5tk4R7alCbpTvuxtCyw0Y,65536 -setuptools/compat.py,sha256=-Hl58PuLOEHUDM3-qsgmk50qNiyLLF3RgBgJ-eGGZG0,2094 -setuptools/depends.py,sha256=gMRnrqQSr_Yp_wf09O88vKSQah1YjjEi5PsDNezM2Hs,6370 -setuptools/dist.py,sha256=alEPOcofbGQSfkVQk6h0yEGNyKiQyCvNQa5YmnUm6wU,35320 -setuptools/extension.py,sha256=nQ9GFTKxRIwmE1W7t1ZSBmuPAUJK_gVRYOCwxA1L38U,1649 +setuptools/depends.py,sha256=WyJIhjIX7D5-JpGSnMAPHEoDcVPQxaO0405keTQT6jM,6418 +setuptools/dist.py,sha256=txOleyyt2xCSTkUjCGW4MYZB8a1xsbC8MulDhSnoivQ,35701 +setuptools/extension.py,sha256=YvsyGHWVWzhNOXMHU239FR14wxw2WwdMLLzWsRP6_IY,1694 setuptools/gui-32.exe,sha256=XBr0bHMA6Hpz2s9s9Bzjl-PwXfa9nH4ie0rFn4V2kWA,65536 setuptools/gui-64.exe,sha256=aYKMhX1IJLn4ULHgWX0sE0yREUt6B3TEHf_jOw6yNyE,75264 setuptools/gui-arm-32.exe,sha256=R5gRWLkY7wvO_CVGxoi7LZVTv0h-DKsKScy6fkbp4XI,69120 setuptools/gui.exe,sha256=XBr0bHMA6Hpz2s9s9Bzjl-PwXfa9nH4ie0rFn4V2kWA,65536 +setuptools/launch.py,sha256=hP3qZxDNu5Hf9C-VAkEP4IC_YYfR1XfxMTj6EguxxCg,730 setuptools/lib2to3_ex.py,sha256=6jPF9sJuHiz0cyg4cwIBLl2VMAxcl3GYSZwWAOuJplU,1998 setuptools/msvc9_support.py,sha256=fo2vjb-dna1SEuHezQCTuelCo6XFBv5cqaI56ABJ1vw,2187 -setuptools/package_index.py,sha256=aavWGle1RtmelMjeTOxjFoGpEdGdGY4hfRnLHUkbD0c,38760 -setuptools/py26compat.py,sha256=ggKS8_aZWWHHS792vF3uXp5vmUkGNk3vjLreLTHr_-Q,431 +setuptools/package_index.py,sha256=T6tZGPHApup6Gl3kz1sCLtY7kmMUXLBKweSAORYS2Qc,39490 +setuptools/py26compat.py,sha256=1Vvuf-hj5bTM3OAXv6vgJQImulne12ann053caOgikU,481 setuptools/py27compat.py,sha256=CGj-jZcFgHUkrEdLvArkxHj96tAaMbG2-yJtUVU7QVI,306 -setuptools/py31compat.py,sha256=O3X_wdWrvXTifeSFbRaCMuc23cDhMHJn7QlITb5cQ8E,1637 -setuptools/sandbox.py,sha256=Gg5UfpsC5xsg_1x68pWRXG4t3nRWNpTk7m13lrnMTKw,13925 +setuptools/py31compat.py,sha256=cqYSVBd2pxvKl75185z40htfEr6EKC29KvSBiSoqHOA,1636 +setuptools/sandbox.py,sha256=tuMRu_8R0_w6Qer9VqDiOTqKy1qr_GjHi-2QAg7TMz0,14210 setuptools/script (dev).tmpl,sha256=f7MR17dTkzaqkCMSVseyOCMVrPVSMdmTQsaB8cZzfuI,201 setuptools/script.tmpl,sha256=WGTt5piezO27c-Dbx6l5Q4T3Ff20A5z7872hv3aAhYY,138 setuptools/site-patch.py,sha256=K-0-cAx36mX_PG-qPZwosG9ZLCliRjquKQ4nHiJvvzg,2389 -setuptools/ssl_support.py,sha256=FASqXlRCmXAi6LUWLUIo0u14MpJqHBgkOc5KPHSRrtI,8044 -setuptools/unicode_utils.py,sha256=gvhAHRj1LELCz-1MP3rfXGi__O1CAm5aksO9Njd2lpU,981 +setuptools/ssl_support.py,sha256=tAFeeyFPVle_GgarPkNrdfnCJgP9PyN_QYGXTgypoyc,8119 +setuptools/unicode_utils.py,sha256=8zVyrL_MFc6P5AmErs21rr7z-3N1pZ_NkOcDC7BPElU,995 setuptools/utils.py,sha256=08Z7mt-9mvrx-XvmS5EyKoRn2lxNTlgFsUwBU3Eq9JQ,293 -setuptools/version.py,sha256=puctVnBYQ-XKHe7xBHqCJwzHsPjWBWn-FQt0kRDJjEg,21 +setuptools/version.py,sha256=E3F8rAlTgCNpmTTY2YGy4T_1iQn3gKsePB7TVIcObu0,23 setuptools/windows_support.py,sha256=5GrfqSP2-dLGJoZTq2g6dCKkyQxxa2n5IQiXlJCoYEE,714 -setuptools/command/__init__.py,sha256=gQMXoLa0TtUtmUZY0ptSouWWA5kcTArWyDQ6QwkjoVQ,554 -setuptools/command/alias.py,sha256=1sLQxZcNh6dDQpDmm4G7UGGTol83nY1NTPmNBbm2siI,2381 -setuptools/command/bdist_egg.py,sha256=3eblnHDm1t8Hwh8K5z1QaWOVkxUvxQc40KV_YZVHNFs,17184 +setuptools/command/__init__.py,sha256=1AM3hv_zCixE7kTXA-onWfK_2KF8GC8fUw3WSxzi5Fg,564 +setuptools/command/alias.py,sha256=KjpE0sz_SDIHv3fpZcIQK-sCkJz-SrC6Gmug6b9Nkc8,2426 +setuptools/command/bdist_egg.py,sha256=Km4CsGbevhvej6kKEfvTYxfkPoQijUyXmImNifrO4Tg,17184 setuptools/command/bdist_rpm.py,sha256=B7l0TnzCGb-0nLlm6rS00jWLkojASwVmdhW2w5Qz_Ak,1508 setuptools/command/bdist_wininst.py,sha256=_6dz3lpB1tY200LxKPLM7qgwTCceOMgaWFF-jW2-pm0,637 setuptools/command/build_ext.py,sha256=pkQ8xp3YPVGGLkGv-SvfxC_GqFpboph1AFEoMFOgQMo,11964 -setuptools/command/build_py.py,sha256=uTgiBroMgyV-Lq4Kt42PLWQknM9G8c8_6TiDv4H5_Sw,7915 -setuptools/command/develop.py,sha256=uyRwABU1JnhQhZO9rS8-nenkzLwKKJt2P7WPnsXrHd4,6610 -setuptools/command/easy_install.py,sha256=_gRt2BDjiJpHuDPJzOFOTThWjspKy7NYIV_Br_PmyB0,87190 -setuptools/command/egg_info.py,sha256=7AEcwMZQ5zl48_Cu_srTxCUqlJBokW10jRlFHZof2fs,16852 +setuptools/command/build_py.py,sha256=HvJ88JuougDccaowYlfMV12kYtd0GLahg2DR2vQRqL4,7983 +setuptools/command/develop.py,sha256=VxSYbpM2jQqtRBn5klIjPVBo3sWKNZMlSbHHiRLUlZo,7383 +setuptools/command/easy_install.py,sha256=w3rYTMOEAxMPNMQ6A3RWgaFNL6y1Ms2TFrgHqAxMrlE,86096 +setuptools/command/egg_info.py,sha256=0_8eI8hgLAlGt8Xk5kiodY_d9lxG6_RSescJISKBJgA,16890 setuptools/command/install.py,sha256=QwaFiZRU3ytIHoPh8uJ9EqV3Fu9C4ca4B7UGAo95tws,4685 -setuptools/command/install_egg_info.py,sha256=KXNB8O34-rK-cZZZr2fnT8kqNktDmTfUA88X2Iln66c,4001 -setuptools/command/install_lib.py,sha256=ntpy-9xiFHfDmXmX_Lfp7nMchw7FpgyP66H7reixI_Y,3771 +setuptools/command/install_egg_info.py,sha256=8J_cH4VbOJv-9Wey8Ijw5SnNI7YS_CA2IKYX569mP5Q,4035 +setuptools/command/install_lib.py,sha256=rWcysInRJHVzgneY41EKW3kW3-dR2q2CvyPzul5ASAk,3839 setuptools/command/install_scripts.py,sha256=vX2JC6v7l090N7CrTfihWBklNbPvfNKAY2LRtukM9XE,2231 setuptools/command/launcher manifest.xml,sha256=xlLbjWrB01tKC0-hlVkOKkiSPbzMml2eOPtJ_ucCnbE,628 setuptools/command/register.py,sha256=bHlMm1qmBbSdahTOT8w6UhA-EgeQIz7p6cD-qOauaiI,270 -setuptools/command/rotate.py,sha256=Qm7SOa32L9XG5b_C7_SSYvKM5rqFXroeQ6w8GXIsY2o,2038 +setuptools/command/rotate.py,sha256=QGZS2t4CmBl7t79KQijNCjRMU50lu3nRhu4FXWB5LIE,2038 setuptools/command/saveopts.py,sha256=za7QCBcQimKKriWcoCcbhxPjUz30gSB74zuTL47xpP4,658 -setuptools/command/sdist.py,sha256=rMT2qS0u4GYJtL4IXiYG-ElEa111wqzQVHpv9uE1L5w,7079 -setuptools/command/setopt.py,sha256=Z3_kav60D2XHZjM0cRhGo7wbBYo7nr4U_U-wMMbpmu8,5080 -setuptools/command/test.py,sha256=yJEniqTzav6R6vimRG3tb7l233rGDSAmFafXIHe9UzU,6562 -setuptools/command/upload_docs.py,sha256=di-XRGtxW5TSFYR6nK9XZj3I5JIU4V00SOFRhptdOGc,6782 -setuptools-18.5.dist-info/DESCRIPTION.rst,sha256=MDsJej8DPV2OKpAKpu74g-2xksRd-uGTeZn4W7D1dnI,9940 -setuptools-18.5.dist-info/METADATA,sha256=AnJr1ZA0xypJknGm_uX312zkByzCrA6_ZyjZDI0d408,11256 -setuptools-18.5.dist-info/RECORD,, -setuptools-18.5.dist-info/WHEEL,sha256=GrqQvamwgBV4nLoJe0vhYRSWzWsx7xjlt74FT0SWYfE,110 -setuptools-18.5.dist-info/dependency_links.txt,sha256=g1tkmtmOY1n1KRGVLZKBtbJf0CCZ2Jil8uGvMfQRJNE,226 -setuptools-18.5.dist-info/entry_points.txt,sha256=xrrbAWSD2o_blM5eb2oXvmCTvfdcjUMunUT4T8C-AAs,2793 -setuptools-18.5.dist-info/metadata.json,sha256=kuSZ-oLefqfph4-5BoCXPJKfOx98Dz9L4EgOJ08ZZl0,4678 -setuptools-18.5.dist-info/top_level.txt,sha256=7780fzudMJkykiTcIrAQ8m8Lll6kot3EEePye3VJgEE,49 -setuptools-18.5.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 -/srv/openmedialibrary/platform/Linux_x86_64/home/.local/bin/easy_install,sha256=7h7lc5DCAhnE08UwEm49wGymGDGdOcrkRdncTKYmXIQ,233 -/srv/openmedialibrary/platform/Linux_x86_64/home/.local/bin/easy_install-3.4,sha256=7h7lc5DCAhnE08UwEm49wGymGDGdOcrkRdncTKYmXIQ,233 -setuptools/__pycache__/package_index.cpython-34.pyc,, -setuptools/__pycache__/msvc9_support.cpython-34.pyc,, -setuptools/command/__pycache__/install.cpython-34.pyc,, -pkg_resources/_vendor/packaging/__pycache__/specifiers.cpython-34.pyc,, -setuptools/__pycache__/depends.cpython-34.pyc,, -setuptools/__pycache__/py26compat.cpython-34.pyc,, -setuptools/__pycache__/ssl_support.cpython-34.pyc,, -pkg_resources/_vendor/packaging/__pycache__/__init__.cpython-34.pyc,, -setuptools/__pycache__/archive_util.cpython-34.pyc,, -setuptools/command/__pycache__/bdist_egg.cpython-34.pyc,, -__pycache__/easy_install.cpython-34.pyc,, -setuptools/__pycache__/compat.cpython-34.pyc,, -pkg_resources/_vendor/packaging/__pycache__/_structures.cpython-34.pyc,, -setuptools/__pycache__/__init__.cpython-34.pyc,, -setuptools/command/__pycache__/egg_info.cpython-34.pyc,, -pkg_resources/_vendor/__pycache__/__init__.cpython-34.pyc,, -setuptools/__pycache__/windows_support.cpython-34.pyc,, -setuptools/command/__pycache__/build_ext.cpython-34.pyc,, -setuptools/command/__pycache__/install_scripts.cpython-34.pyc,, -setuptools/__pycache__/unicode_utils.cpython-34.pyc,, -setuptools/command/__pycache__/register.cpython-34.pyc,, -setuptools/command/__pycache__/install_egg_info.cpython-34.pyc,, -setuptools/command/__pycache__/build_py.cpython-34.pyc,, -setuptools/command/__pycache__/setopt.cpython-34.pyc,, -setuptools/__pycache__/utils.cpython-34.pyc,, -pkg_resources/_vendor/packaging/__pycache__/version.cpython-34.pyc,, -setuptools/command/__pycache__/saveopts.cpython-34.pyc,, -setuptools/__pycache__/py27compat.cpython-34.pyc,, -pkg_resources/__pycache__/__init__.cpython-34.pyc,, -setuptools/command/__pycache__/rotate.cpython-34.pyc,, -setuptools/__pycache__/extension.cpython-34.pyc,, -setuptools/command/__pycache__/bdist_wininst.cpython-34.pyc,, -setuptools/command/__pycache__/test.cpython-34.pyc,, -setuptools/command/__pycache__/upload_docs.cpython-34.pyc,, -_markerlib/__pycache__/__init__.cpython-34.pyc,, -setuptools/command/__pycache__/sdist.cpython-34.pyc,, -_markerlib/__pycache__/markers.cpython-34.pyc,, -setuptools/__pycache__/site-patch.cpython-34.pyc,, -pkg_resources/_vendor/packaging/__pycache__/_compat.cpython-34.pyc,, -setuptools/__pycache__/py31compat.cpython-34.pyc,, -setuptools/__pycache__/sandbox.cpython-34.pyc,, -pkg_resources/_vendor/packaging/__pycache__/__about__.cpython-34.pyc,, -setuptools/__pycache__/dist.cpython-34.pyc,, -setuptools/__pycache__/lib2to3_ex.cpython-34.pyc,, -setuptools/command/__pycache__/bdist_rpm.cpython-34.pyc,, -setuptools/__pycache__/version.cpython-34.pyc,, +setuptools/command/sdist.py,sha256=kQetnPMw6ao3nurWGJZgS4HkOH4AknzMOSvqbVA6jGA,7050 +setuptools/command/setopt.py,sha256=cygJaJWJmiVhR0e_Uh_0_fWyCxMJIqK-Bu6K0LyYUtU,5086 +setuptools/command/test.py,sha256=N2f5RwxkjwU3YQzFYHtzHr636-pdX9XJDuPg5Y92kSo,6888 +setuptools/command/upload.py,sha256=OjAryq4ZoARZiaTN_MpuG1X8Pu9CJNCKmmbMg-gab5I,649 +setuptools/command/upload_docs.py,sha256=htXpASci5gKP0RIrGZRRmbll7RnTRuwvKWZkYsBlDMM,6815 +setuptools/extern/__init__.py,sha256=mTrrj4yLMdFeEwwnqKnSuvZM5RM-HPZ1iXLgaYDlB9o,132 +setuptools-20.1.1.dist-info/DESCRIPTION.rst,sha256=MDsJej8DPV2OKpAKpu74g-2xksRd-uGTeZn4W7D1dnI,9940 +setuptools-20.1.1.dist-info/METADATA,sha256=mXcb68OjP3H_wq0JMNyeX3nYGlTPDi74cndXOowhZps,11173 +setuptools-20.1.1.dist-info/RECORD,, +setuptools-20.1.1.dist-info/WHEEL,sha256=GrqQvamwgBV4nLoJe0vhYRSWzWsx7xjlt74FT0SWYfE,110 +setuptools-20.1.1.dist-info/dependency_links.txt,sha256=oUNXJEArClXFiSSvfFwUKY8TYjeIXhuFfCpXn5K0DCE,226 +setuptools-20.1.1.dist-info/entry_points.txt,sha256=revbaRBbkZ2b1B-hZlAXo_18J9GjdYHgA4DoW8wdTOU,2835 +setuptools-20.1.1.dist-info/metadata.json,sha256=NVNudLLjxdP9rz5vqMErRBQTdpX1tTEbp9EhBRSCQ8U,4636 +setuptools-20.1.1.dist-info/top_level.txt,sha256=7780fzudMJkykiTcIrAQ8m8Lll6kot3EEePye3VJgEE,49 +setuptools-20.1.1.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +/var/lib/lxc/openmedialibrary/rootfs/srv/client/platform_linux64/p34/bin/easy_install,sha256=LopKneUXvnG_dqCSZe6amF-Bf4WEh1ny0uo28w2_nQ0,299 +/var/lib/lxc/openmedialibrary/rootfs/srv/client/platform_linux64/p34/bin/easy_install-3.4,sha256=LopKneUXvnG_dqCSZe6amF-Bf4WEh1ny0uo28w2_nQ0,299 +/var/lib/lxc/openmedialibrary/rootfs/srv/client/platform_linux64/p34/lib/python3.4/site-packages/setuptools-20.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 setuptools/command/__pycache__/install_lib.cpython-34.pyc,, -setuptools/command/__pycache__/develop.cpython-34.pyc,, -setuptools/command/__pycache__/__init__.cpython-34.pyc,, +setuptools/__pycache__/unicode_utils.cpython-34.pyc,, +setuptools/__pycache__/version.cpython-34.pyc,, +setuptools/command/__pycache__/test.cpython-34.pyc,, +setuptools/command/__pycache__/upload.cpython-34.pyc,, +setuptools/command/__pycache__/rotate.cpython-34.pyc,, +setuptools/__pycache__/dist.cpython-34.pyc,, +pkg_resources/_vendor/__pycache__/__init__.cpython-34.pyc,, +pkg_resources/_vendor/packaging/__pycache__/__about__.cpython-34.pyc,, +setuptools/command/__pycache__/egg_info.cpython-34.pyc,, +setuptools/__pycache__/site-patch.cpython-34.pyc,, +setuptools/command/__pycache__/saveopts.cpython-34.pyc,, +setuptools/command/__pycache__/bdist_wininst.cpython-34.pyc,, +setuptools/__pycache__/py27compat.cpython-34.pyc,, +setuptools/__pycache__/archive_util.cpython-34.pyc,, +setuptools/command/__pycache__/register.cpython-34.pyc,, +setuptools/command/__pycache__/install.cpython-34.pyc,, +pkg_resources/_vendor/packaging/__pycache__/version.cpython-34.pyc,, +setuptools/__pycache__/utils.cpython-34.pyc,, +pkg_resources/_vendor/packaging/__pycache__/__init__.cpython-34.pyc,, +pkg_resources/_vendor/__pycache__/six.cpython-34.pyc,, setuptools/command/__pycache__/easy_install.cpython-34.pyc,, +setuptools/command/__pycache__/install_egg_info.cpython-34.pyc,, +setuptools/command/__pycache__/bdist_rpm.cpython-34.pyc,, +setuptools/command/__pycache__/setopt.cpython-34.pyc,, +pkg_resources/_vendor/packaging/__pycache__/specifiers.cpython-34.pyc,, +_markerlib/__pycache__/__init__.cpython-34.pyc,, +pkg_resources/_vendor/packaging/__pycache__/_compat.cpython-34.pyc,, setuptools/command/__pycache__/alias.cpython-34.pyc,, +setuptools/__pycache__/launch.cpython-34.pyc,, +setuptools/__pycache__/py31compat.cpython-34.pyc,, +_markerlib/__pycache__/markers.cpython-34.pyc,, +setuptools/__pycache__/lib2to3_ex.cpython-34.pyc,, +setuptools/command/__pycache__/build_ext.cpython-34.pyc,, +setuptools/__pycache__/sandbox.cpython-34.pyc,, +pkg_resources/__pycache__/__init__.cpython-34.pyc,, +setuptools/command/__pycache__/bdist_egg.cpython-34.pyc,, +setuptools/extern/__pycache__/__init__.cpython-34.pyc,, +setuptools/__pycache__/extension.cpython-34.pyc,, +setuptools/command/__pycache__/develop.cpython-34.pyc,, +__pycache__/easy_install.cpython-34.pyc,, +setuptools/__pycache__/package_index.cpython-34.pyc,, +setuptools/command/__pycache__/__init__.cpython-34.pyc,, +setuptools/__pycache__/ssl_support.cpython-34.pyc,, +setuptools/command/__pycache__/install_scripts.cpython-34.pyc,, +setuptools/__pycache__/py26compat.cpython-34.pyc,, +setuptools/__pycache__/depends.cpython-34.pyc,, +setuptools/command/__pycache__/sdist.cpython-34.pyc,, +pkg_resources/extern/__pycache__/__init__.cpython-34.pyc,, +setuptools/__pycache__/windows_support.cpython-34.pyc,, +setuptools/__pycache__/__init__.cpython-34.pyc,, +setuptools/__pycache__/msvc9_support.cpython-34.pyc,, +setuptools/command/__pycache__/build_py.cpython-34.pyc,, +setuptools/command/__pycache__/upload_docs.cpython-34.pyc,, +pkg_resources/_vendor/packaging/__pycache__/_structures.cpython-34.pyc,, diff --git a/lib/python3.4/site-packages/setuptools-20.1.1.dist-info/WHEEL b/lib/python3.4/site-packages/setuptools-20.1.1.dist-info/WHEEL new file mode 100644 index 0000000..0de529b --- /dev/null +++ b/lib/python3.4/site-packages/setuptools-20.1.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.26.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/lib/python3.4/site-packages/setuptools-18.5.dist-info/dependency_links.txt b/lib/python3.4/site-packages/setuptools-20.1.1.dist-info/dependency_links.txt similarity index 50% rename from lib/python3.4/site-packages/setuptools-18.5.dist-info/dependency_links.txt rename to lib/python3.4/site-packages/setuptools-20.1.1.dist-info/dependency_links.txt index b0477ad..47d1e81 100644 --- a/lib/python3.4/site-packages/setuptools-18.5.dist-info/dependency_links.txt +++ b/lib/python3.4/site-packages/setuptools-20.1.1.dist-info/dependency_links.txt @@ -1,2 +1,2 @@ -https://pypi.python.org/packages/source/c/certifi/certifi-2015.04.28.tar.gz#md5=12c7c3a063b2ff97a0f8291d8de41e8c +https://pypi.python.org/packages/source/c/certifi/certifi-2015.11.20.tar.gz#md5=25134646672c695c1ff1593c2dd75d08 https://pypi.python.org/packages/source/w/wincertstore/wincertstore-0.2.zip#md5=ae728f2f007185648d0c7a8679b361e2 diff --git a/lib/python3.4/site-packages/setuptools-18.5.dist-info/entry_points.txt b/lib/python3.4/site-packages/setuptools-20.1.1.dist-info/entry_points.txt similarity index 98% rename from lib/python3.4/site-packages/setuptools-18.5.dist-info/entry_points.txt rename to lib/python3.4/site-packages/setuptools-20.1.1.dist-info/entry_points.txt index ef17af1..924fed8 100644 --- a/lib/python3.4/site-packages/setuptools-18.5.dist-info/entry_points.txt +++ b/lib/python3.4/site-packages/setuptools-20.1.1.dist-info/entry_points.txt @@ -22,6 +22,7 @@ saveopts = setuptools.command.saveopts:saveopts sdist = setuptools.command.sdist:sdist setopt = setuptools.command.setopt:setopt test = setuptools.command.test:test +upload = setuptools.command.upload:upload upload_docs = setuptools.command.upload_docs:upload_docs [distutils.setup_keywords] diff --git a/lib/python3.4/site-packages/setuptools-20.1.1.dist-info/metadata.json b/lib/python3.4/site-packages/setuptools-20.1.1.dist-info/metadata.json new file mode 100644 index 0000000..385a951 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools-20.1.1.dist-info/metadata.json @@ -0,0 +1 @@ +{"generator": "bdist_wheel (0.26.0)", "summary": "Easily download, build, install, upgrade, and uninstall Python packages", "classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: System :: Archiving :: Packaging", "Topic :: System :: Systems Administration", "Topic :: Utilities"], "extensions": {"python.details": {"project_urls": {"Home": "https://bitbucket.org/pypa/setuptools"}, "contacts": [{"email": "distutils-sig@python.org", "name": "Python Packaging Authority", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}}, "python.exports": {"console_scripts": {"easy_install": "setuptools.command.easy_install:main", "easy_install-3.5": "setuptools.command.easy_install:main"}, "distutils.commands": {"alias": "setuptools.command.alias:alias", "bdist_egg": "setuptools.command.bdist_egg:bdist_egg", "bdist_rpm": "setuptools.command.bdist_rpm:bdist_rpm", "bdist_wininst": "setuptools.command.bdist_wininst:bdist_wininst", "build_ext": "setuptools.command.build_ext:build_ext", "build_py": "setuptools.command.build_py:build_py", "develop": "setuptools.command.develop:develop", "easy_install": "setuptools.command.easy_install:easy_install", "egg_info": "setuptools.command.egg_info:egg_info", "install": "setuptools.command.install:install", "install_egg_info": "setuptools.command.install_egg_info:install_egg_info", "install_lib": "setuptools.command.install_lib:install_lib", "install_scripts": "setuptools.command.install_scripts:install_scripts", "register": "setuptools.command.register:register", "rotate": "setuptools.command.rotate:rotate", "saveopts": "setuptools.command.saveopts:saveopts", "sdist": "setuptools.command.sdist:sdist", "setopt": "setuptools.command.setopt:setopt", "test": "setuptools.command.test:test", "upload": "setuptools.command.upload:upload", "upload_docs": "setuptools.command.upload_docs:upload_docs"}, "distutils.setup_keywords": {"convert_2to3_doctests": "setuptools.dist:assert_string_list", "dependency_links": "setuptools.dist:assert_string_list", "eager_resources": "setuptools.dist:assert_string_list", "entry_points": "setuptools.dist:check_entry_points", "exclude_package_data": "setuptools.dist:check_package_data", "extras_require": "setuptools.dist:check_extras", "include_package_data": "setuptools.dist:assert_bool", "install_requires": "setuptools.dist:check_requirements", "namespace_packages": "setuptools.dist:check_nsp", "package_data": "setuptools.dist:check_package_data", "packages": "setuptools.dist:check_packages", "setup_requires": "setuptools.dist:check_requirements", "test_loader": "setuptools.dist:check_importable", "test_runner": "setuptools.dist:check_importable", "test_suite": "setuptools.dist:check_test_suite", "tests_require": "setuptools.dist:check_requirements", "use_2to3": "setuptools.dist:assert_bool", "use_2to3_exclude_fixers": "setuptools.dist:assert_string_list", "use_2to3_fixers": "setuptools.dist:assert_string_list", "zip_safe": "setuptools.dist:assert_bool"}, "egg_info.writers": {"PKG-INFO": "setuptools.command.egg_info:write_pkg_info", "dependency_links.txt": "setuptools.command.egg_info:overwrite_arg", "depends.txt": "setuptools.command.egg_info:warn_depends_obsolete", "eager_resources.txt": "setuptools.command.egg_info:overwrite_arg", "entry_points.txt": "setuptools.command.egg_info:write_entries", "namespace_packages.txt": "setuptools.command.egg_info:overwrite_arg", "requires.txt": "setuptools.command.egg_info:write_requirements", "top_level.txt": "setuptools.command.egg_info:write_toplevel_names"}, "setuptools.installation": {"eggsecutable": "setuptools.command.easy_install:bootstrap"}}, "python.commands": {"wrap_console": {"easy_install": "setuptools.command.easy_install:main", "easy_install-3.5": "setuptools.command.easy_install:main"}}}, "keywords": ["CPAN", "PyPI", "distutils", "eggs", "package", "management"], "metadata_version": "2.0", "name": "setuptools", "extras": ["certs", "ssl"], "run_requires": [{"requires": ["certifi (==2015.11.20)"], "extra": "certs"}, {"requires": ["wincertstore (==0.2)"], "extra": "ssl", "environment": "sys_platform=='win32'"}], "version": "20.1.1", "test_requires": [{"requires": ["pytest (>=2.8)", "setuptools[ssl]"]}]} \ No newline at end of file diff --git a/lib/python3.4/site-packages/setuptools-18.5.dist-info/top_level.txt b/lib/python3.4/site-packages/setuptools-20.1.1.dist-info/top_level.txt similarity index 100% rename from lib/python3.4/site-packages/setuptools-18.5.dist-info/top_level.txt rename to lib/python3.4/site-packages/setuptools-20.1.1.dist-info/top_level.txt diff --git a/lib/python3.4/site-packages/setuptools-18.5.dist-info/zip-safe b/lib/python3.4/site-packages/setuptools-20.1.1.dist-info/zip-safe similarity index 100% rename from lib/python3.4/site-packages/setuptools-18.5.dist-info/zip-safe rename to lib/python3.4/site-packages/setuptools-20.1.1.dist-info/zip-safe diff --git a/lib/python3.4/site-packages/setuptools/__init__.py b/lib/python3.4/site-packages/setuptools/__init__.py index 712ec08..67b57e4 100644 --- a/lib/python3.4/site-packages/setuptools/__init__.py +++ b/lib/python3.4/site-packages/setuptools/__init__.py @@ -8,11 +8,12 @@ from distutils.core import Command as _Command from distutils.util import convert_path from fnmatch import fnmatchcase +from setuptools.extern.six.moves import filterfalse, map + import setuptools.version from setuptools.extension import Extension from setuptools.dist import Distribution, Feature, _get_unpatched from setuptools.depends import Require -from setuptools.compat import filterfalse __all__ = [ 'setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require', diff --git a/lib/python3.4/site-packages/setuptools/command/__init__.py b/lib/python3.4/site-packages/setuptools/command/__init__.py index f6dbc39..3fb2f6d 100644 --- a/lib/python3.4/site-packages/setuptools/command/__init__.py +++ b/lib/python3.4/site-packages/setuptools/command/__init__.py @@ -2,7 +2,7 @@ __all__ = [ 'alias', 'bdist_egg', 'bdist_rpm', 'build_ext', 'build_py', 'develop', 'easy_install', 'egg_info', 'install', 'install_lib', 'rotate', 'saveopts', 'sdist', 'setopt', 'test', 'install_egg_info', 'install_scripts', - 'register', 'bdist_wininst', 'upload_docs', + 'register', 'bdist_wininst', 'upload_docs', 'upload', ] from distutils.command.bdist import bdist diff --git a/lib/python3.4/site-packages/setuptools/command/alias.py b/lib/python3.4/site-packages/setuptools/command/alias.py index 452a924..4532b1c 100644 --- a/lib/python3.4/site-packages/setuptools/command/alias.py +++ b/lib/python3.4/site-packages/setuptools/command/alias.py @@ -1,5 +1,7 @@ from distutils.errors import DistutilsOptionError +from setuptools.extern.six.moves import map + from setuptools.command.setopt import edit_config, option_base, config_file diff --git a/lib/python3.4/site-packages/setuptools/command/bdist_egg.py b/lib/python3.4/site-packages/setuptools/command/bdist_egg.py index 87dce88..9cebd7f 100644 --- a/lib/python3.4/site-packages/setuptools/command/bdist_egg.py +++ b/lib/python3.4/site-packages/setuptools/command/bdist_egg.py @@ -11,9 +11,10 @@ import os import marshal import textwrap +from setuptools.extern import six + from pkg_resources import get_build_platform, Distribution, ensure_directory from pkg_resources import EntryPoint -from setuptools.compat import basestring from setuptools.extension import Library from setuptools import Command @@ -413,7 +414,7 @@ def iter_symbols(code): for name in code.co_names: yield name for const in code.co_consts: - if isinstance(const, basestring): + if isinstance(const, six.string_types): yield const elif isinstance(const, CodeType): for name in iter_symbols(const): diff --git a/lib/python3.4/site-packages/setuptools/command/build_py.py b/lib/python3.4/site-packages/setuptools/command/build_py.py index a873d54..8623c77 100644 --- a/lib/python3.4/site-packages/setuptools/command/build_py.py +++ b/lib/python3.4/site-packages/setuptools/command/build_py.py @@ -2,9 +2,14 @@ from glob import glob from distutils.util import convert_path import distutils.command.build_py as orig import os -import sys import fnmatch import textwrap +import io +import distutils.errors +import collections +import itertools + +from setuptools.extern.six.moves import map try: from setuptools.lib2to3_ex import Mixin2to3 @@ -55,9 +60,10 @@ class build_py(orig.build_py, Mixin2to3): self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=0)) def __getattr__(self, attr): - if attr == 'data_files': # lazily compute data files - self.data_files = files = self._get_data_files() - return files + "lazily compute data files" + if attr == 'data_files': + self.data_files = self._get_data_files() + return self.data_files return orig.build_py.__getattr__(self, attr) def build_module(self, module, module_file, package): @@ -70,23 +76,21 @@ class build_py(orig.build_py, Mixin2to3): def _get_data_files(self): """Generate list of '(package,src_dir,build_dir,filenames)' tuples""" self.analyze_manifest() - data = [] - for package in self.packages or (): - # Locate package source directory - src_dir = self.get_package_dir(package) + return list(map(self._get_pkg_data_files, self.packages or ())) - # Compute package build directory - build_dir = os.path.join(*([self.build_lib] + package.split('.'))) + def _get_pkg_data_files(self, package): + # Locate package source directory + src_dir = self.get_package_dir(package) - # Length of path to strip from found files - plen = len(src_dir) + 1 + # Compute package build directory + build_dir = os.path.join(*([self.build_lib] + package.split('.'))) - # Strip directory from globbed filenames - filenames = [ - file[plen:] for file in self.find_data_files(package, src_dir) - ] - data.append((package, src_dir, build_dir, filenames)) - return data + # Strip directory from globbed filenames + filenames = [ + os.path.relpath(file, src_dir) + for file in self.find_data_files(package, src_dir) + ] + return package, src_dir, build_dir, filenames def find_data_files(self, package, src_dir): """Return filenames for package's data files in 'src_dir'""" @@ -157,17 +161,15 @@ class build_py(orig.build_py, Mixin2to3): else: return init_py - f = open(init_py, 'rbU') - if 'declare_namespace'.encode() not in f.read(): - from distutils.errors import DistutilsError - - raise DistutilsError( + with io.open(init_py, 'rb') as f: + contents = f.read() + if b'declare_namespace' not in contents: + raise distutils.errors.DistutilsError( "Namespace package problem: %s is a namespace package, but " "its\n__init__.py does not call declare_namespace()! Please " 'fix it.\n(See the setuptools manual under ' '"Namespace Packages" for details.)\n"' % (package,) ) - f.close() return init_py def initialize_options(self): @@ -182,20 +184,25 @@ class build_py(orig.build_py, Mixin2to3): def exclude_data_files(self, package, src_dir, files): """Filter filenames for package's data files in 'src_dir'""" - globs = (self.exclude_package_data.get('', []) - + self.exclude_package_data.get(package, [])) - bad = [] - for pattern in globs: - bad.extend( - fnmatch.filter( - files, os.path.join(src_dir, convert_path(pattern)) - ) + globs = ( + self.exclude_package_data.get('', []) + + self.exclude_package_data.get(package, []) + ) + bad = set( + item + for pattern in globs + for item in fnmatch.filter( + files, + os.path.join(src_dir, convert_path(pattern)), ) - bad = dict.fromkeys(bad) - seen = {} + ) + seen = collections.defaultdict(itertools.count) return [ - f for f in files if f not in bad - and f not in seen and seen.setdefault(f, 1) # ditch dupes + fn + for fn in files + if fn not in bad + # ditch dupes + and not next(seen[fn]) ] diff --git a/lib/python3.4/site-packages/setuptools/command/develop.py b/lib/python3.4/site-packages/setuptools/command/develop.py index 368b64f..11b5df1 100644 --- a/lib/python3.4/site-packages/setuptools/command/develop.py +++ b/lib/python3.4/site-packages/setuptools/command/develop.py @@ -3,10 +3,12 @@ from distutils import log from distutils.errors import DistutilsError, DistutilsOptionError import os import glob +import io + +from setuptools.extern import six from pkg_resources import Distribution, PathMetadata, normalize_path from setuptools.command.easy_install import easy_install -from setuptools.compat import PY3 import setuptools @@ -53,8 +55,8 @@ class develop(easy_install): # pick up setup-dir .egg files only: no .egg-info self.package_index.scan(glob.glob('*.egg')) - self.egg_link = os.path.join(self.install_dir, ei.egg_name + - '.egg-link') + egg_link_fn = ei.egg_name + '.egg-link' + self.egg_link = os.path.join(self.install_dir, egg_link_fn) self.egg_base = ei.egg_base if self.egg_path is None: self.egg_path = os.path.abspath(ei.egg_base) @@ -86,7 +88,7 @@ class develop(easy_install): " installation directory", p, normalize_path(os.curdir)) def install_for_development(self): - if PY3 and getattr(self.distribution, 'use_2to3', False): + if six.PY3 and getattr(self.distribution, 'use_2to3', False): # If we run 2to3 we can not do this inplace: # Ensure metadata is up-to-date @@ -124,9 +126,8 @@ class develop(easy_install): # create an .egg-link in the installation dir, pointing to our egg log.info("Creating %s (link to %s)", self.egg_link, self.egg_base) if not self.dry_run: - f = open(self.egg_link, "w") - f.write(self.egg_path + "\n" + self.setup_path) - f.close() + with open(self.egg_link, "w") as f: + f.write(self.egg_path + "\n" + self.setup_path) # postprocess the installed distro, fixing up .pth, installing scripts, # and handling requirements self.process_distribution(None, self.dist, not self.no_deps) @@ -163,7 +164,33 @@ class develop(easy_install): for script_name in self.distribution.scripts or []: script_path = os.path.abspath(convert_path(script_name)) script_name = os.path.basename(script_path) - f = open(script_path, 'rU') - script_text = f.read() - f.close() + with io.open(script_path) as strm: + script_text = strm.read() self.install_script(dist, script_name, script_text, script_path) + + def install_wrapper_scripts(self, dist): + dist = VersionlessRequirement(dist) + return easy_install.install_wrapper_scripts(self, dist) + + +class VersionlessRequirement(object): + """ + Adapt a pkg_resources.Distribution to simply return the project + name as the 'requirement' so that scripts will work across + multiple versions. + + >>> dist = Distribution(project_name='foo', version='1.0') + >>> str(dist.as_requirement()) + 'foo==1.0' + >>> adapted_dist = VersionlessRequirement(dist) + >>> str(adapted_dist.as_requirement()) + 'foo' + """ + def __init__(self, dist): + self.__dist = dist + + def __getattr__(self, name): + return getattr(self.__dist, name) + + def as_requirement(self): + return self.project_name diff --git a/lib/python3.4/site-packages/setuptools/command/easy_install.py b/lib/python3.4/site-packages/setuptools/command/easy_install.py index 45d180b..4605617 100644 --- a/lib/python3.4/site-packages/setuptools/command/easy_install.py +++ b/lib/python3.4/site-packages/setuptools/command/easy_install.py @@ -20,6 +20,7 @@ from distutils.errors import DistutilsArgError, DistutilsOptionError, \ from distutils.command.install import INSTALL_SCHEMES, SCHEME_KEYS from distutils import log, dir_util from distutils.command.build_scripts import first_line_re +from distutils.spawn import find_executable import sys import os import zipimport @@ -39,6 +40,9 @@ import subprocess import shlex import io +from setuptools.extern import six +from setuptools.extern.six.moves import configparser, map + from setuptools import Command from setuptools.sandbox import run_setup from setuptools.py31compat import get_path, get_config_vars @@ -47,8 +51,6 @@ from setuptools.archive_util import unpack_archive from setuptools.package_index import PackageIndex from setuptools.package_index import URL_SCHEME from setuptools.command import bdist_egg, egg_info -from setuptools.compat import (iteritems, maxsize, basestring, unicode, - reraise, PY2, PY3) from pkg_resources import ( yield_lines, normalize_path, resource_string, ensure_directory, get_distribution, find_distributions, Environment, Requirement, @@ -81,13 +83,13 @@ def samefile(p1, p2): return norm_p1 == norm_p2 -if PY2: +if six.PY2: def _to_ascii(s): return s def isascii(s): try: - unicode(s, 'ascii') + six.text_type(s, 'ascii') return True except UnicodeError: return False @@ -319,7 +321,7 @@ class easy_install(Command): self.local_index = Environment(self.shadow_path + sys.path) if self.find_links is not None: - if isinstance(self.find_links, basestring): + if isinstance(self.find_links, six.string_types): self.find_links = self.find_links.split() else: self.find_links = [] @@ -412,7 +414,7 @@ class easy_install(Command): try: pid = os.getpid() except: - pid = random.randint(0, maxsize) + pid = random.randint(0, sys.maxsize) return os.path.join(self.install_dir, "test-easy-install-%s" % pid) def warn_deprecated_options(self): @@ -760,9 +762,10 @@ class easy_install(Command): return dst def install_wrapper_scripts(self, dist): - if not self.exclude_scripts: - for args in ScriptWriter.best().get_args(dist): - self.write_script(*args) + if self.exclude_scripts: + return + for args in ScriptWriter.best().get_args(dist): + self.write_script(*args) def install_script(self, dist, script_name, script_text, dev_path=None): """Generate a legacy script wrapper and install it""" @@ -770,8 +773,8 @@ class easy_install(Command): is_script = is_python_script(script_text, script_name) if is_script: - script_text = (ScriptWriter.get_header(script_text) + - self._load_template(dev_path) % locals()) + body = self._load_template(dev_path) % locals() + script_text = ScriptWriter.get_header(script_text) + body self.write_script(script_name, _to_ascii(script_text), 'b') @staticmethod @@ -803,9 +806,8 @@ class easy_install(Command): ensure_directory(target) if os.path.exists(target): os.unlink(target) - f = open(target, "w" + mode) - f.write(contents) - f.close() + with open(target, "w" + mode) as f: + f.write(contents) chmod(target, 0o777 - mask) def install_eggs(self, spec, dist_filename, tmpdir): @@ -1244,7 +1246,7 @@ class easy_install(Command): f = open(sitepy, 'rb') current = f.read() # we want str, not bytes - if PY3: + if six.PY3: current = current.decode() f.close() @@ -1270,7 +1272,7 @@ class easy_install(Command): if not self.user: return home = convert_path(os.path.expanduser("~")) - for name, path in iteritems(self.config_vars): + for name, path in six.iteritems(self.config_vars): if path.startswith(home) and not os.path.isdir(path): self.debug_print("os.makedirs('%s', 0o700)" % path) os.makedirs(path, 0o700) @@ -1401,7 +1403,7 @@ def expand_paths(inputs): def extract_wininst_cfg(dist_filename): """Extract configuration data from a bdist_wininst .exe - Returns a ConfigParser.RawConfigParser, or None + Returns a configparser.RawConfigParser, or None """ f = open(dist_filename, 'rb') try: @@ -1414,15 +1416,12 @@ def extract_wininst_cfg(dist_filename): return None f.seek(prepended - 12) - from setuptools.compat import StringIO, ConfigParser - import struct - tag, cfglen, bmlen = struct.unpack("<iii", f.read(12)) if tag not in (0x1234567A, 0x1234567B): return None # not a valid tag f.seek(prepended - (12 + cfglen)) - cfg = ConfigParser.RawConfigParser( + cfg = configparser.RawConfigParser( {'version': '', 'target_version': ''}) try: part = f.read(cfglen) @@ -1431,8 +1430,8 @@ def extract_wininst_cfg(dist_filename): # Now the config is in bytes, but for RawConfigParser, it should # be text, so decode it. config = config.decode(sys.getfilesystemencoding()) - cfg.readfp(StringIO(config)) - except ConfigParser.Error: + cfg.readfp(six.StringIO(config)) + except configparser.Error: return None if not cfg.has_section('metadata') or not cfg.has_section('Setup'): return None @@ -1466,7 +1465,7 @@ def get_exe_prefixes(exe_filename): continue if parts[0].upper() in ('PURELIB', 'PLATLIB'): contents = z.read(name) - if PY3: + if six.PY3: contents = contents.decode() for pth in yield_lines(contents): pth = pth.strip().replace('\\', '/') @@ -1642,7 +1641,7 @@ def auto_chmod(func, arg, exc): chmod(arg, stat.S_IWRITE) return func(arg) et, ev, _ = sys.exc_info() - reraise(et, (ev[0], ev[1] + (" %s %s" % (func, arg)))) + six.reraise(et, (ev[0], ev[1] + (" %s %s" % (func, arg)))) def update_dist_caches(dist_path, fix_zipimporter_caches): @@ -1877,17 +1876,6 @@ def chmod(path, mode): log.debug("chmod failed: %s", e) -def fix_jython_executable(executable, options): - warnings.warn("Use JythonCommandSpec", DeprecationWarning, stacklevel=2) - - if not JythonCommandSpec.relevant(): - return executable - - cmd = CommandSpec.best().from_param(executable) - cmd.install_options(options) - return cmd.as_header().lstrip('#!').rstrip('\n') - - class CommandSpec(list): """ A command spec for a #! header, specified as a list of arguments akin to @@ -1902,7 +1890,7 @@ class CommandSpec(list): """ Choose the best CommandSpec class based on environmental conditions. """ - return cls if not JythonCommandSpec.relevant() else JythonCommandSpec + return cls @classmethod def _sys_executable(cls): @@ -1969,36 +1957,6 @@ class WindowsCommandSpec(CommandSpec): split_args = dict(posix=False) -class JythonCommandSpec(CommandSpec): - @classmethod - def relevant(cls): - return ( - sys.platform.startswith('java') - and - __import__('java').lang.System.getProperty('os.name') != 'Linux' - ) - - def as_header(self): - """ - Workaround Jython's sys.executable being a .sh (an invalid - shebang line interpreter) - """ - if not is_sh(self[0]): - return super(JythonCommandSpec, self).as_header() - - if self.options: - # Can't apply the workaround, leave it broken - log.warn( - "WARNING: Unable to adapt shebang line for Jython," - " the following script is NOT executable\n" - " see http://bugs.jython.org/issue1112 for" - " more information.") - return super(JythonCommandSpec, self).as_header() - - items = ['/usr/bin/env'] + self + list(self.options) - return self._render(items) - - class ScriptWriter(object): """ Encapsulates behavior around writing entry point scripts for console and @@ -2075,7 +2033,10 @@ class ScriptWriter(object): """ Select the best ScriptWriter for this environment. """ - return WindowsScriptWriter.best() if sys.platform == 'win32' else cls + if sys.platform == 'win32' or (os.name == 'java' and os._name == 'nt'): + return WindowsScriptWriter.best() + else: + return cls @classmethod def _get_script_args(cls, type_, name, header, script_text): @@ -2125,8 +2086,8 @@ class WindowsScriptWriter(ScriptWriter): blockers = [name + x for x in old] yield name + ext, header + script_text, 't', blockers - @staticmethod - def _adjust_header(type_, orig_header): + @classmethod + def _adjust_header(cls, type_, orig_header): """ Make sure 'pythonw' is used for gui and and 'python' is used for console (regardless of what sys.executable is). @@ -2137,11 +2098,19 @@ class WindowsScriptWriter(ScriptWriter): pattern, repl = repl, pattern pattern_ob = re.compile(re.escape(pattern), re.IGNORECASE) new_header = pattern_ob.sub(string=orig_header, repl=repl) + return new_header if cls._use_header(new_header) else orig_header + + @staticmethod + def _use_header(new_header): + """ + Should _adjust_header use the replaced header? + + On non-windows systems, always use. On + Windows systems, only use the replaced header if it resolves + to an executable on the system. + """ clean_header = new_header[2:-1].strip('"') - if sys.platform == 'win32' and not os.path.exists(clean_header): - # the adjusted version doesn't exist, so return the original - return orig_header - return new_header + return sys.platform != 'win32' or find_executable(clean_header) class WindowsExecutableLauncherWriter(WindowsScriptWriter): @@ -2200,7 +2169,7 @@ def get_win_launcher(type): def load_launcher_manifest(name): manifest = pkg_resources.resource_string(__name__, 'launcher manifest.xml') - if PY2: + if six.PY2: return manifest % vars() else: return manifest.decode('utf-8') % vars() diff --git a/lib/python3.4/site-packages/setuptools/command/egg_info.py b/lib/python3.4/site-packages/setuptools/command/egg_info.py index 50f3d5c..d1bd9b0 100644 --- a/lib/python3.4/site-packages/setuptools/command/egg_info.py +++ b/lib/python3.4/site-packages/setuptools/command/egg_info.py @@ -10,22 +10,30 @@ import distutils.filelist import os import re import sys +import io +import warnings +import time + +from setuptools.extern import six +from setuptools.extern.six.moves import map + +from setuptools import Command +from setuptools.command.sdist import sdist +from setuptools.command.sdist import walk_revctrl +from setuptools.command.setopt import edit_config +from setuptools.command import bdist_egg +from pkg_resources import ( + parse_requirements, safe_name, parse_version, + safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename) +import setuptools.unicode_utils as unicode_utils + +from pkg_resources.extern import packaging try: from setuptools_svn import svn_utils except ImportError: pass -from setuptools import Command -from setuptools.command.sdist import sdist -from setuptools.compat import basestring, PY3, StringIO -from setuptools.command.sdist import walk_revctrl -from pkg_resources import ( - parse_requirements, safe_name, parse_version, - safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename) -import setuptools.unicode_utils as unicode_utils - -from pkg_resources import packaging class egg_info(Command): description = "create a distribution's .egg-info directory" @@ -58,8 +66,6 @@ class egg_info(Command): self.vtags = None def save_version_info(self, filename): - from setuptools.command.setopt import edit_config - values = dict( egg_info=dict( tag_svn_revision=0, @@ -143,7 +149,7 @@ class egg_info(Command): to the file. """ log.info("writing %s to %s", what, filename) - if PY3: + if six.PY3: data = data.encode("utf-8") if not self.dry_run: f = open(filename, 'wb') @@ -184,12 +190,8 @@ class egg_info(Command): if self.tag_build: version += self.tag_build if self.tag_svn_revision: - rev = self.get_svn_revision() - if rev: # is 0 if it's not an svn working copy - version += '-r%s' % rev + version += '-r%s' % self.get_svn_revision() if self.tag_date: - import time - version += time.strftime("-%Y%m%d") return version @@ -390,7 +392,6 @@ def write_pkg_info(cmd, basename, filename): metadata.name, metadata.version = oldname, oldver safe = getattr(cmd.distribution, 'zip_safe', None) - from setuptools.command import bdist_egg bdist_egg.write_safety_flag(cmd.egg_info, safe) @@ -412,7 +413,7 @@ def _write_requirements(stream, reqs): def write_requirements(cmd, basename, filename): dist = cmd.distribution - data = StringIO() + data = six.StringIO() _write_requirements(data, dist.install_requires) extras_require = dist.extras_require or {} for extra in sorted(extras_require): @@ -452,12 +453,12 @@ def write_arg(cmd, basename, filename, force=False): def write_entries(cmd, basename, filename): ep = cmd.distribution.entry_points - if isinstance(ep, basestring) or ep is None: + if isinstance(ep, six.string_types) or ep is None: data = ep elif ep is not None: data = [] for section, contents in sorted(ep.items()): - if not isinstance(contents, basestring): + if not isinstance(contents, six.string_types): contents = EntryPoint.parse_group(section, contents) contents = '\n'.join(sorted(map(str, contents.values()))) data.append('[%s]\n%s\n\n' % (section, contents)) @@ -467,14 +468,15 @@ def write_entries(cmd, basename, filename): def get_pkg_info_revision(): - # See if we can get a -r### off of PKG-INFO, in case this is an sdist of - # a subversion revision - # + """ + Get a -r### off of PKG-INFO Version in case this is an sdist of + a subversion revision. + """ + warnings.warn("get_pkg_info_revision is deprecated.", DeprecationWarning) if os.path.exists('PKG-INFO'): - f = open('PKG-INFO', 'rU') - for line in f: - match = re.match(r"Version:.*-r(\d+)\s*$", line) - if match: - return int(match.group(1)) - f.close() + with io.open('PKG-INFO') as f: + for line in f: + match = re.match(r"Version:.*-r(\d+)\s*$", line) + if match: + return int(match.group(1)) return 0 diff --git a/lib/python3.4/site-packages/setuptools/command/install_egg_info.py b/lib/python3.4/site-packages/setuptools/command/install_egg_info.py index fd0f118..60b615d 100644 --- a/lib/python3.4/site-packages/setuptools/command/install_egg_info.py +++ b/lib/python3.4/site-packages/setuptools/command/install_egg_info.py @@ -1,6 +1,8 @@ from distutils import log, dir_util import os +from setuptools.extern.six.moves import map + from setuptools import Command from setuptools.archive_util import unpack_archive import pkg_resources @@ -27,7 +29,7 @@ class install_egg_info(Command): ).egg_name() + '.egg-info' self.source = ei_cmd.egg_info self.target = os.path.join(self.install_dir, basename) - self.outputs = [self.target] + self.outputs = [] def run(self): self.run_command('egg_info') diff --git a/lib/python3.4/site-packages/setuptools/command/install_lib.py b/lib/python3.4/site-packages/setuptools/command/install_lib.py index 9b77222..78fe689 100644 --- a/lib/python3.4/site-packages/setuptools/command/install_lib.py +++ b/lib/python3.4/site-packages/setuptools/command/install_lib.py @@ -79,6 +79,8 @@ class install_lib(orig.install_lib): base = os.path.join('__pycache__', '__init__.' + imp.get_tag()) yield base + '.pyc' yield base + '.pyo' + yield base + '.opt-1.pyc' + yield base + '.opt-2.pyc' def copy_tree( self, infile, outfile, diff --git a/lib/python3.4/site-packages/setuptools/command/rotate.py b/lib/python3.4/site-packages/setuptools/command/rotate.py index 1b07362..804f962 100644 --- a/lib/python3.4/site-packages/setuptools/command/rotate.py +++ b/lib/python3.4/site-packages/setuptools/command/rotate.py @@ -3,8 +3,9 @@ from distutils import log from distutils.errors import DistutilsOptionError import os +from setuptools.extern import six + from setuptools import Command -from setuptools.compat import basestring class rotate(Command): @@ -36,7 +37,7 @@ class rotate(Command): self.keep = int(self.keep) except ValueError: raise DistutilsOptionError("--keep must be an integer") - if isinstance(self.match, basestring): + if isinstance(self.match, six.string_types): self.match = [ convert_path(p.strip()) for p in self.match.split(',') ] diff --git a/lib/python3.4/site-packages/setuptools/command/sdist.py b/lib/python3.4/site-packages/setuptools/command/sdist.py index 851a177..6640d4e 100644 --- a/lib/python3.4/site-packages/setuptools/command/sdist.py +++ b/lib/python3.4/site-packages/setuptools/command/sdist.py @@ -3,8 +3,10 @@ from distutils import log import distutils.command.sdist as orig import os import sys +import io + +from setuptools.extern import six -from setuptools.compat import PY3 from setuptools.utils import cs_path_exists import pkg_resources @@ -166,11 +168,8 @@ class sdist(orig.sdist): if not os.path.isfile(self.manifest): return False - fp = open(self.manifest, 'rbU') - try: + with io.open(self.manifest, 'rb') as fp: first_line = fp.readline() - finally: - fp.close() return (first_line != '# file GENERATED by distutils, do NOT edit\n'.encode()) @@ -183,7 +182,7 @@ class sdist(orig.sdist): manifest = open(self.manifest, 'rbU') for line in manifest: # The manifest must contain UTF-8. See #303. - if PY3: + if six.PY3: try: line = line.decode('UTF-8') except UnicodeDecodeError: diff --git a/lib/python3.4/site-packages/setuptools/command/setopt.py b/lib/python3.4/site-packages/setuptools/command/setopt.py index a04d603..7f332be 100644 --- a/lib/python3.4/site-packages/setuptools/command/setopt.py +++ b/lib/python3.4/site-packages/setuptools/command/setopt.py @@ -4,6 +4,8 @@ from distutils.errors import DistutilsOptionError import distutils import os +from setuptools.extern.six.moves import configparser + from setuptools import Command @@ -37,10 +39,8 @@ def edit_config(filename, settings, dry_run=False): while a dictionary lists settings to be changed or deleted in that section. A setting of ``None`` means to delete that setting. """ - from setuptools.compat import ConfigParser - log.debug("Reading configuration from %s", filename) - opts = ConfigParser.RawConfigParser() + opts = configparser.RawConfigParser() opts.read([filename]) for section, options in settings.items(): if options is None: diff --git a/lib/python3.4/site-packages/setuptools/command/test.py b/lib/python3.4/site-packages/setuptools/command/test.py index 160e21c..371e913 100644 --- a/lib/python3.4/site-packages/setuptools/command/test.py +++ b/lib/python3.4/site-packages/setuptools/command/test.py @@ -2,11 +2,13 @@ from distutils.errors import DistutilsOptionError from unittest import TestLoader import sys +from setuptools.extern import six +from setuptools.extern.six.moves import map + from pkg_resources import (resource_listdir, resource_exists, normalize_path, working_set, _namespace_packages, add_activation_listener, require, EntryPoint) from setuptools import Command -from setuptools.compat import PY3 from setuptools.py31compat import unittest_main @@ -41,6 +43,17 @@ class ScanningLoader(TestLoader): return tests[0] # don't create a nested suite for only one return +# adapted from jaraco.classes.properties:NonDataProperty +class NonDataProperty(object): + def __init__(self, fget): + self.fget = fget + + def __get__(self, obj, objtype=None): + if obj is None: + return self + return self.fget(obj) + + class test(Command): """Command to run unit tests after in-place build""" @@ -78,7 +91,7 @@ class test(Command): if self.test_runner is None: self.test_runner = getattr(self.distribution, 'test_runner', None) - @property + @NonDataProperty def test_args(self): return list(self._test_args()) @@ -89,7 +102,7 @@ class test(Command): yield self.test_suite def with_project_on_sys_path(self, func): - with_2to3 = PY3 and getattr(self.distribution, 'use_2to3', False) + with_2to3 = six.PY3 and getattr(self.distribution, 'use_2to3', False) if with_2to3: # If we run 2to3 we can not do this inplace: @@ -149,7 +162,7 @@ class test(Command): # Purge modules under test from sys.modules. The test loader will # re-import them from the build location. Required when 2to3 is used # with namespace packages. - if PY3 and getattr(self.distribution, 'use_2to3', False): + if six.PY3 and getattr(self.distribution, 'use_2to3', False): module = self.test_suite.split('.')[0] if module in _namespace_packages: del_modules = [] diff --git a/lib/python3.4/site-packages/setuptools/command/upload.py b/lib/python3.4/site-packages/setuptools/command/upload.py new file mode 100644 index 0000000..08c20ba --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/command/upload.py @@ -0,0 +1,23 @@ +from distutils.command import upload as orig + + +class upload(orig.upload): + """ + Override default upload behavior to look up password + in the keyring if available. + """ + + def finalize_options(self): + orig.upload.finalize_options(self) + self.password or self._load_password_from_keyring() + + def _load_password_from_keyring(self): + """ + Attempt to load password from keyring. Suppress Exceptions. + """ + try: + keyring = __import__('keyring') + self.password = keyring.get_password(self.repository, + self.username) + except Exception: + pass diff --git a/lib/python3.4/site-packages/setuptools/command/upload_docs.py b/lib/python3.4/site-packages/setuptools/command/upload_docs.py index 001ee93..f887b47 100644 --- a/lib/python3.4/site-packages/setuptools/command/upload_docs.py +++ b/lib/python3.4/site-packages/setuptools/command/upload_docs.py @@ -8,25 +8,26 @@ PyPI's pythonhosted.org). from base64 import standard_b64encode from distutils import log from distutils.errors import DistutilsOptionError -from distutils.command.upload import upload import os import socket import zipfile import tempfile -import sys import shutil -from setuptools.compat import httplib, urlparse, unicode, iteritems, PY3 +from setuptools.extern import six +from setuptools.extern.six.moves import http_client, urllib + from pkg_resources import iter_entry_points +from .upload import upload -errors = 'surrogateescape' if PY3 else 'strict' +errors = 'surrogateescape' if six.PY3 else 'strict' # This is not just a replacement for byte literals # but works as a general purpose encoder def b(s, encoding='utf-8'): - if isinstance(s, unicode): + if isinstance(s, six.text_type): return s.encode(encoding, errors) return s @@ -113,7 +114,7 @@ class upload_docs(upload): # set up the authentication credentials = b(self.username + ':' + self.password) credentials = standard_b64encode(credentials) - if PY3: + if six.PY3: credentials = credentials.decode('ascii') auth = "Basic " + credentials @@ -122,7 +123,7 @@ class upload_docs(upload): sep_boundary = b('\n--') + b(boundary) end_boundary = sep_boundary + b('--') body = [] - for key, values in iteritems(data): + for key, values in six.iteritems(data): title = '\nContent-Disposition: form-data; name="%s"' % key # handle multiple entries for the same name if not isinstance(values, list): @@ -150,12 +151,12 @@ class upload_docs(upload): # We can't use urllib2 since we need to send the Basic # auth right with the first request schema, netloc, url, params, query, fragments = \ - urlparse(self.repository) + urllib.parse.urlparse(self.repository) assert not params and not query and not fragments if schema == 'http': - conn = httplib.HTTPConnection(netloc) + conn = http_client.HTTPConnection(netloc) elif schema == 'https': - conn = httplib.HTTPSConnection(netloc) + conn = http_client.HTTPSConnection(netloc) else: raise AssertionError("unsupported schema " + schema) diff --git a/lib/python3.4/site-packages/setuptools/compat.py b/lib/python3.4/site-packages/setuptools/compat.py deleted file mode 100644 index 73e6e4a..0000000 --- a/lib/python3.4/site-packages/setuptools/compat.py +++ /dev/null @@ -1,66 +0,0 @@ -import sys -import itertools - -PY3 = sys.version_info >= (3,) -PY2 = not PY3 - -if PY2: - basestring = basestring - import __builtin__ as builtins - import ConfigParser - from StringIO import StringIO - BytesIO = StringIO - func_code = lambda o: o.func_code - func_globals = lambda o: o.func_globals - im_func = lambda o: o.im_func - from htmlentitydefs import name2codepoint - import httplib - from BaseHTTPServer import HTTPServer - from SimpleHTTPServer import SimpleHTTPRequestHandler - from BaseHTTPServer import BaseHTTPRequestHandler - iteritems = lambda o: o.iteritems() - long_type = long - maxsize = sys.maxint - unichr = unichr - unicode = unicode - bytes = str - from urllib import url2pathname, splittag, pathname2url - import urllib2 - from urllib2 import urlopen, HTTPError, URLError, unquote, splituser - from urlparse import urlparse, urlunparse, urljoin, urlsplit, urlunsplit - filterfalse = itertools.ifilterfalse - - exec("""def reraise(tp, value, tb=None): - raise tp, value, tb""") - -if PY3: - basestring = str - import builtins - import configparser as ConfigParser - from io import StringIO, BytesIO - func_code = lambda o: o.__code__ - func_globals = lambda o: o.__globals__ - im_func = lambda o: o.__func__ - from html.entities import name2codepoint - import http.client as httplib - from http.server import HTTPServer, SimpleHTTPRequestHandler - from http.server import BaseHTTPRequestHandler - iteritems = lambda o: o.items() - long_type = int - maxsize = sys.maxsize - unichr = chr - unicode = str - bytes = bytes - from urllib.error import HTTPError, URLError - import urllib.request as urllib2 - from urllib.request import urlopen, url2pathname, pathname2url - from urllib.parse import ( - urlparse, urlunparse, unquote, splituser, urljoin, urlsplit, - urlunsplit, splittag, - ) - filterfalse = itertools.filterfalse - - def reraise(tp, value, tb=None): - if value.__traceback__ is not tb: - raise value.with_traceback(tb) - raise value diff --git a/lib/python3.4/site-packages/setuptools/depends.py b/lib/python3.4/site-packages/setuptools/depends.py index e87ef3f..9f7c9a3 100644 --- a/lib/python3.4/site-packages/setuptools/depends.py +++ b/lib/python3.4/site-packages/setuptools/depends.py @@ -3,7 +3,8 @@ import imp import marshal from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN from distutils.version import StrictVersion -from setuptools import compat + +from setuptools.extern import six __all__ = [ 'Require', 'find_module', 'get_module_constant', 'extract_constant' @@ -99,7 +100,8 @@ def _iter_code(code): ptr += 3 if op==EXTENDED_ARG: - extended_arg = arg * compat.long_type(65536) + long_type = six.integer_types[-1] + extended_arg = arg * long_type(65536) continue else: diff --git a/lib/python3.4/site-packages/setuptools/dist.py b/lib/python3.4/site-packages/setuptools/dist.py index d7ad465..7785541 100644 --- a/lib/python3.4/site-packages/setuptools/dist.py +++ b/lib/python3.4/site-packages/setuptools/dist.py @@ -13,13 +13,14 @@ from distutils.core import Distribution as _Distribution from distutils.errors import (DistutilsOptionError, DistutilsPlatformError, DistutilsSetupError) +from setuptools.extern import six +from setuptools.extern.six.moves import map +from pkg_resources.extern import packaging + from setuptools.depends import Require -from setuptools.compat import basestring, PY2 from setuptools import windows_support import pkg_resources -packaging = pkg_resources.packaging - def _get_unpatched(cls): """Protect against re-patching the distutils if reloaded @@ -138,7 +139,7 @@ def check_entry_points(dist, attr, value): raise DistutilsSetupError(e) def check_test_suite(dist, attr, value): - if not isinstance(value,basestring): + if not isinstance(value, six.string_types): raise DistutilsSetupError("test_suite must be a string") def check_package_data(dist, attr, value): @@ -160,7 +161,7 @@ def check_packages(dist, attr, value): for pkgname in value: if not re.match(r'\w+(\.\w+)*', pkgname): distutils.log.warn( - "WARNING: %r not a valid package name; please use only" + "WARNING: %r not a valid package name; please use only " ".-separated package names in setup.py", pkgname ) @@ -267,8 +268,7 @@ class Distribution(_Distribution): if attrs and 'setup_requires' in attrs: self.fetch_build_eggs(attrs['setup_requires']) for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'): - if not hasattr(self,ep.name): - setattr(self,ep.name,None) + vars(self).setdefault(ep.name, None) _Distribution.__init__(self,attrs) if isinstance(self.metadata.version, numbers.Number): # Some people apparently take "version number" too literally :) @@ -440,6 +440,14 @@ class Distribution(_Distribution): self.cmdclass[ep.name] = cmdclass return _Distribution.print_commands(self) + def get_command_list(self): + for ep in pkg_resources.iter_entry_points('distutils.commands'): + if ep.name not in self.cmdclass: + # don't require extras as the commands won't be invoked + cmdclass = ep.resolve() + self.cmdclass[ep.name] = cmdclass + return _Distribution.get_command_list(self) + def _set_feature(self,name,status): """Set feature's inclusion status""" setattr(self,self._feature_attrname(name),status) @@ -675,7 +683,7 @@ class Distribution(_Distribution): """ import sys - if PY2 or self.help_commands: + if six.PY2 or self.help_commands: return _Distribution.handle_display_options(self, option_order) # Stdout may be StringIO (e.g. in tests) @@ -818,7 +826,7 @@ class Feature: if not self.available: raise DistutilsPlatformError( - self.description+" is required," + self.description+" is required, " "but is not available on this platform" ) diff --git a/lib/python3.4/site-packages/setuptools/extension.py b/lib/python3.4/site-packages/setuptools/extension.py index 35eb7c7..d10609b 100644 --- a/lib/python3.4/site-packages/setuptools/extension.py +++ b/lib/python3.4/site-packages/setuptools/extension.py @@ -5,6 +5,8 @@ import distutils.core import distutils.errors import distutils.extension +from setuptools.extern.six.moves import map + from .dist import _get_unpatched from . import msvc9_support diff --git a/lib/python3.4/site-packages/setuptools/extern/__init__.py b/lib/python3.4/site-packages/setuptools/extern/__init__.py new file mode 100644 index 0000000..6859aa5 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/extern/__init__.py @@ -0,0 +1,5 @@ +from pkg_resources.extern import VendorImporter + + +names = 'six', +VendorImporter(__name__, names, 'pkg_resources._vendor').install() diff --git a/lib/python3.4/site-packages/setuptools/launch.py b/lib/python3.4/site-packages/setuptools/launch.py new file mode 100644 index 0000000..06e15e1 --- /dev/null +++ b/lib/python3.4/site-packages/setuptools/launch.py @@ -0,0 +1,35 @@ +""" +Launch the Python script on the command line after +setuptools is bootstrapped via import. +""" + +# Note that setuptools gets imported implicitly by the +# invocation of this script using python -m setuptools.launch + +import tokenize +import sys + + +def run(): + """ + Run the script in sys.argv[1] as if it had + been invoked naturally. + """ + __builtins__ + script_name = sys.argv[1] + namespace = dict( + __file__ = script_name, + __name__ = '__main__', + __doc__ = None, + ) + sys.argv[:] = sys.argv[1:] + + open_ = getattr(tokenize, 'open', open) + script = open_(script_name).read() + norm_script = script.replace('\\r\\n', '\\n') + code = compile(norm_script, script_name, 'exec') + exec(code, namespace) + + +if __name__ == '__main__': + run() diff --git a/lib/python3.4/site-packages/setuptools/package_index.py b/lib/python3.4/site-packages/setuptools/package_index.py index cabf103..c53343e 100644 --- a/lib/python3.4/site-packages/setuptools/package_index.py +++ b/lib/python3.4/site-packages/setuptools/package_index.py @@ -6,8 +6,17 @@ import shutil import socket import base64 import hashlib +import itertools from functools import wraps +try: + from urllib.parse import splituser +except ImportError: + from urllib2 import splituser + +from setuptools.extern import six +from setuptools.extern.six.moves import urllib, http_client, configparser, map + from pkg_resources import ( CHECKOUT_DIST, Distribution, BINARY_DIST, normalize_path, SOURCE_DIST, require, Environment, find_distributions, safe_name, safe_version, @@ -16,12 +25,6 @@ from pkg_resources import ( from setuptools import ssl_support from distutils import log from distutils.errors import DistutilsError -from setuptools.compat import (urllib2, httplib, StringIO, HTTPError, - urlparse, urlunparse, unquote, splituser, - url2pathname, name2codepoint, - unichr, urljoin, urlsplit, urlunsplit, - ConfigParser) -from setuptools.compat import filterfalse from fnmatch import translate from setuptools.py26compat import strip_fragment from setuptools.py27compat import get_all_headers @@ -68,10 +71,11 @@ def parse_bdist_wininst(name): def egg_info_for_url(url): - scheme, server, path, parameters, query, fragment = urlparse(url) - base = unquote(path.split('/')[-1]) + parts = urllib.parse.urlparse(url) + scheme, server, path, parameters, query, fragment = parts + base = urllib.parse.unquote(path.split('/')[-1]) if server=='sourceforge.net' and base=='download': # XXX Yuck - base = unquote(path.split('/')[-2]) + base = urllib.parse.unquote(path.split('/')[-2]) if '#' in base: base, fragment = base.split('#',1) return base,fragment @@ -157,7 +161,7 @@ def unique_everseen(iterable, key=None): seen = set() seen_add = seen.add if key is None: - for element in filterfalse(seen.__contains__, iterable): + for element in six.moves.filterfalse(seen.__contains__, iterable): seen_add(element) yield element else: @@ -189,14 +193,14 @@ def find_external_links(url, page): rels = set(map(str.strip, rel.lower().split(','))) if 'homepage' in rels or 'download' in rels: for match in HREF.finditer(tag): - yield urljoin(url, htmldecode(match.group(1))) + yield urllib.parse.urljoin(url, htmldecode(match.group(1))) for tag in ("<th>Home Page", "<th>Download URL"): pos = page.find(tag) if pos!=-1: match = HREF.search(page,pos) if match: - yield urljoin(url, htmldecode(match.group(1))) + yield urllib.parse.urljoin(url, htmldecode(match.group(1))) user_agent = "Python-urllib/%s setuptools/%s" % ( sys.version[:3], require('setuptools')[0].version @@ -239,7 +243,7 @@ class HashChecker(ContentChecker): @classmethod def from_url(cls, url): "Construct a (possibly null) ContentChecker from a URL" - fragment = urlparse(url)[-1] + fragment = urllib.parse.urlparse(url)[-1] if not fragment: return ContentChecker() match = cls.pattern.search(fragment) @@ -274,7 +278,7 @@ class PackageIndex(Environment): self.to_scan = [] if verify_ssl and ssl_support.is_available and (ca_bundle or ssl_support.find_ca_bundle()): self.opener = ssl_support.opener_for(ca_bundle) - else: self.opener = urllib2.urlopen + else: self.opener = urllib.request.urlopen def process_url(self, url, retrieve=False): """Evaluate a URL as a possible download, and maybe retrieve it""" @@ -311,7 +315,7 @@ class PackageIndex(Environment): base = f.url # handle redirects page = f.read() if not isinstance(page, str): # We are in Python 3 and got bytes. We want str. - if isinstance(f, HTTPError): + if isinstance(f, urllib.error.HTTPError): # Errors have no charset, assume latin1: charset = 'latin-1' else: @@ -319,7 +323,7 @@ class PackageIndex(Environment): page = page.decode(charset, "ignore") f.close() for match in HREF.finditer(page): - link = urljoin(base, htmldecode(match.group(1))) + link = urllib.parse.urljoin(base, htmldecode(match.group(1))) self.process_url(link) if url.startswith(self.index_url) and getattr(f,'code',None)!=404: page = self.process_index(url, page) @@ -342,7 +346,7 @@ class PackageIndex(Environment): def url_ok(self, url, fatal=False): s = URL_SCHEME(url) - if (s and s.group(1).lower()=='file') or self.allows(urlparse(url)[1]): + if (s and s.group(1).lower()=='file') or self.allows(urllib.parse.urlparse(url)[1]): return True msg = ("\nNote: Bypassing %s (disallowed host; see " "http://bit.ly/1dg9ijs for details).\n") @@ -352,20 +356,30 @@ class PackageIndex(Environment): self.warn(msg, url) def scan_egg_links(self, search_path): - for item in search_path: - if os.path.isdir(item): - for entry in os.listdir(item): - if entry.endswith('.egg-link'): - self.scan_egg_link(item, entry) + dirs = filter(os.path.isdir, search_path) + egg_links = ( + (path, entry) + for path in dirs + for entry in os.listdir(path) + if entry.endswith('.egg-link') + ) + list(itertools.starmap(self.scan_egg_link, egg_links)) def scan_egg_link(self, path, entry): - lines = [_f for _f in map(str.strip, - open(os.path.join(path, entry))) if _f] - if len(lines)==2: - for dist in find_distributions(os.path.join(path, lines[0])): - dist.location = os.path.join(path, *lines) - dist.precedence = SOURCE_DIST - self.add(dist) + with open(os.path.join(path, entry)) as raw_lines: + # filter non-empty lines + lines = list(filter(None, map(str.strip, raw_lines))) + + if len(lines) != 2: + # format is not recognized; punt + return + + egg_path, setup_path = lines + + for dist in find_distributions(os.path.join(path, egg_path)): + dist.location = os.path.join(path, *lines) + dist.precedence = SOURCE_DIST + self.add(dist) def process_index(self,url,page): """Process the contents of a PyPI page""" @@ -373,7 +387,7 @@ class PackageIndex(Environment): # Process a URL to see if it's for a package page if link.startswith(self.index_url): parts = list(map( - unquote, link[len(self.index_url):].split('/') + urllib.parse.unquote, link[len(self.index_url):].split('/') )) if len(parts)==2 and '#' not in parts[1]: # it's a package page, sanitize and index it @@ -386,7 +400,7 @@ class PackageIndex(Environment): # process an index page into the package-page index for match in HREF.finditer(page): try: - scan(urljoin(url, htmldecode(match.group(1)))) + scan(urllib.parse.urljoin(url, htmldecode(match.group(1)))) except ValueError: pass @@ -662,7 +676,7 @@ class PackageIndex(Environment): try: checker = HashChecker.from_url(url) fp = self.open_url(strip_fragment(url)) - if isinstance(fp, HTTPError): + if isinstance(fp, urllib.error.HTTPError): raise DistutilsError( "Can't download %s: %s %s" % (url, fp.code,fp.msg) ) @@ -698,21 +712,21 @@ class PackageIndex(Environment): return local_open(url) try: return open_with_auth(url, self.opener) - except (ValueError, httplib.InvalidURL) as v: + except (ValueError, http_client.InvalidURL) as v: msg = ' '.join([str(arg) for arg in v.args]) if warning: self.warn(warning, msg) else: raise DistutilsError('%s %s' % (url, msg)) - except urllib2.HTTPError as v: + except urllib.error.HTTPError as v: return v - except urllib2.URLError as v: + except urllib.error.URLError as v: if warning: self.warn(warning, v.reason) else: raise DistutilsError("Download error for %s: %s" % (url, v.reason)) - except httplib.BadStatusLine as v: + except http_client.BadStatusLine as v: if warning: self.warn(warning, v.line) else: @@ -721,7 +735,7 @@ class PackageIndex(Environment): 'down, %s' % (url, v.line) ) - except httplib.HTTPException as v: + except http_client.HTTPException as v: if warning: self.warn(warning, v) else: @@ -752,7 +766,7 @@ class PackageIndex(Environment): elif scheme.startswith('hg+'): return self._download_hg(url, filename) elif scheme=='file': - return url2pathname(urlparse(url)[2]) + return urllib.request.url2pathname(urllib.parse.urlparse(url)[2]) else: self.url_ok(url, True) # raises error if not allowed return self._attempt_download(url, filename) @@ -786,7 +800,7 @@ class PackageIndex(Environment): url = url.split('#',1)[0] # remove any fragment for svn's sake creds = '' if url.lower().startswith('svn:') and '@' in url: - scheme, netloc, path, p, q, f = urlparse(url) + scheme, netloc, path, p, q, f = urllib.parse.urlparse(url) if not netloc and path.startswith('//') and '/' in path[2:]: netloc, path = path[2:].split('/',1) auth, host = splituser(netloc) @@ -797,14 +811,15 @@ class PackageIndex(Environment): else: creds = " --username="+auth netloc = host - url = urlunparse((scheme, netloc, url, p, q, f)) + parts = scheme, netloc, url, p, q, f + url = urllib.parse.urlunparse(parts) self.info("Doing subversion checkout from %s to %s", url, filename) os.system("svn checkout%s -q %s %s" % (creds, url, filename)) return filename @staticmethod def _vcs_split_rev_from_url(url, pop_prefix=False): - scheme, netloc, path, query, frag = urlsplit(url) + scheme, netloc, path, query, frag = urllib.parse.urlsplit(url) scheme = scheme.split('+', 1)[-1] @@ -816,7 +831,7 @@ class PackageIndex(Environment): path, rev = path.rsplit('@', 1) # Also, discard fragment - url = urlunsplit((scheme, netloc, path, query, '')) + url = urllib.parse.urlunsplit((scheme, netloc, path, query, '')) return url, rev @@ -868,7 +883,7 @@ entity_sub = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?').sub def uchr(c): if not isinstance(c, int): return c - if c>255: return unichr(c) + if c>255: return six.unichr(c) return chr(c) def decode_entity(match): @@ -878,7 +893,7 @@ def decode_entity(match): elif what.startswith('#'): what = int(what[1:]) else: - what = name2codepoint.get(what, match.group(0)) + what = six.moves.html_entities.name2codepoint.get(what, match.group(0)) return uchr(what) def htmldecode(text): @@ -909,7 +924,7 @@ def _encode_auth(auth): >>> chr(10) in str(_encode_auth(long_auth)) False """ - auth_s = unquote(auth) + auth_s = urllib.parse.unquote(auth) # convert to bytes auth_bytes = auth_s.encode() # use the legacy interface for Python 2.3 support @@ -934,14 +949,14 @@ class Credential(object): def __str__(self): return '%(username)s:%(password)s' % vars(self) -class PyPIConfig(ConfigParser.ConfigParser): +class PyPIConfig(configparser.RawConfigParser): def __init__(self): """ Load from ~/.pypirc """ defaults = dict.fromkeys(['username', 'password', 'repository'], '') - ConfigParser.ConfigParser.__init__(self, defaults) + configparser.RawConfigParser.__init__(self, defaults) rc = os.path.join(os.path.expanduser('~'), '.pypirc') if os.path.exists(rc): @@ -973,15 +988,15 @@ class PyPIConfig(ConfigParser.ConfigParser): return cred -def open_with_auth(url, opener=urllib2.urlopen): +def open_with_auth(url, opener=urllib.request.urlopen): """Open a urllib2 request, handling HTTP authentication""" - scheme, netloc, path, params, query, frag = urlparse(url) + scheme, netloc, path, params, query, frag = urllib.parse.urlparse(url) # Double scheme does not raise on Mac OS X as revealed by a # failing test. We would expect "nonnumeric port". Refs #20. if netloc.endswith(':'): - raise httplib.InvalidURL("nonnumeric port: ''") + raise http_client.InvalidURL("nonnumeric port: ''") if scheme in ('http', 'https'): auth, host = splituser(netloc) @@ -997,11 +1012,12 @@ def open_with_auth(url, opener=urllib2.urlopen): if auth: auth = "Basic " + _encode_auth(auth) - new_url = urlunparse((scheme,host,path,params,query,frag)) - request = urllib2.Request(new_url) + parts = scheme, host, path, params, query, frag + new_url = urllib.parse.urlunparse(parts) + request = urllib.request.Request(new_url) request.add_header("Authorization", auth) else: - request = urllib2.Request(url) + request = urllib.request.Request(url) request.add_header('User-Agent', user_agent) fp = opener(request) @@ -1009,9 +1025,10 @@ def open_with_auth(url, opener=urllib2.urlopen): if auth: # Put authentication info back into request URL if same host, # so that links found on the page will work - s2, h2, path2, param2, query2, frag2 = urlparse(fp.url) + s2, h2, path2, param2, query2, frag2 = urllib.parse.urlparse(fp.url) if s2==scheme and h2==host: - fp.url = urlunparse((s2,netloc,path2,param2,query2,frag2)) + parts = s2, netloc, path2, param2, query2, frag2 + fp.url = urllib.parse.urlunparse(parts) return fp @@ -1024,26 +1041,29 @@ def fix_sf_url(url): def local_open(url): """Read a local path, with special support for directories""" - scheme, server, path, param, query, frag = urlparse(url) - filename = url2pathname(path) + scheme, server, path, param, query, frag = urllib.parse.urlparse(url) + filename = urllib.request.url2pathname(path) if os.path.isfile(filename): - return urllib2.urlopen(url) + return urllib.request.urlopen(url) elif path.endswith('/') and os.path.isdir(filename): files = [] for f in os.listdir(filename): - if f=='index.html': - with open(os.path.join(filename,f),'r') as fp: + filepath = os.path.join(filename, f) + if f == 'index.html': + with open(filepath, 'r') as fp: body = fp.read() break - elif os.path.isdir(os.path.join(filename,f)): - f+='/' - files.append("<a href=%r>%s</a>" % (f,f)) + elif os.path.isdir(filepath): + f += '/' + files.append('<a href="{name}">{name}</a>'.format(name=f)) else: - body = ("<html><head><title>%s" % url) + \ - "%s" % '\n'.join(files) + tmpl = ("{url}" + "{files}") + body = tmpl.format(url=url, files='\n'.join(files)) status, message = 200, "OK" else: status, message, body = 404, "Path not found", "Not found" headers = {'content-type': 'text/html'} - return HTTPError(url, status, message, headers, StringIO(body)) + body_stream = six.StringIO(body) + return urllib.error.HTTPError(url, status, message, headers, body_stream) diff --git a/lib/python3.4/site-packages/setuptools/py26compat.py b/lib/python3.4/site-packages/setuptools/py26compat.py index 738b0cc..e52bd85 100644 --- a/lib/python3.4/site-packages/setuptools/py26compat.py +++ b/lib/python3.4/site-packages/setuptools/py26compat.py @@ -4,7 +4,10 @@ Compatibility Support for Python 2.6 and earlier import sys -from setuptools.compat import splittag +try: + from urllib.parse import splittag +except ImportError: + from urllib import splittag def strip_fragment(url): """ diff --git a/lib/python3.4/site-packages/setuptools/py31compat.py b/lib/python3.4/site-packages/setuptools/py31compat.py index c487ac0..8fe6dd9 100644 --- a/lib/python3.4/site-packages/setuptools/py31compat.py +++ b/lib/python3.4/site-packages/setuptools/py31compat.py @@ -20,7 +20,7 @@ except ImportError: import shutil import tempfile class TemporaryDirectory(object): - """" + """ Very simple temporary directory context manager. Will try to delete afterward, but will also ignore OS and similar errors on deletion. diff --git a/lib/python3.4/site-packages/setuptools/sandbox.py b/lib/python3.4/site-packages/setuptools/sandbox.py index 213cebf..23e296b 100644 --- a/lib/python3.4/site-packages/setuptools/sandbox.py +++ b/lib/python3.4/site-packages/setuptools/sandbox.py @@ -8,6 +8,9 @@ import re import contextlib import pickle +from setuptools.extern import six +from setuptools.extern.six.moves import builtins, map + import pkg_resources if sys.platform.startswith('java'): @@ -22,9 +25,6 @@ _open = open from distutils.errors import DistutilsError from pkg_resources import working_set -from setuptools import compat -from setuptools.compat import builtins - __all__ = [ "AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup", ] @@ -98,8 +98,8 @@ class UnpickleableException(Exception): """ An exception representing another Exception that could not be pickled. """ - @classmethod - def dump(cls, type, exc): + @staticmethod + def dump(type, exc): """ Always return a dumped (pickled) type and exc. If exc can't be pickled, wrap it in UnpickleableException first. @@ -107,6 +107,8 @@ class UnpickleableException(Exception): try: return pickle.dumps(type), pickle.dumps(exc) except Exception: + # get UnpickleableException inside the sandbox + from setuptools.sandbox import UnpickleableException as cls return cls.dump(cls, cls(repr(exc))) @@ -136,7 +138,7 @@ class ExceptionSaver: return type, exc = map(pickle.loads, self._saved) - compat.reraise(type, exc, self._tb) + six.reraise(type, exc, self._tb) @contextlib.contextmanager @@ -205,8 +207,12 @@ def _needs_hiding(mod_name): True >>> _needs_hiding('distutils') True + >>> _needs_hiding('os') + False + >>> _needs_hiding('Cython') + True """ - pattern = re.compile('(setuptools|pkg_resources|distutils)(\.|$)') + pattern = re.compile('(setuptools|pkg_resources|distutils|Cython)(\.|$)') return bool(pattern.match(mod_name)) @@ -382,6 +388,7 @@ class DirectorySandbox(AbstractSandbox): AbstractSandbox.__init__(self) def _violation(self, operation, *args, **kw): + from setuptools.sandbox import SandboxViolation raise SandboxViolation(operation, args, kw) if _file: diff --git a/lib/python3.4/site-packages/setuptools/ssl_support.py b/lib/python3.4/site-packages/setuptools/ssl_support.py index cc7db06..657197c 100644 --- a/lib/python3.4/site-packages/setuptools/ssl_support.py +++ b/lib/python3.4/site-packages/setuptools/ssl_support.py @@ -3,9 +3,10 @@ import socket import atexit import re +from setuptools.extern.six.moves import urllib, http_client, map + import pkg_resources from pkg_resources import ResolutionError, ExtractionError -from setuptools.compat import urllib2 try: import ssl @@ -24,20 +25,15 @@ cert_paths = """ /usr/local/share/certs/ca-root.crt /etc/ssl/cert.pem /System/Library/OpenSSL/certs/cert.pem +/usr/local/share/certs/ca-root-nss.crt """.strip().split() -HTTPSHandler = HTTPSConnection = object - -for what, where in ( - ('HTTPSHandler', ['urllib2','urllib.request']), - ('HTTPSConnection', ['httplib', 'http.client']), -): - for module in where: - try: - exec("from %s import %s" % (module, what)) - except ImportError: - pass +try: + HTTPSHandler = urllib.request.HTTPSHandler + HTTPSConnection = http_client.HTTPSConnection +except AttributeError: + HTTPSHandler = HTTPSConnection = object is_available = ssl is not None and object not in (HTTPSHandler, HTTPSConnection) @@ -198,7 +194,7 @@ class VerifyingHTTPSConn(HTTPSConnection): def opener_for(ca_bundle=None): """Get a urlopen() replacement that uses ca_bundle for verification""" - return urllib2.build_opener( + return urllib.request.build_opener( VerifyingHTTPSHandler(ca_bundle or find_ca_bundle()) ).open @@ -223,6 +219,12 @@ def get_win_certfile(): self.addcerts(certs) atexit.register(self.close) + def close(self): + try: + super(MyCertFile, self).close() + except OSError: + pass + _wincerts = MyCertFile(stores=['CA', 'ROOT']) return _wincerts.name diff --git a/lib/python3.4/site-packages/setuptools/unicode_utils.py b/lib/python3.4/site-packages/setuptools/unicode_utils.py index d2de941..ffab3e2 100644 --- a/lib/python3.4/site-packages/setuptools/unicode_utils.py +++ b/lib/python3.4/site-packages/setuptools/unicode_utils.py @@ -1,11 +1,11 @@ import unicodedata import sys -from setuptools.compat import unicode as decoded_string +from setuptools.extern import six # HFS Plus uses decomposed UTF-8 def decompose(path): - if isinstance(path, decoded_string): + if isinstance(path, six.text_type): return unicodedata.normalize('NFD', path) try: path = path.decode('utf-8') @@ -22,11 +22,13 @@ def filesys_decode(path): NONE when no expected encoding works """ - fs_enc = sys.getfilesystemencoding() - if isinstance(path, decoded_string): + if isinstance(path, six.text_type): return path - for enc in (fs_enc, "utf-8"): + fs_enc = sys.getfilesystemencoding() or 'utf-8' + candidates = fs_enc, 'utf-8' + + for enc in candidates: try: return path.decode(enc) except UnicodeDecodeError: diff --git a/lib/python3.4/site-packages/setuptools/version.py b/lib/python3.4/site-packages/setuptools/version.py index 1be1f7e..4494728 100644 --- a/lib/python3.4/site-packages/setuptools/version.py +++ b/lib/python3.4/site-packages/setuptools/version.py @@ -1 +1 @@ -__version__ = '18.5' +__version__ = '20.1.1' diff --git a/lib/python3.4/site-packages/six-1.10.0.dist-info/INSTALLER b/lib/python3.4/site-packages/six-1.10.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/lib/python3.4/site-packages/six-1.10.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/lib/python3.4/site-packages/six-1.10.0.dist-info/RECORD b/lib/python3.4/site-packages/six-1.10.0.dist-info/RECORD index 6350c4e..70c35b3 100644 --- a/lib/python3.4/site-packages/six-1.10.0.dist-info/RECORD +++ b/lib/python3.4/site-packages/six-1.10.0.dist-info/RECORD @@ -5,4 +5,5 @@ six-1.10.0.dist-info/RECORD,, six-1.10.0.dist-info/WHEEL,sha256=GrqQvamwgBV4nLoJe0vhYRSWzWsx7xjlt74FT0SWYfE,110 six-1.10.0.dist-info/metadata.json,sha256=jtOeeTBubYDChl_5Ql5ZPlKoHgg6rdqRIjOz1e5Ek2U,658 six-1.10.0.dist-info/top_level.txt,sha256=_iVH_iYEtEXnD8nYGQYpYFUvkUW9sEO1GYbkeKSAais,4 +/var/lib/lxc/openmedialibrary/rootfs/srv/client/platform_linux64/p34/lib/python3.4/site-packages/six-1.10.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 __pycache__/six.cpython-34.pyc,, diff --git a/lib/python3.4/site-packages/sqlalchemy/__init__.py b/lib/python3.4/site-packages/sqlalchemy/__init__.py index 2815b1f..3008120 100644 --- a/lib/python3.4/site-packages/sqlalchemy/__init__.py +++ b/lib/python3.4/site-packages/sqlalchemy/__init__.py @@ -1,5 +1,5 @@ # sqlalchemy/__init__.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -15,6 +15,7 @@ from .sql import ( case, cast, collate, + column, delete, desc, distinct, @@ -24,6 +25,7 @@ from .sql import ( extract, false, func, + funcfilter, insert, intersect, intersect_all, @@ -39,6 +41,7 @@ from .sql import ( over, select, subquery, + table, text, true, tuple_, @@ -117,7 +120,7 @@ from .schema import ( from .inspection import inspect from .engine import create_engine, engine_from_config -__version__ = '0.9.7' +__version__ = '1.0.12' def __go(lcls): diff --git a/lib/python3.4/site-packages/sqlalchemy/connectors/__init__.py b/lib/python3.4/site-packages/sqlalchemy/connectors/__init__.py index 9253a21..d72c390 100644 --- a/lib/python3.4/site-packages/sqlalchemy/connectors/__init__.py +++ b/lib/python3.4/site-packages/sqlalchemy/connectors/__init__.py @@ -1,5 +1,5 @@ # connectors/__init__.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/python3.4/site-packages/sqlalchemy/connectors/mxodbc.py b/lib/python3.4/site-packages/sqlalchemy/connectors/mxodbc.py index 851dc11..9fc0ce6 100644 --- a/lib/python3.4/site-packages/sqlalchemy/connectors/mxodbc.py +++ b/lib/python3.4/site-packages/sqlalchemy/connectors/mxodbc.py @@ -1,5 +1,5 @@ # connectors/mxodbc.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/python3.4/site-packages/sqlalchemy/connectors/mysqldb.py b/lib/python3.4/site-packages/sqlalchemy/connectors/mysqldb.py deleted file mode 100644 index f936825..0000000 --- a/lib/python3.4/site-packages/sqlalchemy/connectors/mysqldb.py +++ /dev/null @@ -1,145 +0,0 @@ -# connectors/mysqldb.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors -# -# -# This module is part of SQLAlchemy and is released under -# the MIT License: http://www.opensource.org/licenses/mit-license.php - -"""Define behaviors common to MySQLdb dialects. - -Currently includes MySQL and Drizzle. - -""" - -from . import Connector -from ..engine import base as engine_base, default -from ..sql import operators as sql_operators -from .. import exc, log, schema, sql, types as sqltypes, util, processors -import re - - -# the subclassing of Connector by all classes -# here is not strictly necessary - - -class MySQLDBExecutionContext(Connector): - - @property - def rowcount(self): - if hasattr(self, '_rowcount'): - return self._rowcount - else: - return self.cursor.rowcount - - -class MySQLDBCompiler(Connector): - def visit_mod_binary(self, binary, operator, **kw): - return self.process(binary.left, **kw) + " %% " + \ - self.process(binary.right, **kw) - - def post_process_text(self, text): - return text.replace('%', '%%') - - -class MySQLDBIdentifierPreparer(Connector): - - def _escape_identifier(self, value): - value = value.replace(self.escape_quote, self.escape_to_quote) - return value.replace("%", "%%") - - -class MySQLDBConnector(Connector): - driver = 'mysqldb' - supports_unicode_statements = False - supports_sane_rowcount = True - supports_sane_multi_rowcount = True - - supports_native_decimal = True - - default_paramstyle = 'format' - - @classmethod - def dbapi(cls): - # is overridden when pymysql is used - return __import__('MySQLdb') - - - def do_executemany(self, cursor, statement, parameters, context=None): - rowcount = cursor.executemany(statement, parameters) - if context is not None: - context._rowcount = rowcount - - def create_connect_args(self, url): - opts = url.translate_connect_args(database='db', username='user', - password='passwd') - opts.update(url.query) - - util.coerce_kw_type(opts, 'compress', bool) - util.coerce_kw_type(opts, 'connect_timeout', int) - util.coerce_kw_type(opts, 'read_timeout', int) - util.coerce_kw_type(opts, 'client_flag', int) - util.coerce_kw_type(opts, 'local_infile', int) - # Note: using either of the below will cause all strings to be returned - # as Unicode, both in raw SQL operations and with column types like - # String and MSString. - util.coerce_kw_type(opts, 'use_unicode', bool) - util.coerce_kw_type(opts, 'charset', str) - - # Rich values 'cursorclass' and 'conv' are not supported via - # query string. - - ssl = {} - keys = ['ssl_ca', 'ssl_key', 'ssl_cert', 'ssl_capath', 'ssl_cipher'] - for key in keys: - if key in opts: - ssl[key[4:]] = opts[key] - util.coerce_kw_type(ssl, key[4:], str) - del opts[key] - if ssl: - opts['ssl'] = ssl - - # FOUND_ROWS must be set in CLIENT_FLAGS to enable - # supports_sane_rowcount. - client_flag = opts.get('client_flag', 0) - if self.dbapi is not None: - try: - CLIENT_FLAGS = __import__( - self.dbapi.__name__ + '.constants.CLIENT' - ).constants.CLIENT - client_flag |= CLIENT_FLAGS.FOUND_ROWS - except (AttributeError, ImportError): - self.supports_sane_rowcount = False - opts['client_flag'] = client_flag - return [[], opts] - - def _get_server_version_info(self, connection): - dbapi_con = connection.connection - version = [] - r = re.compile('[.\-]') - for n in r.split(dbapi_con.get_server_info()): - try: - version.append(int(n)) - except ValueError: - version.append(n) - return tuple(version) - - def _extract_error_code(self, exception): - return exception.args[0] - - def _detect_charset(self, connection): - """Sniff out the character set in use for connection results.""" - - try: - # note: the SQL here would be - # "SHOW VARIABLES LIKE 'character_set%%'" - cset_name = connection.connection.character_set_name - except AttributeError: - util.warn( - "No 'character_set_name' can be detected with " - "this MySQL-Python version; " - "please upgrade to a recent version of MySQL-Python. " - "Assuming latin1.") - return 'latin1' - else: - return cset_name() - diff --git a/lib/python3.4/site-packages/sqlalchemy/connectors/pyodbc.py b/lib/python3.4/site-packages/sqlalchemy/connectors/pyodbc.py index ef72c80..68bbcc4 100644 --- a/lib/python3.4/site-packages/sqlalchemy/connectors/pyodbc.py +++ b/lib/python3.4/site-packages/sqlalchemy/connectors/pyodbc.py @@ -1,5 +1,5 @@ # connectors/pyodbc.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -26,7 +26,7 @@ class PyODBCConnector(Connector): supports_native_decimal = True default_paramstyle = 'named' - # for non-DSN connections, this should + # for non-DSN connections, this *may* be used to # hold the desired driver name pyodbc_driver_name = None @@ -75,10 +75,21 @@ class PyODBCConnector(Connector): if 'port' in keys and 'port' not in query: port = ',%d' % int(keys.pop('port')) - connectors = ["DRIVER={%s}" % - keys.pop('driver', self.pyodbc_driver_name), - 'Server=%s%s' % (keys.pop('host', ''), port), - 'Database=%s' % keys.pop('database', '')] + connectors = [] + driver = keys.pop('driver', self.pyodbc_driver_name) + if driver is None: + util.warn( + "No driver name specified; " + "this is expected by PyODBC when using " + "DSN-less connections") + else: + connectors.append("DRIVER={%s}" % driver) + + connectors.extend( + [ + 'Server=%s%s' % (keys.pop('host', ''), port), + 'Database=%s' % keys.pop('database', '') + ]) user = keys.pop("user", None) if user: diff --git a/lib/python3.4/site-packages/sqlalchemy/connectors/zxJDBC.py b/lib/python3.4/site-packages/sqlalchemy/connectors/zxJDBC.py index c0af742..e7b2dc9 100644 --- a/lib/python3.4/site-packages/sqlalchemy/connectors/zxJDBC.py +++ b/lib/python3.4/site-packages/sqlalchemy/connectors/zxJDBC.py @@ -1,5 +1,5 @@ # connectors/zxJDBC.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/python3.4/site-packages/sqlalchemy/cprocessors.cpython-34m.so b/lib/python3.4/site-packages/sqlalchemy/cprocessors.cpython-34m.so index 32a79b0..414727a 100755 Binary files a/lib/python3.4/site-packages/sqlalchemy/cprocessors.cpython-34m.so and b/lib/python3.4/site-packages/sqlalchemy/cprocessors.cpython-34m.so differ diff --git a/lib/python3.4/site-packages/sqlalchemy/cprocessors.cpython-35m-x86_64-linux-gnu.so b/lib/python3.4/site-packages/sqlalchemy/cprocessors.cpython-35m-x86_64-linux-gnu.so new file mode 100755 index 0000000..edcd4f3 Binary files /dev/null and b/lib/python3.4/site-packages/sqlalchemy/cprocessors.cpython-35m-x86_64-linux-gnu.so differ diff --git a/lib/python3.4/site-packages/sqlalchemy/cresultproxy.cpython-34m.so b/lib/python3.4/site-packages/sqlalchemy/cresultproxy.cpython-34m.so index 69e233f..dabde30 100755 Binary files a/lib/python3.4/site-packages/sqlalchemy/cresultproxy.cpython-34m.so and b/lib/python3.4/site-packages/sqlalchemy/cresultproxy.cpython-34m.so differ diff --git a/lib/python3.4/site-packages/sqlalchemy/cresultproxy.cpython-35m-x86_64-linux-gnu.so b/lib/python3.4/site-packages/sqlalchemy/cresultproxy.cpython-35m-x86_64-linux-gnu.so new file mode 100755 index 0000000..5b32295 Binary files /dev/null and b/lib/python3.4/site-packages/sqlalchemy/cresultproxy.cpython-35m-x86_64-linux-gnu.so differ diff --git a/lib/python3.4/site-packages/sqlalchemy/cutils.cpython-34m.so b/lib/python3.4/site-packages/sqlalchemy/cutils.cpython-34m.so index 34d2939..429e17c 100755 Binary files a/lib/python3.4/site-packages/sqlalchemy/cutils.cpython-34m.so and b/lib/python3.4/site-packages/sqlalchemy/cutils.cpython-34m.so differ diff --git a/lib/python3.4/site-packages/sqlalchemy/cutils.cpython-35m-x86_64-linux-gnu.so b/lib/python3.4/site-packages/sqlalchemy/cutils.cpython-35m-x86_64-linux-gnu.so new file mode 100755 index 0000000..362e2b4 Binary files /dev/null and b/lib/python3.4/site-packages/sqlalchemy/cutils.cpython-35m-x86_64-linux-gnu.so differ diff --git a/lib/python3.4/site-packages/sqlalchemy/databases/__init__.py b/lib/python3.4/site-packages/sqlalchemy/databases/__init__.py index 19a7ad6..0bfc937 100644 --- a/lib/python3.4/site-packages/sqlalchemy/databases/__init__.py +++ b/lib/python3.4/site-packages/sqlalchemy/databases/__init__.py @@ -1,5 +1,5 @@ # databases/__init__.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -13,7 +13,6 @@ from ..dialects.sqlite import base as sqlite from ..dialects.postgresql import base as postgresql postgres = postgresql from ..dialects.mysql import base as mysql -from ..dialects.drizzle import base as drizzle from ..dialects.oracle import base as oracle from ..dialects.firebird import base as firebird from ..dialects.mssql import base as mssql @@ -21,7 +20,6 @@ from ..dialects.sybase import base as sybase __all__ = ( - 'drizzle', 'firebird', 'mssql', 'mysql', diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/__init__.py b/lib/python3.4/site-packages/sqlalchemy/dialects/__init__.py index f847a61..5653f5b 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/__init__.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/__init__.py @@ -1,12 +1,11 @@ # dialects/__init__.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php __all__ = ( - 'drizzle', 'firebird', 'mssql', 'mysql', diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/drizzle/__init__.py b/lib/python3.4/site-packages/sqlalchemy/dialects/drizzle/__init__.py deleted file mode 100644 index 1392b8e..0000000 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/drizzle/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -from sqlalchemy.dialects.drizzle import base, mysqldb - -base.dialect = mysqldb.dialect - -from sqlalchemy.dialects.drizzle.base import \ - BIGINT, BINARY, BLOB, \ - BOOLEAN, CHAR, DATE, \ - DATETIME, DECIMAL, DOUBLE, \ - ENUM, FLOAT, INTEGER, \ - NUMERIC, REAL, TEXT, \ - TIME, TIMESTAMP, VARBINARY, \ - VARCHAR, dialect - -__all__ = ( - 'BIGINT', 'BINARY', 'BLOB', - 'BOOLEAN', 'CHAR', 'DATE', - 'DATETIME', 'DECIMAL', 'DOUBLE', - 'ENUM', 'FLOAT', 'INTEGER', - 'NUMERIC', 'REAL', 'TEXT', - 'TIME', 'TIMESTAMP', 'VARBINARY', - 'VARCHAR', 'dialect' -) diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/drizzle/base.py b/lib/python3.4/site-packages/sqlalchemy/dialects/drizzle/base.py deleted file mode 100644 index ea4e9bf..0000000 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/drizzle/base.py +++ /dev/null @@ -1,499 +0,0 @@ -# drizzle/base.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors -# -# Copyright (C) 2010-2011 Monty Taylor -# -# This module is part of SQLAlchemy and is released under -# the MIT License: http://www.opensource.org/licenses/mit-license.php - - -""" - -.. dialect:: drizzle - :name: Drizzle - -Drizzle is a variant of MySQL. Unlike MySQL, Drizzle's default storage engine -is InnoDB (transactions, foreign-keys) rather than MyISAM. For more -`Notable Differences `_, visit -the `Drizzle Documentation `_. - -The SQLAlchemy Drizzle dialect leans heavily on the MySQL dialect, so much of -the :doc:`SQLAlchemy MySQL ` documentation is also relevant. - - -""" - -from sqlalchemy import exc -from sqlalchemy import log -from sqlalchemy import types as sqltypes -from sqlalchemy.engine import reflection -from sqlalchemy.dialects.mysql import base as mysql_dialect -from sqlalchemy.types import DATE, DATETIME, BOOLEAN, TIME, \ - BLOB, BINARY, VARBINARY - - -class _NumericType(object): - """Base for Drizzle numeric types.""" - - def __init__(self, **kw): - super(_NumericType, self).__init__(**kw) - - -class _FloatType(_NumericType, sqltypes.Float): - def __init__(self, precision=None, scale=None, asdecimal=True, **kw): - if isinstance(self, (REAL, DOUBLE)) and \ - ( - (precision is None and scale is not None) or - (precision is not None and scale is None) - ): - raise exc.ArgumentError( - "You must specify both precision and scale or omit " - "both altogether.") - - super(_FloatType, self).__init__(precision=precision, - asdecimal=asdecimal, **kw) - self.scale = scale - - -class _StringType(mysql_dialect._StringType): - """Base for Drizzle string types.""" - - def __init__(self, collation=None, binary=False, **kw): - kw['national'] = False - super(_StringType, self).__init__(collation=collation, binary=binary, - **kw) - - -class NUMERIC(_NumericType, sqltypes.NUMERIC): - """Drizzle NUMERIC type.""" - - __visit_name__ = 'NUMERIC' - - def __init__(self, precision=None, scale=None, asdecimal=True, **kw): - """Construct a NUMERIC. - - :param precision: Total digits in this number. If scale and precision - are both None, values are stored to limits allowed by the server. - - :param scale: The number of digits after the decimal point. - - """ - - super(NUMERIC, self).__init__(precision=precision, scale=scale, - asdecimal=asdecimal, **kw) - - -class DECIMAL(_NumericType, sqltypes.DECIMAL): - """Drizzle DECIMAL type.""" - - __visit_name__ = 'DECIMAL' - - def __init__(self, precision=None, scale=None, asdecimal=True, **kw): - """Construct a DECIMAL. - - :param precision: Total digits in this number. If scale and precision - are both None, values are stored to limits allowed by the server. - - :param scale: The number of digits after the decimal point. - - """ - super(DECIMAL, self).__init__(precision=precision, scale=scale, - asdecimal=asdecimal, **kw) - - -class DOUBLE(_FloatType): - """Drizzle DOUBLE type.""" - - __visit_name__ = 'DOUBLE' - - def __init__(self, precision=None, scale=None, asdecimal=True, **kw): - """Construct a DOUBLE. - - :param precision: Total digits in this number. If scale and precision - are both None, values are stored to limits allowed by the server. - - :param scale: The number of digits after the decimal point. - - """ - - super(DOUBLE, self).__init__(precision=precision, scale=scale, - asdecimal=asdecimal, **kw) - - -class REAL(_FloatType, sqltypes.REAL): - """Drizzle REAL type.""" - - __visit_name__ = 'REAL' - - def __init__(self, precision=None, scale=None, asdecimal=True, **kw): - """Construct a REAL. - - :param precision: Total digits in this number. If scale and precision - are both None, values are stored to limits allowed by the server. - - :param scale: The number of digits after the decimal point. - - """ - - super(REAL, self).__init__(precision=precision, scale=scale, - asdecimal=asdecimal, **kw) - - -class FLOAT(_FloatType, sqltypes.FLOAT): - """Drizzle FLOAT type.""" - - __visit_name__ = 'FLOAT' - - def __init__(self, precision=None, scale=None, asdecimal=False, **kw): - """Construct a FLOAT. - - :param precision: Total digits in this number. If scale and precision - are both None, values are stored to limits allowed by the server. - - :param scale: The number of digits after the decimal point. - - """ - - super(FLOAT, self).__init__(precision=precision, scale=scale, - asdecimal=asdecimal, **kw) - - def bind_processor(self, dialect): - return None - - -class INTEGER(sqltypes.INTEGER): - """Drizzle INTEGER type.""" - - __visit_name__ = 'INTEGER' - - def __init__(self, **kw): - """Construct an INTEGER.""" - - super(INTEGER, self).__init__(**kw) - - -class BIGINT(sqltypes.BIGINT): - """Drizzle BIGINTEGER type.""" - - __visit_name__ = 'BIGINT' - - def __init__(self, **kw): - """Construct a BIGINTEGER.""" - - super(BIGINT, self).__init__(**kw) - - -class TIME(mysql_dialect.TIME): - """Drizzle TIME type.""" - - -class TIMESTAMP(sqltypes.TIMESTAMP): - """Drizzle TIMESTAMP type.""" - - __visit_name__ = 'TIMESTAMP' - - -class TEXT(_StringType, sqltypes.TEXT): - """Drizzle TEXT type, for text up to 2^16 characters.""" - - __visit_name__ = 'TEXT' - - def __init__(self, length=None, **kw): - """Construct a TEXT. - - :param length: Optional, if provided the server may optimize storage - by substituting the smallest TEXT type sufficient to store - ``length`` characters. - - :param collation: Optional, a column-level collation for this string - value. Takes precedence to 'binary' short-hand. - - :param binary: Defaults to False: short-hand, pick the binary - collation type that matches the column's character set. Generates - BINARY in schema. This does not affect the type of data stored, - only the collation of character data. - - """ - - super(TEXT, self).__init__(length=length, **kw) - - -class VARCHAR(_StringType, sqltypes.VARCHAR): - """Drizzle VARCHAR type, for variable-length character data.""" - - __visit_name__ = 'VARCHAR' - - def __init__(self, length=None, **kwargs): - """Construct a VARCHAR. - - :param collation: Optional, a column-level collation for this string - value. Takes precedence to 'binary' short-hand. - - :param binary: Defaults to False: short-hand, pick the binary - collation type that matches the column's character set. Generates - BINARY in schema. This does not affect the type of data stored, - only the collation of character data. - - """ - - super(VARCHAR, self).__init__(length=length, **kwargs) - - -class CHAR(_StringType, sqltypes.CHAR): - """Drizzle CHAR type, for fixed-length character data.""" - - __visit_name__ = 'CHAR' - - def __init__(self, length=None, **kwargs): - """Construct a CHAR. - - :param length: Maximum data length, in characters. - - :param binary: Optional, use the default binary collation for the - national character set. This does not affect the type of data - stored, use a BINARY type for binary data. - - :param collation: Optional, request a particular collation. Must be - compatible with the national character set. - - """ - - super(CHAR, self).__init__(length=length, **kwargs) - - -class ENUM(mysql_dialect.ENUM): - """Drizzle ENUM type.""" - - def __init__(self, *enums, **kw): - """Construct an ENUM. - - Example: - - Column('myenum', ENUM("foo", "bar", "baz")) - - :param enums: The range of valid values for this ENUM. Values will be - quoted when generating the schema according to the quoting flag (see - below). - - :param strict: Defaults to False: ensure that a given value is in this - ENUM's range of permissible values when inserting or updating rows. - Note that Drizzle will not raise a fatal error if you attempt to - store an out of range value- an alternate value will be stored - instead. - (See Drizzle ENUM documentation.) - - :param collation: Optional, a column-level collation for this string - value. Takes precedence to 'binary' short-hand. - - :param binary: Defaults to False: short-hand, pick the binary - collation type that matches the column's character set. Generates - BINARY in schema. This does not affect the type of data stored, - only the collation of character data. - - :param quoting: Defaults to 'auto': automatically determine enum value - quoting. If all enum values are surrounded by the same quoting - character, then use 'quoted' mode. Otherwise, use 'unquoted' mode. - - 'quoted': values in enums are already quoted, they will be used - directly when generating the schema - this usage is deprecated. - - 'unquoted': values in enums are not quoted, they will be escaped and - surrounded by single quotes when generating the schema. - - Previous versions of this type always required manually quoted - values to be supplied; future versions will always quote the string - literals for you. This is a transitional option. - - """ - - super(ENUM, self).__init__(*enums, **kw) - - -class _DrizzleBoolean(sqltypes.Boolean): - def get_dbapi_type(self, dbapi): - return dbapi.NUMERIC - - -colspecs = { - sqltypes.Numeric: NUMERIC, - sqltypes.Float: FLOAT, - sqltypes.Time: TIME, - sqltypes.Enum: ENUM, - sqltypes.Boolean: _DrizzleBoolean, -} - - -# All the types we have in Drizzle -ischema_names = { - 'BIGINT': BIGINT, - 'BINARY': BINARY, - 'BLOB': BLOB, - 'BOOLEAN': BOOLEAN, - 'CHAR': CHAR, - 'DATE': DATE, - 'DATETIME': DATETIME, - 'DECIMAL': DECIMAL, - 'DOUBLE': DOUBLE, - 'ENUM': ENUM, - 'FLOAT': FLOAT, - 'INT': INTEGER, - 'INTEGER': INTEGER, - 'NUMERIC': NUMERIC, - 'TEXT': TEXT, - 'TIME': TIME, - 'TIMESTAMP': TIMESTAMP, - 'VARBINARY': VARBINARY, - 'VARCHAR': VARCHAR, -} - - -class DrizzleCompiler(mysql_dialect.MySQLCompiler): - - def visit_typeclause(self, typeclause): - type_ = typeclause.type.dialect_impl(self.dialect) - if isinstance(type_, sqltypes.Integer): - return 'INTEGER' - else: - return super(DrizzleCompiler, self).visit_typeclause(typeclause) - - def visit_cast(self, cast, **kwargs): - type_ = self.process(cast.typeclause) - if type_ is None: - return self.process(cast.clause) - - return 'CAST(%s AS %s)' % (self.process(cast.clause), type_) - - -class DrizzleDDLCompiler(mysql_dialect.MySQLDDLCompiler): - pass - - -class DrizzleTypeCompiler(mysql_dialect.MySQLTypeCompiler): - def _extend_numeric(self, type_, spec): - return spec - - def _extend_string(self, type_, defaults, spec): - """Extend a string-type declaration with standard SQL - COLLATE annotations and Drizzle specific extensions. - - """ - - def attr(name): - return getattr(type_, name, defaults.get(name)) - - if attr('collation'): - collation = 'COLLATE %s' % type_.collation - elif attr('binary'): - collation = 'BINARY' - else: - collation = None - - return ' '.join([c for c in (spec, collation) - if c is not None]) - - def visit_NCHAR(self, type): - raise NotImplementedError("Drizzle does not support NCHAR") - - def visit_NVARCHAR(self, type): - raise NotImplementedError("Drizzle does not support NVARCHAR") - - def visit_FLOAT(self, type_): - if type_.scale is not None and type_.precision is not None: - return "FLOAT(%s, %s)" % (type_.precision, type_.scale) - else: - return "FLOAT" - - def visit_BOOLEAN(self, type_): - return "BOOLEAN" - - def visit_BLOB(self, type_): - return "BLOB" - - -class DrizzleExecutionContext(mysql_dialect.MySQLExecutionContext): - pass - - -class DrizzleIdentifierPreparer(mysql_dialect.MySQLIdentifierPreparer): - pass - - -@log.class_logger -class DrizzleDialect(mysql_dialect.MySQLDialect): - """Details of the Drizzle dialect. - - Not used directly in application code. - """ - - name = 'drizzle' - - _supports_cast = True - supports_sequences = False - supports_native_boolean = True - supports_views = False - - default_paramstyle = 'format' - colspecs = colspecs - - statement_compiler = DrizzleCompiler - ddl_compiler = DrizzleDDLCompiler - type_compiler = DrizzleTypeCompiler - ischema_names = ischema_names - preparer = DrizzleIdentifierPreparer - - def on_connect(self): - """Force autocommit - Drizzle Bug#707842 doesn't set this properly""" - - def connect(conn): - conn.autocommit(False) - return connect - - @reflection.cache - def get_table_names(self, connection, schema=None, **kw): - """Return a Unicode SHOW TABLES from a given schema.""" - - if schema is not None: - current_schema = schema - else: - current_schema = self.default_schema_name - - charset = 'utf8' - rp = connection.execute("SHOW TABLES FROM %s" % - self.identifier_preparer.quote_identifier(current_schema)) - return [row[0] for row in self._compat_fetchall(rp, charset=charset)] - - @reflection.cache - def get_view_names(self, connection, schema=None, **kw): - raise NotImplementedError - - def _detect_casing(self, connection): - """Sniff out identifier case sensitivity. - - Cached per-connection. This value can not change without a server - restart. - """ - - return 0 - - def _detect_collations(self, connection): - """Pull the active COLLATIONS list from the server. - - Cached per-connection. - """ - - collations = {} - charset = self._connection_charset - rs = connection.execute( - 'SELECT CHARACTER_SET_NAME, COLLATION_NAME FROM' - ' data_dictionary.COLLATIONS') - for row in self._compat_fetchall(rs, charset): - collations[row[0]] = row[1] - return collations - - def _detect_ansiquotes(self, connection): - """Detect and adjust for the ANSI_QUOTES sql mode.""" - - self._server_ansiquotes = False - self._backslash_escapes = False - - diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/drizzle/mysqldb.py b/lib/python3.4/site-packages/sqlalchemy/dialects/drizzle/mysqldb.py deleted file mode 100644 index 7d91cc3..0000000 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/drizzle/mysqldb.py +++ /dev/null @@ -1,48 +0,0 @@ -""" -.. dialect:: drizzle+mysqldb - :name: MySQL-Python - :dbapi: mysqldb - :connectstring: drizzle+mysqldb://:@[:]/ - :url: http://sourceforge.net/projects/mysql-python - - -""" - -from sqlalchemy.dialects.drizzle.base import ( - DrizzleDialect, - DrizzleExecutionContext, - DrizzleCompiler, - DrizzleIdentifierPreparer) -from sqlalchemy.connectors.mysqldb import ( - MySQLDBExecutionContext, - MySQLDBCompiler, - MySQLDBIdentifierPreparer, - MySQLDBConnector) - - -class DrizzleExecutionContext_mysqldb(MySQLDBExecutionContext, - DrizzleExecutionContext): - pass - - -class DrizzleCompiler_mysqldb(MySQLDBCompiler, DrizzleCompiler): - pass - - -class DrizzleIdentifierPreparer_mysqldb(MySQLDBIdentifierPreparer, - DrizzleIdentifierPreparer): - pass - - -class DrizzleDialect_mysqldb(MySQLDBConnector, DrizzleDialect): - execution_ctx_cls = DrizzleExecutionContext_mysqldb - statement_compiler = DrizzleCompiler_mysqldb - preparer = DrizzleIdentifierPreparer_mysqldb - - def _detect_charset(self, connection): - """Sniff out the character set in use for connection results.""" - - return 'utf8' - - -dialect = DrizzleDialect_mysqldb diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/firebird/__init__.py b/lib/python3.4/site-packages/sqlalchemy/dialects/firebird/__init__.py index 9e8a882..f27bdc0 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/firebird/__init__.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/firebird/__init__.py @@ -1,5 +1,5 @@ # firebird/__init__.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/firebird/base.py b/lib/python3.4/site-packages/sqlalchemy/dialects/firebird/base.py index bcdb6d9..4dbf382 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/firebird/base.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/firebird/base.py @@ -1,5 +1,5 @@ # firebird/base.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -78,7 +78,6 @@ from sqlalchemy.sql import expression from sqlalchemy.engine import base, default, reflection from sqlalchemy.sql import compiler - from sqlalchemy.types import (BIGINT, BLOB, DATE, FLOAT, INTEGER, NUMERIC, SMALLINT, TEXT, TIME, TIMESTAMP, Integer) @@ -181,16 +180,16 @@ ischema_names = { # _FBDate, etc. as bind/result functionality is required) class FBTypeCompiler(compiler.GenericTypeCompiler): - def visit_boolean(self, type_): - return self.visit_SMALLINT(type_) + def visit_boolean(self, type_, **kw): + return self.visit_SMALLINT(type_, **kw) - def visit_datetime(self, type_): - return self.visit_TIMESTAMP(type_) + def visit_datetime(self, type_, **kw): + return self.visit_TIMESTAMP(type_, **kw) - def visit_TEXT(self, type_): + def visit_TEXT(self, type_, **kw): return "BLOB SUB_TYPE 1" - def visit_BLOB(self, type_): + def visit_BLOB(self, type_, **kw): return "BLOB SUB_TYPE 0" def _extend_string(self, type_, basic): @@ -200,16 +199,16 @@ class FBTypeCompiler(compiler.GenericTypeCompiler): else: return '%s CHARACTER SET %s' % (basic, charset) - def visit_CHAR(self, type_): - basic = super(FBTypeCompiler, self).visit_CHAR(type_) + def visit_CHAR(self, type_, **kw): + basic = super(FBTypeCompiler, self).visit_CHAR(type_, **kw) return self._extend_string(type_, basic) - def visit_VARCHAR(self, type_): + def visit_VARCHAR(self, type_, **kw): if not type_.length: raise exc.CompileError( "VARCHAR requires a length on dialect %s" % self.dialect.name) - basic = super(FBTypeCompiler, self).visit_VARCHAR(type_) + basic = super(FBTypeCompiler, self).visit_VARCHAR(type_, **kw) return self._extend_string(type_, basic) @@ -294,22 +293,22 @@ class FBCompiler(sql.compiler.SQLCompiler): def visit_sequence(self, seq): return "gen_id(%s, 1)" % self.preparer.format_sequence(seq) - def get_select_precolumns(self, select): + def get_select_precolumns(self, select, **kw): """Called when building a ``SELECT`` statement, position is just before column list Firebird puts the limit and offset right after the ``SELECT``... """ result = "" - if select._limit: - result += "FIRST %s " % self.process(sql.literal(select._limit)) - if select._offset: - result += "SKIP %s " % self.process(sql.literal(select._offset)) + if select._limit_clause is not None: + result += "FIRST %s " % self.process(select._limit_clause, **kw) + if select._offset_clause is not None: + result += "SKIP %s " % self.process(select._offset_clause, **kw) if select._distinct: result += "DISTINCT " return result - def limit_clause(self, select): + def limit_clause(self, select, **kw): """Already taken care of in the `get_select_precolumns` method.""" return "" diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/firebird/fdb.py b/lib/python3.4/site-packages/sqlalchemy/dialects/firebird/fdb.py index ddffc80..aff8cff 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/firebird/fdb.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/firebird/fdb.py @@ -1,5 +1,5 @@ # firebird/fdb.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/firebird/kinterbasdb.py b/lib/python3.4/site-packages/sqlalchemy/dialects/firebird/kinterbasdb.py index 6bd7887..3df9f73 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/firebird/kinterbasdb.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/firebird/kinterbasdb.py @@ -1,5 +1,5 @@ # firebird/kinterbasdb.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/__init__.py b/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/__init__.py index d004776..8c9e858 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/__init__.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/__init__.py @@ -1,5 +1,5 @@ # mssql/__init__.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/adodbapi.py b/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/adodbapi.py index e9927f8..60fa25d 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/adodbapi.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/adodbapi.py @@ -1,5 +1,5 @@ # mssql/adodbapi.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/base.py b/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/base.py index 3d10904..36a8a93 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/base.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/base.py @@ -1,5 +1,5 @@ # mssql/base.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -166,6 +166,55 @@ how SQLAlchemy handles this: This is an auxilliary use case suitable for testing and bulk insert scenarios. +.. _legacy_schema_rendering: + +Rendering of SQL statements that include schema qualifiers +--------------------------------------------------------- + +When using :class:`.Table` metadata that includes a "schema" qualifier, +such as:: + + account_table = Table( + 'account', metadata, + Column('id', Integer, primary_key=True), + Column('info', String(100)), + schema="customer_schema" + ) + +The SQL Server dialect has a long-standing behavior that it will attempt +to turn a schema-qualified table name into an alias, such as:: + + >>> eng = create_engine("mssql+pymssql://mydsn") + >>> print(account_table.select().compile(eng)) + SELECT account_1.id, account_1.info + FROM customer_schema.account AS account_1 + +This behavior is legacy, does not function correctly for many forms +of SQL statements, and will be disabled by default in the 1.1 series +of SQLAlchemy. As of 1.0.5, the above statement will produce the following +warning:: + + SAWarning: legacy_schema_aliasing flag is defaulted to True; + some schema-qualified queries may not function correctly. + Consider setting this flag to False for modern SQL Server versions; + this flag will default to False in version 1.1 + +This warning encourages the :class:`.Engine` to be created as follows:: + + >>> eng = create_engine("mssql+pymssql://mydsn", legacy_schema_aliasing=False) + +Where the above SELECT statement will produce:: + + >>> print(account_table.select().compile(eng)) + SELECT customer_schema.account.id, customer_schema.account.info + FROM customer_schema.account + +The warning will not emit if the ``legacy_schema_aliasing`` flag is set +to either True or False. + +.. versionadded:: 1.0.5 - Added the ``legacy_schema_aliasing`` flag to disable + the SQL Server dialect's legacy behavior with schema-qualified table + names. This flag will default to False in version 1.1. Collation Support ----------------- @@ -187,7 +236,7 @@ CREATE TABLE statement for this column will yield:: LIMIT/OFFSET Support -------------------- -MSSQL has no support for the LIMIT or OFFSET keysowrds. LIMIT is +MSSQL has no support for the LIMIT or OFFSET keywords. LIMIT is supported directly through the ``TOP`` Transact SQL keyword:: select.limit @@ -226,6 +275,53 @@ The DATE and TIME types are not available for MSSQL 2005 and previous - if a server version below 2008 is detected, DDL for these types will be issued as DATETIME. +.. _mssql_large_type_deprecation: + +Large Text/Binary Type Deprecation +---------------------------------- + +Per `SQL Server 2012/2014 Documentation `_, +the ``NTEXT``, ``TEXT`` and ``IMAGE`` datatypes are to be removed from SQL Server +in a future release. SQLAlchemy normally relates these types to the +:class:`.UnicodeText`, :class:`.Text` and :class:`.LargeBinary` datatypes. + +In order to accommodate this change, a new flag ``deprecate_large_types`` +is added to the dialect, which will be automatically set based on detection +of the server version in use, if not otherwise set by the user. The +behavior of this flag is as follows: + +* When this flag is ``True``, the :class:`.UnicodeText`, :class:`.Text` and + :class:`.LargeBinary` datatypes, when used to render DDL, will render the + types ``NVARCHAR(max)``, ``VARCHAR(max)``, and ``VARBINARY(max)``, + respectively. This is a new behavior as of the addition of this flag. + +* When this flag is ``False``, the :class:`.UnicodeText`, :class:`.Text` and + :class:`.LargeBinary` datatypes, when used to render DDL, will render the + types ``NTEXT``, ``TEXT``, and ``IMAGE``, + respectively. This is the long-standing behavior of these types. + +* The flag begins with the value ``None``, before a database connection is + established. If the dialect is used to render DDL without the flag being + set, it is interpreted the same as ``False``. + +* On first connection, the dialect detects if SQL Server version 2012 or greater + is in use; if the flag is still at ``None``, it sets it to ``True`` or + ``False`` based on whether 2012 or greater is detected. + +* The flag can be set to either ``True`` or ``False`` when the dialect + is created, typically via :func:`.create_engine`:: + + eng = create_engine("mssql+pymssql://user:pass@host/db", + deprecate_large_types=True) + +* Complete control over whether the "old" or "new" types are rendered is + available in all SQLAlchemy versions by using the UPPERCASE type objects + instead: :class:`.NVARCHAR`, :class:`.VARCHAR`, :class:`.types.VARBINARY`, + :class:`.TEXT`, :class:`.mssql.NTEXT`, :class:`.mssql.IMAGE` will always remain + fixed and always output exactly that type. + +.. versionadded:: 1.0.0 + .. _mssql_indexes: Clustered Index Support @@ -367,19 +463,20 @@ import operator import re from ... import sql, schema as sa_schema, exc, util -from ...sql import compiler, expression, \ - util as sql_util, cast +from ...sql import compiler, expression, util as sql_util from ... import engine from ...engine import reflection, default from ... import types as sqltypes from ...types import INTEGER, BIGINT, SMALLINT, DECIMAL, NUMERIC, \ FLOAT, TIMESTAMP, DATETIME, DATE, BINARY,\ - VARBINARY, TEXT, VARCHAR, NVARCHAR, CHAR, NCHAR + TEXT, VARCHAR, NVARCHAR, CHAR, NCHAR from ...util import update_wrapper from . import information_schema as ischema +# http://sqlserverbuilds.blogspot.com/ +MS_2012_VERSION = (11,) MS_2008_VERSION = (10,) MS_2005_VERSION = (9,) MS_2000_VERSION = (8,) @@ -451,9 +548,13 @@ class _MSDate(sqltypes.Date): if isinstance(value, datetime.datetime): return value.date() elif isinstance(value, util.string_types): + m = self._reg.match(value) + if not m: + raise ValueError( + "could not parse %r as a date value" % (value, )) return datetime.date(*[ int(x or 0) - for x in self._reg.match(value).groups() + for x in m.groups() ]) else: return value @@ -485,9 +586,13 @@ class TIME(sqltypes.TIME): if isinstance(value, datetime.datetime): return value.time() elif isinstance(value, util.string_types): + m = self._reg.match(value) + if not m: + raise ValueError( + "could not parse %r as a time value" % (value, )) return datetime.time(*[ int(x or 0) - for x in self._reg.match(value).groups()]) + for x in m.groups()]) else: return value return process @@ -545,6 +650,26 @@ class NTEXT(sqltypes.UnicodeText): __visit_name__ = 'NTEXT' +class VARBINARY(sqltypes.VARBINARY, sqltypes.LargeBinary): + """The MSSQL VARBINARY type. + + This type extends both :class:`.types.VARBINARY` and + :class:`.types.LargeBinary`. In "deprecate_large_types" mode, + the :class:`.types.LargeBinary` type will produce ``VARBINARY(max)`` + on SQL Server. + + .. versionadded:: 1.0.0 + + .. seealso:: + + :ref:`mssql_large_type_deprecation` + + + + """ + __visit_name__ = 'VARBINARY' + + class IMAGE(sqltypes.LargeBinary): __visit_name__ = 'IMAGE' @@ -626,7 +751,6 @@ ischema_names = { class MSTypeCompiler(compiler.GenericTypeCompiler): - def _extend(self, spec, type_, length=None): """Extend a string-type declaration with standard SQL COLLATE annotations. @@ -647,103 +771,115 @@ class MSTypeCompiler(compiler.GenericTypeCompiler): return ' '.join([c for c in (spec, collation) if c is not None]) - def visit_FLOAT(self, type_): + def visit_FLOAT(self, type_, **kw): precision = getattr(type_, 'precision', None) if precision is None: return "FLOAT" else: return "FLOAT(%(precision)s)" % {'precision': precision} - def visit_TINYINT(self, type_): + def visit_TINYINT(self, type_, **kw): return "TINYINT" - def visit_DATETIMEOFFSET(self, type_): - if type_.precision: + def visit_DATETIMEOFFSET(self, type_, **kw): + if type_.precision is not None: return "DATETIMEOFFSET(%s)" % type_.precision else: return "DATETIMEOFFSET" - def visit_TIME(self, type_): + def visit_TIME(self, type_, **kw): precision = getattr(type_, 'precision', None) - if precision: + if precision is not None: return "TIME(%s)" % precision else: return "TIME" - def visit_DATETIME2(self, type_): + def visit_DATETIME2(self, type_, **kw): precision = getattr(type_, 'precision', None) - if precision: + if precision is not None: return "DATETIME2(%s)" % precision else: return "DATETIME2" - def visit_SMALLDATETIME(self, type_): + def visit_SMALLDATETIME(self, type_, **kw): return "SMALLDATETIME" - def visit_unicode(self, type_): - return self.visit_NVARCHAR(type_) + def visit_unicode(self, type_, **kw): + return self.visit_NVARCHAR(type_, **kw) - def visit_unicode_text(self, type_): - return self.visit_NTEXT(type_) + def visit_text(self, type_, **kw): + if self.dialect.deprecate_large_types: + return self.visit_VARCHAR(type_, **kw) + else: + return self.visit_TEXT(type_, **kw) - def visit_NTEXT(self, type_): + def visit_unicode_text(self, type_, **kw): + if self.dialect.deprecate_large_types: + return self.visit_NVARCHAR(type_, **kw) + else: + return self.visit_NTEXT(type_, **kw) + + def visit_NTEXT(self, type_, **kw): return self._extend("NTEXT", type_) - def visit_TEXT(self, type_): + def visit_TEXT(self, type_, **kw): return self._extend("TEXT", type_) - def visit_VARCHAR(self, type_): + def visit_VARCHAR(self, type_, **kw): return self._extend("VARCHAR", type_, length=type_.length or 'max') - def visit_CHAR(self, type_): + def visit_CHAR(self, type_, **kw): return self._extend("CHAR", type_) - def visit_NCHAR(self, type_): + def visit_NCHAR(self, type_, **kw): return self._extend("NCHAR", type_) - def visit_NVARCHAR(self, type_): + def visit_NVARCHAR(self, type_, **kw): return self._extend("NVARCHAR", type_, length=type_.length or 'max') - def visit_date(self, type_): + def visit_date(self, type_, **kw): if self.dialect.server_version_info < MS_2008_VERSION: - return self.visit_DATETIME(type_) + return self.visit_DATETIME(type_, **kw) else: - return self.visit_DATE(type_) + return self.visit_DATE(type_, **kw) - def visit_time(self, type_): + def visit_time(self, type_, **kw): if self.dialect.server_version_info < MS_2008_VERSION: - return self.visit_DATETIME(type_) + return self.visit_DATETIME(type_, **kw) else: - return self.visit_TIME(type_) + return self.visit_TIME(type_, **kw) - def visit_large_binary(self, type_): - return self.visit_IMAGE(type_) + def visit_large_binary(self, type_, **kw): + if self.dialect.deprecate_large_types: + return self.visit_VARBINARY(type_, **kw) + else: + return self.visit_IMAGE(type_, **kw) - def visit_IMAGE(self, type_): + def visit_IMAGE(self, type_, **kw): return "IMAGE" - def visit_VARBINARY(self, type_): + def visit_VARBINARY(self, type_, **kw): return self._extend( "VARBINARY", type_, length=type_.length or 'max') - def visit_boolean(self, type_): + def visit_boolean(self, type_, **kw): return self.visit_BIT(type_) - def visit_BIT(self, type_): + def visit_BIT(self, type_, **kw): return "BIT" - def visit_MONEY(self, type_): + def visit_MONEY(self, type_, **kw): return "MONEY" - def visit_SMALLMONEY(self, type_): + def visit_SMALLMONEY(self, type_, **kw): return 'SMALLMONEY' - def visit_UNIQUEIDENTIFIER(self, type_): + def visit_UNIQUEIDENTIFIER(self, type_, **kw): return "UNIQUEIDENTIFIER" - def visit_SQL_VARIANT(self, type_): + def visit_SQL_VARIANT(self, type_, **kw): return 'SQL_VARIANT' @@ -846,7 +982,7 @@ class MSExecutionContext(default.DefaultExecutionContext): "SET IDENTITY_INSERT %s OFF" % self.dialect.identifier_preparer. format_table( self.compiled.statement.table))) - except: + except Exception: pass def get_result_proxy(self): @@ -872,6 +1008,15 @@ class MSSQLCompiler(compiler.SQLCompiler): self.tablealiases = {} super(MSSQLCompiler, self).__init__(*args, **kwargs) + def _with_legacy_schema_aliasing(fn): + def decorate(self, *arg, **kw): + if self.dialect.legacy_schema_aliasing: + return fn(self, *arg, **kw) + else: + super_ = getattr(super(MSSQLCompiler, self), fn.__name__) + return super_(*arg, **kw) + return decorate + def visit_now_func(self, fn, **kw): return "CURRENT_TIMESTAMP" @@ -900,19 +1045,24 @@ class MSSQLCompiler(compiler.SQLCompiler): self.process(binary.left, **kw), self.process(binary.right, **kw)) - def get_select_precolumns(self, select): - """ MS-SQL puts TOP, its version of LIMIT, here """ - if select._distinct or select._limit is not None: - s = select._distinct and "DISTINCT " or "" + def get_select_precolumns(self, select, **kw): + """ MS-SQL puts TOP, it's version of LIMIT here """ + s = "" + if select._distinct: + s += "DISTINCT " + + if select._simple_int_limit and not select._offset: # ODBC drivers and possibly others # don't support bind params in the SELECT clause on SQL Server. # so have to use literal here. - if select._limit is not None: - if not select._offset: - s += "TOP %d " % select._limit + s += "TOP %d " % select._limit + + if s: return s - return compiler.SQLCompiler.get_select_precolumns(self, select) + else: + return compiler.SQLCompiler.get_select_precolumns( + self, select, **kw) def get_from_hint_text(self, table, text): return text @@ -920,7 +1070,7 @@ class MSSQLCompiler(compiler.SQLCompiler): def get_crud_hint_text(self, table, text): return text - def limit_clause(self, select): + def limit_clause(self, select, **kw): # Limit in mssql is after the select keyword return "" @@ -929,39 +1079,48 @@ class MSSQLCompiler(compiler.SQLCompiler): so tries to wrap it in a subquery with ``row_number()`` criterion. """ - if select._offset and not getattr(select, '_mssql_visit', None): + if ( + ( + not select._simple_int_limit and + select._limit_clause is not None + ) or ( + select._offset_clause is not None and + not select._simple_int_offset or select._offset + ) + ) and not getattr(select, '_mssql_visit', None): + # to use ROW_NUMBER(), an ORDER BY is required. if not select._order_by_clause.clauses: raise exc.CompileError('MSSQL requires an order_by when ' - 'using an offset.') - _offset = select._offset - _limit = select._limit + 'using an OFFSET or a non-simple ' + 'LIMIT clause') + _order_by_clauses = select._order_by_clause.clauses + limit_clause = select._limit_clause + offset_clause = select._offset_clause + kwargs['select_wraps_for'] = select select = select._generate() select._mssql_visit = True select = select.column( sql.func.ROW_NUMBER().over(order_by=_order_by_clauses) - .label("mssql_rn") - ).order_by(None).alias() + .label("mssql_rn")).order_by(None).alias() mssql_rn = sql.column('mssql_rn') limitselect = sql.select([c for c in select.c if c.key != 'mssql_rn']) - limitselect.append_whereclause(mssql_rn > _offset) - if _limit is not None: - limitselect.append_whereclause(mssql_rn <= (_limit + _offset)) - return self.process(limitselect, iswrapper=True, **kwargs) + if offset_clause is not None: + limitselect.append_whereclause(mssql_rn > offset_clause) + if limit_clause is not None: + limitselect.append_whereclause( + mssql_rn <= (limit_clause + offset_clause)) + else: + limitselect.append_whereclause( + mssql_rn <= (limit_clause)) + return self.process(limitselect, **kwargs) else: return compiler.SQLCompiler.visit_select(self, select, **kwargs) - def _schema_aliased_table(self, table): - if getattr(table, 'schema', None) is not None: - if table not in self.tablealiases: - self.tablealiases[table] = table.alias() - return self.tablealiases[table] - else: - return None - + @_with_legacy_schema_aliasing def visit_table(self, table, mssql_aliased=False, iscrud=False, **kwargs): if mssql_aliased is table or iscrud: return super(MSSQLCompiler, self).visit_table(table, **kwargs) @@ -973,25 +1132,14 @@ class MSSQLCompiler(compiler.SQLCompiler): else: return super(MSSQLCompiler, self).visit_table(table, **kwargs) - def visit_alias(self, alias, **kwargs): + @_with_legacy_schema_aliasing + def visit_alias(self, alias, **kw): # translate for schema-qualified table aliases - kwargs['mssql_aliased'] = alias.original - return super(MSSQLCompiler, self).visit_alias(alias, **kwargs) + kw['mssql_aliased'] = alias.original + return super(MSSQLCompiler, self).visit_alias(alias, **kw) - def visit_extract(self, extract, **kw): - field = self.extract_map.get(extract.field, extract.field) - return 'DATEPART("%s", %s)' % \ - (field, self.process(extract.expr, **kw)) - - def visit_savepoint(self, savepoint_stmt): - return "SAVE TRANSACTION %s" % \ - self.preparer.format_savepoint(savepoint_stmt) - - def visit_rollback_to_savepoint(self, savepoint_stmt): - return ("ROLLBACK TRANSACTION %s" - % self.preparer.format_savepoint(savepoint_stmt)) - - def visit_column(self, column, add_to_result_map=None, **kwargs): + @_with_legacy_schema_aliasing + def visit_column(self, column, add_to_result_map=None, **kw): if column.table is not None and \ (not self.isupdate and not self.isdelete) or \ self.is_subquery(): @@ -1009,10 +1157,40 @@ class MSSQLCompiler(compiler.SQLCompiler): ) return super(MSSQLCompiler, self).\ - visit_column(converted, **kwargs) + visit_column(converted, **kw) return super(MSSQLCompiler, self).visit_column( - column, add_to_result_map=add_to_result_map, **kwargs) + column, add_to_result_map=add_to_result_map, **kw) + + def _schema_aliased_table(self, table): + if getattr(table, 'schema', None) is not None: + if self.dialect._warn_schema_aliasing and \ + table.schema.lower() != 'information_schema': + util.warn( + "legacy_schema_aliasing flag is defaulted to True; " + "some schema-qualified queries may not function " + "correctly. Consider setting this flag to False for " + "modern SQL Server versions; this flag will default to " + "False in version 1.1") + + if table not in self.tablealiases: + self.tablealiases[table] = table.alias() + return self.tablealiases[table] + else: + return None + + def visit_extract(self, extract, **kw): + field = self.extract_map.get(extract.field, extract.field) + return 'DATEPART(%s, %s)' % \ + (field, self.process(extract.expr, **kw)) + + def visit_savepoint(self, savepoint_stmt): + return "SAVE TRANSACTION %s" % \ + self.preparer.format_savepoint(savepoint_stmt) + + def visit_rollback_to_savepoint(self, savepoint_stmt): + return ("ROLLBACK TRANSACTION %s" + % self.preparer.format_savepoint(savepoint_stmt)) def visit_binary(self, binary, **kwargs): """Move bind parameters to the right-hand side of an operator, where @@ -1141,8 +1319,11 @@ class MSSQLStrictCompiler(MSSQLCompiler): class MSDDLCompiler(compiler.DDLCompiler): def get_column_specification(self, column, **kwargs): - colspec = (self.preparer.format_column(column) + " " - + self.dialect.type_compiler.process(column.type)) + colspec = ( + self.preparer.format_column(column) + " " + + self.dialect.type_compiler.process( + column.type, type_expression=column) + ) if column.nullable is not None: if not column.nullable or column.primary_key or \ @@ -1321,6 +1502,10 @@ class MSDialect(default.DefaultDialect): sqltypes.Time: TIME, } + engine_config_types = default.DefaultDialect.engine_config_types.union([ + ('legacy_schema_aliasing', util.asbool), + ]) + ischema_names = ischema_names supports_native_boolean = False @@ -1351,13 +1536,24 @@ class MSDialect(default.DefaultDialect): query_timeout=None, use_scope_identity=True, max_identifier_length=None, - schema_name="dbo", **opts): + schema_name="dbo", + deprecate_large_types=None, + legacy_schema_aliasing=None, **opts): self.query_timeout = int(query_timeout or 0) self.schema_name = schema_name self.use_scope_identity = use_scope_identity self.max_identifier_length = int(max_identifier_length or 0) or \ self.max_identifier_length + self.deprecate_large_types = deprecate_large_types + + if legacy_schema_aliasing is None: + self.legacy_schema_aliasing = True + self._warn_schema_aliasing = True + else: + self.legacy_schema_aliasing = legacy_schema_aliasing + self._warn_schema_aliasing = False + super(MSDialect, self).__init__(**opts) def do_savepoint(self, connection, name): @@ -1371,21 +1567,31 @@ class MSDialect(default.DefaultDialect): def initialize(self, connection): super(MSDialect, self).initialize(connection) + self._setup_version_attributes() + + def _setup_version_attributes(self): if self.server_version_info[0] not in list(range(8, 17)): # FreeTDS with version 4.2 seems to report here # a number like "95.10.255". Don't know what # that is. So emit warning. + # Use TDS Version 7.0 through 7.3, per the MS information here: + # https://msdn.microsoft.com/en-us/library/dd339982.aspx + # and FreeTDS information here (7.3 highest supported version): + # http://www.freetds.org/userguide/choosingtdsprotocol.htm util.warn( "Unrecognized server version info '%s'. Version specific " "behaviors may not function properly. If using ODBC " - "with FreeTDS, ensure server version 7.0 or 8.0, not 4.2, " - "is configured in the FreeTDS configuration." % + "with FreeTDS, ensure TDS_VERSION 7.0 through 7.3, not " + "4.2, is configured in the FreeTDS configuration." % ".".join(str(x) for x in self.server_version_info)) if self.server_version_info >= MS_2005_VERSION and \ 'implicit_returning' not in self.__dict__: self.implicit_returning = True if self.server_version_info >= MS_2008_VERSION: self.supports_multivalues_insert = True + if self.deprecate_large_types is None: + self.deprecate_large_types = \ + self.server_version_info >= MS_2012_VERSION def _get_default_schema_name(self, connection): if self.server_version_info < MS_2005_VERSION: @@ -1573,12 +1779,11 @@ class MSDialect(default.DefaultDialect): if coltype in (MSString, MSChar, MSNVarchar, MSNChar, MSText, MSNText, MSBinary, MSVarBinary, sqltypes.LargeBinary): + if charlen == -1: + charlen = 'max' kwargs['length'] = charlen if collation: kwargs['collation'] = collation - if coltype == MSText or \ - (coltype in (MSString, MSNVarchar) and charlen == -1): - kwargs.pop('length') if coltype is None: util.warn( diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/information_schema.py b/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/information_schema.py index 371a1ed..e2c0a46 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/information_schema.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/information_schema.py @@ -1,5 +1,5 @@ # mssql/information_schema.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/mxodbc.py b/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/mxodbc.py index ffe38d8..5e20ed1 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/mxodbc.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/mxodbc.py @@ -1,5 +1,5 @@ # mssql/mxodbc.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/pymssql.py b/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/pymssql.py index 8f76336..e3a4db8 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/pymssql.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/pymssql.py @@ -1,5 +1,5 @@ # mssql/pymssql.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -46,11 +46,12 @@ class MSDialect_pymssql(MSDialect): @classmethod def dbapi(cls): module = __import__('pymssql') - # pymmsql doesn't have a Binary method. we use string - # TODO: monkeypatching here is less than ideal - module.Binary = lambda x: x if hasattr(x, 'decode') else str(x) - + # pymmsql < 2.1.1 doesn't have a Binary method. we use string client_ver = tuple(int(x) for x in module.__version__.split(".")) + if client_ver < (2, 1, 1): + # TODO: monkeypatching here is less than ideal + module.Binary = lambda x: x if hasattr(x, 'decode') else str(x) + if client_ver < (1, ): util.warn("The pymssql dialect expects at least " "the 1.0 series of the pymssql DBAPI.") @@ -63,7 +64,7 @@ class MSDialect_pymssql(MSDialect): def _get_server_version_info(self, connection): vers = connection.scalar("select @@version") m = re.match( - r"Microsoft SQL Server.*? - (\d+).(\d+).(\d+).(\d+)", vers) + r"Microsoft .*? - (\d+).(\d+).(\d+).(\d+)", vers) if m: return tuple(int(x) for x in m.group(1, 2, 3, 4)) else: @@ -84,7 +85,8 @@ class MSDialect_pymssql(MSDialect): "message 20003", # connection timeout "Error 10054", "Not connected to any MS SQL server", - "Connection is closed" + "Connection is closed", + "message 20006", # Write to the server failed ): if msg in str(e): return True diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/pyodbc.py b/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/pyodbc.py index 1c75fe1..c938368 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/pyodbc.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/pyodbc.py @@ -1,5 +1,5 @@ # mssql/pyodbc.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -12,74 +12,57 @@ :connectstring: mssql+pyodbc://:@ :url: http://pypi.python.org/pypi/pyodbc/ -Additional Connection Examples -------------------------------- +Connecting to PyODBC +-------------------- -Examples of pyodbc connection string URLs: +The URL here is to be translated to PyODBC connection strings, as +detailed in `ConnectionStrings `_. -* ``mssql+pyodbc://mydsn`` - connects using the specified DSN named ``mydsn``. - The connection string that is created will appear like:: +DSN Connections +^^^^^^^^^^^^^^^ - dsn=mydsn;Trusted_Connection=Yes +A DSN-based connection is **preferred** overall when using ODBC. A +basic DSN-based connection looks like:: -* ``mssql+pyodbc://user:pass@mydsn`` - connects using the DSN named - ``mydsn`` passing in the ``UID`` and ``PWD`` information. The - connection string that is created will appear like:: + engine = create_engine("mssql+pyodbc://scott:tiger@some_dsn") + +Which above, will pass the following connection string to PyODBC:: dsn=mydsn;UID=user;PWD=pass -* ``mssql+pyodbc://user:pass@mydsn/?LANGUAGE=us_english`` - connects - using the DSN named ``mydsn`` passing in the ``UID`` and ``PWD`` - information, plus the additional connection configuration option - ``LANGUAGE``. The connection string that is created will appear - like:: +If the username and password are omitted, the DSN form will also add +the ``Trusted_Connection=yes`` directive to the ODBC string. - dsn=mydsn;UID=user;PWD=pass;LANGUAGE=us_english +Hostname Connections +^^^^^^^^^^^^^^^^^^^^ -* ``mssql+pyodbc://user:pass@host/db`` - connects using a connection - that would appear like:: +Hostname-based connections are **not preferred**, however are supported. +The ODBC driver name must be explicitly specified:: - DRIVER={SQL Server};Server=host;Database=db;UID=user;PWD=pass + engine = create_engine("mssql+pyodbc://scott:tiger@myhost:port/databasename?driver=SQL+Server+Native+Client+10.0") -* ``mssql+pyodbc://user:pass@host:123/db`` - connects using a connection - string which includes the port - information using the comma syntax. This will create the following - connection string:: +.. versionchanged:: 1.0.0 Hostname-based PyODBC connections now require the + SQL Server driver name specified explicitly. SQLAlchemy cannot + choose an optimal default here as it varies based on platform + and installed drivers. - DRIVER={SQL Server};Server=host,123;Database=db;UID=user;PWD=pass +Other keywords interpreted by the Pyodbc dialect to be passed to +``pyodbc.connect()`` in both the DSN and hostname cases include: +``odbc_autotranslate``, ``ansi``, ``unicode_results``, ``autocommit``. -* ``mssql+pyodbc://user:pass@host/db?port=123`` - connects using a connection - string that includes the port - information as a separate ``port`` keyword. This will create the - following connection string:: +Pass through exact Pyodbc string +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - DRIVER={SQL Server};Server=host;Database=db;UID=user;PWD=pass;port=123 +A PyODBC connection string can also be sent exactly as specified in +`ConnectionStrings `_ +into the driver using the parameter ``odbc_connect``. The delimeters must be URL escaped, however, +as illustrated below using ``urllib.quote_plus``:: -* ``mssql+pyodbc://user:pass@host/db?driver=MyDriver`` - connects using a - connection string that includes a custom ODBC driver name. This will create - the following connection string:: + import urllib + params = urllib.quote_plus("DRIVER={SQL Server Native Client 10.0};SERVER=dagger;DATABASE=test;UID=user;PWD=password") - DRIVER={MyDriver};Server=host;Database=db;UID=user;PWD=pass + engine = create_engine("mssql+pyodbc:///?odbc_connect=%s" % params) -If you require a connection string that is outside the options -presented above, use the ``odbc_connect`` keyword to pass in a -urlencoded connection string. What gets passed in will be urldecoded -and passed directly. - -For example:: - - mssql+pyodbc:///?odbc_connect=dsn%3Dmydsn%3BDatabase%3Ddb - -would create the following connection string:: - - dsn=mydsn;Database=db - -Encoding your connection string can be easily accomplished through -the python shell. For example:: - - >>> import urllib - >>> urllib.quote_plus('dsn=mydsn;Database=db') - 'dsn%3Dmydsn%3BDatabase%3Ddb' Unicode Binds ------------- @@ -112,7 +95,7 @@ for unix + PyODBC. """ -from .base import MSExecutionContext, MSDialect +from .base import MSExecutionContext, MSDialect, VARBINARY from ...connectors.pyodbc import PyODBCConnector from ... import types as sqltypes, util import decimal @@ -191,6 +174,22 @@ class _MSFloat_pyodbc(_ms_numeric_pyodbc, sqltypes.Float): pass +class _VARBINARY_pyodbc(VARBINARY): + def bind_processor(self, dialect): + if dialect.dbapi is None: + return None + + DBAPIBinary = dialect.dbapi.Binary + + def process(value): + if value is not None: + return DBAPIBinary(value) + else: + # pyodbc-specific + return dialect.dbapi.BinaryNull + return process + + class MSExecutionContext_pyodbc(MSExecutionContext): _embedded_scope_identity = False @@ -243,13 +242,13 @@ class MSDialect_pyodbc(PyODBCConnector, MSDialect): execution_ctx_cls = MSExecutionContext_pyodbc - pyodbc_driver_name = 'SQL Server' - colspecs = util.update_copy( MSDialect.colspecs, { sqltypes.Numeric: _MSNumeric_pyodbc, - sqltypes.Float: _MSFloat_pyodbc + sqltypes.Float: _MSFloat_pyodbc, + VARBINARY: _VARBINARY_pyodbc, + sqltypes.LargeBinary: _VARBINARY_pyodbc, } ) diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/zxjdbc.py b/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/zxjdbc.py index b23a010..0bf68c2 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/zxjdbc.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/mssql/zxjdbc.py @@ -1,5 +1,5 @@ # mssql/zxjdbc.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -13,6 +13,8 @@ [?key=value&key=value...] :driverurl: http://jtds.sourceforge.net/ + .. note:: Jython is not supported by current versions of SQLAlchemy. The + zxjdbc dialect should be considered as experimental. """ from ...connectors.zxJDBC import ZxJDBCConnector diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/__init__.py b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/__init__.py index 498603c..fabd932 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/__init__.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/__init__.py @@ -1,5 +1,5 @@ # mysql/__init__.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/base.py b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/base.py index 6c5633f..3f9c599 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/base.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/base.py @@ -1,5 +1,5 @@ # mysql/base.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -32,6 +32,11 @@ the ``pool_recycle`` option which controls the maximum age of any connection:: engine = create_engine('mysql+mysqldb://...', pool_recycle=3600) +.. seealso:: + + :ref:`pool_setting_recycle` - full description of the pool recycle feature. + + .. _mysql_storage_engines: CREATE TABLE arguments including Storage Engines @@ -101,10 +106,12 @@ all lower case both within SQLAlchemy as well as on the MySQL database itself, especially if database reflection features are to be used. +.. _mysql_isolation_level: + Transaction Isolation Level --------------------------- -:func:`.create_engine` accepts an ``isolation_level`` +:func:`.create_engine` accepts an :paramref:`.create_engine.isolation_level` parameter which results in the command ``SET SESSION TRANSACTION ISOLATION LEVEL `` being invoked for every new connection. Valid values for this parameter are @@ -144,6 +151,90 @@ multi-column key for some storage engines:: Column('id', Integer, primary_key=True) ) +.. _mysql_unicode: + +Unicode +------- + +Charset Selection +~~~~~~~~~~~~~~~~~ + +Most MySQL DBAPIs offer the option to set the client character set for +a connection. This is typically delivered using the ``charset`` parameter +in the URL, such as:: + + e = create_engine("mysql+pymysql://scott:tiger@localhost/\ +test?charset=utf8") + +This charset is the **client character set** for the connection. Some +MySQL DBAPIs will default this to a value such as ``latin1``, and some +will make use of the ``default-character-set`` setting in the ``my.cnf`` +file as well. Documentation for the DBAPI in use should be consulted +for specific behavior. + +The encoding used for Unicode has traditionally been ``'utf8'``. However, +for MySQL versions 5.5.3 on forward, a new MySQL-specific encoding +``'utf8mb4'`` has been introduced. The rationale for this new encoding +is due to the fact that MySQL's utf-8 encoding only supports +codepoints up to three bytes instead of four. Therefore, +when communicating with a MySQL database +that includes codepoints more than three bytes in size, +this new charset is preferred, if supported by both the database as well +as the client DBAPI, as in:: + + e = create_engine("mysql+pymysql://scott:tiger@localhost/\ +test?charset=utf8mb4") + +At the moment, up-to-date versions of MySQLdb and PyMySQL support the +``utf8mb4`` charset. Other DBAPIs such as MySQL-Connector and OurSQL +may **not** support it as of yet. + +In order to use ``utf8mb4`` encoding, changes to +the MySQL schema and/or server configuration may be required. + +.. seealso:: + + `The utf8mb4 Character Set \ +`_ - \ +in the MySQL documentation + +Unicode Encoding / Decoding +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +All modern MySQL DBAPIs all offer the service of handling the encoding and +decoding of unicode data between the Python application space and the database. +As this was not always the case, SQLAlchemy also includes a comprehensive system +of performing the encode/decode task as well. As only one of these systems +should be in use at at time, SQLAlchemy has long included functionality +to automatically detect upon first connection whether or not the DBAPI is +automatically handling unicode. + +Whether or not the MySQL DBAPI will handle encoding can usually be configured +using a DBAPI flag ``use_unicode``, which is known to be supported at least +by MySQLdb, PyMySQL, and MySQL-Connector. Setting this value to ``0`` +in the "connect args" or query string will have the effect of disabling the +DBAPI's handling of unicode, such that it instead will return data of the +``str`` type or ``bytes`` type, with data in the configured charset:: + + # connect while disabling the DBAPI's unicode encoding/decoding + e = create_engine("mysql+mysqldb://scott:tiger@localhost/test?charset=utf8&use_unicode=0") + +Current recommendations for modern DBAPIs are as follows: + +* It is generally always safe to leave the ``use_unicode`` flag set at + its default; that is, don't use it at all. +* Under Python 3, the ``use_unicode=0`` flag should **never be used**. + SQLAlchemy under Python 3 generally assumes the DBAPI receives and returns + string values as Python 3 strings, which are inherently unicode objects. +* Under Python 2 with MySQLdb, the ``use_unicode=0`` flag will **offer + superior performance**, as MySQLdb's unicode converters under Python 2 only + have been observed to have unusually slow performance compared to SQLAlchemy's + fast C-based encoders/decoders. + +In short: don't specify ``use_unicode`` *at all*, with the possible +exception of ``use_unicode=0`` on MySQLdb with Python 2 **only** for a +potential performance gain. + Ansi Quoting Style ------------------ @@ -188,15 +279,13 @@ SQLAlchemy standardizes the DBAPI ``cursor.rowcount`` attribute to be the usual definition of "number of rows matched by an UPDATE or DELETE" statement. This is in contradiction to the default setting on most MySQL DBAPI drivers, which is "number of rows actually modified/deleted". For this reason, the -SQLAlchemy MySQL dialects always set the ``constants.CLIENT.FOUND_ROWS`` flag, -or whatever is equivalent for the DBAPI in use, on connect, unless the flag -value is overridden using DBAPI-specific options -(such as ``client_flag`` for the MySQL-Python driver, ``found_rows`` for the -OurSQL driver). +SQLAlchemy MySQL dialects always add the ``constants.CLIENT.FOUND_ROWS`` +flag, or whatever is equivalent for the target dialect, upon connection. +This setting is currently hardcoded. -See also: +.. seealso:: -:attr:`.ResultProxy.rowcount` + :attr:`.ResultProxy.rowcount` CAST Support @@ -341,6 +430,110 @@ reflection will not include foreign keys. For these tables, you may supply a :ref:`mysql_storage_engines` +.. _mysql_unique_constraints: + +MySQL Unique Constraints and Reflection +--------------------------------------- + +SQLAlchemy supports both the :class:`.Index` construct with the +flag ``unique=True``, indicating a UNIQUE index, as well as the +:class:`.UniqueConstraint` construct, representing a UNIQUE constraint. +Both objects/syntaxes are supported by MySQL when emitting DDL to create +these constraints. However, MySQL does not have a unique constraint +construct that is separate from a unique index; that is, the "UNIQUE" +constraint on MySQL is equivalent to creating a "UNIQUE INDEX". + +When reflecting these constructs, the :meth:`.Inspector.get_indexes` +and the :meth:`.Inspector.get_unique_constraints` methods will **both** +return an entry for a UNIQUE index in MySQL. However, when performing +full table reflection using ``Table(..., autoload=True)``, +the :class:`.UniqueConstraint` construct is +**not** part of the fully reflected :class:`.Table` construct under any +circumstances; this construct is always represented by a :class:`.Index` +with the ``unique=True`` setting present in the :attr:`.Table.indexes` +collection. + + +.. _mysql_timestamp_null: + +TIMESTAMP Columns and NULL +-------------------------- + +MySQL historically enforces that a column which specifies the +TIMESTAMP datatype implicitly includes a default value of +CURRENT_TIMESTAMP, even though this is not stated, and additionally +sets the column as NOT NULL, the opposite behavior vs. that of all +other datatypes:: + + mysql> CREATE TABLE ts_test ( + -> a INTEGER, + -> b INTEGER NOT NULL, + -> c TIMESTAMP, + -> d TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + -> e TIMESTAMP NULL); + Query OK, 0 rows affected (0.03 sec) + + mysql> SHOW CREATE TABLE ts_test; + +---------+----------------------------------------------------- + | Table | Create Table + +---------+----------------------------------------------------- + | ts_test | CREATE TABLE `ts_test` ( + `a` int(11) DEFAULT NULL, + `b` int(11) NOT NULL, + `c` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + `d` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `e` timestamp NULL DEFAULT NULL + ) ENGINE=MyISAM DEFAULT CHARSET=latin1 + +Above, we see that an INTEGER column defaults to NULL, unless it is specified +with NOT NULL. But when the column is of type TIMESTAMP, an implicit +default of CURRENT_TIMESTAMP is generated which also coerces the column +to be a NOT NULL, even though we did not specify it as such. + +This behavior of MySQL can be changed on the MySQL side using the +`explicit_defaults_for_timestamp +`_ configuration flag introduced in +MySQL 5.6. With this server setting enabled, TIMESTAMP columns behave like +any other datatype on the MySQL side with regards to defaults and nullability. + +However, to accommodate the vast majority of MySQL databases that do not +specify this new flag, SQLAlchemy emits the "NULL" specifier explicitly with +any TIMESTAMP column that does not specify ``nullable=False``. In order +to accommodate newer databases that specify ``explicit_defaults_for_timestamp``, +SQLAlchemy also emits NOT NULL for TIMESTAMP columns that do specify +``nullable=False``. The following example illustrates:: + + from sqlalchemy import MetaData, Integer, Table, Column, text + from sqlalchemy.dialects.mysql import TIMESTAMP + + m = MetaData() + t = Table('ts_test', m, + Column('a', Integer), + Column('b', Integer, nullable=False), + Column('c', TIMESTAMP), + Column('d', TIMESTAMP, nullable=False) + ) + + + from sqlalchemy import create_engine + e = create_engine("mysql://scott:tiger@localhost/test", echo=True) + m.create_all(e) + +output:: + + CREATE TABLE ts_test ( + a INTEGER, + b INTEGER NOT NULL, + c TIMESTAMP NULL, + d TIMESTAMP NOT NULL + ) + +.. versionchanged:: 1.0.0 - SQLAlchemy now renders NULL or NOT NULL in all + cases for TIMESTAMP columns, to accommodate + ``explicit_defaults_for_timestamp``. Prior to this version, it will + not render "NOT NULL" for a TIMESTAMP column that is ``nullable=False``. + """ import datetime @@ -409,6 +602,8 @@ RESERVED_WORDS = set( 'get', 'io_after_gtids', 'io_before_gtids', 'master_bind', 'one_shot', 'partition', 'sql_after_gtids', 'sql_before_gtids', # 5.6 + 'generated', 'optimizer_costs', 'stored', 'virtual', # 5.7 + ]) AUTOCOMMIT_RE = re.compile( @@ -420,7 +615,6 @@ SET_RE = re.compile( class _NumericType(object): - """Base for MySQL numeric types. This is the base both for NUMERIC as well as INTEGER, hence @@ -439,7 +633,6 @@ class _NumericType(object): class _FloatType(_NumericType, sqltypes.Float): - def __init__(self, precision=None, scale=None, asdecimal=True, **kw): if isinstance(self, (REAL, DOUBLE)) and \ ( @@ -460,7 +653,6 @@ class _FloatType(_NumericType, sqltypes.Float): class _IntegerType(_NumericType, sqltypes.Integer): - def __init__(self, display_width=None, **kw): self.display_width = display_width super(_IntegerType, self).__init__(**kw) @@ -472,7 +664,6 @@ class _IntegerType(_NumericType, sqltypes.Integer): class _StringType(sqltypes.String): - """Base for MySQL string types.""" def __init__(self, charset=None, collation=None, @@ -494,8 +685,15 @@ class _StringType(sqltypes.String): to_inspect=[_StringType, sqltypes.String]) -class NUMERIC(_NumericType, sqltypes.NUMERIC): +class _MatchType(sqltypes.Float, sqltypes.MatchType): + def __init__(self, **kw): + # TODO: float arguments? + sqltypes.Float.__init__(self) + sqltypes.MatchType.__init__(self) + + +class NUMERIC(_NumericType, sqltypes.NUMERIC): """MySQL NUMERIC type.""" __visit_name__ = 'NUMERIC' @@ -521,7 +719,6 @@ class NUMERIC(_NumericType, sqltypes.NUMERIC): class DECIMAL(_NumericType, sqltypes.DECIMAL): - """MySQL DECIMAL type.""" __visit_name__ = 'DECIMAL' @@ -547,7 +744,6 @@ class DECIMAL(_NumericType, sqltypes.DECIMAL): class DOUBLE(_FloatType): - """MySQL DOUBLE type.""" __visit_name__ = 'DOUBLE' @@ -581,7 +777,6 @@ class DOUBLE(_FloatType): class REAL(_FloatType, sqltypes.REAL): - """MySQL REAL type.""" __visit_name__ = 'REAL' @@ -615,7 +810,6 @@ class REAL(_FloatType, sqltypes.REAL): class FLOAT(_FloatType, sqltypes.FLOAT): - """MySQL FLOAT type.""" __visit_name__ = 'FLOAT' @@ -644,7 +838,6 @@ class FLOAT(_FloatType, sqltypes.FLOAT): class INTEGER(_IntegerType, sqltypes.INTEGER): - """MySQL INTEGER type.""" __visit_name__ = 'INTEGER' @@ -666,7 +859,6 @@ class INTEGER(_IntegerType, sqltypes.INTEGER): class BIGINT(_IntegerType, sqltypes.BIGINT): - """MySQL BIGINTEGER type.""" __visit_name__ = 'BIGINT' @@ -688,7 +880,6 @@ class BIGINT(_IntegerType, sqltypes.BIGINT): class MEDIUMINT(_IntegerType): - """MySQL MEDIUMINTEGER type.""" __visit_name__ = 'MEDIUMINT' @@ -710,7 +901,6 @@ class MEDIUMINT(_IntegerType): class TINYINT(_IntegerType): - """MySQL TINYINT type.""" __visit_name__ = 'TINYINT' @@ -732,7 +922,6 @@ class TINYINT(_IntegerType): class SMALLINT(_IntegerType, sqltypes.SMALLINT): - """MySQL SMALLINTEGER type.""" __visit_name__ = 'SMALLINT' @@ -754,7 +943,6 @@ class SMALLINT(_IntegerType, sqltypes.SMALLINT): class BIT(sqltypes.TypeEngine): - """MySQL BIT type. This type is for MySQL 5.0.3 or greater for MyISAM, and 5.0.5 or greater @@ -784,7 +972,9 @@ class BIT(sqltypes.TypeEngine): def process(value): if value is not None: v = 0 - for i in map(ord, value): + for i in value: + if not isinstance(i, int): + i = ord(i) # convert byte to int on Python 2 v = v << 8 | i return v return value @@ -792,7 +982,6 @@ class BIT(sqltypes.TypeEngine): class TIME(sqltypes.TIME): - """MySQL TIME type. """ __visit_name__ = 'TIME' @@ -838,7 +1027,6 @@ class TIME(sqltypes.TIME): class TIMESTAMP(sqltypes.TIMESTAMP): - """MySQL TIMESTAMP type. """ @@ -869,7 +1057,6 @@ class TIMESTAMP(sqltypes.TIMESTAMP): class DATETIME(sqltypes.DATETIME): - """MySQL DATETIME type. """ @@ -900,7 +1087,6 @@ class DATETIME(sqltypes.DATETIME): class YEAR(sqltypes.TypeEngine): - """MySQL YEAR type, for single byte storage of years 1901-2155.""" __visit_name__ = 'YEAR' @@ -910,7 +1096,6 @@ class YEAR(sqltypes.TypeEngine): class TEXT(_StringType, sqltypes.TEXT): - """MySQL TEXT type, for text up to 2^16 characters.""" __visit_name__ = 'TEXT' @@ -947,7 +1132,6 @@ class TEXT(_StringType, sqltypes.TEXT): class TINYTEXT(_StringType): - """MySQL TINYTEXT type, for text up to 2^8 characters.""" __visit_name__ = 'TINYTEXT' @@ -980,7 +1164,6 @@ class TINYTEXT(_StringType): class MEDIUMTEXT(_StringType): - """MySQL MEDIUMTEXT type, for text up to 2^24 characters.""" __visit_name__ = 'MEDIUMTEXT' @@ -1013,7 +1196,6 @@ class MEDIUMTEXT(_StringType): class LONGTEXT(_StringType): - """MySQL LONGTEXT type, for text up to 2^32 characters.""" __visit_name__ = 'LONGTEXT' @@ -1046,7 +1228,6 @@ class LONGTEXT(_StringType): class VARCHAR(_StringType, sqltypes.VARCHAR): - """MySQL VARCHAR type, for variable-length character data.""" __visit_name__ = 'VARCHAR' @@ -1079,7 +1260,6 @@ class VARCHAR(_StringType, sqltypes.VARCHAR): class CHAR(_StringType, sqltypes.CHAR): - """MySQL CHAR type, for fixed-length character data.""" __visit_name__ = 'CHAR' @@ -1121,7 +1301,6 @@ class CHAR(_StringType, sqltypes.CHAR): class NVARCHAR(_StringType, sqltypes.NVARCHAR): - """MySQL NVARCHAR type. For variable-length character data in the server's configured national @@ -1148,7 +1327,6 @@ class NVARCHAR(_StringType, sqltypes.NVARCHAR): class NCHAR(_StringType, sqltypes.NCHAR): - """MySQL NCHAR type. For fixed-length character data in the server's configured national @@ -1175,28 +1353,24 @@ class NCHAR(_StringType, sqltypes.NCHAR): class TINYBLOB(sqltypes._Binary): - """MySQL TINYBLOB type, for binary data up to 2^8 bytes.""" __visit_name__ = 'TINYBLOB' class MEDIUMBLOB(sqltypes._Binary): - """MySQL MEDIUMBLOB type, for binary data up to 2^24 bytes.""" __visit_name__ = 'MEDIUMBLOB' class LONGBLOB(sqltypes._Binary): - """MySQL LONGBLOB type, for binary data up to 2^32 bytes.""" __visit_name__ = 'LONGBLOB' class _EnumeratedValues(_StringType): - def _init_values(self, values, kw): self.quoting = kw.pop('quoting', 'auto') @@ -1240,7 +1414,6 @@ class _EnumeratedValues(_StringType): class ENUM(sqltypes.Enum, _EnumeratedValues): - """MySQL ENUM type.""" __visit_name__ = 'ENUM' @@ -1302,6 +1475,7 @@ class ENUM(sqltypes.Enum, _EnumeratedValues): kw.pop('quote', None) kw.pop('native_enum', None) kw.pop('inherit_schema', None) + kw.pop('_create_events', None) _StringType.__init__(self, length=length, **kw) sqltypes.Enum.__init__(self, *values) @@ -1329,7 +1503,6 @@ class ENUM(sqltypes.Enum, _EnumeratedValues): class SET(_EnumeratedValues): - """MySQL SET type.""" __visit_name__ = 'SET' @@ -1341,32 +1514,28 @@ class SET(_EnumeratedValues): Column('myset', SET("foo", "bar", "baz")) - :param values: The range of valid values for this SET. Values will be - quoted when generating the schema according to the quoting flag (see - below). - .. versionchanged:: 0.9.0 quoting is applied automatically to - :class:`.mysql.SET` in the same way as for :class:`.mysql.ENUM`. + The list of potential values is required in the case that this + set will be used to generate DDL for a table, or if the + :paramref:`.SET.retrieve_as_bitwise` flag is set to True. - :param charset: Optional, a column-level character set for this string - value. Takes precedence to 'ascii' or 'unicode' short-hand. + :param values: The range of valid values for this SET. - :param collation: Optional, a column-level collation for this string - value. Takes precedence to 'binary' short-hand. + :param convert_unicode: Same flag as that of + :paramref:`.String.convert_unicode`. - :param ascii: Defaults to False: short-hand for the ``latin1`` - character set, generates ASCII in schema. + :param collation: same as that of :paramref:`.String.collation` - :param unicode: Defaults to False: short-hand for the ``ucs2`` - character set, generates UNICODE in schema. + :param charset: same as that of :paramref:`.VARCHAR.charset`. - :param binary: Defaults to False: short-hand, pick the binary - collation type that matches the column's character set. Generates - BINARY in schema. This does not affect the type of data stored, - only the collation of character data. + :param ascii: same as that of :paramref:`.VARCHAR.ascii`. - :param quoting: Defaults to 'auto': automatically determine enum value - quoting. If all enum values are surrounded by the same quoting + :param unicode: same as that of :paramref:`.VARCHAR.unicode`. + + :param binary: same as that of :paramref:`.VARCHAR.binary`. + + :param quoting: Defaults to 'auto': automatically determine set value + quoting. If all values are surrounded by the same quoting character, then use 'quoted' mode. Otherwise, use 'unquoted' mode. 'quoted': values in enums are already quoted, they will be used @@ -1381,50 +1550,117 @@ class SET(_EnumeratedValues): .. versionadded:: 0.9.0 + :param retrieve_as_bitwise: if True, the data for the set type will be + persisted and selected using an integer value, where a set is coerced + into a bitwise mask for persistence. MySQL allows this mode which + has the advantage of being able to store values unambiguously, + such as the blank string ``''``. The datatype will appear + as the expression ``col + 0`` in a SELECT statement, so that the + value is coerced into an integer value in result sets. + This flag is required if one wishes + to persist a set that can store the blank string ``''`` as a value. + + .. warning:: + + When using :paramref:`.mysql.SET.retrieve_as_bitwise`, it is + essential that the list of set values is expressed in the + **exact same order** as exists on the MySQL database. + + .. versionadded:: 1.0.0 + + """ + self.retrieve_as_bitwise = kw.pop('retrieve_as_bitwise', False) values, length = self._init_values(values, kw) self.values = tuple(values) - + if not self.retrieve_as_bitwise and '' in values: + raise exc.ArgumentError( + "Can't use the blank value '' in a SET without " + "setting retrieve_as_bitwise=True") + if self.retrieve_as_bitwise: + self._bitmap = dict( + (value, 2 ** idx) + for idx, value in enumerate(self.values) + ) + self._bitmap.update( + (2 ** idx, value) + for idx, value in enumerate(self.values) + ) kw.setdefault('length', length) super(SET, self).__init__(**kw) + def column_expression(self, colexpr): + if self.retrieve_as_bitwise: + return colexpr + 0 + else: + return colexpr + def result_processor(self, dialect, coltype): - def process(value): - # The good news: - # No ',' quoting issues- commas aren't allowed in SET values - # The bad news: - # Plenty of driver inconsistencies here. - if isinstance(value, set): - # ..some versions convert '' to an empty set - if not value: - value.add('') - return value - # ...and some versions return strings - if value is not None: - return set(value.split(',')) - else: - return value + if self.retrieve_as_bitwise: + def process(value): + if value is not None: + value = int(value) + + return set( + util.map_bits(self._bitmap.__getitem__, value) + ) + else: + return None + else: + super_convert = super(SET, self).result_processor(dialect, coltype) + + def process(value): + if isinstance(value, util.string_types): + # MySQLdb returns a string, let's parse + if super_convert: + value = super_convert(value) + return set(re.findall(r'[^,]+', value)) + else: + # mysql-connector-python does a naive + # split(",") which throws in an empty string + if value is not None: + value.discard('') + return value return process def bind_processor(self, dialect): super_convert = super(SET, self).bind_processor(dialect) + if self.retrieve_as_bitwise: + def process(value): + if value is None: + return None + elif isinstance(value, util.int_types + util.string_types): + if super_convert: + return super_convert(value) + else: + return value + else: + int_value = 0 + for v in value: + int_value |= self._bitmap[v] + return int_value + else: - def process(value): - if value is None or isinstance( - value, util.int_types + util.string_types): - pass - else: - if None in value: - value = set(value) - value.remove(None) - value.add('') - value = ','.join(value) - if super_convert: - return super_convert(value) - else: - return value + def process(value): + # accept strings and int (actually bitflag) values directly + if value is not None and not isinstance( + value, util.int_types + util.string_types): + value = ",".join(value) + + if super_convert: + return super_convert(value) + else: + return value return process + def adapt(self, impltype, **kw): + kw['retrieve_as_bitwise'] = self.retrieve_as_bitwise + return util.constructor_copy( + self, impltype, + *self.values, + **kw + ) + # old names MSTime = TIME MSSet = SET @@ -1465,6 +1701,7 @@ colspecs = { sqltypes.Float: FLOAT, sqltypes.Time: TIME, sqltypes.Enum: ENUM, + sqltypes.MatchType: _MatchType } # Everything 3.23 through 5.1 excepting OpenGIS types. @@ -1540,9 +1777,12 @@ class MySQLCompiler(compiler.SQLCompiler): def get_from_hint_text(self, table, text): return text - def visit_typeclause(self, typeclause): - type_ = typeclause.type.dialect_impl(self.dialect) - if isinstance(type_, sqltypes.Integer): + def visit_typeclause(self, typeclause, type_=None): + if type_ is None: + type_ = typeclause.type.dialect_impl(self.dialect) + if isinstance(type_, sqltypes.TypeDecorator): + return self.visit_typeclause(typeclause, type_.impl) + elif isinstance(type_, sqltypes.Integer): if getattr(type_, 'unsigned', False): return 'UNSIGNED INTEGER' else: @@ -1567,10 +1807,17 @@ class MySQLCompiler(compiler.SQLCompiler): def visit_cast(self, cast, **kwargs): # No cast until 4, no decimals until 5. if not self.dialect._supports_cast: + util.warn( + "Current MySQL version does not support " + "CAST; the CAST will be skipped.") return self.process(cast.clause.self_group()) type_ = self.process(cast.typeclause) if type_ is None: + util.warn( + "Datatype %s does not support CAST on MySQL; " + "the CAST will be skipped." % + self.dialect.type_compiler.process(cast.typeclause.type)) return self.process(cast.clause.self_group()) return 'CAST(%s AS %s)' % (self.process(cast.clause), type_) @@ -1581,7 +1828,15 @@ class MySQLCompiler(compiler.SQLCompiler): value = value.replace('\\', '\\\\') return value - def get_select_precolumns(self, select): + # override native_boolean=False behavior here, as + # MySQL still supports native boolean + def visit_true(self, element, **kw): + return "true" + + def visit_false(self, element, **kw): + return "false" + + def get_select_precolumns(self, select, **kw): """Add special MySQL keywords in place of DISTINCT. .. note:: @@ -1606,13 +1861,13 @@ class MySQLCompiler(compiler.SQLCompiler): " ON ", self.process(join.onclause, **kwargs))) - def for_update_clause(self, select): + def for_update_clause(self, select, **kw): if select._for_update_arg.read: return " LOCK IN SHARE MODE" else: return " FOR UPDATE" - def limit_clause(self, select): + def limit_clause(self, select, **kw): # MySQL supports: # LIMIT # LIMIT , @@ -1621,15 +1876,16 @@ class MySQLCompiler(compiler.SQLCompiler): # The latter is more readable for offsets but we're stuck with the # former until we can refine dialects by server revision. - limit, offset = select._limit, select._offset + limit_clause, offset_clause = select._limit_clause, \ + select._offset_clause - if (limit, offset) == (None, None): + if limit_clause is None and offset_clause is None: return '' - elif offset is not None: + elif offset_clause is not None: # As suggested by the MySQL docs, need to apply an # artificial limit if one wasn't provided # http://dev.mysql.com/doc/refman/5.0/en/select.html - if limit is None: + if limit_clause is None: # hardwire the upper limit. Currently # needed by OurSQL with Python 3 # (https://bugs.launchpad.net/oursql/+bug/686232), @@ -1637,15 +1893,15 @@ class MySQLCompiler(compiler.SQLCompiler): # bound as part of MySQL's "syntax" for OFFSET with # no LIMIT return ' \n LIMIT %s, %s' % ( - self.process(sql.literal(offset)), + self.process(offset_clause, **kw), "18446744073709551615") else: return ' \n LIMIT %s, %s' % ( - self.process(sql.literal(offset)), - self.process(sql.literal(limit))) + self.process(offset_clause, **kw), + self.process(limit_clause, **kw)) else: # No offset provided, so just use the limit - return ' \n LIMIT %s' % (self.process(sql.literal(limit)),) + return ' \n LIMIT %s' % (self.process(limit_clause, **kw),) def update_limit_clause(self, update_stmt): limit = update_stmt.kwargs.get('%s_limit' % self.dialect.name, None) @@ -1670,11 +1926,10 @@ class MySQLCompiler(compiler.SQLCompiler): # creation of foreign key constraints fails." class MySQLDDLCompiler(compiler.DDLCompiler): - - def create_table_constraints(self, table): + def create_table_constraints(self, table, **kw): """Get table constraints.""" constraint_string = super( - MySQLDDLCompiler, self).create_table_constraints(table) + MySQLDDLCompiler, self).create_table_constraints(table, **kw) # why self.dialect.name and not 'mysql'? because of drizzle is_innodb = 'engine' in table.dialect_options[self.dialect.name] and \ @@ -1700,22 +1955,28 @@ class MySQLDDLCompiler(compiler.DDLCompiler): def get_column_specification(self, column, **kw): """Builds column DDL.""" - colspec = [self.preparer.format_column(column), - self.dialect.type_compiler.process(column.type) - ] + colspec = [ + self.preparer.format_column(column), + self.dialect.type_compiler.process( + column.type, type_expression=column) + ] + + is_timestamp = isinstance(column.type, sqltypes.TIMESTAMP) + + if not column.nullable: + colspec.append('NOT NULL') + + # see: http://docs.sqlalchemy.org/en/latest/dialects/ + # mysql.html#mysql_timestamp_null + elif column.nullable and is_timestamp: + colspec.append('NULL') default = self.get_column_default_string(column) if default is not None: colspec.append('DEFAULT ' + default) - is_timestamp = isinstance(column.type, sqltypes.TIMESTAMP) - if not column.nullable and not is_timestamp: - colspec.append('NOT NULL') - - elif column.nullable and is_timestamp and default is None: - colspec.append('NULL') - - if column is column.table._autoincrement_column and \ + if column.table is not None \ + and column is column.table._autoincrement_column and \ column.server_default is None: colspec.append('AUTO_INCREMENT') @@ -1851,7 +2112,6 @@ class MySQLDDLCompiler(compiler.DDLCompiler): class MySQLTypeCompiler(compiler.GenericTypeCompiler): - def _extend_numeric(self, type_, spec): "Extend a numeric-type declaration with MySQL specific extensions." @@ -1899,7 +2159,7 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler): def _mysql_type(self, type_): return isinstance(type_, (_StringType, _NumericType)) - def visit_NUMERIC(self, type_): + def visit_NUMERIC(self, type_, **kw): if type_.precision is None: return self._extend_numeric(type_, "NUMERIC") elif type_.scale is None: @@ -1912,7 +2172,7 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler): {'precision': type_.precision, 'scale': type_.scale}) - def visit_DECIMAL(self, type_): + def visit_DECIMAL(self, type_, **kw): if type_.precision is None: return self._extend_numeric(type_, "DECIMAL") elif type_.scale is None: @@ -1925,7 +2185,7 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler): {'precision': type_.precision, 'scale': type_.scale}) - def visit_DOUBLE(self, type_): + def visit_DOUBLE(self, type_, **kw): if type_.precision is not None and type_.scale is not None: return self._extend_numeric(type_, "DOUBLE(%(precision)s, %(scale)s)" % @@ -1934,7 +2194,7 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler): else: return self._extend_numeric(type_, 'DOUBLE') - def visit_REAL(self, type_): + def visit_REAL(self, type_, **kw): if type_.precision is not None and type_.scale is not None: return self._extend_numeric(type_, "REAL(%(precision)s, %(scale)s)" % @@ -1943,7 +2203,7 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler): else: return self._extend_numeric(type_, 'REAL') - def visit_FLOAT(self, type_): + def visit_FLOAT(self, type_, **kw): if self._mysql_type(type_) and \ type_.scale is not None and \ type_.precision is not None: @@ -1955,7 +2215,7 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler): else: return self._extend_numeric(type_, "FLOAT") - def visit_INTEGER(self, type_): + def visit_INTEGER(self, type_, **kw): if self._mysql_type(type_) and type_.display_width is not None: return self._extend_numeric( type_, "INTEGER(%(display_width)s)" % @@ -1963,7 +2223,7 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler): else: return self._extend_numeric(type_, "INTEGER") - def visit_BIGINT(self, type_): + def visit_BIGINT(self, type_, **kw): if self._mysql_type(type_) and type_.display_width is not None: return self._extend_numeric( type_, "BIGINT(%(display_width)s)" % @@ -1971,7 +2231,7 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler): else: return self._extend_numeric(type_, "BIGINT") - def visit_MEDIUMINT(self, type_): + def visit_MEDIUMINT(self, type_, **kw): if self._mysql_type(type_) and type_.display_width is not None: return self._extend_numeric( type_, "MEDIUMINT(%(display_width)s)" % @@ -1979,14 +2239,14 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler): else: return self._extend_numeric(type_, "MEDIUMINT") - def visit_TINYINT(self, type_): + def visit_TINYINT(self, type_, **kw): if self._mysql_type(type_) and type_.display_width is not None: return self._extend_numeric(type_, "TINYINT(%s)" % type_.display_width) else: return self._extend_numeric(type_, "TINYINT") - def visit_SMALLINT(self, type_): + def visit_SMALLINT(self, type_, **kw): if self._mysql_type(type_) and type_.display_width is not None: return self._extend_numeric(type_, "SMALLINT(%(display_width)s)" % @@ -1995,55 +2255,55 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler): else: return self._extend_numeric(type_, "SMALLINT") - def visit_BIT(self, type_): + def visit_BIT(self, type_, **kw): if type_.length is not None: return "BIT(%s)" % type_.length else: return "BIT" - def visit_DATETIME(self, type_): + def visit_DATETIME(self, type_, **kw): if getattr(type_, 'fsp', None): return "DATETIME(%d)" % type_.fsp else: return "DATETIME" - def visit_DATE(self, type_): + def visit_DATE(self, type_, **kw): return "DATE" - def visit_TIME(self, type_): + def visit_TIME(self, type_, **kw): if getattr(type_, 'fsp', None): return "TIME(%d)" % type_.fsp else: return "TIME" - def visit_TIMESTAMP(self, type_): + def visit_TIMESTAMP(self, type_, **kw): if getattr(type_, 'fsp', None): return "TIMESTAMP(%d)" % type_.fsp else: return "TIMESTAMP" - def visit_YEAR(self, type_): + def visit_YEAR(self, type_, **kw): if type_.display_width is None: return "YEAR" else: return "YEAR(%s)" % type_.display_width - def visit_TEXT(self, type_): + def visit_TEXT(self, type_, **kw): if type_.length: return self._extend_string(type_, {}, "TEXT(%d)" % type_.length) else: return self._extend_string(type_, {}, "TEXT") - def visit_TINYTEXT(self, type_): + def visit_TINYTEXT(self, type_, **kw): return self._extend_string(type_, {}, "TINYTEXT") - def visit_MEDIUMTEXT(self, type_): + def visit_MEDIUMTEXT(self, type_, **kw): return self._extend_string(type_, {}, "MEDIUMTEXT") - def visit_LONGTEXT(self, type_): + def visit_LONGTEXT(self, type_, **kw): return self._extend_string(type_, {}, "LONGTEXT") - def visit_VARCHAR(self, type_): + def visit_VARCHAR(self, type_, **kw): if type_.length: return self._extend_string( type_, {}, "VARCHAR(%d)" % type_.length) @@ -2052,14 +2312,14 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler): "VARCHAR requires a length on dialect %s" % self.dialect.name) - def visit_CHAR(self, type_): + def visit_CHAR(self, type_, **kw): if type_.length: return self._extend_string(type_, {}, "CHAR(%(length)s)" % {'length': type_.length}) else: return self._extend_string(type_, {}, "CHAR") - def visit_NVARCHAR(self, type_): + def visit_NVARCHAR(self, type_, **kw): # We'll actually generate the equiv. "NATIONAL VARCHAR" instead # of "NVARCHAR". if type_.length: @@ -2071,7 +2331,7 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler): "NVARCHAR requires a length on dialect %s" % self.dialect.name) - def visit_NCHAR(self, type_): + def visit_NCHAR(self, type_, **kw): # We'll actually generate the equiv. # "NATIONAL CHAR" instead of "NCHAR". if type_.length: @@ -2081,31 +2341,31 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler): else: return self._extend_string(type_, {'national': True}, "CHAR") - def visit_VARBINARY(self, type_): + def visit_VARBINARY(self, type_, **kw): return "VARBINARY(%d)" % type_.length - def visit_large_binary(self, type_): + def visit_large_binary(self, type_, **kw): return self.visit_BLOB(type_) - def visit_enum(self, type_): + def visit_enum(self, type_, **kw): if not type_.native_enum: return super(MySQLTypeCompiler, self).visit_enum(type_) else: return self._visit_enumerated_values("ENUM", type_, type_.enums) - def visit_BLOB(self, type_): + def visit_BLOB(self, type_, **kw): if type_.length: return "BLOB(%d)" % type_.length else: return "BLOB" - def visit_TINYBLOB(self, type_): + def visit_TINYBLOB(self, type_, **kw): return "TINYBLOB" - def visit_MEDIUMBLOB(self, type_): + def visit_MEDIUMBLOB(self, type_, **kw): return "MEDIUMBLOB" - def visit_LONGBLOB(self, type_): + def visit_LONGBLOB(self, type_, **kw): return "LONGBLOB" def _visit_enumerated_values(self, name, type_, enumerated_values): @@ -2116,15 +2376,15 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler): name, ",".join(quoted_enums)) ) - def visit_ENUM(self, type_): + def visit_ENUM(self, type_, **kw): return self._visit_enumerated_values("ENUM", type_, type_._enumerated_values) - def visit_SET(self, type_): + def visit_SET(self, type_, **kw): return self._visit_enumerated_values("SET", type_, type_._enumerated_values) - def visit_BOOLEAN(self, type): + def visit_BOOLEAN(self, type, **kw): return "BOOL" @@ -2151,7 +2411,6 @@ class MySQLIdentifierPreparer(compiler.IdentifierPreparer): @log.class_logger class MySQLDialect(default.DefaultDialect): - """Details of the MySQL dialect. Not used directly in application code. """ @@ -2159,6 +2418,10 @@ class MySQLDialect(default.DefaultDialect): name = 'mysql' supports_alter = True + # MySQL has no true "boolean" type; we + # allow for the "true" and "false" keywords, however + supports_native_boolean = False + # identifiers are 64, however aliases can be 255... max_identifier_length = 255 max_index_name_length = 64 @@ -2249,7 +2512,7 @@ class MySQLDialect(default.DefaultDialect): # basic operations via autocommit fail. try: dbapi_connection.commit() - except: + except Exception: if self.server_version_info < (3, 23, 15): args = sys.exc_info()[1].args if args and args[0] == 1064: @@ -2261,7 +2524,7 @@ class MySQLDialect(default.DefaultDialect): try: dbapi_connection.rollback() - except: + except Exception: if self.server_version_info < (3, 23, 15): args = sys.exc_info()[1].args if args and args[0] == 1064: @@ -2345,7 +2608,8 @@ class MySQLDialect(default.DefaultDialect): rs = None try: try: - rs = connection.execute(st) + rs = connection.execution_options( + skip_user_error_events=True).execute(st) have = rs.fetchone() is not None rs.close() return have @@ -2501,13 +2765,14 @@ class MySQLDialect(default.DefaultDialect): pass else: self.logger.info( - "Converting unknown KEY type %s to a plain KEY" % flavor) + "Converting unknown KEY type %s to a plain KEY", flavor) pass index_d = {} index_d['name'] = spec['name'] index_d['column_names'] = [s[0] for s in spec['columns']] index_d['unique'] = unique - index_d['type'] = flavor + if flavor: + index_d['type'] = flavor indexes.append(index_d) return indexes @@ -2520,7 +2785,8 @@ class MySQLDialect(default.DefaultDialect): return [ { 'name': key['name'], - 'column_names': [col[0] for col in key['columns']] + 'column_names': [col[0] for col in key['columns']], + 'duplicates_index': key['name'], } for key in parsed_state.keys if key['type'] == 'UNIQUE' @@ -2568,7 +2834,7 @@ class MySQLDialect(default.DefaultDialect): schema, table_name)) sql = self._show_create_table(connection, None, charset, full_name=full_name) - if sql.startswith('CREATE ALGORITHM'): + if re.match(r'^CREATE (?:ALGORITHM)?.* VIEW', sql): # Adapt views to something table-like. columns = self._describe_table(connection, None, charset, full_name=full_name) @@ -2651,7 +2917,8 @@ class MySQLDialect(default.DefaultDialect): rp = None try: - rp = connection.execute(st) + rp = connection.execution_options( + skip_user_error_events=True).execute(st) except exc.DBAPIError as e: if self._extract_error_code(e.orig) == 1146: raise exc.NoSuchTableError(full_name) @@ -2675,7 +2942,8 @@ class MySQLDialect(default.DefaultDialect): rp, rows = None, None try: try: - rp = connection.execute(st) + rp = connection.execution_options( + skip_user_error_events=True).execute(st) except exc.DBAPIError as e: if self._extract_error_code(e.orig) == 1146: raise exc.NoSuchTableError(full_name) @@ -2689,7 +2957,6 @@ class MySQLDialect(default.DefaultDialect): class ReflectedState(object): - """Stores raw information about a SHOW CREATE TABLE statement.""" def __init__(self): @@ -2702,7 +2969,6 @@ class ReflectedState(object): @log.class_logger class MySQLTableDefinitionParser(object): - """Parses the results of a SHOW CREATE TABLE statement.""" def __init__(self, dialect, preparer): @@ -2839,8 +3105,7 @@ class MySQLTableDefinitionParser(object): if not spec['full']: util.warn("Incomplete reflection of column definition %r" % line) - name, type_, args, notnull = \ - spec['name'], spec['coltype'], spec['arg'], spec['notnull'] + name, type_, args = spec['name'], spec['coltype'], spec['arg'] try: col_type = self.dialect.ischema_names[type_] @@ -2859,23 +3124,31 @@ class MySQLTableDefinitionParser(object): # Column type keyword options type_kw = {} + + if issubclass(col_type, (DATETIME, TIME, TIMESTAMP)): + if type_args: + type_kw['fsp'] = type_args.pop(0) + for kw in ('unsigned', 'zerofill'): if spec.get(kw, False): type_kw[kw] = True for kw in ('charset', 'collate'): if spec.get(kw, False): type_kw[kw] = spec[kw] - if issubclass(col_type, _EnumeratedValues): type_args = _EnumeratedValues._strip_values(type_args) + if issubclass(col_type, SET) and '' in type_args: + type_kw['retrieve_as_bitwise'] = True + type_instance = col_type(*type_args, **type_kw) - col_args, col_kw = [], {} + col_kw = {} # NOT NULL col_kw['nullable'] = True - if spec.get('notnull', False): + # this can be "NULL" in the case of TIMESTAMP + if spec.get('notnull', False) == 'NOT NULL': col_kw['nullable'] = False # AUTO_INCREMENT @@ -2994,7 +3267,7 @@ class MySQLTableDefinitionParser(object): r'(?: +(?PZEROFILL))?' r'(?: +CHARACTER SET +(?P[\w_]+))?' r'(?: +COLLATE +(?P[\w_]+))?' - r'(?: +(?PNOT NULL))?' + r'(?: +(?P(?:NOT )?NULL))?' r'(?: +DEFAULT +(?P' r'(?:NULL|\x27(?:\x27\x27|[^\x27])*\x27|\w+' r'(?: +ON UPDATE \w+)?)' @@ -3014,7 +3287,7 @@ class MySQLTableDefinitionParser(object): r'%(iq)s(?P(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +' r'(?P\w+)' r'(?:\((?P(?:\d+|\d+,\d+|\x27(?:\x27\x27|[^\x27])+\x27))\))?' - r'.*?(?PNOT NULL)?' + r'.*?(?P(?:NOT )NULL)?' % quotes ) @@ -3109,7 +3382,6 @@ _options_of_type_string = ('COMMENT', 'DATA DIRECTORY', 'INDEX DIRECTORY', class _DecodingRowProxy(object): - """Return unicode-decoded values based on type inspection. Smooth over data type issues (esp. with alpha driver versions) and @@ -3122,9 +3394,17 @@ class _DecodingRowProxy(object): # sets.Set(['value']) (seriously) but thankfully that doesn't # seem to come up in DDL queries. + _encoding_compat = { + 'koi8r': 'koi8_r', + 'koi8u': 'koi8_u', + 'utf16': 'utf-16-be', # MySQL's uft16 is always bigendian + 'utf8mb4': 'utf8', # real utf8 + 'eucjpms': 'ujis', + } + def __init__(self, rowproxy, charset): self.rowproxy = rowproxy - self.charset = charset + self.charset = self._encoding_compat.get(charset, charset) def __getitem__(self, index): item = self.rowproxy[index] diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/cymysql.py b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/cymysql.py index 51b6304..8bc0ae3 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/cymysql.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/cymysql.py @@ -1,5 +1,5 @@ # mysql/cymysql.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/gaerdbms.py b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/gaerdbms.py index 0059f5a..4e36588 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/gaerdbms.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/gaerdbms.py @@ -1,5 +1,5 @@ # mysql/gaerdbms.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -17,6 +17,13 @@ developers-guide .. versionadded:: 0.7.8 + .. deprecated:: 1.0 This dialect is **no longer necessary** for + Google Cloud SQL; the MySQLdb dialect can be used directly. + Cloud SQL now recommends creating connections via the + mysql dialect using the URL format + + ``mysql+mysqldb://root@/?unix_socket=/cloudsql/:`` + Pooling ------- @@ -33,6 +40,7 @@ import os from .mysqldb import MySQLDialect_mysqldb from ...pool import NullPool import re +from sqlalchemy.util import warn_deprecated def _is_dev_environment(): @@ -43,6 +51,14 @@ class MySQLDialect_gaerdbms(MySQLDialect_mysqldb): @classmethod def dbapi(cls): + + warn_deprecated( + "Google Cloud SQL now recommends creating connections via the " + "MySQLdb dialect directly, using the URL format " + "mysql+mysqldb://root@/?unix_socket=/cloudsql/" + ":" + ) + # from django: # http://code.google.com/p/googleappengine/source/ # browse/trunk/python/google/storage/speckle/ diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/mysqlconnector.py b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/mysqlconnector.py index a2535d0..a3a3f2b 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/mysqlconnector.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/mysqlconnector.py @@ -1,5 +1,5 @@ # mysql/mysqlconnector.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -14,6 +14,12 @@ :url: http://dev.mysql.com/downloads/connector/python/ +Unicode +------- + +Please see :ref:`mysql_unicode` for current recommendations on unicode +handling. + """ from .base import (MySQLDialect, MySQLExecutionContext, @@ -21,6 +27,7 @@ from .base import (MySQLDialect, MySQLExecutionContext, BIT) from ... import util +import re class MySQLExecutionContext_mysqlconnector(MySQLExecutionContext): @@ -31,18 +38,34 @@ class MySQLExecutionContext_mysqlconnector(MySQLExecutionContext): class MySQLCompiler_mysqlconnector(MySQLCompiler): def visit_mod_binary(self, binary, operator, **kw): - return self.process(binary.left, **kw) + " %% " + \ - self.process(binary.right, **kw) + if self.dialect._mysqlconnector_double_percents: + return self.process(binary.left, **kw) + " %% " + \ + self.process(binary.right, **kw) + else: + return self.process(binary.left, **kw) + " % " + \ + self.process(binary.right, **kw) def post_process_text(self, text): - return text.replace('%', '%%') + if self.dialect._mysqlconnector_double_percents: + return text.replace('%', '%%') + else: + return text + + def escape_literal_column(self, text): + if self.dialect._mysqlconnector_double_percents: + return text.replace('%', '%%') + else: + return text class MySQLIdentifierPreparer_mysqlconnector(MySQLIdentifierPreparer): def _escape_identifier(self, value): value = value.replace(self.escape_quote, self.escape_to_quote) - return value.replace("%", "%%") + if self.dialect._mysqlconnector_double_percents: + return value.replace("%", "%%") + else: + return value class _myconnpyBIT(BIT): @@ -55,8 +78,6 @@ class _myconnpyBIT(BIT): class MySQLDialect_mysqlconnector(MySQLDialect): driver = 'mysqlconnector' - if util.py2k: - supports_unicode_statements = False supports_unicode_binds = True supports_sane_rowcount = True @@ -77,6 +98,10 @@ class MySQLDialect_mysqlconnector(MySQLDialect): } ) + @util.memoized_property + def supports_unicode_statements(self): + return util.py3k or self._mysqlconnector_version_info > (2, 0) + @classmethod def dbapi(cls): from mysql import connector @@ -89,8 +114,10 @@ class MySQLDialect_mysqlconnector(MySQLDialect): util.coerce_kw_type(opts, 'buffered', bool) util.coerce_kw_type(opts, 'raise_on_warnings', bool) + + # unfortunately, MySQL/connector python refuses to release a + # cursor without reading fully, so non-buffered isn't an option opts.setdefault('buffered', True) - opts.setdefault('raise_on_warnings', True) # FOUND_ROWS must be set in ClientFlag to enable # supports_sane_rowcount. @@ -101,10 +128,25 @@ class MySQLDialect_mysqlconnector(MySQLDialect): 'client_flags', ClientFlag.get_default()) client_flags |= ClientFlag.FOUND_ROWS opts['client_flags'] = client_flags - except: + except Exception: pass return [[], opts] + @util.memoized_property + def _mysqlconnector_version_info(self): + if self.dbapi and hasattr(self.dbapi, '__version__'): + m = re.match(r'(\d+)\.(\d+)(?:\.(\d+))?', + self.dbapi.__version__) + if m: + return tuple( + int(x) + for x in m.group(1, 2, 3) + if x is not None) + + @util.memoized_property + def _mysqlconnector_double_percents(self): + return not util.py3k and self._mysqlconnector_version_info < (2, 0) + def _get_server_version_info(self, connection): dbapi_con = connection.connection version = dbapi_con.get_server_version() diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/mysqldb.py b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/mysqldb.py index 7cb2e66..9c35eb7 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/mysqldb.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/mysqldb.py @@ -1,5 +1,5 @@ # mysql/mysqldb.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -13,78 +13,101 @@ :connectstring: mysql+mysqldb://:@[:]/ :url: http://sourceforge.net/projects/mysql-python +.. _mysqldb_unicode: Unicode ------- -MySQLdb requires a "charset" parameter to be passed in order for it -to handle non-ASCII characters correctly. When this parameter is passed, -MySQLdb will also implicitly set the "use_unicode" flag to true, which means -that it will return Python unicode objects instead of bytestrings. -However, SQLAlchemy's decode process, when C extensions are enabled, -is orders of magnitude faster than that of MySQLdb as it does not call into -Python functions to do so. Therefore, the **recommended URL to use for -unicode** will include both charset and use_unicode=0:: +Please see :ref:`mysql_unicode` for current recommendations on unicode +handling. - create_engine("mysql+mysqldb://user:pass@host/dbname?charset=utf8&use_unicode=0") +Py3K Support +------------ -As of this writing, MySQLdb only runs on Python 2. It is not known how -MySQLdb behaves on Python 3 as far as unicode decoding. +Currently, MySQLdb only runs on Python 2 and development has been stopped. +`mysqlclient`_ is fork of MySQLdb and provides Python 3 support as well +as some bugfixes. +.. _mysqlclient: https://github.com/PyMySQL/mysqlclient-python -Known Issues -------------- +Using MySQLdb with Google Cloud SQL +----------------------------------- -MySQL-python version 1.2.2 has a serious memory leak related -to unicode conversion, a feature which is disabled via ``use_unicode=0``. -It is strongly advised to use the latest version of MySQL-Python. +Google Cloud SQL now recommends use of the MySQLdb dialect. Connect +using a URL like the following:: + + mysql+mysqldb://root@/?unix_socket=/cloudsql/: """ from .base import (MySQLDialect, MySQLExecutionContext, MySQLCompiler, MySQLIdentifierPreparer) -from ...connectors.mysqldb import ( - MySQLDBExecutionContext, - MySQLDBCompiler, - MySQLDBIdentifierPreparer, - MySQLDBConnector -) from .base import TEXT from ... import sql +from ... import util +import re -class MySQLExecutionContext_mysqldb( - MySQLDBExecutionContext, - MySQLExecutionContext): - pass +class MySQLExecutionContext_mysqldb(MySQLExecutionContext): + + @property + def rowcount(self): + if hasattr(self, '_rowcount'): + return self._rowcount + else: + return self.cursor.rowcount -class MySQLCompiler_mysqldb(MySQLDBCompiler, MySQLCompiler): - pass +class MySQLCompiler_mysqldb(MySQLCompiler): + def visit_mod_binary(self, binary, operator, **kw): + return self.process(binary.left, **kw) + " %% " + \ + self.process(binary.right, **kw) + + def post_process_text(self, text): + return text.replace('%', '%%') -class MySQLIdentifierPreparer_mysqldb( - MySQLDBIdentifierPreparer, - MySQLIdentifierPreparer): - pass +class MySQLIdentifierPreparer_mysqldb(MySQLIdentifierPreparer): + + def _escape_identifier(self, value): + value = value.replace(self.escape_quote, self.escape_to_quote) + return value.replace("%", "%%") -class MySQLDialect_mysqldb(MySQLDBConnector, MySQLDialect): +class MySQLDialect_mysqldb(MySQLDialect): + driver = 'mysqldb' + supports_unicode_statements = True + supports_sane_rowcount = True + supports_sane_multi_rowcount = True + + supports_native_decimal = True + + default_paramstyle = 'format' execution_ctx_cls = MySQLExecutionContext_mysqldb statement_compiler = MySQLCompiler_mysqldb preparer = MySQLIdentifierPreparer_mysqldb + @classmethod + def dbapi(cls): + return __import__('MySQLdb') + + def do_executemany(self, cursor, statement, parameters, context=None): + rowcount = cursor.executemany(statement, parameters) + if context is not None: + context._rowcount = rowcount + def _check_unicode_returns(self, connection): # work around issue fixed in # https://github.com/farcepest/MySQLdb1/commit/cd44524fef63bd3fcb71947392326e9742d520e8 # specific issue w/ the utf8_bin collation and unicode returns - has_utf8_bin = connection.scalar( - "show collation where %s = 'utf8' and %s = 'utf8_bin'" - % ( - self.identifier_preparer.quote("Charset"), - self.identifier_preparer.quote("Collation") - )) + has_utf8_bin = self.server_version_info > (5, ) and \ + connection.scalar( + "show collation where %s = 'utf8' and %s = 'utf8_bin'" + % ( + self.identifier_preparer.quote("Charset"), + self.identifier_preparer.quote("Collation") + )) if has_utf8_bin: additional_tests = [ sql.collate(sql.cast( @@ -94,7 +117,82 @@ class MySQLDialect_mysqldb(MySQLDBConnector, MySQLDialect): ] else: additional_tests = [] - return super(MySQLDBConnector, self)._check_unicode_returns( + return super(MySQLDialect_mysqldb, self)._check_unicode_returns( connection, additional_tests) + def create_connect_args(self, url): + opts = url.translate_connect_args(database='db', username='user', + password='passwd') + opts.update(url.query) + + util.coerce_kw_type(opts, 'compress', bool) + util.coerce_kw_type(opts, 'connect_timeout', int) + util.coerce_kw_type(opts, 'read_timeout', int) + util.coerce_kw_type(opts, 'client_flag', int) + util.coerce_kw_type(opts, 'local_infile', int) + # Note: using either of the below will cause all strings to be + # returned as Unicode, both in raw SQL operations and with column + # types like String and MSString. + util.coerce_kw_type(opts, 'use_unicode', bool) + util.coerce_kw_type(opts, 'charset', str) + + # Rich values 'cursorclass' and 'conv' are not supported via + # query string. + + ssl = {} + keys = ['ssl_ca', 'ssl_key', 'ssl_cert', 'ssl_capath', 'ssl_cipher'] + for key in keys: + if key in opts: + ssl[key[4:]] = opts[key] + util.coerce_kw_type(ssl, key[4:], str) + del opts[key] + if ssl: + opts['ssl'] = ssl + + # FOUND_ROWS must be set in CLIENT_FLAGS to enable + # supports_sane_rowcount. + client_flag = opts.get('client_flag', 0) + if self.dbapi is not None: + try: + CLIENT_FLAGS = __import__( + self.dbapi.__name__ + '.constants.CLIENT' + ).constants.CLIENT + client_flag |= CLIENT_FLAGS.FOUND_ROWS + except (AttributeError, ImportError): + self.supports_sane_rowcount = False + opts['client_flag'] = client_flag + return [[], opts] + + def _get_server_version_info(self, connection): + dbapi_con = connection.connection + version = [] + r = re.compile('[.\-]') + for n in r.split(dbapi_con.get_server_info()): + try: + version.append(int(n)) + except ValueError: + version.append(n) + return tuple(version) + + def _extract_error_code(self, exception): + return exception.args[0] + + def _detect_charset(self, connection): + """Sniff out the character set in use for connection results.""" + + try: + # note: the SQL here would be + # "SHOW VARIABLES LIKE 'character_set%%'" + cset_name = connection.connection.character_set_name + except AttributeError: + util.warn( + "No 'character_set_name' can be detected with " + "this MySQL-Python version; " + "please upgrade to a recent version of MySQL-Python. " + "Assuming latin1.") + return 'latin1' + else: + return cset_name() + + dialect = MySQLDialect_mysqldb diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/oursql.py b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/oursql.py index fa127f3..b91db18 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/oursql.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/oursql.py @@ -1,5 +1,5 @@ # mysql/oursql.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -16,22 +16,10 @@ Unicode ------- -oursql defaults to using ``utf8`` as the connection charset, but other -encodings may be used instead. Like the MySQL-Python driver, unicode support -can be completely disabled:: +Please see :ref:`mysql_unicode` for current recommendations on unicode +handling. - # oursql sets the connection charset to utf8 automatically; all strings come - # back as utf8 str - create_engine('mysql+oursql:///mydb?use_unicode=0') -To not automatically use ``utf8`` and instead use whatever the connection -defaults to, there is a separate parameter:: - - # use the default connection charset; all strings come back as unicode - create_engine('mysql+oursql:///mydb?default_charset=1') - - # use latin1 as the connection charset; all strings come back as unicode - create_engine('mysql+oursql:///mydb?charset=latin1') """ import re diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/pymysql.py b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/pymysql.py index 31226ce..3c493fb 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/pymysql.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/pymysql.py @@ -1,5 +1,5 @@ # mysql/pymysql.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -12,7 +12,13 @@ :dbapi: pymysql :connectstring: mysql+pymysql://:@/\ [?] - :url: http://code.google.com/p/pymysql/ + :url: http://www.pymysql.org/ + +Unicode +------- + +Please see :ref:`mysql_unicode` for current recommendations on unicode +handling. MySQL-Python Compatibility -------------------------- @@ -31,8 +37,12 @@ class MySQLDialect_pymysql(MySQLDialect_mysqldb): driver = 'pymysql' description_encoding = None - if py3k: - supports_unicode_statements = True + + # generally, these two values should be both True + # or both False. PyMySQL unicode tests pass all the way back + # to 0.4 either way. See [ticket:3337] + supports_unicode_statements = True + supports_unicode_binds = True @classmethod def dbapi(cls): diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/pyodbc.py b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/pyodbc.py index 58e8b30..882d3ea 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/pyodbc.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/pyodbc.py @@ -1,5 +1,5 @@ # mysql/pyodbc.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -14,14 +14,11 @@ :connectstring: mysql+pyodbc://:@ :url: http://pypi.python.org/pypi/pyodbc/ - -Limitations ------------ - -The mysql-pyodbc dialect is subject to unresolved character encoding issues -which exist within the current ODBC drivers available. -(see http://code.google.com/p/pyodbc/issues/detail?id=25). Consider usage -of OurSQL, MySQLdb, or MySQL-connector/Python. + .. note:: The PyODBC for MySQL dialect is not well supported, and + is subject to unresolved character encoding issues + which exist within the current ODBC drivers available. + (see http://code.google.com/p/pyodbc/issues/detail?id=25). + Other dialects for MySQL are recommended. """ diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/zxjdbc.py b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/zxjdbc.py index 0cf92cd..fe4c137 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/zxjdbc.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/mysql/zxjdbc.py @@ -1,5 +1,5 @@ # mysql/zxjdbc.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -14,6 +14,9 @@ :driverurl: http://dev.mysql.com/downloads/connector/j/ + .. note:: Jython is not supported by current versions of SQLAlchemy. The + zxjdbc dialect should be considered as experimental. + Character Sets -------------- diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/oracle/__init__.py b/lib/python3.4/site-packages/sqlalchemy/dialects/oracle/__init__.py index fd32f22..0c5c317 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/oracle/__init__.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/oracle/__init__.py @@ -1,5 +1,5 @@ # oracle/__init__.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/oracle/base.py b/lib/python3.4/site-packages/sqlalchemy/dialects/oracle/base.py index 4d45f85..eb63983 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/oracle/base.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/oracle/base.py @@ -1,5 +1,5 @@ # oracle/base.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -213,15 +213,81 @@ is reflected and the type is reported as ``DATE``, the time-supporting examining the type of column for use in special Python translations or for migrating schemas to other database backends. +.. _oracle_table_options: + +Oracle Table Options +------------------------- + +The CREATE TABLE phrase supports the following options with Oracle +in conjunction with the :class:`.Table` construct: + + +* ``ON COMMIT``:: + + Table( + "some_table", metadata, ..., + prefixes=['GLOBAL TEMPORARY'], oracle_on_commit='PRESERVE ROWS') + +.. versionadded:: 1.0.0 + +* ``COMPRESS``:: + + Table('mytable', metadata, Column('data', String(32)), + oracle_compress=True) + + Table('mytable', metadata, Column('data', String(32)), + oracle_compress=6) + + The ``oracle_compress`` parameter accepts either an integer compression + level, or ``True`` to use the default compression level. + +.. versionadded:: 1.0.0 + +.. _oracle_index_options: + +Oracle Specific Index Options +----------------------------- + +Bitmap Indexes +~~~~~~~~~~~~~~ + +You can specify the ``oracle_bitmap`` parameter to create a bitmap index +instead of a B-tree index:: + + Index('my_index', my_table.c.data, oracle_bitmap=True) + +Bitmap indexes cannot be unique and cannot be compressed. SQLAlchemy will not +check for such limitations, only the database will. + +.. versionadded:: 1.0.0 + +Index compression +~~~~~~~~~~~~~~~~~ + +Oracle has a more efficient storage mode for indexes containing lots of +repeated values. Use the ``oracle_compress`` parameter to turn on key c +ompression:: + + Index('my_index', my_table.c.data, oracle_compress=True) + + Index('my_index', my_table.c.data1, my_table.c.data2, unique=True, + oracle_compress=1) + +The ``oracle_compress`` parameter accepts either an integer specifying the +number of prefix columns to compress, or ``True`` to use the default (all +columns for non-unique indexes, all but the last column for unique indexes). + +.. versionadded:: 1.0.0 + """ import re from sqlalchemy import util, sql -from sqlalchemy.engine import default, base, reflection +from sqlalchemy.engine import default, reflection from sqlalchemy.sql import compiler, visitors, expression -from sqlalchemy.sql import (operators as sql_operators, - functions as sql_functions) +from sqlalchemy.sql import operators as sql_operators +from sqlalchemy.sql.elements import quoted_name from sqlalchemy import types as sqltypes, schema as sa_schema from sqlalchemy.types import VARCHAR, NVARCHAR, CHAR, \ BLOB, CLOB, TIMESTAMP, FLOAT @@ -300,7 +366,6 @@ class LONG(sqltypes.Text): class DATE(sqltypes.DateTime): - """Provide the oracle DATE type. This type has no special Python behavior, except that it subclasses @@ -349,7 +414,6 @@ class INTERVAL(sqltypes.TypeEngine): class ROWID(sqltypes.TypeEngine): - """Oracle ROWID type. When used in a cast() or similar, generates ROWID. @@ -359,7 +423,6 @@ class ROWID(sqltypes.TypeEngine): class _OracleBoolean(sqltypes.Boolean): - def get_dbapi_type(self, dbapi): return dbapi.NUMBER @@ -395,19 +458,19 @@ class OracleTypeCompiler(compiler.GenericTypeCompiler): # Oracle does not allow milliseconds in DATE # Oracle does not support TIME columns - def visit_datetime(self, type_): - return self.visit_DATE(type_) + def visit_datetime(self, type_, **kw): + return self.visit_DATE(type_, **kw) - def visit_float(self, type_): - return self.visit_FLOAT(type_) + def visit_float(self, type_, **kw): + return self.visit_FLOAT(type_, **kw) - def visit_unicode(self, type_): + def visit_unicode(self, type_, **kw): if self.dialect._supports_nchar: - return self.visit_NVARCHAR2(type_) + return self.visit_NVARCHAR2(type_, **kw) else: - return self.visit_VARCHAR2(type_) + return self.visit_VARCHAR2(type_, **kw) - def visit_INTERVAL(self, type_): + def visit_INTERVAL(self, type_, **kw): return "INTERVAL DAY%s TO SECOND%s" % ( type_.day_precision is not None and "(%d)" % type_.day_precision or @@ -417,22 +480,22 @@ class OracleTypeCompiler(compiler.GenericTypeCompiler): "", ) - def visit_LONG(self, type_): + def visit_LONG(self, type_, **kw): return "LONG" - def visit_TIMESTAMP(self, type_): + def visit_TIMESTAMP(self, type_, **kw): if type_.timezone: return "TIMESTAMP WITH TIME ZONE" else: return "TIMESTAMP" - def visit_DOUBLE_PRECISION(self, type_): - return self._generate_numeric(type_, "DOUBLE PRECISION") + def visit_DOUBLE_PRECISION(self, type_, **kw): + return self._generate_numeric(type_, "DOUBLE PRECISION", **kw) def visit_NUMBER(self, type_, **kw): return self._generate_numeric(type_, "NUMBER", **kw) - def _generate_numeric(self, type_, name, precision=None, scale=None): + def _generate_numeric(self, type_, name, precision=None, scale=None, **kw): if precision is None: precision = type_.precision @@ -448,17 +511,17 @@ class OracleTypeCompiler(compiler.GenericTypeCompiler): n = "%(name)s(%(precision)s, %(scale)s)" return n % {'name': name, 'precision': precision, 'scale': scale} - def visit_string(self, type_): - return self.visit_VARCHAR2(type_) + def visit_string(self, type_, **kw): + return self.visit_VARCHAR2(type_, **kw) - def visit_VARCHAR2(self, type_): + def visit_VARCHAR2(self, type_, **kw): return self._visit_varchar(type_, '', '2') - def visit_NVARCHAR2(self, type_): + def visit_NVARCHAR2(self, type_, **kw): return self._visit_varchar(type_, 'N', '2') visit_NVARCHAR = visit_NVARCHAR2 - def visit_VARCHAR(self, type_): + def visit_VARCHAR(self, type_, **kw): return self._visit_varchar(type_, '', '') def _visit_varchar(self, type_, n, num): @@ -471,36 +534,35 @@ class OracleTypeCompiler(compiler.GenericTypeCompiler): varchar = "%(n)sVARCHAR%(two)s(%(length)s)" return varchar % {'length': type_.length, 'two': num, 'n': n} - def visit_text(self, type_): - return self.visit_CLOB(type_) + def visit_text(self, type_, **kw): + return self.visit_CLOB(type_, **kw) - def visit_unicode_text(self, type_): + def visit_unicode_text(self, type_, **kw): if self.dialect._supports_nchar: - return self.visit_NCLOB(type_) + return self.visit_NCLOB(type_, **kw) else: - return self.visit_CLOB(type_) + return self.visit_CLOB(type_, **kw) - def visit_large_binary(self, type_): - return self.visit_BLOB(type_) + def visit_large_binary(self, type_, **kw): + return self.visit_BLOB(type_, **kw) - def visit_big_integer(self, type_): - return self.visit_NUMBER(type_, precision=19) + def visit_big_integer(self, type_, **kw): + return self.visit_NUMBER(type_, precision=19, **kw) - def visit_boolean(self, type_): - return self.visit_SMALLINT(type_) + def visit_boolean(self, type_, **kw): + return self.visit_SMALLINT(type_, **kw) - def visit_RAW(self, type_): + def visit_RAW(self, type_, **kw): if type_.length: return "RAW(%(length)s)" % {'length': type_.length} else: return "RAW" - def visit_ROWID(self, type_): + def visit_ROWID(self, type_, **kw): return "ROWID" class OracleCompiler(compiler.SQLCompiler): - """Oracle compiler modifies the lexical structure of Select statements to work under non-ANSI configured Oracle databases, if the use_ansi flag is False. @@ -538,6 +600,9 @@ class OracleCompiler(compiler.SQLCompiler): def visit_false(self, expr, **kw): return '0' + def get_cte_preamble(self, recursive): + return "WITH" + def get_select_hint_text(self, byfroms): return " ".join( "/*+ %s */" % text for table, text in byfroms.items() @@ -601,29 +666,17 @@ class OracleCompiler(compiler.SQLCompiler): else: return sql.and_(*clauses) - def visit_outer_join_column(self, vc): - return self.process(vc.column) + "(+)" + def visit_outer_join_column(self, vc, **kw): + return self.process(vc.column, **kw) + "(+)" def visit_sequence(self, seq): return (self.dialect.identifier_preparer.format_sequence(seq) + ".nextval") - def visit_alias(self, alias, asfrom=False, ashint=False, **kwargs): - """Oracle doesn't like ``FROM table AS alias``. Is the AS standard - SQL?? - """ + def get_render_as_alias_suffix(self, alias_name_text): + """Oracle doesn't like ``FROM table AS alias``""" - if asfrom or ashint: - alias_name = isinstance(alias.name, expression._truncated_label) and \ - self._truncated_identifier("alias", alias.name) or alias.name - - if ashint: - return alias_name - elif asfrom: - return self.process(alias.original, asfrom=asfrom, **kwargs) + \ - " " + self.preparer.format_alias(alias, alias_name) - else: - return self.process(alias.original, **kwargs) + return " " + alias_name_text def returning_clause(self, stmt, returning_cols): columns = [] @@ -640,8 +693,9 @@ class OracleCompiler(compiler.SQLCompiler): self.bindparam_string(self._truncate_bindparam(outparam))) columns.append( self.process(col_expr, within_columns_clause=False)) - self.result_map[outparam.key] = ( - outparam.key, + + self._add_to_result_map( + outparam.key, outparam.key, (column, getattr(column, 'name', None), getattr(column, 'key', None)), column.type @@ -669,9 +723,11 @@ class OracleCompiler(compiler.SQLCompiler): select = select.where(whereclause) select._oracle_visit = True - if select._limit is not None or select._offset is not None: + limit_clause = select._limit_clause + offset_clause = select._offset_clause + if limit_clause is not None or offset_clause is not None: # See http://www.oracle.com/technology/oramag/oracle/06-sep/\ - # o56asktom.html + # o56asktom.html # # Generalized form of an Oracle pagination query: # select ... from ( @@ -682,13 +738,15 @@ class OracleCompiler(compiler.SQLCompiler): # Outer select and "ROWNUM as ora_rn" can be dropped if # limit=0 - # TODO: use annotations instead of clone + attr set ? + kwargs['select_wraps_for'] = select select = select._generate() select._oracle_visit = True # Wrap the middle select and add the hint limitselect = sql.select([c for c in select.c]) - if select._limit and self.dialect.optimize_limits: + if limit_clause is not None and \ + self.dialect.optimize_limits and \ + select._simple_int_limit: limitselect = limitselect.prefix_with( "/*+ FIRST_ROWS(%d) */" % select._limit) @@ -697,17 +755,24 @@ class OracleCompiler(compiler.SQLCompiler): limitselect._is_wrapper = True # If needed, add the limiting clause - if select._limit is not None: - max_row = select._limit - if select._offset is not None: - max_row += select._offset + if limit_clause is not None: if not self.dialect.use_binds_for_limits: + # use simple int limits, will raise an exception + # if the limit isn't specified this way + max_row = select._limit + + if offset_clause is not None: + max_row += select._offset max_row = sql.literal_column("%d" % max_row) + else: + max_row = limit_clause + if offset_clause is not None: + max_row = max_row + offset_clause limitselect.append_whereclause( sql.literal_column("ROWNUM") <= max_row) # If needed, add the ora_rn, and wrap again with offset. - if select._offset is None: + if offset_clause is None: limitselect._for_update_arg = select._for_update_arg select = limitselect else: @@ -721,22 +786,21 @@ class OracleCompiler(compiler.SQLCompiler): offsetselect._oracle_visit = True offsetselect._is_wrapper = True - offset_value = select._offset if not self.dialect.use_binds_for_limits: - offset_value = sql.literal_column("%d" % offset_value) + offset_clause = sql.literal_column( + "%d" % select._offset) offsetselect.append_whereclause( - sql.literal_column("ora_rn") > offset_value) + sql.literal_column("ora_rn") > offset_clause) offsetselect._for_update_arg = select._for_update_arg select = offsetselect - kwargs['iswrapper'] = getattr(select, '_is_wrapper', False) return compiler.SQLCompiler.visit_select(self, select, **kwargs) - def limit_clause(self, select): + def limit_clause(self, select, **kw): return "" - def for_update_clause(self, select): + def for_update_clause(self, select, **kw): if self.is_subquery(): return "" @@ -744,7 +808,7 @@ class OracleCompiler(compiler.SQLCompiler): if select._for_update_arg.of: tmp += ' OF ' + ', '.join( - self.process(elem) for elem in + self.process(elem, **kw) for elem in select._for_update_arg.of ) @@ -773,15 +837,57 @@ class OracleDDLCompiler(compiler.DDLCompiler): return text - def visit_create_index(self, create, **kw): - return super(OracleDDLCompiler, self).\ - visit_create_index(create, include_schema=True) + def visit_create_index(self, create): + index = create.element + self._verify_index_table(index) + preparer = self.preparer + text = "CREATE " + if index.unique: + text += "UNIQUE " + if index.dialect_options['oracle']['bitmap']: + text += "BITMAP " + text += "INDEX %s ON %s (%s)" % ( + self._prepared_index_name(index, include_schema=True), + preparer.format_table(index.table, use_schema=True), + ', '.join( + self.sql_compiler.process( + expr, + include_table=False, literal_binds=True) + for expr in index.expressions) + ) + if index.dialect_options['oracle']['compress'] is not False: + if index.dialect_options['oracle']['compress'] is True: + text += " COMPRESS" + else: + text += " COMPRESS %d" % ( + index.dialect_options['oracle']['compress'] + ) + return text + + def post_create_table(self, table): + table_opts = [] + opts = table.dialect_options['oracle'] + + if opts['on_commit']: + on_commit_options = opts['on_commit'].replace("_", " ").upper() + table_opts.append('\n ON COMMIT %s' % on_commit_options) + + if opts['compress']: + if opts['compress'] is True: + table_opts.append("\n COMPRESS") + else: + table_opts.append("\n COMPRESS FOR %s" % ( + opts['compress'] + )) + + return ''.join(table_opts) class OracleIdentifierPreparer(compiler.IdentifierPreparer): reserved_words = set([x.lower() for x in RESERVED_WORDS]) - illegal_initial_characters = set(range(0, 10)).union(["_", "$"]) + illegal_initial_characters = set( + (str(dig) for dig in range(0, 10))).union(["_", "$"]) def _bindparam_requires_quotes(self, value): """Return True if the given identifier requires quoting.""" @@ -798,7 +904,6 @@ class OracleIdentifierPreparer(compiler.IdentifierPreparer): class OracleExecutionContext(default.DefaultExecutionContext): - def fire_sequence(self, seq, type_): return self._execute_scalar( "SELECT " + @@ -815,6 +920,8 @@ class OracleDialect(default.DefaultDialect): supports_sane_rowcount = True supports_sane_multi_rowcount = False + supports_simple_order_by_label = False + supports_sequences = True sequences_optional = False postfetch_lastrowid = False @@ -836,7 +943,15 @@ class OracleDialect(default.DefaultDialect): reflection_options = ('oracle_resolve_synonyms', ) construct_arguments = [ - (sa_schema.Table, {"resolve_synonyms": False}) + (sa_schema.Table, { + "resolve_synonyms": False, + "on_commit": None, + "compress": False + }), + (sa_schema.Index, { + "bitmap": False, + "compress": False + }) ] def __init__(self, @@ -866,6 +981,16 @@ class OracleDialect(default.DefaultDialect): return self.server_version_info and \ self.server_version_info < (9, ) + @property + def _supports_table_compression(self): + return self.server_version_info and \ + self.server_version_info >= (9, 2, ) + + @property + def _supports_table_compress_for(self): + return self.server_version_info and \ + self.server_version_info >= (11, ) + @property def _supports_char_length(self): return not self._is_oracle_8 @@ -908,6 +1033,8 @@ class OracleDialect(default.DefaultDialect): if name.upper() == name and not \ self.identifier_preparer._requires_quotes(name.lower()): return name.lower() + elif name.lower() == name: + return quoted_name(name, quote=True) else: return name @@ -1023,7 +1150,21 @@ class OracleDialect(default.DefaultDialect): "WHERE nvl(tablespace_name, 'no tablespace') NOT IN " "('SYSTEM', 'SYSAUX') " "AND OWNER = :owner " - "AND IOT_NAME IS NULL") + "AND IOT_NAME IS NULL " + "AND DURATION IS NULL") + cursor = connection.execute(s, owner=schema) + return [self.normalize_name(row[0]) for row in cursor] + + @reflection.cache + def get_temp_table_names(self, connection, **kw): + schema = self.denormalize_name(self.default_schema_name) + s = sql.text( + "SELECT table_name FROM all_tables " + "WHERE nvl(tablespace_name, 'no tablespace') NOT IN " + "('SYSTEM', 'SYSAUX') " + "AND OWNER = :owner " + "AND IOT_NAME IS NULL " + "AND DURATION IS NOT NULL") cursor = connection.execute(s, owner=schema) return [self.normalize_name(row[0]) for row in cursor] @@ -1034,6 +1175,50 @@ class OracleDialect(default.DefaultDialect): cursor = connection.execute(s, owner=self.denormalize_name(schema)) return [self.normalize_name(row[0]) for row in cursor] + @reflection.cache + def get_table_options(self, connection, table_name, schema=None, **kw): + options = {} + + resolve_synonyms = kw.get('oracle_resolve_synonyms', False) + dblink = kw.get('dblink', '') + info_cache = kw.get('info_cache') + + (table_name, schema, dblink, synonym) = \ + self._prepare_reflection_args(connection, table_name, schema, + resolve_synonyms, dblink, + info_cache=info_cache) + + params = {"table_name": table_name} + + columns = ["table_name"] + if self._supports_table_compression: + columns.append("compression") + if self._supports_table_compress_for: + columns.append("compress_for") + + text = "SELECT %(columns)s "\ + "FROM ALL_TABLES%(dblink)s "\ + "WHERE table_name = :table_name" + + if schema is not None: + params['owner'] = schema + text += " AND owner = :owner " + text = text % {'dblink': dblink, 'columns': ", ".join(columns)} + + result = connection.execute(sql.text(text), **params) + + enabled = dict(DISABLED=False, ENABLED=True) + + row = result.first() + if row: + if "compression" in row and enabled.get(row.compression, False): + if "compress_for" in row: + options['oracle_compress'] = row.compress_for + else: + options['oracle_compress'] = True + + return options + @reflection.cache def get_columns(self, connection, table_name, schema=None, **kw): """ @@ -1119,7 +1304,8 @@ class OracleDialect(default.DefaultDialect): params = {'table_name': table_name} text = \ - "SELECT a.index_name, a.column_name, b.uniqueness "\ + "SELECT a.index_name, a.column_name, "\ + "\nb.index_type, b.uniqueness, b.compression, b.prefix_length "\ "\nFROM ALL_IND_COLUMNS%(dblink)s a, "\ "\nALL_INDEXES%(dblink)s b "\ "\nWHERE "\ @@ -1145,6 +1331,7 @@ class OracleDialect(default.DefaultDialect): dblink=dblink, info_cache=kw.get('info_cache')) pkeys = pk_constraint['constrained_columns'] uniqueness = dict(NONUNIQUE=False, UNIQUE=True) + enabled = dict(DISABLED=False, ENABLED=True) oracle_sys_col = re.compile(r'SYS_NC\d+\$', re.IGNORECASE) @@ -1164,10 +1351,15 @@ class OracleDialect(default.DefaultDialect): if rset.index_name != last_index_name: remove_if_primary_key(index) index = dict(name=self.normalize_name(rset.index_name), - column_names=[]) + column_names=[], dialect_options={}) indexes.append(index) index['unique'] = uniqueness.get(rset.uniqueness, False) + if rset.index_type in ('BITMAP', 'FUNCTION-BASED BITMAP'): + index['dialect_options']['oracle_bitmap'] = True + if enabled.get(rset.compression, False): + index['dialect_options']['oracle_compress'] = rset.prefix_length + # filter out Oracle SYS_NC names. could also do an outer join # to the all_tab_columns table and check for real col names there. if not oracle_sys_col.match(rset.column_name): diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/oracle/cx_oracle.py b/lib/python3.4/site-packages/sqlalchemy/dialects/oracle/cx_oracle.py index 4a1ceec..cfbb87e 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/oracle/cx_oracle.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/oracle/cx_oracle.py @@ -1,5 +1,5 @@ # oracle/cx_oracle.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -61,6 +61,14 @@ on the URL, or as keyword arguments to :func:`.create_engine()` are: Defaults to ``True``. Note that this is the opposite default of the cx_Oracle DBAPI itself. +* ``service_name`` - An option to use connection string (DSN) with + ``SERVICE_NAME`` instead of ``SID``. It can't be passed when a ``database`` + part is given. + E.g. ``oracle+cx_oracle://scott:tiger@host:1521/?service_name=hr`` + is a valid url. This value is only available as a URL query string argument. + + .. versionadded:: 1.0.0 + .. _cx_oracle_unicode: Unicode @@ -285,6 +293,7 @@ from .base import OracleCompiler, OracleDialect, OracleExecutionContext from . import base as oracle from ...engine import result as _result from sqlalchemy import types as sqltypes, util, exc, processors +from sqlalchemy import util import random import collections import decimal @@ -711,8 +720,10 @@ class OracleDialect_cx_oracle(OracleDialect): # this occurs in tests with mock DBAPIs self._cx_oracle_string_types = set() self._cx_oracle_with_unicode = False - elif self.cx_oracle_ver >= (5,) and not \ - hasattr(self.dbapi, 'UNICODE'): + elif util.py3k or ( + self.cx_oracle_ver >= (5,) and not \ + hasattr(self.dbapi, 'UNICODE') + ): # cx_Oracle WITH_UNICODE mode. *only* python # unicode objects accepted for anything self.supports_unicode_statements = True @@ -862,14 +873,26 @@ class OracleDialect_cx_oracle(OracleDialect): util.coerce_kw_type(dialect_opts, opt, bool) setattr(self, opt, dialect_opts[opt]) - if url.database: + database = url.database + service_name = dialect_opts.get('service_name', None) + if database or service_name: # if we have a database, then we have a remote host port = url.port if port: port = int(port) else: port = 1521 - dsn = self.dbapi.makedsn(url.host, port, url.database) + + if database and service_name: + raise exc.InvalidRequestError( + '"service_name" option shouldn\'t ' + 'be used with a "database" part of the url') + if database: + makedsn_kwargs = {'sid': database} + if service_name: + makedsn_kwargs = {'service_name': service_name} + + dsn = self.dbapi.makedsn(url.host, port, **makedsn_kwargs) else: # we have a local tnsname dsn = url.host diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/oracle/zxjdbc.py b/lib/python3.4/site-packages/sqlalchemy/dialects/oracle/zxjdbc.py index 82c8e2f..c3259fe 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/oracle/zxjdbc.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/oracle/zxjdbc.py @@ -1,5 +1,5 @@ # oracle/zxjdbc.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -10,8 +10,10 @@ :name: zxJDBC for Jython :dbapi: zxjdbc :connectstring: oracle+zxjdbc://user:pass@host/dbname - :driverurl: http://www.oracle.com/technology/software/tech/java/\ -sqlj_jdbc/index.html. + :driverurl: http://www.oracle.com/technetwork/database/features/jdbc/index-091264.html + + .. note:: Jython is not supported by current versions of SQLAlchemy. The + zxjdbc dialect should be considered as experimental. """ import decimal @@ -68,8 +70,7 @@ class OracleCompiler_zxjdbc(OracleCompiler): expression._select_iterables(returning_cols)) # within_columns_clause=False so that labels (foo AS bar) don't render - columns = [self.process(c, within_columns_clause=False, - result_map=self.result_map) + columns = [self.process(c, within_columns_clause=False) for c in self.returning_cols] if not hasattr(self, 'returning_parameters'): diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/postgres.py b/lib/python3.4/site-packages/sqlalchemy/dialects/postgres.py index f813e00..04d37a2 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/postgres.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/postgres.py @@ -1,5 +1,5 @@ # dialects/postgres.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/__init__.py b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/__init__.py index d755e6a..006afbd 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/__init__.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/__init__.py @@ -1,11 +1,11 @@ # postgresql/__init__.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -from . import base, psycopg2, pg8000, pypostgresql, zxjdbc +from . import base, psycopg2, pg8000, pypostgresql, zxjdbc, psycopg2cffi base.dialect = psycopg2.dialect @@ -13,7 +13,7 @@ from .base import \ INTEGER, BIGINT, SMALLINT, VARCHAR, CHAR, TEXT, NUMERIC, FLOAT, REAL, \ INET, CIDR, UUID, BIT, MACADDR, OID, DOUBLE_PRECISION, TIMESTAMP, TIME, \ DATE, BYTEA, BOOLEAN, INTERVAL, ARRAY, ENUM, dialect, array, Any, All, \ - TSVECTOR + TSVECTOR, DropEnumType from .constraints import ExcludeConstraint from .hstore import HSTORE, hstore from .json import JSON, JSONElement, JSONB @@ -26,5 +26,6 @@ __all__ = ( 'DOUBLE_PRECISION', 'TIMESTAMP', 'TIME', 'DATE', 'BYTEA', 'BOOLEAN', 'INTERVAL', 'ARRAY', 'ENUM', 'dialect', 'Any', 'All', 'array', 'HSTORE', 'hstore', 'INT4RANGE', 'INT8RANGE', 'NUMRANGE', 'DATERANGE', - 'TSRANGE', 'TSTZRANGE', 'json', 'JSON', 'JSONB', 'JSONElement' + 'TSRANGE', 'TSTZRANGE', 'json', 'JSON', 'JSONB', 'JSONElement', + 'DropEnumType' ) diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/base.py b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/base.py index 369a87f..6a60c22 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/base.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/base.py @@ -1,5 +1,5 @@ # postgresql/base.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -48,7 +48,7 @@ Transaction Isolation Level --------------------------- All Postgresql dialects support setting of transaction isolation level -both via a dialect-specific parameter ``isolation_level`` +both via a dialect-specific parameter :paramref:`.create_engine.isolation_level` accepted by :func:`.create_engine`, as well as the ``isolation_level`` argument as passed to :meth:`.Connection.execution_options`. When using a non-psycopg2 dialect, @@ -102,7 +102,7 @@ via foreign key constraint, a decision must be made as to how the ``.schema`` is represented in those remote tables, in the case where that remote schema name is also a member of the current `Postgresql search path -`_. +`_. By default, the Postgresql dialect mimics the behavior encouraged by Postgresql's own ``pg_get_constraintdef()`` builtin procedure. This function @@ -266,7 +266,7 @@ will emit to the database:: The Postgresql text search functions such as ``to_tsquery()`` and ``to_tsvector()`` are available -explicitly using the standard :attr:`.func` construct. For example:: +explicitly using the standard :data:`.func` construct. For example:: select([ func.to_tsvector('fat cats ate rats').match('cat & rat') @@ -299,7 +299,7 @@ not re-compute the column on demand. In order to provide for this explicit query planning, or to use different search strategies, the ``match`` method accepts a ``postgresql_regconfig`` -keyword argument. +keyword argument:: select([mytable.c.id]).where( mytable.c.title.match('somestring', postgresql_regconfig='english') @@ -311,7 +311,7 @@ Emits the equivalent of:: WHERE mytable.title @@ to_tsquery('english', 'somestring') One can also specifically pass in a `'regconfig'` value to the -``to_tsvector()`` command as the initial argument. +``to_tsvector()`` command as the initial argument:: select([mytable.c.id]).where( func.to_tsvector('english', mytable.c.title )\ @@ -401,13 +401,179 @@ The value passed to the keyword argument will be simply passed through to the underlying CREATE INDEX command, so it *must* be a valid index type for your version of PostgreSQL. +.. _postgresql_index_storage: + +Index Storage Parameters +^^^^^^^^^^^^^^^^^^^^^^^^ + +PostgreSQL allows storage parameters to be set on indexes. The storage +parameters available depend on the index method used by the index. Storage +parameters can be specified on :class:`.Index` using the ``postgresql_with`` +keyword argument:: + + Index('my_index', my_table.c.data, postgresql_with={"fillfactor": 50}) + +.. versionadded:: 1.0.6 + +.. _postgresql_index_concurrently: + +Indexes with CONCURRENTLY +^^^^^^^^^^^^^^^^^^^^^^^^^ + +The Postgresql index option CONCURRENTLY is supported by passing the +flag ``postgresql_concurrently`` to the :class:`.Index` construct:: + + tbl = Table('testtbl', m, Column('data', Integer)) + + idx1 = Index('test_idx1', tbl.c.data, postgresql_concurrently=True) + +The above index construct will render SQL as:: + + CREATE INDEX CONCURRENTLY test_idx1 ON testtbl (data) + +.. versionadded:: 0.9.9 + +.. _postgresql_index_reflection: + +Postgresql Index Reflection +--------------------------- + +The Postgresql database creates a UNIQUE INDEX implicitly whenever the +UNIQUE CONSTRAINT construct is used. When inspecting a table using +:class:`.Inspector`, the :meth:`.Inspector.get_indexes` +and the :meth:`.Inspector.get_unique_constraints` will report on these +two constructs distinctly; in the case of the index, the key +``duplicates_constraint`` will be present in the index entry if it is +detected as mirroring a constraint. When performing reflection using +``Table(..., autoload=True)``, the UNIQUE INDEX is **not** returned +in :attr:`.Table.indexes` when it is detected as mirroring a +:class:`.UniqueConstraint` in the :attr:`.Table.constraints` collection. + +.. versionchanged:: 1.0.0 - :class:`.Table` reflection now includes + :class:`.UniqueConstraint` objects present in the :attr:`.Table.constraints` + collection; the Postgresql backend will no longer include a "mirrored" + :class:`.Index` construct in :attr:`.Table.indexes` if it is detected + as corresponding to a unique constraint. + +Special Reflection Options +-------------------------- + +The :class:`.Inspector` used for the Postgresql backend is an instance +of :class:`.PGInspector`, which offers additional methods:: + + from sqlalchemy import create_engine, inspect + + engine = create_engine("postgresql+psycopg2://localhost/test") + insp = inspect(engine) # will be a PGInspector + + print(insp.get_enums()) + +.. autoclass:: PGInspector + :members: + +.. _postgresql_table_options: + +PostgreSQL Table Options +------------------------- + +Several options for CREATE TABLE are supported directly by the PostgreSQL +dialect in conjunction with the :class:`.Table` construct: + +* ``TABLESPACE``:: + + Table("some_table", metadata, ..., postgresql_tablespace='some_tablespace') + +* ``ON COMMIT``:: + + Table("some_table", metadata, ..., postgresql_on_commit='PRESERVE ROWS') + +* ``WITH OIDS``:: + + Table("some_table", metadata, ..., postgresql_with_oids=True) + +* ``WITHOUT OIDS``:: + + Table("some_table", metadata, ..., postgresql_with_oids=False) + +* ``INHERITS``:: + + Table("some_table", metadata, ..., postgresql_inherits="some_supertable") + + Table("some_table", metadata, ..., postgresql_inherits=("t1", "t2", ...)) + +.. versionadded:: 1.0.0 + +.. seealso:: + + `Postgresql CREATE TABLE options + `_ + +ENUM Types +---------- + +Postgresql has an independently creatable TYPE structure which is used +to implement an enumerated type. This approach introduces significant +complexity on the SQLAlchemy side in terms of when this type should be +CREATED and DROPPED. The type object is also an independently reflectable +entity. The following sections should be consulted: + +* :class:`.postgresql.ENUM` - DDL and typing support for ENUM. + +* :meth:`.PGInspector.get_enums` - retrieve a listing of current ENUM types + +* :meth:`.postgresql.ENUM.create` , :meth:`.postgresql.ENUM.drop` - individual + CREATE and DROP commands for ENUM. + +.. _postgresql_array_of_enum: + +Using ENUM with ARRAY +^^^^^^^^^^^^^^^^^^^^^ + +The combination of ENUM and ARRAY is not directly supported by backend +DBAPIs at this time. In order to send and receive an ARRAY of ENUM, +use the following workaround type:: + + class ArrayOfEnum(ARRAY): + + def bind_expression(self, bindvalue): + return sa.cast(bindvalue, self) + + def result_processor(self, dialect, coltype): + super_rp = super(ArrayOfEnum, self).result_processor( + dialect, coltype) + + def handle_raw_string(value): + inner = re.match(r"^{(.*)}$", value).group(1) + return inner.split(",") if inner else [] + + def process(value): + if value is None: + return None + return super_rp(handle_raw_string(value)) + return process + +E.g.:: + + Table( + 'mydata', metadata, + Column('id', Integer, primary_key=True), + Column('data', ArrayOfEnum(ENUM('a', 'b, 'c', name='myenum'))) + + ) + +This type is not included as a built-in type as it would be incompatible +with a DBAPI that suddenly decides to support ARRAY of ENUM directly in +a new version. + """ from collections import defaultdict import re +import datetime as dt + from ... import sql, schema, exc, util from ...engine import default, reflection -from ...sql import compiler, expression, operators +from ...sql import compiler, expression, operators, default_comparator from ... import types as sqltypes try: @@ -510,6 +676,10 @@ class INTERVAL(sqltypes.TypeEngine): def _type_affinity(self): return sqltypes.Interval + @property + def python_type(self): + return dt.timedelta + PGInterval = INTERVAL @@ -604,10 +774,10 @@ class _Slice(expression.ColumnElement): type = sqltypes.NULLTYPE def __init__(self, slice_, source_comparator): - self.start = source_comparator._check_literal( + self.start = default_comparator._check_literal( source_comparator.expr, operators.getitem, slice_.start) - self.stop = source_comparator._check_literal( + self.stop = default_comparator._check_literal( source_comparator.expr, operators.getitem, slice_.stop) @@ -695,7 +865,7 @@ class array(expression.Tuple): self.type = ARRAY(self.type) def _bind_param(self, operator, obj): - return array(*[ + return array([ expression.BindParameter(None, o, _compared_to_operator=operator, _compared_to_type=self.type, unique=True) for o in obj @@ -760,6 +930,16 @@ class ARRAY(sqltypes.Concatenable, sqltypes.TypeEngine): mytable.c.data[2:7]: [1, 2, 3] }) + .. note:: + + Multi-dimensional support for the ``[]`` operator is not supported + in SQLAlchemy 1.0. Please use the :func:`.type_coerce` function + to cast an intermediary expression to ARRAY again as a workaround:: + + expr = type_coerce(my_array_column[5], ARRAY(Integer))[6] + + Multi-dimensional support will be provided in a future release. + :class:`.ARRAY` provides special methods for containment operations, e.g.:: @@ -774,6 +954,10 @@ class ARRAY(sqltypes.Concatenable, sqltypes.TypeEngine): The :class:`.ARRAY` type may not be supported on all DBAPIs. It is known to work on psycopg2 and not pg8000. + Additionally, the :class:`.ARRAY` type does not work directly in + conjunction with the :class:`.ENUM` type. For a workaround, see the + special type at :ref:`postgresql_array_of_enum`. + See also: :class:`.postgresql.array` - produce a literal array value. @@ -800,8 +984,9 @@ class ARRAY(sqltypes.Concatenable, sqltypes.TypeEngine): index += shift_indexes return_type = self.type.item_type - return self._binary_operate(self.expr, operators.getitem, index, - result_type=return_type) + return default_comparator._binary_operate( + self.expr, operators.getitem, index, + result_type=return_type) def any(self, other, operator=operators.eq): """Return ``other operator ANY (array)`` clause. @@ -1004,21 +1189,76 @@ class ENUM(sqltypes.Enum): """Postgresql ENUM type. This is a subclass of :class:`.types.Enum` which includes - support for PG's ``CREATE TYPE``. + support for PG's ``CREATE TYPE`` and ``DROP TYPE``. - :class:`~.postgresql.ENUM` is used automatically when - using the :class:`.types.Enum` type on PG assuming - the ``native_enum`` is left as ``True``. However, the - :class:`~.postgresql.ENUM` class can also be instantiated - directly in order to access some additional Postgresql-specific - options, namely finer control over whether or not - ``CREATE TYPE`` should be emitted. + When the builtin type :class:`.types.Enum` is used and the + :paramref:`.Enum.native_enum` flag is left at its default of + True, the Postgresql backend will use a :class:`.postgresql.ENUM` + type as the implementation, so the special create/drop rules + will be used. - Note that both :class:`.types.Enum` as well as - :class:`~.postgresql.ENUM` feature create/drop - methods; the base :class:`.types.Enum` type ultimately - delegates to the :meth:`~.postgresql.ENUM.create` and - :meth:`~.postgresql.ENUM.drop` methods present here. + The create/drop behavior of ENUM is necessarily intricate, due to the + awkward relationship the ENUM type has in relationship to the + parent table, in that it may be "owned" by just a single table, or + may be shared among many tables. + + When using :class:`.types.Enum` or :class:`.postgresql.ENUM` + in an "inline" fashion, the ``CREATE TYPE`` and ``DROP TYPE`` is emitted + corresponding to when the :meth:`.Table.create` and :meth:`.Table.drop` + methods are called:: + + table = Table('sometable', metadata, + Column('some_enum', ENUM('a', 'b', 'c', name='myenum')) + ) + + table.create(engine) # will emit CREATE ENUM and CREATE TABLE + table.drop(engine) # will emit DROP TABLE and DROP ENUM + + To use a common enumerated type between multiple tables, the best + practice is to declare the :class:`.types.Enum` or + :class:`.postgresql.ENUM` independently, and associate it with the + :class:`.MetaData` object itself:: + + my_enum = ENUM('a', 'b', 'c', name='myenum', metadata=metadata) + + t1 = Table('sometable_one', metadata, + Column('some_enum', myenum) + ) + + t2 = Table('sometable_two', metadata, + Column('some_enum', myenum) + ) + + When this pattern is used, care must still be taken at the level + of individual table creates. Emitting CREATE TABLE without also + specifying ``checkfirst=True`` will still cause issues:: + + t1.create(engine) # will fail: no such type 'myenum' + + If we specify ``checkfirst=True``, the individual table-level create + operation will check for the ``ENUM`` and create if not exists:: + + # will check if enum exists, and emit CREATE TYPE if not + t1.create(engine, checkfirst=True) + + When using a metadata-level ENUM type, the type will always be created + and dropped if either the metadata-wide create/drop is called:: + + metadata.create_all(engine) # will emit CREATE TYPE + metadata.drop_all(engine) # will emit DROP TYPE + + The type can also be created and dropped directly:: + + my_enum.create(engine) + my_enum.drop(engine) + + .. versionchanged:: 1.0.0 The Postgresql :class:`.postgresql.ENUM` type + now behaves more strictly with regards to CREATE/DROP. A metadata-level + ENUM type will only be created and dropped at the metadata level, + not the table level, with the exception of + ``table.create(checkfirst=True)``. + The ``table.drop()`` call will now emit a DROP TYPE for a table-level + enumerated type. """ @@ -1124,9 +1364,18 @@ class ENUM(sqltypes.Enum): return False def _on_table_create(self, target, bind, checkfirst, **kw): - if not self._check_for_name_in_memos(checkfirst, kw): + if checkfirst or ( + not self.metadata and + not kw.get('_is_metadata_operation', False)) and \ + not self._check_for_name_in_memos(checkfirst, kw): self.create(bind=bind, checkfirst=checkfirst) + def _on_table_drop(self, target, bind, checkfirst, **kw): + if not self.metadata and \ + not kw.get('_is_metadata_operation', False) and \ + not self._check_for_name_in_memos(checkfirst, kw): + self.drop(bind=bind, checkfirst=checkfirst) + def _on_metadata_create(self, target, bind, checkfirst, **kw): if not self._check_for_name_in_memos(checkfirst, kw): self.create(bind=bind, checkfirst=checkfirst) @@ -1256,14 +1505,14 @@ class PGCompiler(compiler.SQLCompiler): def visit_sequence(self, seq): return "nextval('%s')" % self.preparer.format_sequence(seq) - def limit_clause(self, select): + def limit_clause(self, select, **kw): text = "" - if select._limit is not None: - text += " \n LIMIT " + self.process(sql.literal(select._limit)) - if select._offset is not None: - if select._limit is None: + if select._limit_clause is not None: + text += " \n LIMIT " + self.process(select._limit_clause, **kw) + if select._offset_clause is not None: + if select._limit_clause is None: text += " \n LIMIT ALL" - text += " OFFSET " + self.process(sql.literal(select._offset)) + text += " OFFSET " + self.process(select._offset_clause, **kw) return text def format_from_hint_text(self, sqltext, table, hint, iscrud): @@ -1271,7 +1520,7 @@ class PGCompiler(compiler.SQLCompiler): raise exc.CompileError("Unrecognized hint: %r" % hint) return "ONLY " + sqltext - def get_select_precolumns(self, select): + def get_select_precolumns(self, select, **kw): if select._distinct is not False: if select._distinct is True: return "DISTINCT " @@ -1280,11 +1529,12 @@ class PGCompiler(compiler.SQLCompiler): [self.process(col) for col in select._distinct] ) + ") " else: - return "DISTINCT ON (" + self.process(select._distinct) + ") " + return "DISTINCT ON (" + \ + self.process(select._distinct, **kw) + ") " else: return "" - def for_update_clause(self, select): + def for_update_clause(self, select, **kw): if select._for_update_arg.read: tmp = " FOR SHARE" @@ -1296,7 +1546,7 @@ class PGCompiler(compiler.SQLCompiler): c.table if isinstance(c, expression.ColumnClause) else c for c in select._for_update_arg.of) tmp += " OF " + ", ".join( - self.process(table, ashint=True) + self.process(table, ashint=True, use_schema=False, **kw) for table in tables ) @@ -1348,7 +1598,8 @@ class PGDDLCompiler(compiler.DDLCompiler): else: colspec += " SERIAL" else: - colspec += " " + self.dialect.type_compiler.process(column.type) + colspec += " " + self.dialect.type_compiler.process(column.type, + type_expression=column) default = self.get_column_default_string(column) if default is not None: colspec += " DEFAULT " + default @@ -1381,7 +1632,13 @@ class PGDDLCompiler(compiler.DDLCompiler): text = "CREATE " if index.unique: text += "UNIQUE " - text += "INDEX %s ON %s " % ( + text += "INDEX " + + concurrently = index.dialect_options['postgresql']['concurrently'] + if concurrently: + text += "CONCURRENTLY " + + text += "%s ON %s " % ( self._prepared_index_name(index, include_schema=False), preparer.format_table(index.table) @@ -1400,10 +1657,22 @@ class PGDDLCompiler(compiler.DDLCompiler): if not isinstance(expr, expression.ColumnClause) else expr, include_table=False, literal_binds=True) + - (c.key in ops and (' ' + ops[c.key]) or '') - for expr, c in zip(index.expressions, index.columns)]) + ( + (' ' + ops[expr.key]) + if hasattr(expr, 'key') + and expr.key in ops else '' + ) + for expr in index.expressions + ]) ) + withclause = index.dialect_options['postgresql']['with'] + + if withclause: + text += " WITH (%s)" % (', '.join( + ['%s = %s' % storage_parameter + for storage_parameter in withclause.items()])) + whereclause = index.dialect_options["postgresql"]["where"] if whereclause is not None: @@ -1413,15 +1682,17 @@ class PGDDLCompiler(compiler.DDLCompiler): text += " WHERE " + where_compiled return text - def visit_exclude_constraint(self, constraint): + def visit_exclude_constraint(self, constraint, **kw): text = "" if constraint.name is not None: text += "CONSTRAINT %s " % \ self.preparer.format_constraint(constraint) elements = [] - for c in constraint.columns: - op = constraint.operators[c.name] - elements.append(self.preparer.quote(c.name) + ' WITH ' + op) + for expr, name, op in constraint._render_exprs: + kw['include_table'] = False + elements.append( + "%s WITH %s" % (self.sql_compiler.process(expr, **kw), op) + ) text += "EXCLUDE USING %s (%s)" % (constraint.using, ', '.join(elements)) if constraint.where is not None: @@ -1431,96 +1702,125 @@ class PGDDLCompiler(compiler.DDLCompiler): text += self.define_constraint_deferrability(constraint) return text + def post_create_table(self, table): + table_opts = [] + pg_opts = table.dialect_options['postgresql'] + + inherits = pg_opts.get('inherits') + if inherits is not None: + if not isinstance(inherits, (list, tuple)): + inherits = (inherits, ) + table_opts.append( + '\n INHERITS ( ' + + ', '.join(self.preparer.quote(name) for name in inherits) + + ' )') + + if pg_opts['with_oids'] is True: + table_opts.append('\n WITH OIDS') + elif pg_opts['with_oids'] is False: + table_opts.append('\n WITHOUT OIDS') + + if pg_opts['on_commit']: + on_commit_options = pg_opts['on_commit'].replace("_", " ").upper() + table_opts.append('\n ON COMMIT %s' % on_commit_options) + + if pg_opts['tablespace']: + tablespace_name = pg_opts['tablespace'] + table_opts.append( + '\n TABLESPACE %s' % self.preparer.quote(tablespace_name) + ) + + return ''.join(table_opts) + class PGTypeCompiler(compiler.GenericTypeCompiler): - - def visit_TSVECTOR(self, type): + def visit_TSVECTOR(self, type, **kw): return "TSVECTOR" - def visit_INET(self, type_): + def visit_INET(self, type_, **kw): return "INET" - def visit_CIDR(self, type_): + def visit_CIDR(self, type_, **kw): return "CIDR" - def visit_MACADDR(self, type_): + def visit_MACADDR(self, type_, **kw): return "MACADDR" - def visit_OID(self, type_): + def visit_OID(self, type_, **kw): return "OID" - def visit_FLOAT(self, type_): + def visit_FLOAT(self, type_, **kw): if not type_.precision: return "FLOAT" else: return "FLOAT(%(precision)s)" % {'precision': type_.precision} - def visit_DOUBLE_PRECISION(self, type_): + def visit_DOUBLE_PRECISION(self, type_, **kw): return "DOUBLE PRECISION" - def visit_BIGINT(self, type_): + def visit_BIGINT(self, type_, **kw): return "BIGINT" - def visit_HSTORE(self, type_): + def visit_HSTORE(self, type_, **kw): return "HSTORE" - def visit_JSON(self, type_): + def visit_JSON(self, type_, **kw): return "JSON" - def visit_JSONB(self, type_): + def visit_JSONB(self, type_, **kw): return "JSONB" - def visit_INT4RANGE(self, type_): + def visit_INT4RANGE(self, type_, **kw): return "INT4RANGE" - def visit_INT8RANGE(self, type_): + def visit_INT8RANGE(self, type_, **kw): return "INT8RANGE" - def visit_NUMRANGE(self, type_): + def visit_NUMRANGE(self, type_, **kw): return "NUMRANGE" - def visit_DATERANGE(self, type_): + def visit_DATERANGE(self, type_, **kw): return "DATERANGE" - def visit_TSRANGE(self, type_): + def visit_TSRANGE(self, type_, **kw): return "TSRANGE" - def visit_TSTZRANGE(self, type_): + def visit_TSTZRANGE(self, type_, **kw): return "TSTZRANGE" - def visit_datetime(self, type_): - return self.visit_TIMESTAMP(type_) + def visit_datetime(self, type_, **kw): + return self.visit_TIMESTAMP(type_, **kw) - def visit_enum(self, type_): + def visit_enum(self, type_, **kw): if not type_.native_enum or not self.dialect.supports_native_enum: - return super(PGTypeCompiler, self).visit_enum(type_) + return super(PGTypeCompiler, self).visit_enum(type_, **kw) else: - return self.visit_ENUM(type_) + return self.visit_ENUM(type_, **kw) - def visit_ENUM(self, type_): + def visit_ENUM(self, type_, **kw): return self.dialect.identifier_preparer.format_type(type_) - def visit_TIMESTAMP(self, type_): + def visit_TIMESTAMP(self, type_, **kw): return "TIMESTAMP%s %s" % ( getattr(type_, 'precision', None) and "(%d)" % type_.precision or "", (type_.timezone and "WITH" or "WITHOUT") + " TIME ZONE" ) - def visit_TIME(self, type_): + def visit_TIME(self, type_, **kw): return "TIME%s %s" % ( getattr(type_, 'precision', None) and "(%d)" % type_.precision or "", (type_.timezone and "WITH" or "WITHOUT") + " TIME ZONE" ) - def visit_INTERVAL(self, type_): + def visit_INTERVAL(self, type_, **kw): if type_.precision is not None: return "INTERVAL(%d)" % type_.precision else: return "INTERVAL" - def visit_BIT(self, type_): + def visit_BIT(self, type_, **kw): if type_.varying: compiled = "BIT VARYING" if type_.length is not None: @@ -1529,16 +1829,16 @@ class PGTypeCompiler(compiler.GenericTypeCompiler): compiled = "BIT(%d)" % type_.length return compiled - def visit_UUID(self, type_): + def visit_UUID(self, type_, **kw): return "UUID" - def visit_large_binary(self, type_): - return self.visit_BYTEA(type_) + def visit_large_binary(self, type_, **kw): + return self.visit_BYTEA(type_, **kw) - def visit_BYTEA(self, type_): + def visit_BYTEA(self, type_, **kw): return "BYTEA" - def visit_ARRAY(self, type_): + def visit_ARRAY(self, type_, **kw): return self.process(type_.item_type) + ('[]' * (type_.dimensions if type_.dimensions is not None else 1)) @@ -1570,11 +1870,45 @@ class PGInspector(reflection.Inspector): reflection.Inspector.__init__(self, conn) def get_table_oid(self, table_name, schema=None): - """Return the oid from `table_name` and `schema`.""" + """Return the OID for the given table name.""" return self.dialect.get_table_oid(self.bind, table_name, schema, info_cache=self.info_cache) + def get_enums(self, schema=None): + """Return a list of ENUM objects. + + Each member is a dictionary containing these fields: + + * name - name of the enum + * schema - the schema name for the enum. + * visible - boolean, whether or not this enum is visible + in the default search path. + * labels - a list of string labels that apply to the enum. + + :param schema: schema name. If None, the default schema + (typically 'public') is used. May also be set to '*' to + indicate load enums for all schemas. + + .. versionadded:: 1.0.0 + + """ + schema = schema or self.default_schema_name + return self.dialect._load_enums(self.bind, schema) + + def get_foreign_table_names(self, schema=None): + """Return a list of FOREIGN TABLE names. + + Behavior is similar to that of :meth:`.Inspector.get_table_names`, + except that the list is limited to those tables tha report a + ``relkind`` value of ``f``. + + .. versionadded:: 1.0.0 + + """ + schema = schema or self.default_schema_name + return self.dialect._get_foreign_table_names(self.bind, schema) + class CreateEnumType(schema._CreateDropBase): __visit_name__ = "create_enum_type" @@ -1666,10 +2000,16 @@ class PGDialect(default.DefaultDialect): (schema.Index, { "using": False, "where": None, - "ops": {} + "ops": {}, + "concurrently": False, + "with": {} }), (schema.Table, { - "ignore_search_path": False + "ignore_search_path": False, + "tablespace": None, + "with_oids": None, + "on_commit": None, + "inherits": None }) ] @@ -1798,7 +2138,8 @@ class PGDialect(default.DefaultDialect): cursor = connection.execute( sql.text( "select relname from pg_class c join pg_namespace n on " - "n.oid=c.relnamespace where n.nspname=current_schema() " + "n.oid=c.relnamespace where " + "pg_catalog.pg_table_is_visible(c.oid) " "and relname=:name", bindparams=[ sql.bindparam('name', util.text_type(table_name), @@ -1916,7 +2257,7 @@ class PGDialect(default.DefaultDialect): FROM pg_catalog.pg_class c LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace WHERE (%s) - AND c.relname = :table_name AND c.relkind in ('r','v') + AND c.relname = :table_name AND c.relkind in ('r', 'v', 'm', 'f') """ % schema_where_clause # Since we're binding to unicode, table_name and schema_name must be # unicode. @@ -1969,6 +2310,24 @@ class PGDialect(default.DefaultDialect): ) return [row[0] for row in result] + @reflection.cache + def _get_foreign_table_names(self, connection, schema=None, **kw): + if schema is not None: + current_schema = schema + else: + current_schema = self.default_schema_name + + result = connection.execute( + sql.text("SELECT relname FROM pg_class c " + "WHERE relkind = 'f' " + "AND '%s' = (select nspname from pg_namespace n " + "where n.oid = c.relnamespace) " % + current_schema, + typemap={'relname': sqltypes.Unicode} + ) + ) + return [row[0] for row in result] + @reflection.cache def get_view_names(self, connection, schema=None, **kw): if schema is not None: @@ -1978,7 +2337,7 @@ class PGDialect(default.DefaultDialect): s = """ SELECT relname FROM pg_class c - WHERE relkind = 'v' + WHERE relkind IN ('m', 'v') AND '%(schema)s' = (select nspname from pg_namespace n where n.oid = c.relnamespace) """ % dict(schema=current_schema) @@ -2039,7 +2398,12 @@ class PGDialect(default.DefaultDialect): c = connection.execute(s, table_oid=table_oid) rows = c.fetchall() domains = self._load_domains(connection) - enums = self._load_enums(connection) + enums = dict( + ( + "%s.%s" % (rec['schema'], rec['name']) + if not rec['visible'] else rec['name'], rec) for rec in + self._load_enums(connection, schema='*') + ) # format columns columns = [] @@ -2113,10 +2477,9 @@ class PGDialect(default.DefaultDialect): elif attype in enums: enum = enums[attype] coltype = ENUM - if "." in attype: - kwargs['schema'], kwargs['name'] = attype.split('.') - else: - kwargs['name'] = attype + kwargs['name'] = enum['name'] + if not enum['visible']: + kwargs['schema'] = enum['schema'] args = tuple(enum['labels']) break elif attype in domains: @@ -2323,32 +2686,69 @@ class PGDialect(default.DefaultDialect): # cast indkey as varchar since it's an int2vector, # returned as a list by some drivers such as pypostgresql - IDX_SQL = """ - SELECT - i.relname as relname, - ix.indisunique, ix.indexprs, ix.indpred, - a.attname, a.attnum, ix.indkey%s - FROM - pg_class t - join pg_index ix on t.oid = ix.indrelid - join pg_class i on i.oid=ix.indexrelid - left outer join - pg_attribute a - on t.oid=a.attrelid and %s - WHERE - t.relkind = 'r' - and t.oid = :table_oid - and ix.indisprimary = 'f' - ORDER BY - t.relname, - i.relname - """ % ( - # version 8.3 here was based on observing the - # cast does not work in PG 8.2.4, does work in 8.3.0. - # nothing in PG changelogs regarding this. - "::varchar" if self.server_version_info >= (8, 3) else "", - self._pg_index_any("a.attnum", "ix.indkey") - ) + if self.server_version_info < (8, 5): + IDX_SQL = """ + SELECT + i.relname as relname, + ix.indisunique, ix.indexprs, ix.indpred, + a.attname, a.attnum, NULL, ix.indkey%s, + %s, am.amname + FROM + pg_class t + join pg_index ix on t.oid = ix.indrelid + join pg_class i on i.oid = ix.indexrelid + left outer join + pg_attribute a + on t.oid = a.attrelid and %s + left outer join + pg_am am + on i.relam = am.oid + WHERE + t.relkind IN ('r', 'v', 'f', 'm') + and t.oid = :table_oid + and ix.indisprimary = 'f' + ORDER BY + t.relname, + i.relname + """ % ( + # version 8.3 here was based on observing the + # cast does not work in PG 8.2.4, does work in 8.3.0. + # nothing in PG changelogs regarding this. + "::varchar" if self.server_version_info >= (8, 3) else "", + "i.reloptions" if self.server_version_info >= (8, 2) + else "NULL", + self._pg_index_any("a.attnum", "ix.indkey") + ) + else: + IDX_SQL = """ + SELECT + i.relname as relname, + ix.indisunique, ix.indexprs, ix.indpred, + a.attname, a.attnum, c.conrelid, ix.indkey::varchar, + i.reloptions, am.amname + FROM + pg_class t + join pg_index ix on t.oid = ix.indrelid + join pg_class i on i.oid = ix.indexrelid + left outer join + pg_attribute a + on t.oid = a.attrelid and a.attnum = ANY(ix.indkey) + left outer join + pg_constraint c + on (ix.indrelid = c.conrelid and + ix.indexrelid = c.conindid and + c.contype in ('p', 'u', 'x')) + left outer join + pg_am am + on i.relam = am.oid + WHERE + t.relkind IN ('r', 'v', 'f', 'm') + and t.oid = :table_oid + and ix.indisprimary = 'f' + ORDER BY + t.relname, + i.relname + """ t = sql.text(IDX_SQL, typemap={'attname': sqltypes.Unicode}) c = connection.execute(t, table_oid=table_oid) @@ -2357,7 +2757,8 @@ class PGDialect(default.DefaultDialect): sv_idx_name = None for row in c.fetchall(): - idx_name, unique, expr, prd, col, col_num, idx_key = row + (idx_name, unique, expr, prd, col, + col_num, conrelid, idx_key, options, amname) = row if expr: if idx_name != sv_idx_name: @@ -2374,18 +2775,43 @@ class PGDialect(default.DefaultDialect): % idx_name) sv_idx_name = idx_name + has_idx = idx_name in indexes index = indexes[idx_name] if col is not None: index['cols'][col_num] = col - index['key'] = [int(k.strip()) for k in idx_key.split()] - index['unique'] = unique + if not has_idx: + index['key'] = [int(k.strip()) for k in idx_key.split()] + index['unique'] = unique + if conrelid is not None: + index['duplicates_constraint'] = idx_name + if options: + index['options'] = dict( + [option.split("=") for option in options]) - return [ - {'name': name, - 'unique': idx['unique'], - 'column_names': [idx['cols'][i] for i in idx['key']]} - for name, idx in indexes.items() - ] + # it *might* be nice to include that this is 'btree' in the + # reflection info. But we don't want an Index object + # to have a ``postgresql_using`` in it that is just the + # default, so for the moment leaving this out. + if amname and amname != 'btree': + index['amname'] = amname + + result = [] + for name, idx in indexes.items(): + entry = { + 'name': name, + 'unique': idx['unique'], + 'column_names': [idx['cols'][i] for i in idx['key']] + } + if 'duplicates_constraint' in idx: + entry['duplicates_constraint'] = idx['duplicates_constraint'] + if 'options' in idx: + entry.setdefault( + 'dialect_options', {})["postgresql_with"] = idx['options'] + if 'amname' in idx: + entry.setdefault( + 'dialect_options', {})["postgresql_using"] = idx['amname'] + result.append(entry) + return result @reflection.cache def get_unique_constraints(self, connection, table_name, @@ -2424,7 +2850,8 @@ class PGDialect(default.DefaultDialect): for name, uc in uniques.items() ] - def _load_enums(self, connection): + def _load_enums(self, connection, schema=None): + schema = schema or self.default_schema_name if not self.supports_native_enum: return {} @@ -2440,31 +2867,37 @@ class PGDialect(default.DefaultDialect): LEFT JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace LEFT JOIN pg_catalog.pg_enum e ON t.oid = e.enumtypid WHERE t.typtype = 'e' - ORDER BY "name", e.oid -- e.oid gives us label order """ + if schema != '*': + SQL_ENUMS += "AND n.nspname = :schema " + + # e.oid gives us label order within an enum + SQL_ENUMS += 'ORDER BY "schema", "name", e.oid' + s = sql.text(SQL_ENUMS, typemap={ 'attname': sqltypes.Unicode, 'label': sqltypes.Unicode}) + + if schema != '*': + s = s.bindparams(schema=schema) + c = connection.execute(s) - enums = {} + enums = [] + enum_by_name = {} for enum in c.fetchall(): - if enum['visible']: - # 'visible' just means whether or not the enum is in a - # schema that's on the search path -- or not overridden by - # a schema with higher precedence. If it's not visible, - # it will be prefixed with the schema-name when it's used. - name = enum['name'] + key = (enum['schema'], enum['name']) + if key in enum_by_name: + enum_by_name[key]['labels'].append(enum['label']) else: - name = "%s.%s" % (enum['schema'], enum['name']) - - if name in enums: - enums[name]['labels'].append(enum['label']) - else: - enums[name] = { + enum_by_name[key] = enum_rec = { + 'name': enum['name'], + 'schema': enum['schema'], + 'visible': enum['visible'], 'labels': [enum['label']], } + enums.append(enum_rec) return enums diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/constraints.py b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/constraints.py index e8ebc75..c6bb890 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/constraints.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/constraints.py @@ -1,10 +1,11 @@ -# Copyright (C) 2013-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2013-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -from sqlalchemy.schema import ColumnCollectionConstraint -from sqlalchemy.sql import expression +from ...sql.schema import ColumnCollectionConstraint +from ...sql import expression +from ... import util class ExcludeConstraint(ColumnCollectionConstraint): @@ -48,20 +49,42 @@ static/sql-createtable.html#SQL-CREATETABLE-EXCLUDE for this constraint. """ + columns = [] + render_exprs = [] + self.operators = {} + + expressions, operators = zip(*elements) + + for (expr, column, strname, add_element), operator in zip( + self._extract_col_expression_collection(expressions), + operators + ): + if add_element is not None: + columns.append(add_element) + + name = column.name if column is not None else strname + + if name is not None: + # backwards compat + self.operators[name] = operator + + expr = expression._literal_as_text(expr) + + render_exprs.append( + (expr, name, operator) + ) + + self._render_exprs = render_exprs ColumnCollectionConstraint.__init__( self, - *[col for col, op in elements], + *columns, name=kw.get('name'), deferrable=kw.get('deferrable'), initially=kw.get('initially') ) - self.operators = {} - for col_or_string, op in elements: - name = getattr(col_or_string, 'name', col_or_string) - self.operators[name] = op self.using = kw.get('using', 'gist') where = kw.get('where') - if where: + if where is not None: self.where = expression._literal_as_text(where) def copy(self, **kw): diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/hstore.py b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/hstore.py index 9601edc..a4ff461 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/hstore.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/hstore.py @@ -1,5 +1,5 @@ # postgresql/hstore.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/json.py b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/json.py index 25ac342..f7ede85 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/json.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/json.py @@ -1,5 +1,5 @@ # postgresql/json.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -12,7 +12,7 @@ from .base import ischema_names from ... import types as sqltypes from ...sql.operators import custom_op from ... import sql -from ...sql import elements +from ...sql import elements, default_comparator from ... import util __all__ = ('JSON', 'JSONElement', 'JSONB') @@ -46,7 +46,8 @@ class JSONElement(elements.BinaryExpression): self._json_opstring = opstring operator = custom_op(opstring, precedence=5) - right = left._check_literal(left, operator, right) + right = default_comparator._check_literal( + left, operator, right) super(JSONElement, self).__init__( left, right, operator, type_=result_type) @@ -77,7 +78,7 @@ class JSONElement(elements.BinaryExpression): def cast(self, type_): """Convert this :class:`.JSONElement` to apply both the 'astext' operator - as well as an explicit type cast when evaulated. + as well as an explicit type cast when evaluated. E.g.:: @@ -164,6 +165,23 @@ class JSON(sqltypes.TypeEngine): __visit_name__ = 'JSON' + def __init__(self, none_as_null=False): + """Construct a :class:`.JSON` type. + + :param none_as_null: if True, persist the value ``None`` as a + SQL NULL value, not the JSON encoding of ``null``. Note that + when this flag is False, the :func:`.null` construct can still + be used to persist a NULL value:: + + from sqlalchemy import null + conn.execute(table.insert(), data=null()) + + .. versionchanged:: 0.9.8 - Added ``none_as_null``, and :func:`.null` + is now supported in order to persist a NULL value. + + """ + self.none_as_null = none_as_null + class comparator_factory(sqltypes.Concatenable.Comparator): """Define comparison operations for :class:`.JSON`.""" @@ -185,9 +203,17 @@ class JSON(sqltypes.TypeEngine): encoding = dialect.encoding def process(value): + if isinstance(value, elements.Null) or ( + value is None and self.none_as_null + ): + return None return json_serializer(value).encode(encoding) else: def process(value): + if isinstance(value, elements.Null) or ( + value is None and self.none_as_null + ): + return None return json_serializer(value) return process @@ -197,9 +223,13 @@ class JSON(sqltypes.TypeEngine): encoding = dialect.encoding def process(value): + if value is None: + return None return json_deserializer(value.decode(encoding)) else: def process(value): + if value is None: + return None return json_deserializer(value) return process diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/pg8000.py b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/pg8000.py index 589567d..68e8e02 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/pg8000.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/pg8000.py @@ -1,5 +1,5 @@ # postgresql/pg8000.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under @@ -13,17 +13,30 @@ postgresql+pg8000://user:password@host:port/dbname[?key=value&key=value...] :url: https://pythonhosted.org/pg8000/ + +.. _pg8000_unicode: + Unicode ------- -When communicating with the server, pg8000 **always uses the server-side -character set**. SQLAlchemy has no ability to modify what character set -pg8000 chooses to use, and additionally SQLAlchemy does no unicode conversion -of any kind with the pg8000 backend. The origin of the client encoding setting -is ultimately the CLIENT_ENCODING setting in postgresql.conf. +pg8000 will encode / decode string values between it and the server using the +PostgreSQL ``client_encoding`` parameter; by default this is the value in +the ``postgresql.conf`` file, which often defaults to ``SQL_ASCII``. +Typically, this can be changed to ``utf-8``, as a more useful default:: -It is not necessary, though is also harmless, to pass the "encoding" parameter -to :func:`.create_engine` when using pg8000. + #client_encoding = sql_ascii # actually, defaults to database + # encoding + client_encoding = utf8 + +The ``client_encoding`` can be overriden for a session by executing the SQL: + +SET CLIENT_ENCODING TO 'utf8'; + +SQLAlchemy will execute this SQL on all new connections based on the value +passed to :func:`.create_engine` using the ``client_encoding`` parameter:: + + engine = create_engine( + "postgresql+pg8000://user:pass@host/dbname", client_encoding='utf8') .. _pg8000_isolation_level: @@ -58,6 +71,8 @@ from ... import types as sqltypes from .base import ( PGDialect, PGCompiler, PGIdentifierPreparer, PGExecutionContext, _DECIMAL_TYPES, _FLOAT_TYPES, _INT_TYPES) +import re +from sqlalchemy.dialects.postgresql.json import JSON class _PGNumeric(sqltypes.Numeric): @@ -88,6 +103,15 @@ class _PGNumericNoBind(_PGNumeric): return None +class _PGJSON(JSON): + + def result_processor(self, dialect, coltype): + if dialect._dbapi_version > (1, 10, 1): + return None # Has native JSON + else: + return super(_PGJSON, self).result_processor(dialect, coltype) + + class PGExecutionContext_pg8000(PGExecutionContext): pass @@ -119,7 +143,7 @@ class PGDialect_pg8000(PGDialect): supports_unicode_binds = True default_paramstyle = 'format' - supports_sane_multi_rowcount = False + supports_sane_multi_rowcount = True execution_ctx_cls = PGExecutionContext_pg8000 statement_compiler = PGCompiler_pg8000 preparer = PGIdentifierPreparer_pg8000 @@ -129,10 +153,29 @@ class PGDialect_pg8000(PGDialect): PGDialect.colspecs, { sqltypes.Numeric: _PGNumericNoBind, - sqltypes.Float: _PGNumeric + sqltypes.Float: _PGNumeric, + JSON: _PGJSON, } ) + def __init__(self, client_encoding=None, **kwargs): + PGDialect.__init__(self, **kwargs) + self.client_encoding = client_encoding + + def initialize(self, connection): + self.supports_sane_multi_rowcount = self._dbapi_version >= (1, 9, 14) + super(PGDialect_pg8000, self).initialize(connection) + + @util.memoized_property + def _dbapi_version(self): + if self.dbapi and hasattr(self.dbapi, '__version__'): + return tuple( + [ + int(x) for x in re.findall( + r'(\d+)(?:[-\.]?|$)', self.dbapi.__version__)]) + else: + return (99, 99, 99) + @classmethod def dbapi(cls): return __import__('pg8000') @@ -171,4 +214,51 @@ class PGDialect_pg8000(PGDialect): (level, self.name, ", ".join(self._isolation_lookup)) ) + def set_client_encoding(self, connection, client_encoding): + # adjust for ConnectionFairy possibly being present + if hasattr(connection, 'connection'): + connection = connection.connection + + cursor = connection.cursor() + cursor.execute("SET CLIENT_ENCODING TO '" + client_encoding + "'") + cursor.execute("COMMIT") + cursor.close() + + def do_begin_twophase(self, connection, xid): + connection.connection.tpc_begin((0, xid, '')) + + def do_prepare_twophase(self, connection, xid): + connection.connection.tpc_prepare() + + def do_rollback_twophase( + self, connection, xid, is_prepared=True, recover=False): + connection.connection.tpc_rollback((0, xid, '')) + + def do_commit_twophase( + self, connection, xid, is_prepared=True, recover=False): + connection.connection.tpc_commit((0, xid, '')) + + def do_recover_twophase(self, connection): + return [row[1] for row in connection.connection.tpc_recover()] + + def on_connect(self): + fns = [] + if self.client_encoding is not None: + def on_connect(conn): + self.set_client_encoding(conn, self.client_encoding) + fns.append(on_connect) + + if self.isolation_level is not None: + def on_connect(conn): + self.set_isolation_level(conn, self.isolation_level) + fns.append(on_connect) + + if len(fns) > 0: + def on_connect(conn): + for fn in fns: + fn(conn) + return on_connect + else: + return None + dialect = PGDialect_pg8000 diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/psycopg2.py index e6450c9..a0f0cca 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/psycopg2.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/psycopg2.py @@ -1,5 +1,5 @@ # postgresql/psycopg2.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -32,10 +32,25 @@ psycopg2-specific keyword arguments which are accepted by way of enabling this mode on a per-execution basis. * ``use_native_unicode``: Enable the usage of Psycopg2 "native unicode" mode per connection. True by default. + + .. seealso:: + + :ref:`psycopg2_disable_native_unicode` + * ``isolation_level``: This option, available for all PostgreSQL dialects, includes the ``AUTOCOMMIT`` isolation level when using the psycopg2 - dialect. See :ref:`psycopg2_isolation_level`. + dialect. + .. seealso:: + + :ref:`psycopg2_isolation_level` + +* ``client_encoding``: sets the client encoding in a libpq-agnostic way, + using psycopg2's ``set_client_encoding()`` method. + + .. seealso:: + + :ref:`psycopg2_unicode` Unix Domain Connections ------------------------ @@ -51,12 +66,15 @@ in ``/tmp``, or whatever socket directory was specified when PostgreSQL was built. This value can be overridden by passing a pathname to psycopg2, using ``host`` as an additional keyword argument:: - create_engine("postgresql+psycopg2://user:password@/dbname?host=/var/lib/postgresql") + create_engine("postgresql+psycopg2://user:password@/dbname?\ +host=/var/lib/postgresql") See also: -`PQconnectdbParams `_ +`PQconnectdbParams `_ + +.. _psycopg2_execution_options: Per-Statement/Connection Execution Options ------------------------------------------- @@ -65,18 +83,27 @@ The following DBAPI-specific options are respected when used with :meth:`.Connection.execution_options`, :meth:`.Executable.execution_options`, :meth:`.Query.execution_options`, in addition to those not specific to DBAPIs: -* isolation_level - Set the transaction isolation level for the lifespan of a +* ``isolation_level`` - Set the transaction isolation level for the lifespan of a :class:`.Connection` (can only be set on a connection, not a statement or query). See :ref:`psycopg2_isolation_level`. -* stream_results - Enable or disable usage of psycopg2 server side cursors - +* ``stream_results`` - Enable or disable usage of psycopg2 server side cursors - this feature makes use of "named" cursors in combination with special result handling methods so that result rows are not fully buffered. If ``None`` or not set, the ``server_side_cursors`` option of the :class:`.Engine` is used. -Unicode -------- +* ``max_row_buffer`` - when using ``stream_results``, an integer value that + specifies the maximum number of rows to buffer at a time. This is + interpreted by the :class:`.BufferedRowResultProxy`, and if omitted the + buffer will grow to ultimately store 1000 rows at a time. + + .. versionadded:: 1.0.6 + +.. _psycopg2_unicode: + +Unicode with Psycopg2 +---------------------- By default, the psycopg2 driver uses the ``psycopg2.extensions.UNICODE`` extension, such that the DBAPI receives and returns all strings as Python @@ -84,27 +111,51 @@ Unicode objects directly - SQLAlchemy passes these values through without change. Psycopg2 here will encode/decode string values based on the current "client encoding" setting; by default this is the value in the ``postgresql.conf`` file, which often defaults to ``SQL_ASCII``. -Typically, this can be changed to ``utf-8``, as a more useful default:: +Typically, this can be changed to ``utf8``, as a more useful default:: - #client_encoding = sql_ascii # actually, defaults to database + # postgresql.conf file + + # client_encoding = sql_ascii # actually, defaults to database # encoding client_encoding = utf8 A second way to affect the client encoding is to set it within Psycopg2 -locally. SQLAlchemy will call psycopg2's ``set_client_encoding()`` -method (see: -http://initd.org/psycopg/docs/connection.html#connection.set_client_encoding) +locally. SQLAlchemy will call psycopg2's +:meth:`psycopg2:connection.set_client_encoding` method on all new connections based on the value passed to :func:`.create_engine` using the ``client_encoding`` parameter:: + # set_client_encoding() setting; + # works for *all* Postgresql versions engine = create_engine("postgresql://user:pass@host/dbname", client_encoding='utf8') This overrides the encoding specified in the Postgresql client configuration. +When using the parameter in this way, the psycopg2 driver emits +``SET client_encoding TO 'utf8'`` on the connection explicitly, and works +in all Postgresql versions. -.. versionadded:: 0.7.3 - The psycopg2-specific ``client_encoding`` parameter to - :func:`.create_engine`. +Note that the ``client_encoding`` setting as passed to :func:`.create_engine` +is **not the same** as the more recently added ``client_encoding`` parameter +now supported by libpq directly. This is enabled when ``client_encoding`` +is passed directly to ``psycopg2.connect()``, and from SQLAlchemy is passed +using the :paramref:`.create_engine.connect_args` parameter:: + + # libpq direct parameter setting; + # only works for Postgresql **9.1 and above** + engine = create_engine("postgresql://user:pass@host/dbname", + connect_args={'client_encoding': 'utf8'}) + + # using the query string is equivalent + engine = create_engine("postgresql://user:pass@host/dbname?client_encoding=utf8") + +The above parameter was only added to libpq as of version 9.1 of Postgresql, +so using the previous method is better for cross-version support. + +.. _psycopg2_disable_native_unicode: + +Disabling Native Unicode +^^^^^^^^^^^^^^^^^^^^^^^^ SQLAlchemy can also be instructed to skip the usage of the psycopg2 ``UNICODE`` extension and to instead utilize its own unicode encode/decode @@ -116,8 +167,56 @@ in and coerce from bytes on the way back, using the value of the :func:`.create_engine` ``encoding`` parameter, which defaults to ``utf-8``. SQLAlchemy's own unicode encode/decode functionality is steadily becoming -obsolete as more DBAPIs support unicode fully along with the approach of -Python 3; in modern usage psycopg2 should be relied upon to handle unicode. +obsolete as most DBAPIs now support unicode fully. + +Bound Parameter Styles +---------------------- + +The default parameter style for the psycopg2 dialect is "pyformat", where +SQL is rendered using ``%(paramname)s`` style. This format has the limitation +that it does not accommodate the unusual case of parameter names that +actually contain percent or parenthesis symbols; as SQLAlchemy in many cases +generates bound parameter names based on the name of a column, the presence +of these characters in a column name can lead to problems. + +There are two solutions to the issue of a :class:`.schema.Column` that contains +one of these characters in its name. One is to specify the +:paramref:`.schema.Column.key` for columns that have such names:: + + measurement = Table('measurement', metadata, + Column('Size (meters)', Integer, key='size_meters') + ) + +Above, an INSERT statement such as ``measurement.insert()`` will use +``size_meters`` as the parameter name, and a SQL expression such as +``measurement.c.size_meters > 10`` will derive the bound parameter name +from the ``size_meters`` key as well. + +.. versionchanged:: 1.0.0 - SQL expressions will use :attr:`.Column.key` + as the source of naming when anonymous bound parameters are created + in SQL expressions; previously, this behavior only applied to + :meth:`.Table.insert` and :meth:`.Table.update` parameter names. + +The other solution is to use a positional format; psycopg2 allows use of the +"format" paramstyle, which can be passed to +:paramref:`.create_engine.paramstyle`:: + + engine = create_engine( + 'postgresql://scott:tiger@localhost:5432/test', paramstyle='format') + +With the above engine, instead of a statement like:: + + INSERT INTO measurement ("Size (meters)") VALUES (%(Size (meters))s) + {'Size (meters)': 1} + +we instead see:: + + INSERT INTO measurement ("Size (meters)") VALUES (%s) + (1, ) + +Where above, the dictionary style is converted into a tuple with positional +style. + Transactions ------------ @@ -148,7 +247,7 @@ The psycopg2 dialect supports these constants for isolation level: * ``AUTOCOMMIT`` .. versionadded:: 0.8.2 support for AUTOCOMMIT isolation level when using - psycopg2. + psycopg2. .. seealso:: @@ -173,14 +272,17 @@ HSTORE type The ``psycopg2`` DBAPI includes an extension to natively handle marshalling of the HSTORE type. The SQLAlchemy psycopg2 dialect will enable this extension -by default when it is detected that the target database has the HSTORE -type set up for use. In other words, when the dialect makes the first +by default when psycopg2 version 2.4 or greater is used, and +it is detected that the target database has the HSTORE type set up for use. +In other words, when the dialect makes the first connection, a sequence like the following is performed: 1. Request the available HSTORE oids using ``psycopg2.extras.HstoreAdapter.get_oids()``. If this function returns a list of HSTORE identifiers, we then determine that the ``HSTORE`` extension is present. + This function is **skipped** if the version of psycopg2 installed is + less than version 2.4. 2. If the ``use_native_hstore`` flag is at its default of ``True``, and we've detected that ``HSTORE`` oids are available, the @@ -219,9 +321,14 @@ from ... import types as sqltypes from .base import PGDialect, PGCompiler, \ PGIdentifierPreparer, PGExecutionContext, \ ENUM, ARRAY, _DECIMAL_TYPES, _FLOAT_TYPES,\ - _INT_TYPES + _INT_TYPES, UUID from .hstore import HSTORE -from .json import JSON +from .json import JSON, JSONB + +try: + from uuid import UUID as _python_UUID +except ImportError: + _python_UUID = None logger = logging.getLogger('sqlalchemy.dialects.postgresql') @@ -256,7 +363,7 @@ class _PGNumeric(sqltypes.Numeric): class _PGEnum(ENUM): def result_processor(self, dialect, coltype): - if util.py2k and self.convert_unicode is True: + if self.native_enum and util.py2k and self.convert_unicode is True: # we can't easily use PG's extensions here because # the OID is on the fly, and we need to give it a python # function anyway - not really worth it. @@ -286,6 +393,35 @@ class _PGJSON(JSON): else: return super(_PGJSON, self).result_processor(dialect, coltype) + +class _PGJSONB(JSONB): + + def result_processor(self, dialect, coltype): + if dialect._has_native_jsonb: + return None + else: + return super(_PGJSONB, self).result_processor(dialect, coltype) + + +class _PGUUID(UUID): + def bind_processor(self, dialect): + if not self.as_uuid and dialect.use_native_uuid: + nonetype = type(None) + + def process(value): + if value is not None: + value = _python_UUID(value) + return value + return process + + def result_processor(self, dialect, coltype): + if not self.as_uuid and dialect.use_native_uuid: + def process(value): + if value is not None: + value = str(value) + return value + return process + # When we're handed literal SQL, ensure it's a SELECT query. Since # 8.3, combining cursors and "FOR UPDATE" has been fine. SERVER_SIDE_CURSOR_RE = re.compile( @@ -374,8 +510,21 @@ class PGDialect_psycopg2(PGDialect): preparer = PGIdentifierPreparer_psycopg2 psycopg2_version = (0, 0) + FEATURE_VERSION_MAP = dict( + native_json=(2, 5), + native_jsonb=(2, 5, 4), + sane_multi_rowcount=(2, 0, 9), + array_oid=(2, 4, 3), + hstore_adapter=(2, 4) + ) + _has_native_hstore = False _has_native_json = False + _has_native_jsonb = False + + engine_config_types = PGDialect.engine_config_types.union([ + ('use_native_unicode', util.asbool), + ]) colspecs = util.update_copy( PGDialect.colspecs, @@ -384,18 +533,21 @@ class PGDialect_psycopg2(PGDialect): ENUM: _PGEnum, # needs force_unicode sqltypes.Enum: _PGEnum, # needs force_unicode HSTORE: _PGHStore, - JSON: _PGJSON + JSON: _PGJSON, + JSONB: _PGJSONB, + UUID: _PGUUID } ) def __init__(self, server_side_cursors=False, use_native_unicode=True, client_encoding=None, - use_native_hstore=True, + use_native_hstore=True, use_native_uuid=True, **kwargs): PGDialect.__init__(self, **kwargs) self.server_side_cursors = server_side_cursors self.use_native_unicode = use_native_unicode self.use_native_hstore = use_native_hstore + self.use_native_uuid = use_native_uuid self.supports_unicode_binds = use_native_unicode self.client_encoding = client_encoding if self.dbapi and hasattr(self.dbapi, '__version__'): @@ -412,19 +564,34 @@ class PGDialect_psycopg2(PGDialect): self._has_native_hstore = self.use_native_hstore and \ self._hstore_oids(connection.connection) \ is not None - self._has_native_json = self.psycopg2_version >= (2, 5) + self._has_native_json = \ + self.psycopg2_version >= self.FEATURE_VERSION_MAP['native_json'] + self._has_native_jsonb = \ + self.psycopg2_version >= self.FEATURE_VERSION_MAP['native_jsonb'] # http://initd.org/psycopg/docs/news.html#what-s-new-in-psycopg-2-0-9 - self.supports_sane_multi_rowcount = self.psycopg2_version >= (2, 0, 9) + self.supports_sane_multi_rowcount = \ + self.psycopg2_version >= \ + self.FEATURE_VERSION_MAP['sane_multi_rowcount'] @classmethod def dbapi(cls): import psycopg2 return psycopg2 + @classmethod + def _psycopg2_extensions(cls): + from psycopg2 import extensions + return extensions + + @classmethod + def _psycopg2_extras(cls): + from psycopg2 import extras + return extras + @util.memoized_property def _isolation_lookup(self): - from psycopg2 import extensions + extensions = self._psycopg2_extensions() return { 'AUTOCOMMIT': extensions.ISOLATION_LEVEL_AUTOCOMMIT, 'READ COMMITTED': extensions.ISOLATION_LEVEL_READ_COMMITTED, @@ -446,7 +613,8 @@ class PGDialect_psycopg2(PGDialect): connection.set_isolation_level(level) def on_connect(self): - from psycopg2 import extras, extensions + extras = self._psycopg2_extras() + extensions = self._psycopg2_extensions() fns = [] if self.client_encoding is not None: @@ -459,6 +627,11 @@ class PGDialect_psycopg2(PGDialect): self.set_isolation_level(conn, self.isolation_level) fns.append(on_connect) + if self.dbapi and self.use_native_uuid: + def on_connect(conn): + extras.register_uuid(None, conn) + fns.append(on_connect) + if self.dbapi and self.use_native_unicode: def on_connect(conn): extensions.register_type(extensions.UNICODE, conn) @@ -470,19 +643,23 @@ class PGDialect_psycopg2(PGDialect): hstore_oids = self._hstore_oids(conn) if hstore_oids is not None: oid, array_oid = hstore_oids + kw = {'oid': oid} if util.py2k: - extras.register_hstore(conn, oid=oid, - array_oid=array_oid, - unicode=True) - else: - extras.register_hstore(conn, oid=oid, - array_oid=array_oid) + kw['unicode'] = True + if self.psycopg2_version >= \ + self.FEATURE_VERSION_MAP['array_oid']: + kw['array_oid'] = array_oid + extras.register_hstore(conn, **kw) fns.append(on_connect) if self.dbapi and self._json_deserializer: def on_connect(conn): - extras.register_default_json( - conn, loads=self._json_deserializer) + if self._has_native_json: + extras.register_default_json( + conn, loads=self._json_deserializer) + if self._has_native_jsonb: + extras.register_default_jsonb( + conn, loads=self._json_deserializer) fns.append(on_connect) if fns: @@ -495,8 +672,8 @@ class PGDialect_psycopg2(PGDialect): @util.memoized_instancemethod def _hstore_oids(self, conn): - if self.psycopg2_version >= (2, 4): - from psycopg2 import extras + if self.psycopg2_version >= self.FEATURE_VERSION_MAP['hstore_adapter']: + extras = self._psycopg2_extras() oids = extras.HstoreAdapter.get_oids(conn) if oids is not None and oids[0]: return oids[0:2] @@ -512,12 +689,14 @@ class PGDialect_psycopg2(PGDialect): def is_disconnect(self, e, connection, cursor): if isinstance(e, self.dbapi.Error): # check the "closed" flag. this might not be - # present on old psycopg2 versions + # present on old psycopg2 versions. Also, + # this flag doesn't actually help in a lot of disconnect + # situations, so don't rely on it. if getattr(connection, 'closed', False): return True - # legacy checks based on strings. the "closed" check - # above most likely obviates the need for any of these. + # checks based on strings. in the case that .closed + # didn't cut it, fall back onto these. str_e = str(e).partition("\n")[0] for msg in [ # these error messages from libpq: interfaces/libpq/fe-misc.c @@ -534,8 +713,10 @@ class PGDialect_psycopg2(PGDialect): # not sure where this path is originally from, it may # be obsolete. It really says "losed", not "closed". 'losed the connection unexpectedly', - # this can occur in newer SSL - 'connection has been closed unexpectedly' + # these can occur in newer SSL + 'connection has been closed unexpectedly', + 'SSL SYSCALL error: Bad file descriptor', + 'SSL SYSCALL error: EOF detected', ]: idx = str_e.find(msg) if idx >= 0 and '"' not in str_e[:idx]: diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/psycopg2cffi.py b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/psycopg2cffi.py new file mode 100644 index 0000000..ab99a83 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/psycopg2cffi.py @@ -0,0 +1,61 @@ +# testing/engines.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php +""" +.. dialect:: postgresql+psycopg2cffi + :name: psycopg2cffi + :dbapi: psycopg2cffi + :connectstring: \ +postgresql+psycopg2cffi://user:password@host:port/dbname\ +[?key=value&key=value...] + :url: http://pypi.python.org/pypi/psycopg2cffi/ + +``psycopg2cffi`` is an adaptation of ``psycopg2``, using CFFI for the C +layer. This makes it suitable for use in e.g. PyPy. Documentation +is as per ``psycopg2``. + +.. versionadded:: 1.0.0 + +.. seealso:: + + :mod:`sqlalchemy.dialects.postgresql.psycopg2` + +""" +from .psycopg2 import PGDialect_psycopg2 + + +class PGDialect_psycopg2cffi(PGDialect_psycopg2): + driver = 'psycopg2cffi' + supports_unicode_statements = True + + # psycopg2cffi's first release is 2.5.0, but reports + # __version__ as 2.4.4. Subsequent releases seem to have + # fixed this. + + FEATURE_VERSION_MAP = dict( + native_json=(2, 4, 4), + native_jsonb=(2, 7, 1), + sane_multi_rowcount=(2, 4, 4), + array_oid=(2, 4, 4), + hstore_adapter=(2, 4, 4) + ) + + @classmethod + def dbapi(cls): + return __import__('psycopg2cffi') + + @classmethod + def _psycopg2_extensions(cls): + root = __import__('psycopg2cffi', fromlist=['extensions']) + return root.extensions + + @classmethod + def _psycopg2_extras(cls): + root = __import__('psycopg2cffi', fromlist=['extras']) + return root.extras + + +dialect = PGDialect_psycopg2cffi diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/pypostgresql.py b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/pypostgresql.py index 3ebd013..f2b850a 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/pypostgresql.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/pypostgresql.py @@ -1,5 +1,5 @@ # postgresql/pypostgresql.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -65,6 +65,23 @@ class PGDialect_pypostgresql(PGDialect): from postgresql.driver import dbapi20 return dbapi20 + _DBAPI_ERROR_NAMES = [ + "Error", + "InterfaceError", "DatabaseError", "DataError", + "OperationalError", "IntegrityError", "InternalError", + "ProgrammingError", "NotSupportedError" + ] + + @util.memoized_property + def dbapi_exception_translation_map(self): + if self.dbapi is None: + return {} + + return dict( + (getattr(self.dbapi, name).__name__, name) + for name in self._DBAPI_ERROR_NAMES + ) + def create_connect_args(self, url): opts = url.translate_connect_args(username='user') if 'port' in opts: diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/ranges.py b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/ranges.py index 28f80d0..42a1cd4 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/ranges.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/ranges.py @@ -1,4 +1,4 @@ -# Copyright (C) 2013-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2013-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/zxjdbc.py b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/zxjdbc.py index 00b428f..cc46460 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/zxjdbc.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/postgresql/zxjdbc.py @@ -1,5 +1,5 @@ # postgresql/zxjdbc.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/sqlite/__init__.py b/lib/python3.4/site-packages/sqlalchemy/dialects/sqlite/__init__.py index 0eceaa5..a8dec30 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/sqlite/__init__.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/sqlite/__init__.py @@ -1,11 +1,11 @@ # sqlite/__init__.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -from sqlalchemy.dialects.sqlite import base, pysqlite +from sqlalchemy.dialects.sqlite import base, pysqlite, pysqlcipher # default dialect base.dialect = pysqlite.dialect diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/sqlite/base.py b/lib/python3.4/site-packages/sqlalchemy/dialects/sqlite/base.py index fbd580d..e623ff0 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/sqlite/base.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/sqlite/base.py @@ -1,5 +1,5 @@ # sqlite/base.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -9,6 +9,7 @@ .. dialect:: sqlite :name: SQLite +.. _sqlite_datetime: Date and Time Types ------------------- @@ -23,6 +24,20 @@ These types represent dates and times as ISO formatted strings, which also nicely support ordering. There's no reliance on typical "libc" internals for these functions so historical dates are fully supported. +Ensuring Text affinity +^^^^^^^^^^^^^^^^^^^^^^ + +The DDL rendered for these types is the standard ``DATE``, ``TIME`` +and ``DATETIME`` indicators. However, custom storage formats can also be +applied to these types. When the +storage format is detected as containing no alpha characters, the DDL for +these types is rendered as ``DATE_CHAR``, ``TIME_CHAR``, and ``DATETIME_CHAR``, +so that the column continues to have textual affinity. + +.. seealso:: + + `Type Affinity `_ - in the SQLite documentation + .. _sqlite_autoincrement: SQLite Auto Incrementing Behavior @@ -30,14 +45,20 @@ SQLite Auto Incrementing Behavior Background on SQLite's autoincrement is at: http://sqlite.org/autoinc.html -Two things to note: +Key concepts: -* The AUTOINCREMENT keyword is **not** required for SQLite tables to - generate primary key values automatically. AUTOINCREMENT only means that the - algorithm used to generate ROWID values should be slightly different. -* SQLite does **not** generate primary key (i.e. ROWID) values, even for - one column, if the table has a composite (i.e. multi-column) primary key. - This is regardless of the AUTOINCREMENT keyword being present or not. +* SQLite has an implicit "auto increment" feature that takes place for any + non-composite primary-key column that is specifically created using + "INTEGER PRIMARY KEY" for the type + primary key. + +* SQLite also has an explicit "AUTOINCREMENT" keyword, that is **not** + equivalent to the implicit autoincrement feature; this keyword is not + recommended for general use. SQLAlchemy does not render this keyword + unless a special SQLite-specific directive is used (see below). However, + it still requires that the column's type is named "INTEGER". + +Using the AUTOINCREMENT Keyword +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ To specifically render the AUTOINCREMENT keyword on the primary key column when rendering DDL, add the flag ``sqlite_autoincrement=True`` to the Table @@ -47,50 +68,172 @@ construct:: Column('id', Integer, primary_key=True), sqlite_autoincrement=True) -Transaction Isolation Level ---------------------------- +Allowing autoincrement behavior SQLAlchemy types other than Integer/INTEGER +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -:func:`.create_engine` accepts an ``isolation_level`` parameter which results -in the command ``PRAGMA read_uncommitted `` being invoked for every new -connection. Valid values for this parameter are ``SERIALIZABLE`` and ``READ -UNCOMMITTED`` corresponding to a value of 0 and 1, respectively. See the -section :ref:`pysqlite_serializable` for an important workaround when using -serializable isolation with Pysqlite. +SQLite's typing model is based on naming conventions. Among +other things, this means that any type name which contains the +substring ``"INT"`` will be determined to be of "integer affinity". A +type named ``"BIGINT"``, ``"SPECIAL_INT"`` or even ``"XYZINTQPR"``, will be considered by +SQLite to be of "integer" affinity. However, **the SQLite +autoincrement feature, whether implicitly or explicitly enabled, +requires that the name of the column's type +is exactly the string "INTEGER"**. Therefore, if an +application uses a type like :class:`.BigInteger` for a primary key, on +SQLite this type will need to be rendered as the name ``"INTEGER"`` when +emitting the initial ``CREATE TABLE`` statement in order for the autoincrement +behavior to be available. + +One approach to achieve this is to use :class:`.Integer` on SQLite +only using :meth:`.TypeEngine.with_variant`:: + + table = Table( + "my_table", metadata, + Column("id", BigInteger().with_variant(Integer, "sqlite"), primary_key=True) + ) + +Another is to use a subclass of :class:`.BigInteger` that overrides its DDL name +to be ``INTEGER`` when compiled against SQLite:: + + from sqlalchemy import BigInteger + from sqlalchemy.ext.compiler import compiles + + class SLBigInteger(BigInteger): + pass + + @compiles(SLBigInteger, 'sqlite') + def bi_c(element, compiler, **kw): + return "INTEGER" + + @compiles(SLBigInteger) + def bi_c(element, compiler, **kw): + return compiler.visit_BIGINT(element, **kw) + + + table = Table( + "my_table", metadata, + Column("id", SLBigInteger(), primary_key=True) + ) + +.. seealso:: + + :meth:`.TypeEngine.with_variant` + + :ref:`sqlalchemy.ext.compiler_toplevel` + + `Datatypes In SQLite Version 3 `_ + +.. _sqlite_concurrency: Database Locking Behavior / Concurrency --------------------------------------- -Note that SQLite is not designed for a high level of concurrency. The database -itself, being a file, is locked completely during write operations and within -transactions, meaning exactly one connection has exclusive access to the -database during this period - all other connections will be blocked during -this time. +SQLite is not designed for a high level of write concurrency. The database +itself, being a file, is locked completely during write operations within +transactions, meaning exactly one "connection" (in reality a file handle) +has exclusive access to the database during this period - all other +"connections" will be blocked during this time. The Python DBAPI specification also calls for a connection model that is -always in a transaction; there is no BEGIN method, only commit and rollback. -This implies that a SQLite DBAPI driver would technically allow only -serialized access to a particular database file at all times. The pysqlite -driver attempts to ameliorate this by deferring the actual BEGIN statement -until the first DML (INSERT, UPDATE, or DELETE) is received within a -transaction. While this breaks serializable isolation, it at least delays the -exclusive locking inherent in SQLite's design. +always in a transaction; there is no ``connection.begin()`` method, +only ``connection.commit()`` and ``connection.rollback()``, upon which a +new transaction is to be begun immediately. This may seem to imply +that the SQLite driver would in theory allow only a single filehandle on a +particular database file at any time; however, there are several +factors both within SQlite itself as well as within the pysqlite driver +which loosen this restriction significantly. -SQLAlchemy's default mode of usage with the ORM is known as -"autocommit=False", which means the moment the :class:`.Session` begins to be -used, a transaction is begun. As the :class:`.Session` is used, the autoflush -feature, also on by default, will flush out pending changes to the database -before each query. The effect of this is that a :class:`.Session` used in its -default mode will often emit DML early on, long before the transaction is -actually committed. This again will have the effect of serializing access to -the SQLite database. If highly concurrent reads are desired against the SQLite -database, it is advised that the autoflush feature be disabled, and -potentially even that autocommit be re-enabled, which has the effect of each -SQL statement and flush committing changes immediately. +However, no matter what locking modes are used, SQLite will still always +lock the database file once a transaction is started and DML (e.g. INSERT, +UPDATE, DELETE) has at least been emitted, and this will block +other transactions at least at the point that they also attempt to emit DML. +By default, the length of time on this block is very short before it times out +with an error. -For more information on SQLite's lack of concurrency by design, please see +This behavior becomes more critical when used in conjunction with the +SQLAlchemy ORM. SQLAlchemy's :class:`.Session` object by default runs +within a transaction, and with its autoflush model, may emit DML preceding +any SELECT statement. This may lead to a SQLite database that locks +more quickly than is expected. The locking mode of SQLite and the pysqlite +driver can be manipulated to some degree, however it should be noted that +achieving a high degree of write-concurrency with SQLite is a losing battle. + +For more information on SQLite's lack of write concurrency by design, please +see `Situations Where Another RDBMS May Work Better - High Concurrency `_ near the bottom of the page. +The following subsections introduce areas that are impacted by SQLite's +file-based architecture and additionally will usually require workarounds to +work when using the pysqlite driver. + +.. _sqlite_isolation_level: + +Transaction Isolation Level +---------------------------- + +SQLite supports "transaction isolation" in a non-standard way, along two +axes. One is that of the `PRAGMA read_uncommitted `_ +instruction. This setting can essentially switch SQLite between its +default mode of ``SERIALIZABLE`` isolation, and a "dirty read" isolation +mode normally referred to as ``READ UNCOMMITTED``. + +SQLAlchemy ties into this PRAGMA statement using the +:paramref:`.create_engine.isolation_level` parameter of :func:`.create_engine`. +Valid values for this parameter when used with SQLite are ``"SERIALIZABLE"`` +and ``"READ UNCOMMITTED"`` corresponding to a value of 0 and 1, respectively. +SQLite defaults to ``SERIALIZABLE``, however its behavior is impacted by +the pysqlite driver's default behavior. + +The other axis along which SQLite's transactional locking is impacted is +via the nature of the ``BEGIN`` statement used. The three varieties +are "deferred", "immediate", and "exclusive", as described at +`BEGIN TRANSACTION `_. A straight +``BEGIN`` statement uses the "deferred" mode, where the the database file is +not locked until the first read or write operation, and read access remains +open to other transactions until the first write operation. But again, +it is critical to note that the pysqlite driver interferes with this behavior +by *not even emitting BEGIN* until the first write operation. + +.. warning:: + + SQLite's transactional scope is impacted by unresolved + issues in the pysqlite driver, which defers BEGIN statements to a greater + degree than is often feasible. See the section :ref:`pysqlite_serializable` + for techniques to work around this behavior. + +SAVEPOINT Support +---------------------------- + +SQLite supports SAVEPOINTs, which only function once a transaction is +begun. SQLAlchemy's SAVEPOINT support is available using the +:meth:`.Connection.begin_nested` method at the Core level, and +:meth:`.Session.begin_nested` at the ORM level. However, SAVEPOINTs +won't work at all with pysqlite unless workarounds are taken. + +.. warning:: + + SQLite's SAVEPOINT feature is impacted by unresolved + issues in the pysqlite driver, which defers BEGIN statements to a greater + degree than is often feasible. See the section :ref:`pysqlite_serializable` + for techniques to work around this behavior. + +Transactional DDL +---------------------------- + +The SQLite database supports transactional :term:`DDL` as well. +In this case, the pysqlite driver is not only failing to start transactions, +it also is ending any existing transction when DDL is detected, so again, +workarounds are required. + +.. warning:: + + SQLite's transactional DDL is impacted by unresolved issues + in the pysqlite driver, which fails to emit BEGIN and additionally + forces a COMMIT to cancel any transaction when DDL is encountered. + See the section :ref:`pysqlite_serializable` + for techniques to work around this behavior. + .. _sqlite_foreign_keys: Foreign Key Support @@ -120,6 +263,15 @@ new connections through the usage of events:: cursor.execute("PRAGMA foreign_keys=ON") cursor.close() +.. warning:: + + When SQLite foreign keys are enabled, it is **not possible** + to emit CREATE or DROP statements for tables that contain + mutually-dependent foreign key constraints; + to emit the DDL for these tables requires that ALTER TABLE be used to + create or drop these constraints separately, for which SQLite has + no support. + .. seealso:: `SQLite Foreign Key Support `_ @@ -127,6 +279,9 @@ new connections through the usage of events:: :ref:`event_toplevel` - SQLAlchemy event API. + :ref:`use_alter` - more information on SQLAlchemy's facilities for handling + mutually-dependent foreign key constraints. + .. _sqlite_type_reflection: Type Reflection @@ -177,6 +332,133 @@ lookup is used instead: .. versionadded:: 0.9.3 Support for SQLite type affinity rules when reflecting columns. + +.. _sqlite_partial_index: + +Partial Indexes +--------------- + +A partial index, e.g. one which uses a WHERE clause, can be specified +with the DDL system using the argument ``sqlite_where``:: + + tbl = Table('testtbl', m, Column('data', Integer)) + idx = Index('test_idx1', tbl.c.data, + sqlite_where=and_(tbl.c.data > 5, tbl.c.data < 10)) + +The index will be rendered at create time as:: + + CREATE INDEX test_idx1 ON testtbl (data) + WHERE data > 5 AND data < 10 + +.. versionadded:: 0.9.9 + +Dotted Column Names +------------------- + +Using table or column names that explicitly have periods in them is +**not recommended**. While this is generally a bad idea for relational +databases in general, as the dot is a syntactically significant character, +the SQLite driver up until version **3.10.0** of SQLite has a bug which +requires that SQLAlchemy filter out these dots in result sets. + +.. note:: + + The following SQLite issue has been resolved as of version 3.10.0 + of SQLite. SQLAlchemy as of **1.1** automatically disables its internal + workarounds based on detection of this version. + +The bug, entirely outside of SQLAlchemy, can be illustrated thusly:: + + import sqlite3 + + conn = sqlite3.connect(":memory:") + cursor = conn.cursor() + + cursor.execute("create table x (a integer, b integer)") + cursor.execute("insert into x (a, b) values (1, 1)") + cursor.execute("insert into x (a, b) values (2, 2)") + + cursor.execute("select x.a, x.b from x") + assert [c[0] for c in cursor.description] == ['a', 'b'] + + cursor.execute(''' + select x.a, x.b from x where a=1 + union + select x.a, x.b from x where a=2 + ''') + assert [c[0] for c in cursor.description] == ['a', 'b'], \\ + [c[0] for c in cursor.description] + +The second assertion fails:: + + Traceback (most recent call last): + File "test.py", line 19, in + [c[0] for c in cursor.description] + AssertionError: ['x.a', 'x.b'] + +Where above, the driver incorrectly reports the names of the columns +including the name of the table, which is entirely inconsistent vs. +when the UNION is not present. + +SQLAlchemy relies upon column names being predictable in how they match +to the original statement, so the SQLAlchemy dialect has no choice but +to filter these out:: + + + from sqlalchemy import create_engine + + eng = create_engine("sqlite://") + conn = eng.connect() + + conn.execute("create table x (a integer, b integer)") + conn.execute("insert into x (a, b) values (1, 1)") + conn.execute("insert into x (a, b) values (2, 2)") + + result = conn.execute("select x.a, x.b from x") + assert result.keys() == ["a", "b"] + + result = conn.execute(''' + select x.a, x.b from x where a=1 + union + select x.a, x.b from x where a=2 + ''') + assert result.keys() == ["a", "b"] + +Note that above, even though SQLAlchemy filters out the dots, *both +names are still addressable*:: + + >>> row = result.first() + >>> row["a"] + 1 + >>> row["x.a"] + 1 + >>> row["b"] + 1 + >>> row["x.b"] + 1 + +Therefore, the workaround applied by SQLAlchemy only impacts +:meth:`.ResultProxy.keys` and :meth:`.RowProxy.keys()` in the public API. +In the very specific case where +an application is forced to use column names that contain dots, and the +functionality of :meth:`.ResultProxy.keys` and :meth:`.RowProxy.keys()` +is required to return these dotted names unmodified, the ``sqlite_raw_colnames`` +execution option may be provided, either on a per-:class:`.Connection` basis:: + + result = conn.execution_options(sqlite_raw_colnames=True).execute(''' + select x.a, x.b from x where a=1 + union + select x.a, x.b from x where a=2 + ''') + assert result.keys() == ["x.a", "x.b"] + +or on a per-:class:`.Engine` basis:: + + engine = create_engine("sqlite://", execution_options={"sqlite_raw_colnames": True}) + +When using the per-:class:`.Engine` execution option, note that +**Core and ORM queries that use UNION may not function properly**. + """ import datetime @@ -189,7 +471,7 @@ from ... import util from ...engine import default, reflection from ...sql import compiler -from ...types import (BLOB, BOOLEAN, CHAR, DATE, DECIMAL, FLOAT, +from ...types import (BLOB, BOOLEAN, CHAR, DECIMAL, FLOAT, INTEGER, REAL, NUMERIC, SMALLINT, TEXT, TIMESTAMP, VARCHAR) @@ -205,6 +487,25 @@ class _DateTimeMixin(object): if storage_format is not None: self._storage_format = storage_format + @property + def format_is_text_affinity(self): + """return True if the storage format will automatically imply + a TEXT affinity. + + If the storage format contains no non-numeric characters, + it will imply a NUMERIC storage format on SQLite; in this case, + the type will generate its DDL as DATE_CHAR, DATETIME_CHAR, + TIME_CHAR. + + .. versionadded:: 1.0.0 + + """ + spec = self._storage_format % { + "year": 0, "month": 0, "day": 0, "hour": 0, + "minute": 0, "second": 0, "microsecond": 0 + } + return bool(re.search(r'[^0-9]', spec)) + def adapt(self, cls, **kw): if issubclass(cls, _DateTimeMixin): if self._storage_format: @@ -460,7 +761,9 @@ ischema_names = { 'BOOLEAN': sqltypes.BOOLEAN, 'CHAR': sqltypes.CHAR, 'DATE': sqltypes.DATE, + 'DATE_CHAR': sqltypes.DATE, 'DATETIME': sqltypes.DATETIME, + 'DATETIME_CHAR': sqltypes.DATETIME, 'DOUBLE': sqltypes.FLOAT, 'DECIMAL': sqltypes.DECIMAL, 'FLOAT': sqltypes.FLOAT, @@ -471,6 +774,7 @@ ischema_names = { 'SMALLINT': sqltypes.SMALLINT, 'TEXT': sqltypes.TEXT, 'TIME': sqltypes.TIME, + 'TIME_CHAR': sqltypes.TIME, 'TIMESTAMP': sqltypes.TIMESTAMP, 'VARCHAR': sqltypes.VARCHAR, 'NVARCHAR': sqltypes.NVARCHAR, @@ -525,19 +829,19 @@ class SQLiteCompiler(compiler.SQLCompiler): raise exc.CompileError( "%s is not a valid extract argument." % extract.field) - def limit_clause(self, select): + def limit_clause(self, select, **kw): text = "" - if select._limit is not None: - text += "\n LIMIT " + self.process(sql.literal(select._limit)) - if select._offset is not None: - if select._limit is None: + if select._limit_clause is not None: + text += "\n LIMIT " + self.process(select._limit_clause, **kw) + if select._offset_clause is not None: + if select._limit_clause is None: text += "\n LIMIT " + self.process(sql.literal(-1)) - text += " OFFSET " + self.process(sql.literal(select._offset)) + text += " OFFSET " + self.process(select._offset_clause, **kw) else: - text += " OFFSET " + self.process(sql.literal(0)) + text += " OFFSET " + self.process(sql.literal(0), **kw) return text - def for_update_clause(self, select): + def for_update_clause(self, select, **kw): # sqlite has no "FOR UPDATE" AFAICT return '' @@ -545,7 +849,8 @@ class SQLiteCompiler(compiler.SQLCompiler): class SQLiteDDLCompiler(compiler.DDLCompiler): def get_column_specification(self, column, **kwargs): - coltype = self.dialect.type_compiler.process(column.type) + coltype = self.dialect.type_compiler.process( + column.type, type_expression=column) colspec = self.preparer.format_column(column) + " " + coltype default = self.get_column_default_string(column) if default is not None: @@ -580,8 +885,8 @@ class SQLiteDDLCompiler(compiler.DDLCompiler): def visit_foreign_key_constraint(self, constraint): - local_table = list(constraint._elements.values())[0].parent.table - remote_table = list(constraint._elements.values())[0].column.table + local_table = constraint.elements[0].parent.table + remote_table = constraint.elements[0].column.table if local_table.schema != remote_table.schema: return None @@ -596,14 +901,46 @@ class SQLiteDDLCompiler(compiler.DDLCompiler): return preparer.format_table(table, use_schema=False) def visit_create_index(self, create): - return super(SQLiteDDLCompiler, self).visit_create_index( + index = create.element + + text = super(SQLiteDDLCompiler, self).visit_create_index( create, include_table_schema=False) + whereclause = index.dialect_options["sqlite"]["where"] + if whereclause is not None: + where_compiled = self.sql_compiler.process( + whereclause, include_table=False, + literal_binds=True) + text += " WHERE " + where_compiled + + return text + class SQLiteTypeCompiler(compiler.GenericTypeCompiler): - def visit_large_binary(self, type_): + def visit_large_binary(self, type_, **kw): return self.visit_BLOB(type_) + def visit_DATETIME(self, type_, **kw): + if not isinstance(type_, _DateTimeMixin) or \ + type_.format_is_text_affinity: + return super(SQLiteTypeCompiler, self).visit_DATETIME(type_) + else: + return "DATETIME_CHAR" + + def visit_DATE(self, type_, **kw): + if not isinstance(type_, _DateTimeMixin) or \ + type_.format_is_text_affinity: + return super(SQLiteTypeCompiler, self).visit_DATE(type_) + else: + return "DATE_CHAR" + + def visit_TIME(self, type_, **kw): + if not isinstance(type_, _DateTimeMixin) or \ + type_.format_is_text_affinity: + return super(SQLiteTypeCompiler, self).visit_TIME(type_) + else: + return "TIME_CHAR" + class SQLiteIdentifierPreparer(compiler.IdentifierPreparer): reserved_words = set([ @@ -647,10 +984,15 @@ class SQLiteExecutionContext(default.DefaultExecutionContext): return self.execution_options.get("sqlite_raw_colnames", False) def _translate_colname(self, colname): - # adjust for dotted column names. SQLite in the case of UNION may - # store col names as "tablename.colname" in cursor.description + # TODO: detect SQLite version 3.10.0 or greater; + # see [ticket:3633] + + # adjust for dotted column names. SQLite + # in the case of UNION may store col names as + # "tablename.colname", or if using an attached database, + # "database.tablename.colname", in cursor.description if not self._preserve_raw_colnames and "." in colname: - return colname.split(".")[1], colname + return colname.split(".")[-1], colname else: return colname, None @@ -664,6 +1006,9 @@ class SQLiteDialect(default.DefaultDialect): supports_empty_insert = False supports_cast = True supports_multivalues_insert = True + + # TODO: detect version 3.7.16 or greater; + # see [ticket:3634] supports_right_nested_joins = False default_paramstyle = 'qmark' @@ -682,7 +1027,10 @@ class SQLiteDialect(default.DefaultDialect): construct_arguments = [ (sa_schema.Table, { "autoincrement": False - }) + }), + (sa_schema.Index, { + "where": None, + }), ] _broken_fk_pragma_quotes = False @@ -763,60 +1111,44 @@ class SQLiteDialect(default.DefaultDialect): if schema is not None: qschema = self.identifier_preparer.quote_identifier(schema) master = '%s.sqlite_master' % qschema - s = ("SELECT name FROM %s " - "WHERE type='table' ORDER BY name") % (master,) - rs = connection.execute(s) else: - try: - s = ("SELECT name FROM " - " (SELECT * FROM sqlite_master UNION ALL " - " SELECT * FROM sqlite_temp_master) " - "WHERE type='table' ORDER BY name") - rs = connection.execute(s) - except exc.DBAPIError: - s = ("SELECT name FROM sqlite_master " - "WHERE type='table' ORDER BY name") - rs = connection.execute(s) + master = "sqlite_master" + s = ("SELECT name FROM %s " + "WHERE type='table' ORDER BY name") % (master,) + rs = connection.execute(s) + return [row[0] for row in rs] + + @reflection.cache + def get_temp_table_names(self, connection, **kw): + s = "SELECT name FROM sqlite_temp_master "\ + "WHERE type='table' ORDER BY name " + rs = connection.execute(s) + + return [row[0] for row in rs] + + @reflection.cache + def get_temp_view_names(self, connection, **kw): + s = "SELECT name FROM sqlite_temp_master "\ + "WHERE type='view' ORDER BY name " + rs = connection.execute(s) return [row[0] for row in rs] def has_table(self, connection, table_name, schema=None): - quote = self.identifier_preparer.quote_identifier - if schema is not None: - pragma = "PRAGMA %s." % quote(schema) - else: - pragma = "PRAGMA " - qtable = quote(table_name) - statement = "%stable_info(%s)" % (pragma, qtable) - cursor = _pragma_cursor(connection.execute(statement)) - row = cursor.fetchone() - - # consume remaining rows, to work around - # http://www.sqlite.org/cvstrac/tktview?tn=1884 - while not cursor.closed and cursor.fetchone() is not None: - pass - - return row is not None + info = self._get_table_pragma( + connection, "table_info", table_name, schema=schema) + return bool(info) @reflection.cache def get_view_names(self, connection, schema=None, **kw): if schema is not None: qschema = self.identifier_preparer.quote_identifier(schema) master = '%s.sqlite_master' % qschema - s = ("SELECT name FROM %s " - "WHERE type='view' ORDER BY name") % (master,) - rs = connection.execute(s) else: - try: - s = ("SELECT name FROM " - " (SELECT * FROM sqlite_master UNION ALL " - " SELECT * FROM sqlite_temp_master) " - "WHERE type='view' ORDER BY name") - rs = connection.execute(s) - except exc.DBAPIError: - s = ("SELECT name FROM sqlite_master " - "WHERE type='view' ORDER BY name") - rs = connection.execute(s) + master = "sqlite_master" + s = ("SELECT name FROM %s " + "WHERE type='view' ORDER BY name") % (master,) + rs = connection.execute(s) return [row[0] for row in rs] @@ -847,18 +1179,11 @@ class SQLiteDialect(default.DefaultDialect): @reflection.cache def get_columns(self, connection, table_name, schema=None, **kw): - quote = self.identifier_preparer.quote_identifier - if schema is not None: - pragma = "PRAGMA %s." % quote(schema) - else: - pragma = "PRAGMA " - qtable = quote(table_name) - statement = "%stable_info(%s)" % (pragma, qtable) - c = _pragma_cursor(connection.execute(statement)) + info = self._get_table_pragma( + connection, "table_info", table_name, schema=schema) - rows = c.fetchall() columns = [] - for row in rows: + for row in info: (name, type_, nullable, default, primary_key) = ( row[1], row[2].upper(), not row[3], row[4], row[5]) @@ -945,116 +1270,219 @@ class SQLiteDialect(default.DefaultDialect): @reflection.cache def get_foreign_keys(self, connection, table_name, schema=None, **kw): - quote = self.identifier_preparer.quote_identifier - if schema is not None: - pragma = "PRAGMA %s." % quote(schema) - else: - pragma = "PRAGMA " - qtable = quote(table_name) - statement = "%sforeign_key_list(%s)" % (pragma, qtable) - c = _pragma_cursor(connection.execute(statement)) - fkeys = [] + # sqlite makes this *extremely difficult*. + # First, use the pragma to get the actual FKs. + pragma_fks = self._get_table_pragma( + connection, "foreign_key_list", + table_name, schema=schema + ) + fks = {} - while True: - row = c.fetchone() - if row is None: - break + + for row in pragma_fks: (numerical_id, rtbl, lcol, rcol) = ( row[0], row[2], row[3], row[4]) - self._parse_fk(fks, fkeys, numerical_id, rtbl, lcol, rcol) + if rcol is None: + rcol = lcol + + if self._broken_fk_pragma_quotes: + rtbl = re.sub(r'^[\"\[`\']|[\"\]`\']$', '', rtbl) + + if numerical_id in fks: + fk = fks[numerical_id] + else: + fk = fks[numerical_id] = { + 'name': None, + 'constrained_columns': [], + 'referred_schema': None, + 'referred_table': rtbl, + 'referred_columns': [], + } + fks[numerical_id] = fk + + fk['constrained_columns'].append(lcol) + fk['referred_columns'].append(rcol) + + def fk_sig(constrained_columns, referred_table, referred_columns): + return tuple(constrained_columns) + (referred_table,) + \ + tuple(referred_columns) + + # then, parse the actual SQL and attempt to find DDL that matches + # the names as well. SQLite saves the DDL in whatever format + # it was typed in as, so need to be liberal here. + + keys_by_signature = dict( + ( + fk_sig( + fk['constrained_columns'], + fk['referred_table'], fk['referred_columns']), + fk + ) for fk in fks.values() + ) + + table_data = self._get_table_sql(connection, table_name, schema=schema) + if table_data is None: + # system tables, etc. + return [] + + def parse_fks(): + FK_PATTERN = ( + '(?:CONSTRAINT (\w+) +)?' + 'FOREIGN KEY *\( *(.+?) *\) +' + 'REFERENCES +(?:(?:"(.+?)")|([a-z0-9_]+)) *\((.+?)\)' + ) + + for match in re.finditer(FK_PATTERN, table_data, re.I): + ( + constraint_name, constrained_columns, + referred_quoted_name, referred_name, + referred_columns) = match.group(1, 2, 3, 4, 5) + constrained_columns = list( + self._find_cols_in_sig(constrained_columns)) + if not referred_columns: + referred_columns = constrained_columns + else: + referred_columns = list( + self._find_cols_in_sig(referred_columns)) + referred_name = referred_quoted_name or referred_name + yield ( + constraint_name, constrained_columns, + referred_name, referred_columns) + fkeys = [] + + for ( + constraint_name, constrained_columns, + referred_name, referred_columns) in parse_fks(): + sig = fk_sig( + constrained_columns, referred_name, referred_columns) + if sig not in keys_by_signature: + util.warn( + "WARNING: SQL-parsed foreign key constraint " + "'%s' could not be located in PRAGMA " + "foreign_keys for table %s" % ( + sig, + table_name + )) + continue + key = keys_by_signature.pop(sig) + key['name'] = constraint_name + fkeys.append(key) + # assume the remainders are the unnamed, inline constraints, just + # use them as is as it's extremely difficult to parse inline + # constraints + fkeys.extend(keys_by_signature.values()) return fkeys - def _parse_fk(self, fks, fkeys, numerical_id, rtbl, lcol, rcol): - # sqlite won't return rcol if the table was created with REFERENCES - # , no col - if rcol is None: - rcol = lcol - - if self._broken_fk_pragma_quotes: - rtbl = re.sub(r'^[\"\[`\']|[\"\]`\']$', '', rtbl) - - try: - fk = fks[numerical_id] - except KeyError: - fk = { - 'name': None, - 'constrained_columns': [], - 'referred_schema': None, - 'referred_table': rtbl, - 'referred_columns': [], - } - fkeys.append(fk) - fks[numerical_id] = fk - - if lcol not in fk['constrained_columns']: - fk['constrained_columns'].append(lcol) - if rcol not in fk['referred_columns']: - fk['referred_columns'].append(rcol) - return fk - - @reflection.cache - def get_indexes(self, connection, table_name, schema=None, **kw): - quote = self.identifier_preparer.quote_identifier - if schema is not None: - pragma = "PRAGMA %s." % quote(schema) - else: - pragma = "PRAGMA " - include_auto_indexes = kw.pop('include_auto_indexes', False) - qtable = quote(table_name) - statement = "%sindex_list(%s)" % (pragma, qtable) - c = _pragma_cursor(connection.execute(statement)) - indexes = [] - while True: - row = c.fetchone() - if row is None: - break - # ignore implicit primary key index. - # http://www.mail-archive.com/sqlite-users@sqlite.org/msg30517.html - elif (not include_auto_indexes and - row[1].startswith('sqlite_autoindex')): - continue - - indexes.append(dict(name=row[1], column_names=[], unique=row[2])) - # loop thru unique indexes to get the column names. - for idx in indexes: - statement = "%sindex_info(%s)" % (pragma, quote(idx['name'])) - c = connection.execute(statement) - cols = idx['column_names'] - while True: - row = c.fetchone() - if row is None: - break - cols.append(row[2]) - return indexes + def _find_cols_in_sig(self, sig): + for match in re.finditer(r'(?:"(.+?)")|([a-z0-9_]+)', sig, re.I): + yield match.group(1) or match.group(2) @reflection.cache def get_unique_constraints(self, connection, table_name, schema=None, **kw): - UNIQUE_SQL = """ - SELECT sql - FROM - sqlite_master - WHERE - type='table' AND - name=:table_name - """ - c = connection.execute(UNIQUE_SQL, table_name=table_name) - table_data = c.fetchone()[0] - UNIQUE_PATTERN = 'CONSTRAINT (\w+) UNIQUE \(([^\)]+)\)' - return [ - {'name': name, - 'column_names': [col.strip(' "') for col in cols.split(',')]} - for name, cols in re.findall(UNIQUE_PATTERN, table_data) - ] + auto_index_by_sig = {} + for idx in self.get_indexes( + connection, table_name, schema=schema, + include_auto_indexes=True, **kw): + if not idx['name'].startswith("sqlite_autoindex"): + continue + sig = tuple(idx['column_names']) + auto_index_by_sig[sig] = idx + table_data = self._get_table_sql( + connection, table_name, schema=schema, **kw) + if not table_data: + return [] -def _pragma_cursor(cursor): - """work around SQLite issue whereby cursor.description - is blank when PRAGMA returns no rows.""" + unique_constraints = [] - if cursor.closed: - cursor.fetchone = lambda: None - cursor.fetchall = lambda: [] - return cursor + def parse_uqs(): + UNIQUE_PATTERN = '(?:CONSTRAINT "?(.+?)"? +)?UNIQUE *\((.+?)\)' + INLINE_UNIQUE_PATTERN = ( + '(?:(".+?")|([a-z0-9]+)) ' + '+[a-z0-9_ ]+? +UNIQUE') + + for match in re.finditer(UNIQUE_PATTERN, table_data, re.I): + name, cols = match.group(1, 2) + yield name, list(self._find_cols_in_sig(cols)) + + # we need to match inlines as well, as we seek to differentiate + # a UNIQUE constraint from a UNIQUE INDEX, even though these + # are kind of the same thing :) + for match in re.finditer(INLINE_UNIQUE_PATTERN, table_data, re.I): + cols = list( + self._find_cols_in_sig(match.group(1) or match.group(2))) + yield None, cols + + for name, cols in parse_uqs(): + sig = tuple(cols) + if sig in auto_index_by_sig: + auto_index_by_sig.pop(sig) + parsed_constraint = { + 'name': name, + 'column_names': cols + } + unique_constraints.append(parsed_constraint) + # NOTE: auto_index_by_sig might not be empty here, + # the PRIMARY KEY may have an entry. + return unique_constraints + + @reflection.cache + def get_indexes(self, connection, table_name, schema=None, **kw): + pragma_indexes = self._get_table_pragma( + connection, "index_list", table_name, schema=schema) + indexes = [] + + include_auto_indexes = kw.pop('include_auto_indexes', False) + for row in pragma_indexes: + # ignore implicit primary key index. + # http://www.mail-archive.com/sqlite-users@sqlite.org/msg30517.html + if (not include_auto_indexes and + row[1].startswith('sqlite_autoindex')): + continue + + indexes.append(dict(name=row[1], column_names=[], unique=row[2])) + + # loop thru unique indexes to get the column names. + for idx in indexes: + pragma_index = self._get_table_pragma( + connection, "index_info", idx['name']) + + for row in pragma_index: + idx['column_names'].append(row[2]) + return indexes + + @reflection.cache + def _get_table_sql(self, connection, table_name, schema=None, **kw): + try: + s = ("SELECT sql FROM " + " (SELECT * FROM sqlite_master UNION ALL " + " SELECT * FROM sqlite_temp_master) " + "WHERE name = '%s' " + "AND type = 'table'") % table_name + rs = connection.execute(s) + except exc.DBAPIError: + s = ("SELECT sql FROM sqlite_master WHERE name = '%s' " + "AND type = 'table'") % table_name + rs = connection.execute(s) + return rs.scalar() + + def _get_table_pragma(self, connection, pragma, table_name, schema=None): + quote = self.identifier_preparer.quote_identifier + if schema is not None: + statement = "PRAGMA %s." % quote(schema) + else: + statement = "PRAGMA " + qtable = quote(table_name) + statement = "%s%s(%s)" % (statement, pragma, qtable) + cursor = connection.execute(statement) + if not cursor._soft_closed: + # work around SQLite issue whereby cursor.description + # is blank when PRAGMA returns no rows: + # http://www.sqlite.org/cvstrac/tktview?tn=1884 + result = cursor.fetchall() + else: + result = [] + return result diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/sqlite/pysqlcipher.py b/lib/python3.4/site-packages/sqlalchemy/dialects/sqlite/pysqlcipher.py new file mode 100644 index 0000000..bbafc8d --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/sqlite/pysqlcipher.py @@ -0,0 +1,116 @@ +# sqlite/pysqlcipher.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" +.. dialect:: sqlite+pysqlcipher + :name: pysqlcipher + :dbapi: pysqlcipher + :connectstring: sqlite+pysqlcipher://:passphrase/file_path[?kdf_iter=] + :url: https://pypi.python.org/pypi/pysqlcipher + + ``pysqlcipher`` is a fork of the standard ``pysqlite`` driver to make + use of the `SQLCipher `_ backend. + + .. versionadded:: 0.9.9 + +Driver +------ + +The driver here is the `pysqlcipher `_ +driver, which makes use of the SQLCipher engine. This system essentially +introduces new PRAGMA commands to SQLite which allows the setting of a +passphrase and other encryption parameters, allowing the database +file to be encrypted. + +Connect Strings +--------------- + +The format of the connect string is in every way the same as that +of the :mod:`~sqlalchemy.dialects.sqlite.pysqlite` driver, except that the +"password" field is now accepted, which should contain a passphrase:: + + e = create_engine('sqlite+pysqlcipher://:testing@/foo.db') + +For an absolute file path, two leading slashes should be used for the +database name:: + + e = create_engine('sqlite+pysqlcipher://:testing@//path/to/foo.db') + +A selection of additional encryption-related pragmas supported by SQLCipher +as documented at https://www.zetetic.net/sqlcipher/sqlcipher-api/ can be passed +in the query string, and will result in that PRAGMA being called for each +new connection. Currently, ``cipher``, ``kdf_iter`` +``cipher_page_size`` and ``cipher_use_hmac`` are supported:: + + e = create_engine('sqlite+pysqlcipher://:testing@/foo.db?cipher=aes-256-cfb&kdf_iter=64000') + + +Pooling Behavior +---------------- + +The driver makes a change to the default pool behavior of pysqlite +as described in :ref:`pysqlite_threading_pooling`. The pysqlcipher driver +has been observed to be significantly slower on connection than the +pysqlite driver, most likely due to the encryption overhead, so the +dialect here defaults to using the :class:`.SingletonThreadPool` +implementation, +instead of the :class:`.NullPool` pool used by pysqlite. As always, the pool +implementation is entirely configurable using the +:paramref:`.create_engine.poolclass` parameter; the :class:`.StaticPool` may +be more feasible for single-threaded use, or :class:`.NullPool` may be used +to prevent unencrypted connections from being held open for long periods of +time, at the expense of slower startup time for new connections. + + +""" +from __future__ import absolute_import +from .pysqlite import SQLiteDialect_pysqlite +from ...engine import url as _url +from ... import pool + + +class SQLiteDialect_pysqlcipher(SQLiteDialect_pysqlite): + driver = 'pysqlcipher' + + pragmas = ('kdf_iter', 'cipher', 'cipher_page_size', 'cipher_use_hmac') + + @classmethod + def dbapi(cls): + from pysqlcipher import dbapi2 as sqlcipher + return sqlcipher + + @classmethod + def get_pool_class(cls, url): + return pool.SingletonThreadPool + + def connect(self, *cargs, **cparams): + passphrase = cparams.pop('passphrase', '') + + pragmas = dict( + (key, cparams.pop(key, None)) for key in + self.pragmas + ) + + conn = super(SQLiteDialect_pysqlcipher, self).\ + connect(*cargs, **cparams) + conn.execute('pragma key="%s"' % passphrase) + for prag, value in pragmas.items(): + if value is not None: + conn.execute('pragma %s=%s' % (prag, value)) + + return conn + + def create_connect_args(self, url): + super_url = _url.URL( + url.drivername, username=url.username, + host=url.host, database=url.database, query=url.query) + c_args, opts = super(SQLiteDialect_pysqlcipher, self).\ + create_connect_args(super_url) + opts['passphrase'] = url.password + return c_args, opts + +dialect = SQLiteDialect_pysqlcipher diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/sqlite/pysqlite.py b/lib/python3.4/site-packages/sqlalchemy/dialects/sqlite/pysqlite.py index c673332..33d04de 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/sqlite/pysqlite.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/sqlite/pysqlite.py @@ -1,5 +1,5 @@ # sqlite/pysqlite.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -200,30 +200,68 @@ is passed containing non-ASCII characters. .. _pysqlite_serializable: -Serializable Transaction Isolation ----------------------------------- +Serializable isolation / Savepoints / Transactional DDL +------------------------------------------------------- -The pysqlite DBAPI driver has a long-standing bug in which transactional -state is not begun until the first DML statement, that is INSERT, UPDATE -or DELETE, is emitted. A SELECT statement will not cause transactional -state to begin. While this mode of usage is fine for typical situations -and has the advantage that the SQLite database file is not prematurely -locked, it breaks serializable transaction isolation, which requires -that the database file be locked upon any SQL being emitted. +In the section :ref:`sqlite_concurrency`, we refer to the pysqlite +driver's assortment of issues that prevent several features of SQLite +from working correctly. The pysqlite DBAPI driver has several +long-standing bugs which impact the correctness of its transactional +behavior. In its default mode of operation, SQLite features such as +SERIALIZABLE isolation, transactional DDL, and SAVEPOINT support are +non-functional, and in order to use these features, workarounds must +be taken. -To work around this issue, the ``BEGIN`` keyword can be emitted -at the start of each transaction. The following recipe establishes -a :meth:`.ConnectionEvents.begin` handler to achieve this:: +The issue is essentially that the driver attempts to second-guess the user's +intent, failing to start transactions and sometimes ending them prematurely, in +an effort to minimize the SQLite databases's file locking behavior, even +though SQLite itself uses "shared" locks for read-only activities. + +SQLAlchemy chooses to not alter this behavior by default, as it is the +long-expected behavior of the pysqlite driver; if and when the pysqlite +driver attempts to repair these issues, that will be more of a driver towards +defaults for SQLAlchemy. + +The good news is that with a few events, we can implement transactional +support fully, by disabling pysqlite's feature entirely and emitting BEGIN +ourselves. This is achieved using two event listeners:: from sqlalchemy import create_engine, event - engine = create_engine("sqlite:///myfile.db", - isolation_level='SERIALIZABLE') + engine = create_engine("sqlite:///myfile.db") + + @event.listens_for(engine, "connect") + def do_connect(dbapi_connection, connection_record): + # disable pysqlite's emitting of the BEGIN statement entirely. + # also stops it from emitting COMMIT before any DDL. + dbapi_connection.isolation_level = None @event.listens_for(engine, "begin") def do_begin(conn): + # emit our own BEGIN conn.execute("BEGIN") +Above, we intercept a new pysqlite connection and disable any transactional +integration. Then, at the point at which SQLAlchemy knows that transaction +scope is to begin, we emit ``"BEGIN"`` ourselves. + +When we take control of ``"BEGIN"``, we can also control directly SQLite's +locking modes, introduced at `BEGIN TRANSACTION `_, +by adding the desired locking mode to our ``"BEGIN"``:: + + @event.listens_for(engine, "begin") + def do_begin(conn): + conn.execute("BEGIN EXCLUSIVE") + +.. seealso:: + + `BEGIN TRANSACTION `_ - on the SQLite site + + `sqlite3 SELECT does not BEGIN a transaction `_ - on the Python bug tracker + + `sqlite3 module breaks transactions and potentially corrupts data `_ - on the Python bug tracker + + """ from sqlalchemy.dialects.sqlite.base import SQLiteDialect, DATETIME, DATE diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/sybase/__init__.py b/lib/python3.4/site-packages/sqlalchemy/dialects/sybase/__init__.py index eb31359..18535ed 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/sybase/__init__.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/sybase/__init__.py @@ -1,5 +1,5 @@ # sybase/__init__.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/sybase/base.py b/lib/python3.4/site-packages/sqlalchemy/dialects/sybase/base.py index 43ddc43..1e38534 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/sybase/base.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/sybase/base.py @@ -1,5 +1,5 @@ # sybase/base.py -# Copyright (C) 2010-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2010-2016 the SQLAlchemy authors and contributors # # get_select_precolumns(), limit_clause() implementation # copyright (C) 2007 Fisch Asset Management @@ -98,7 +98,6 @@ RESERVED_WORDS = set([ class _SybaseUnitypeMixin(object): - """these types appear to return a buffer object.""" def result_processor(self, dialect, coltype): @@ -147,41 +146,40 @@ class IMAGE(sqltypes.LargeBinary): class SybaseTypeCompiler(compiler.GenericTypeCompiler): - - def visit_large_binary(self, type_): + def visit_large_binary(self, type_, **kw): return self.visit_IMAGE(type_) - def visit_boolean(self, type_): + def visit_boolean(self, type_, **kw): return self.visit_BIT(type_) - def visit_unicode(self, type_): + def visit_unicode(self, type_, **kw): return self.visit_NVARCHAR(type_) - def visit_UNICHAR(self, type_): + def visit_UNICHAR(self, type_, **kw): return "UNICHAR(%d)" % type_.length - def visit_UNIVARCHAR(self, type_): + def visit_UNIVARCHAR(self, type_, **kw): return "UNIVARCHAR(%d)" % type_.length - def visit_UNITEXT(self, type_): + def visit_UNITEXT(self, type_, **kw): return "UNITEXT" - def visit_TINYINT(self, type_): + def visit_TINYINT(self, type_, **kw): return "TINYINT" - def visit_IMAGE(self, type_): + def visit_IMAGE(self, type_, **kw): return "IMAGE" - def visit_BIT(self, type_): + def visit_BIT(self, type_, **kw): return "BIT" - def visit_MONEY(self, type_): + def visit_MONEY(self, type_, **kw): return "MONEY" - def visit_SMALLMONEY(self, type_): + def visit_SMALLMONEY(self, type_, **kw): return "SMALLMONEY" - def visit_UNIQUEIDENTIFIER(self, type_): + def visit_UNIQUEIDENTIFIER(self, type_, **kw): return "UNIQUEIDENTIFIER" ischema_names = { @@ -325,28 +323,30 @@ class SybaseSQLCompiler(compiler.SQLCompiler): 'milliseconds': 'millisecond' }) - def get_select_precolumns(self, select): + def get_select_precolumns(self, select, **kw): s = select._distinct and "DISTINCT " or "" # TODO: don't think Sybase supports # bind params for FIRST / TOP - if select._limit: + limit = select._limit + if limit: # if select._limit == 1: - # s += "FIRST " + # s += "FIRST " # else: - # s += "TOP %s " % (select._limit,) - s += "TOP %s " % (select._limit,) - if select._offset: - if not select._limit: + # s += "TOP %s " % (select._limit,) + s += "TOP %s " % (limit,) + offset = select._offset + if offset: + if not limit: # FIXME: sybase doesn't allow an offset without a limit # so use a huge value for TOP here s += "TOP 1000000 " - s += "START AT %s " % (select._offset + 1,) + s += "START AT %s " % (offset + 1,) return s def get_from_hint_text(self, table, text): return text - def limit_clause(self, select): + def limit_clause(self, select, **kw): # Limit in sybase is after the select keyword return "" @@ -375,10 +375,10 @@ class SybaseSQLCompiler(compiler.SQLCompiler): class SybaseDDLCompiler(compiler.DDLCompiler): - def get_column_specification(self, column, **kwargs): colspec = self.preparer.format_column(column) + " " + \ - self.dialect.type_compiler.process(column.type) + self.dialect.type_compiler.process( + column.type, type_expression=column) if column.table is None: raise exc.CompileError( @@ -608,8 +608,8 @@ class SybaseDialect(default.DefaultDialect): FROM sysreferences r JOIN sysobjects o on r.tableid = o.id WHERE r.tableid = :table_id """) - referential_constraints = connection.execute(REFCONSTRAINT_SQL, - table_id=table_id) + referential_constraints = connection.execute( + REFCONSTRAINT_SQL, table_id=table_id).fetchall() REFTABLE_SQL = text(""" SELECT o.name AS name, u.name AS 'schema' @@ -740,10 +740,13 @@ class SybaseDialect(default.DefaultDialect): results.close() constrained_columns = [] - for i in range(1, pks["count"] + 1): - constrained_columns.append(pks["pk_%i" % (i,)]) - return {"constrained_columns": constrained_columns, - "name": pks["name"]} + if pks: + for i in range(1, pks["count"] + 1): + constrained_columns.append(pks["pk_%i" % (i,)]) + return {"constrained_columns": constrained_columns, + "name": pks["name"]} + else: + return {"constrained_columns": [], "name": None} @reflection.cache def get_schema_names(self, connection, **kw): diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/sybase/mxodbc.py b/lib/python3.4/site-packages/sqlalchemy/dialects/sybase/mxodbc.py index 373bea0..60e6510 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/sybase/mxodbc.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/sybase/mxodbc.py @@ -1,5 +1,5 @@ # sybase/mxodbc.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/sybase/pyodbc.py b/lib/python3.4/site-packages/sqlalchemy/dialects/sybase/pyodbc.py index cb76d13..348ca32 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/sybase/pyodbc.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/sybase/pyodbc.py @@ -1,5 +1,5 @@ # sybase/pyodbc.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/python3.4/site-packages/sqlalchemy/dialects/sybase/pysybase.py b/lib/python3.4/site-packages/sqlalchemy/dialects/sybase/pysybase.py index 6843eb4..41ca47f 100644 --- a/lib/python3.4/site-packages/sqlalchemy/dialects/sybase/pysybase.py +++ b/lib/python3.4/site-packages/sqlalchemy/dialects/sybase/pysybase.py @@ -1,5 +1,5 @@ # sybase/pysybase.py -# Copyright (C) 2010-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2010-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/python3.4/site-packages/sqlalchemy/engine/__init__.py b/lib/python3.4/site-packages/sqlalchemy/engine/__init__.py index 9c9e038..09054d9 100644 --- a/lib/python3.4/site-packages/sqlalchemy/engine/__init__.py +++ b/lib/python3.4/site-packages/sqlalchemy/engine/__init__.py @@ -1,5 +1,5 @@ # engine/__init__.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -72,6 +72,7 @@ from .base import ( ) from .result import ( + BaseRowProxy, BufferedColumnResultProxy, BufferedColumnRow, BufferedRowResultProxy, @@ -248,6 +249,34 @@ def create_engine(*args, **kwargs): Microsoft SQL Server. Set this to ``False`` to disable the automatic usage of RETURNING. + :param isolation_level: this string parameter is interpreted by various + dialects in order to affect the transaction isolation level of the + database connection. The parameter essentially accepts some subset of + these string arguments: ``"SERIALIZABLE"``, ``"REPEATABLE_READ"``, + ``"READ_COMMITTED"``, ``"READ_UNCOMMITTED"`` and ``"AUTOCOMMIT"``. + Behavior here varies per backend, and + individual dialects should be consulted directly. + + Note that the isolation level can also be set on a per-:class:`.Connection` + basis as well, using the + :paramref:`.Connection.execution_options.isolation_level` + feature. + + .. seealso:: + + :attr:`.Connection.default_isolation_level` - view default level + + :paramref:`.Connection.execution_options.isolation_level` + - set per :class:`.Connection` isolation level + + :ref:`SQLite Transaction Isolation ` + + :ref:`Postgresql Transaction Isolation ` + + :ref:`MySQL Transaction Isolation ` + + :ref:`session_transaction_isolation` - for the ORM + :param label_length=None: optional integer value which limits the size of dynamically generated column labels to that many characters. If less than 6, labels are generated as @@ -276,6 +305,17 @@ def create_engine(*args, **kwargs): be used instead. Can be used for testing of DBAPIs as well as to inject "mock" DBAPI implementations into the :class:`.Engine`. + :param paramstyle=None: The `paramstyle `_ + to use when rendering bound parameters. This style defaults to the + one recommended by the DBAPI itself, which is retrieved from the + ``.paramstyle`` attribute of the DBAPI. However, most DBAPIs accept + more than one paramstyle, and in particular it may be desirable + to change a "named" paramstyle into a "positional" one, or vice versa. + When this attribute is passed, it should be one of the values + ``"qmark"``, ``"numeric"``, ``"named"``, ``"format"`` or + ``"pyformat"``, and should correspond to a parameter style known + to be supported by the DBAPI in use. + :param pool=None: an already-constructed instance of :class:`~sqlalchemy.pool.Pool`, such as a :class:`~sqlalchemy.pool.QueuePool` instance. If non-None, this @@ -349,14 +389,33 @@ def create_engine(*args, **kwargs): def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs): """Create a new Engine instance using a configuration dictionary. - The dictionary is typically produced from a config file where keys - are prefixed, such as sqlalchemy.url, sqlalchemy.echo, etc. The - 'prefix' argument indicates the prefix to be searched for. + The dictionary is typically produced from a config file. + + The keys of interest to ``engine_from_config()`` should be prefixed, e.g. + ``sqlalchemy.url``, ``sqlalchemy.echo``, etc. The 'prefix' argument + indicates the prefix to be searched for. Each matching key (after the + prefix is stripped) is treated as though it were the corresponding keyword + argument to a :func:`.create_engine` call. + + The only required key is (assuming the default prefix) ``sqlalchemy.url``, + which provides the :ref:`database URL `. A select set of keyword arguments will be "coerced" to their - expected type based on string values. In a future release, this - functionality will be expanded and include dialect-specific - arguments. + expected type based on string values. The set of arguments + is extensible per-dialect using the ``engine_config_types`` accessor. + + :param configuration: A dictionary (typically produced from a config file, + but this is not a requirement). Items whose keys start with the value + of 'prefix' will have that prefix stripped, and will then be passed to + :ref:`create_engine`. + + :param prefix: Prefix to match and then strip from keys + in 'configuration'. + + :param kwargs: Each keyword argument to ``engine_from_config()`` itself + overrides the corresponding item taken from the 'configuration' + dictionary. Keyword arguments should *not* be prefixed. + """ options = dict((key[len(prefix):], configuration[key]) diff --git a/lib/python3.4/site-packages/sqlalchemy/engine/base.py b/lib/python3.4/site-packages/sqlalchemy/engine/base.py index cf06896..80edd95 100644 --- a/lib/python3.4/site-packages/sqlalchemy/engine/base.py +++ b/lib/python3.4/site-packages/sqlalchemy/engine/base.py @@ -1,5 +1,5 @@ # engine/base.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -45,7 +45,7 @@ class Connection(Connectable): """ def __init__(self, engine, connection=None, close_with_result=False, - _branch=False, _execution_options=None, + _branch_from=None, _execution_options=None, _dispatch=None, _has_events=None): """Construct a new Connection. @@ -57,48 +57,80 @@ class Connection(Connectable): """ self.engine = engine self.dialect = engine.dialect - self.__connection = connection or engine.raw_connection() - self.__transaction = None - self.should_close_with_result = close_with_result - self.__savepoint_seq = 0 - self.__branch = _branch - self.__invalid = False - self.__can_reconnect = True - if _dispatch: - self.dispatch = _dispatch - elif _has_events is None: - # if _has_events is sent explicitly as False, - # then don't join the dispatch of the engine; we don't - # want to handle any of the engine's events in that case. - self.dispatch = self.dispatch._join(engine.dispatch) - self._has_events = _has_events or ( - _has_events is None and engine._has_events) + self.__branch_from = _branch_from + self.__branch = _branch_from is not None - self._echo = self.engine._should_log_info() - if _execution_options: - self._execution_options =\ - engine._execution_options.union(_execution_options) + if _branch_from: + self.__connection = connection + self._execution_options = _execution_options + self._echo = _branch_from._echo + self.should_close_with_result = False + self.dispatch = _dispatch + self._has_events = _branch_from._has_events else: + self.__connection = connection \ + if connection is not None else engine.raw_connection() + self.__transaction = None + self.__savepoint_seq = 0 + self.should_close_with_result = close_with_result + self.__invalid = False + self.__can_reconnect = True + self._echo = self.engine._should_log_info() + + if _has_events is None: + # if _has_events is sent explicitly as False, + # then don't join the dispatch of the engine; we don't + # want to handle any of the engine's events in that case. + self.dispatch = self.dispatch._join(engine.dispatch) + self._has_events = _has_events or ( + _has_events is None and engine._has_events) + + assert not _execution_options self._execution_options = engine._execution_options if self._has_events or self.engine._has_events: - self.dispatch.engine_connect(self, _branch) + self.dispatch.engine_connect(self, self.__branch) def _branch(self): """Return a new Connection which references this Connection's engine and connection; but does not have close_with_result enabled, and also whose close() method does nothing. - This is used to execute "sub" statements within a single execution, - usually an INSERT statement. + The Core uses this very sparingly, only in the case of + custom SQL default functions that are to be INSERTed as the + primary key of a row where we need to get the value back, so we have + to invoke it distinctly - this is a very uncommon case. + + Userland code accesses _branch() when the connect() or + contextual_connect() methods are called. The branched connection + acts as much as possible like the parent, except that it stays + connected when a close() event occurs. + + """ + if self.__branch_from: + return self.__branch_from._branch() + else: + return self.engine._connection_cls( + self.engine, + self.__connection, + _branch_from=self, + _execution_options=self._execution_options, + _has_events=self._has_events, + _dispatch=self.dispatch) + + @property + def _root(self): + """return the 'root' connection. + + Returns 'self' if this connection is not a branch, else + returns the root connection from which we ultimately branched. + """ - return self.engine._connection_cls( - self.engine, - self.__connection, - _branch=True, - _has_events=self._has_events, - _dispatch=self.dispatch) + if self.__branch_from: + return self.__branch_from + else: + return self def _clone(self): """Create a shallow copy of this Connection. @@ -169,14 +201,19 @@ class Connection(Connectable): used by the ORM internally supersedes a cache dictionary specified here. - :param isolation_level: Available on: Connection. + :param isolation_level: Available on: :class:`.Connection`. Set the transaction isolation level for - the lifespan of this connection. Valid values include - those string values accepted by the ``isolation_level`` - parameter passed to :func:`.create_engine`, and are - database specific, including those for :ref:`sqlite_toplevel`, - :ref:`postgresql_toplevel` - see those dialect's documentation - for further info. + the lifespan of this :class:`.Connection` object (*not* the + underyling DBAPI connection, for which the level is reset + to its original setting upon termination of this + :class:`.Connection` object). + + Valid values include + those string values accepted by the + :paramref:`.create_engine.isolation_level` + parameter passed to :func:`.create_engine`. These levels are + semi-database specific; see individual dialect documentation for + valid levels. Note that this option necessarily affects the underlying DBAPI connection for the lifespan of the originating @@ -185,6 +222,41 @@ class Connection(Connectable): is returned to the connection pool, i.e. the :meth:`.Connection.close` method is called. + .. warning:: The ``isolation_level`` execution option should + **not** be used when a transaction is already established, that + is, the :meth:`.Connection.begin` method or similar has been + called. A database cannot change the isolation level on a + transaction in progress, and different DBAPIs and/or + SQLAlchemy dialects may implicitly roll back or commit + the transaction, or not affect the connection at all. + + .. versionchanged:: 0.9.9 A warning is emitted when the + ``isolation_level`` execution option is used after a + transaction has been started with :meth:`.Connection.begin` + or similar. + + .. note:: The ``isolation_level`` execution option is implicitly + reset if the :class:`.Connection` is invalidated, e.g. via + the :meth:`.Connection.invalidate` method, or if a + disconnection error occurs. The new connection produced after + the invalidation will not have the isolation level re-applied + to it automatically. + + .. seealso:: + + :paramref:`.create_engine.isolation_level` + - set per :class:`.Engine` isolation level + + :meth:`.Connection.get_isolation_level` - view current level + + :ref:`SQLite Transaction Isolation ` + + :ref:`Postgresql Transaction Isolation ` + + :ref:`MySQL Transaction Isolation ` + + :ref:`session_transaction_isolation` - for the ORM + :param no_parameters: When ``True``, if the final parameter list or dictionary is totally empty, will invoke the statement on the cursor as ``cursor.execute(statement)``, @@ -224,24 +296,101 @@ class Connection(Connectable): def invalidated(self): """Return True if this connection was invalidated.""" - return self.__invalid + return self._root.__invalid @property def connection(self): - "The underlying DB-API connection managed by this Connection." + """The underlying DB-API connection managed by this Connection. + + .. seealso:: + + + :ref:`dbapi_connections` + + """ try: return self.__connection except AttributeError: - return self._revalidate_connection() + try: + return self._revalidate_connection() + except Exception as e: + self._handle_dbapi_exception(e, None, None, None, None) + + def get_isolation_level(self): + """Return the current isolation level assigned to this + :class:`.Connection`. + + This will typically be the default isolation level as determined + by the dialect, unless if the + :paramref:`.Connection.execution_options.isolation_level` + feature has been used to alter the isolation level on a + per-:class:`.Connection` basis. + + This attribute will typically perform a live SQL operation in order + to procure the current isolation level, so the value returned is the + actual level on the underlying DBAPI connection regardless of how + this state was set. Compare to the + :attr:`.Connection.default_isolation_level` accessor + which returns the dialect-level setting without performing a SQL + query. + + .. versionadded:: 0.9.9 + + .. seealso:: + + :attr:`.Connection.default_isolation_level` - view default level + + :paramref:`.create_engine.isolation_level` + - set per :class:`.Engine` isolation level + + :paramref:`.Connection.execution_options.isolation_level` + - set per :class:`.Connection` isolation level + + """ + try: + return self.dialect.get_isolation_level(self.connection) + except Exception as e: + self._handle_dbapi_exception(e, None, None, None, None) + + @property + def default_isolation_level(self): + """The default isolation level assigned to this :class:`.Connection`. + + This is the isolation level setting that the :class:`.Connection` + has when first procured via the :meth:`.Engine.connect` method. + This level stays in place until the + :paramref:`.Connection.execution_options.isolation_level` is used + to change the setting on a per-:class:`.Connection` basis. + + Unlike :meth:`.Connection.get_isolation_level`, this attribute is set + ahead of time from the first connection procured by the dialect, + so SQL query is not invoked when this accessor is called. + + .. versionadded:: 0.9.9 + + .. seealso:: + + :meth:`.Connection.get_isolation_level` - view current level + + :paramref:`.create_engine.isolation_level` + - set per :class:`.Engine` isolation level + + :paramref:`.Connection.execution_options.isolation_level` + - set per :class:`.Connection` isolation level + + """ + return self.dialect.default_isolation_level def _revalidate_connection(self): + if self.__branch_from: + return self.__branch_from._revalidate_connection() if self.__can_reconnect and self.__invalid: if self.__transaction is not None: raise exc.InvalidRequestError( "Can't reconnect until invalid " "transaction is rolled back") - self.__connection = self.engine.raw_connection() + self.__connection = self.engine.raw_connection(_connection=self) self.__invalid = False return self.__connection raise exc.ResourceClosedError("This Connection is closed") @@ -343,16 +492,17 @@ class Connection(Connectable): :ref:`pool_connection_invalidation` """ + if self.invalidated: return if self.closed: raise exc.ResourceClosedError("This Connection is closed") - if self._connection_is_valid: - self.__connection.invalidate(exception) - del self.__connection - self.__invalid = True + if self._root._connection_is_valid: + self._root.__connection.invalidate(exception) + del self._root.__connection + self._root.__invalid = True def detach(self): """Detach the underlying DB-API connection from its connection pool. @@ -415,6 +565,8 @@ class Connection(Connectable): :class:`.Engine`. """ + if self.__branch_from: + return self.__branch_from.begin() if self.__transaction is None: self.__transaction = RootTransaction(self) @@ -436,6 +588,9 @@ class Connection(Connectable): See also :meth:`.Connection.begin`, :meth:`.Connection.begin_twophase`. """ + if self.__branch_from: + return self.__branch_from.begin_nested() + if self.__transaction is None: self.__transaction = RootTransaction(self) else: @@ -459,6 +614,9 @@ class Connection(Connectable): """ + if self.__branch_from: + return self.__branch_from.begin_twophase(xid=xid) + if self.__transaction is not None: raise exc.InvalidRequestError( "Cannot start a two phase transaction when a transaction " @@ -479,10 +637,11 @@ class Connection(Connectable): def in_transaction(self): """Return True if a transaction is in progress.""" - - return self.__transaction is not None + return self._root.__transaction is not None def _begin_impl(self, transaction): + assert not self.__branch_from + if self._echo: self.engine.logger.info("BEGIN (implicit)") @@ -497,6 +656,8 @@ class Connection(Connectable): self._handle_dbapi_exception(e, None, None, None, None) def _rollback_impl(self): + assert not self.__branch_from + if self._has_events or self.engine._has_events: self.dispatch.rollback(self) @@ -516,6 +677,8 @@ class Connection(Connectable): self.__transaction = None def _commit_impl(self, autocommit=False): + assert not self.__branch_from + if self._has_events or self.engine._has_events: self.dispatch.commit(self) @@ -532,6 +695,8 @@ class Connection(Connectable): self.__transaction = None def _savepoint_impl(self, name=None): + assert not self.__branch_from + if self._has_events or self.engine._has_events: self.dispatch.savepoint(self, name) @@ -543,6 +708,8 @@ class Connection(Connectable): return name def _rollback_to_savepoint_impl(self, name, context): + assert not self.__branch_from + if self._has_events or self.engine._has_events: self.dispatch.rollback_savepoint(self, name, context) @@ -551,6 +718,8 @@ class Connection(Connectable): self.__transaction = context def _release_savepoint_impl(self, name, context): + assert not self.__branch_from + if self._has_events or self.engine._has_events: self.dispatch.release_savepoint(self, name, context) @@ -559,6 +728,8 @@ class Connection(Connectable): self.__transaction = context def _begin_twophase_impl(self, transaction): + assert not self.__branch_from + if self._echo: self.engine.logger.info("BEGIN TWOPHASE (implicit)") if self._has_events or self.engine._has_events: @@ -571,6 +742,8 @@ class Connection(Connectable): self.connection._reset_agent = transaction def _prepare_twophase_impl(self, xid): + assert not self.__branch_from + if self._has_events or self.engine._has_events: self.dispatch.prepare_twophase(self, xid) @@ -579,6 +752,8 @@ class Connection(Connectable): self.engine.dialect.do_prepare_twophase(self, xid) def _rollback_twophase_impl(self, xid, is_prepared): + assert not self.__branch_from + if self._has_events or self.engine._has_events: self.dispatch.rollback_twophase(self, xid, is_prepared) @@ -595,6 +770,8 @@ class Connection(Connectable): self.__transaction = None def _commit_twophase_impl(self, xid, is_prepared): + assert not self.__branch_from + if self._has_events or self.engine._has_events: self.dispatch.commit_twophase(self, xid, is_prepared) @@ -610,8 +787,8 @@ class Connection(Connectable): self.__transaction = None def _autorollback(self): - if not self.in_transaction(): - self._rollback_impl() + if not self._root.in_transaction(): + self._root._rollback_impl() def close(self): """Close this :class:`.Connection`. @@ -632,13 +809,21 @@ class Connection(Connectable): and will allow no further operations. """ + if self.__branch_from: + try: + del self.__connection + except AttributeError: + pass + finally: + self.__can_reconnect = False + return try: conn = self.__connection except AttributeError: pass else: - if not self.__branch: - conn.close() + + conn.close() if conn._reset_agent is self.__transaction: conn._reset_agent = None @@ -670,7 +855,7 @@ class Connection(Connectable): a subclass of :class:`.Executable`, such as a :func:`~.expression.select` construct * a :class:`.FunctionElement`, such as that generated - by :attr:`.func`, will be automatically wrapped in + by :data:`.func`, will be automatically wrapped in a SELECT statement, which is then executed. * a :class:`.DDLElement` object * a :class:`.DefaultGenerator` object @@ -798,17 +983,16 @@ class Connection(Connectable): distilled_params = _distill_params(multiparams, params) if distilled_params: # note this is usually dict but we support RowProxy - # as well; but dict.keys() as an iterator is OK + # as well; but dict.keys() as an iterable is OK keys = distilled_params[0].keys() else: keys = [] dialect = self.dialect if 'compiled_cache' in self._execution_options: - key = dialect, elem, tuple(keys), len(distilled_params) > 1 - if key in self._execution_options['compiled_cache']: - compiled_sql = self._execution_options['compiled_cache'][key] - else: + key = dialect, elem, tuple(sorted(keys)), len(distilled_params) > 1 + compiled_sql = self._execution_options['compiled_cache'].get(key) + if compiled_sql is None: compiled_sql = elem.compile( dialect=dialect, column_keys=keys, inline=len(distilled_params) > 1) @@ -888,9 +1072,10 @@ class Connection(Connectable): context = constructor(dialect, self, conn, *args) except Exception as e: - self._handle_dbapi_exception(e, - util.text_type(statement), parameters, - None, None) + self._handle_dbapi_exception( + e, + util.text_type(statement), parameters, + None, None) if context.compiled: context.pre_exec() @@ -914,36 +1099,39 @@ class Connection(Connectable): "%r", sql_util._repr_params(parameters, batches=10) ) + + evt_handled = False try: if context.executemany: - for fn in () if not self.dialect._has_events \ - else self.dialect.dispatch.do_executemany: - if fn(cursor, statement, parameters, context): - break - else: + if self.dialect._has_events: + for fn in self.dialect.dispatch.do_executemany: + if fn(cursor, statement, parameters, context): + evt_handled = True + break + if not evt_handled: self.dialect.do_executemany( cursor, statement, parameters, context) - elif not parameters and context.no_parameters: - for fn in () if not self.dialect._has_events \ - else self.dialect.dispatch.do_execute_no_params: - if fn(cursor, statement, context): - break - else: + if self.dialect._has_events: + for fn in self.dialect.dispatch.do_execute_no_params: + if fn(cursor, statement, context): + evt_handled = True + break + if not evt_handled: self.dialect.do_execute_no_params( cursor, statement, context) - else: - for fn in () if not self.dialect._has_events \ - else self.dialect.dispatch.do_execute: - if fn(cursor, statement, parameters, context): - break - else: + if self.dialect._has_events: + for fn in self.dialect.dispatch.do_execute: + if fn(cursor, statement, parameters, context): + evt_handled = True + break + if not evt_handled: self.dialect.do_execute( cursor, statement, @@ -967,36 +1155,17 @@ class Connection(Connectable): if context.compiled: context.post_exec() - if context.isinsert and not context.executemany: - context.post_insert() + if context.is_crud or context.is_text: + result = context._setup_crud_result_proxy() + else: + result = context.get_result_proxy() + if result._metadata is None: + result._soft_close(_autoclose_connection=False) - # create a resultproxy, get rowcount/implicit RETURNING - # rows, close cursor if no further results pending - result = context.get_result_proxy() - if context.isinsert: - if context._is_implicit_returning: - context._fetch_implicit_returning(result) - result.close(_autoclose_connection=False) - result._metadata = None - elif not context._is_explicit_returning: - result.close(_autoclose_connection=False) - result._metadata = None - elif context.isupdate and context._is_implicit_returning: - context._fetch_implicit_update_returning(result) - result.close(_autoclose_connection=False) - result._metadata = None + if context.should_autocommit and self._root.__transaction is None: + self._root._commit_impl(autocommit=True) - elif result._metadata is None: - # no results, get rowcount - # (which requires open cursor on some drivers - # such as kintersbasdb, mxodbc), - result.rowcount - result.close(_autoclose_connection=False) - - if self.__transaction is None and context.should_autocommit: - self._commit_impl(autocommit=True) - - if result.closed and self.should_close_with_result: + if result._soft_closed and self.should_close_with_result: self.close() return result @@ -1055,8 +1224,6 @@ class Connection(Connectable): """ try: cursor.close() - except (SystemExit, KeyboardInterrupt): - raise except Exception: # log the error through the connection pool's logger. self.engine.pool.logger.error( @@ -1071,7 +1238,6 @@ class Connection(Connectable): parameters, cursor, context): - exc_info = sys.exc_info() if context and context.exception is None: @@ -1081,16 +1247,22 @@ class Connection(Connectable): self._is_disconnect = \ isinstance(e, self.dialect.dbapi.Error) and \ not self.closed and \ - self.dialect.is_disconnect(e, self.__connection, cursor) + self.dialect.is_disconnect( + e, + self.__connection if not self.invalidated else None, + cursor) if context: context.is_disconnect = self._is_disconnect + invalidate_pool_on_disconnect = True + if self._reentrant_error: util.raise_from_cause( exc.DBAPIError.instance(statement, parameters, e, - self.dialect.dbapi.Error), + self.dialect.dbapi.Error, + dialect=self.dialect), exc_info ) self._reentrant_error = True @@ -1106,13 +1278,16 @@ class Connection(Connectable): parameters, e, self.dialect.dbapi.Error, - connection_invalidated=self._is_disconnect) + connection_invalidated=self._is_disconnect, + dialect=self.dialect) else: sqlalchemy_exception = None newraise = None - if self._has_events or self.engine._has_events: + if (self._has_events or self.engine._has_events) and \ + not self._execution_options.get( + 'skip_user_error_events', False): # legacy dbapi_error event if should_wrap and context: self.dispatch.dbapi_error(self, @@ -1124,7 +1299,8 @@ class Connection(Connectable): # new handle_error event ctx = ExceptionContextImpl( - e, sqlalchemy_exception, self, cursor, statement, + e, sqlalchemy_exception, self.engine, + self, cursor, statement, parameters, context, self._is_disconnect) for fn in self.dispatch.handle_error: @@ -1144,6 +1320,11 @@ class Connection(Connectable): sqlalchemy_exception.connection_invalidated = \ self._is_disconnect = ctx.is_disconnect + # set up potentially user-defined value for + # invalidate pool. + invalidate_pool_on_disconnect = \ + ctx.invalidate_pool_on_disconnect + if should_wrap and context: context.handle_dbapi_exception(e) @@ -1166,12 +1347,66 @@ class Connection(Connectable): del self._reentrant_error if self._is_disconnect: del self._is_disconnect - dbapi_conn_wrapper = self.connection - self.engine.pool._invalidate(dbapi_conn_wrapper, e) - self.invalidate(e) + if not self.invalidated: + dbapi_conn_wrapper = self.__connection + if invalidate_pool_on_disconnect: + self.engine.pool._invalidate(dbapi_conn_wrapper, e) + self.invalidate(e) if self.should_close_with_result: self.close() + @classmethod + def _handle_dbapi_exception_noconnection(cls, e, dialect, engine): + + exc_info = sys.exc_info() + + is_disconnect = dialect.is_disconnect(e, None, None) + + should_wrap = isinstance(e, dialect.dbapi.Error) + + if should_wrap: + sqlalchemy_exception = exc.DBAPIError.instance( + None, + None, + e, + dialect.dbapi.Error, + connection_invalidated=is_disconnect) + else: + sqlalchemy_exception = None + + newraise = None + + if engine._has_events: + ctx = ExceptionContextImpl( + e, sqlalchemy_exception, engine, None, None, None, + None, None, is_disconnect) + for fn in engine.dispatch.handle_error: + try: + # handler returns an exception; + # call next handler in a chain + per_fn = fn(ctx) + if per_fn is not None: + ctx.chained_exception = newraise = per_fn + except Exception as _raised: + # handler raises an exception - stop processing + newraise = _raised + break + + if sqlalchemy_exception and \ + is_disconnect != ctx.is_disconnect: + sqlalchemy_exception.connection_invalidated = \ + is_disconnect = ctx.is_disconnect + + if newraise: + util.raise_from_cause(newraise, exc_info) + elif should_wrap: + util.raise_from_cause( + sqlalchemy_exception, + exc_info + ) + else: + util.reraise(*exc_info) + def default_schema_name(self): return self.engine.dialect.get_default_schema_name(self) @@ -1250,8 +1485,9 @@ class ExceptionContextImpl(ExceptionContext): """Implement the :class:`.ExceptionContext` interface.""" def __init__(self, exception, sqlalchemy_exception, - connection, cursor, statement, parameters, + engine, connection, cursor, statement, parameters, context, is_disconnect): + self.engine = engine self.connection = connection self.sqlalchemy_exception = sqlalchemy_exception self.original_exception = exception @@ -1295,9 +1531,13 @@ class Transaction(object): def __init__(self, connection, parent): self.connection = connection - self._parent = parent or self + self._actual_parent = parent self.is_active = True + @property + def _parent(self): + return self._actual_parent or self + def close(self): """Close this :class:`.Transaction`. @@ -1575,29 +1815,28 @@ class Engine(Connectable, log.Identified): def dispose(self): """Dispose of the connection pool used by this :class:`.Engine`. + This has the effect of fully closing all **currently checked in** + database connections. Connections that are still checked out + will **not** be closed, however they will no longer be associated + with this :class:`.Engine`, so when they are closed individually, + eventually the :class:`.Pool` which they are associated with will + be garbage collected and they will be closed out fully, if + not already closed on checkin. + A new connection pool is created immediately after the old one has been disposed. This new pool, like all SQLAlchemy connection pools, does not make any actual connections to the database until one is - first requested. + first requested, so as long as the :class:`.Engine` isn't used again, + no new connections will be made. - This method has two general use cases: + .. seealso:: - * When a dropped connection is detected, it is assumed that all - connections held by the pool are potentially dropped, and - the entire pool is replaced. - - * An application may want to use :meth:`dispose` within a test - suite that is creating multiple engines. - - It is critical to note that :meth:`dispose` does **not** guarantee - that the application will release all open database connections - only - those connections that are checked into the pool are closed. - Connections which remain checked out or have been detached from - the engine are not affected. + :ref:`engine_disposal` """ self.pool.dispose() self.pool = self.pool.recreate() + self.dispatch.engine_disposed(self) def _execute_default(self, default): with self.contextual_connect() as conn: @@ -1795,10 +2034,11 @@ class Engine(Connectable, log.Identified): """ - return self._connection_cls(self, - self.pool.connect(), - close_with_result=close_with_result, - **kwargs) + return self._connection_cls( + self, + self._wrap_pool_connect(self.pool.connect, None), + close_with_result=close_with_result, + **kwargs) def table_names(self, schema=None, connection=None): """Return a list of all table names available in the database. @@ -1828,7 +2068,18 @@ class Engine(Connectable, log.Identified): """ return self.run_callable(self.dialect.has_table, table_name, schema) - def raw_connection(self): + def _wrap_pool_connect(self, fn, connection): + dialect = self.dialect + try: + return fn() + except dialect.dbapi.Error as e: + if connection is None: + Connection._handle_dbapi_exception_noconnection( + e, dialect, self) + else: + util.reraise(*sys.exc_info()) + + def raw_connection(self, _connection=None): """Return a "raw" DBAPI connection from the connection pool. The returned object is a proxied version of the DBAPI @@ -1839,13 +2090,18 @@ class Engine(Connectable, log.Identified): for real. This method provides direct DBAPI connection access for - special situations. In most situations, the :class:`.Connection` - object should be used, which is procured using the - :meth:`.Engine.connect` method. + special situations when the API provided by :class:`.Connection` + is not needed. When a :class:`.Connection` object is already + present, the DBAPI connection is available using + the :attr:`.Connection.connection` accessor. + + .. seealso:: + + :ref:`dbapi_connections` """ - - return self.pool.unique_connection() + return self._wrap_pool_connect( + self.pool.unique_connection, _connection) class OptionEngine(Engine): diff --git a/lib/python3.4/site-packages/sqlalchemy/engine/default.py b/lib/python3.4/site-packages/sqlalchemy/engine/default.py index 2fece76..9798d13 100644 --- a/lib/python3.4/site-packages/sqlalchemy/engine/default.py +++ b/lib/python3.4/site-packages/sqlalchemy/engine/default.py @@ -1,5 +1,5 @@ # engine/default.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -61,14 +61,13 @@ class DefaultDialect(interfaces.Dialect): engine_config_types = util.immutabledict([ ('convert_unicode', util.bool_or_str('force')), - ('pool_timeout', int), + ('pool_timeout', util.asint), ('echo', util.bool_or_str('debug')), ('echo_pool', util.bool_or_str('debug')), - ('pool_recycle', int), - ('pool_size', int), - ('max_overflow', int), - ('pool_threadlocal', bool), - ('use_native_unicode', bool), + ('pool_recycle', util.asint), + ('pool_size', util.asint), + ('max_overflow', util.asint), + ('pool_threadlocal', util.asbool), ]) # if the NUMERIC type @@ -157,6 +156,15 @@ class DefaultDialect(interfaces.Dialect): reflection_options = () + dbapi_exception_translation_map = util.immutabledict() + """mapping used in the extremely unusual case that a DBAPI's + published exceptions don't actually have the __name__ that they + are linked towards. + + .. versionadded:: 1.0.5 + + """ + def __init__(self, convert_unicode=False, encoding='utf-8', paramstyle=None, dbapi=None, implicit_returning=None, @@ -395,6 +403,12 @@ class DefaultDialect(interfaces.Dialect): self._set_connection_isolation(connection, opts['isolation_level']) def _set_connection_isolation(self, connection, level): + if connection.in_transaction(): + util.warn( + "Connection is already established with a Transaction; " + "setting isolation_level may implicitly rollback or commit " + "the existing transaction, or have no effect until " + "next transaction") self.set_isolation_level(connection.connection, level) connection.connection._connection_record.\ finalize_callback.append(self.reset_isolation_level) @@ -452,14 +466,13 @@ class DefaultExecutionContext(interfaces.ExecutionContext): isinsert = False isupdate = False isdelete = False + is_crud = False + is_text = False isddl = False executemany = False - result_map = None compiled = None statement = None - postfetch_cols = None - prefetch_cols = None - returning_cols = None + result_column_struct = None _is_implicit_returning = False _is_explicit_returning = False @@ -472,10 +485,9 @@ class DefaultExecutionContext(interfaces.ExecutionContext): """Initialize execution context for a DDLElement construct.""" self = cls.__new__(cls) - self.dialect = dialect self.root_connection = connection self._dbapi_connection = dbapi_connection - self.engine = connection.engine + self.dialect = connection.dialect self.compiled = compiled = compiled_ddl self.isddl = True @@ -507,25 +519,20 @@ class DefaultExecutionContext(interfaces.ExecutionContext): """Initialize execution context for a Compiled construct.""" self = cls.__new__(cls) - self.dialect = dialect self.root_connection = connection self._dbapi_connection = dbapi_connection - self.engine = connection.engine + self.dialect = connection.dialect self.compiled = compiled if not compiled.can_execute: raise exc.ArgumentError("Not an executable clause") - self.execution_options = compiled.statement._execution_options - if connection._execution_options: - self.execution_options = dict(self.execution_options) - self.execution_options.update(connection._execution_options) + self.execution_options = compiled.statement._execution_options.union( + connection._execution_options) - # compiled clauseelement. process bind params, process table defaults, - # track collections used by ResultProxy to target and process results - - self.result_map = compiled.result_map + self.result_column_struct = ( + compiled._result_columns, compiled._ordered_columns) self.unicode_statement = util.text_type(compiled) if not dialect.supports_unicode_statements: @@ -537,11 +544,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext): self.isinsert = compiled.isinsert self.isupdate = compiled.isupdate self.isdelete = compiled.isdelete - - if self.isinsert or self.isupdate or self.isdelete: - self._is_explicit_returning = bool(compiled.statement._returning) - self._is_implicit_returning = bool( - compiled.returning and not compiled.statement._returning) + self.is_text = compiled.isplaintext if not parameters: self.compiled_parameters = [compiled.construct_params()] @@ -553,11 +556,19 @@ class DefaultExecutionContext(interfaces.ExecutionContext): self.executemany = len(parameters) > 1 self.cursor = self.create_cursor() - if self.isinsert or self.isupdate: - self.postfetch_cols = self.compiled.postfetch - self.prefetch_cols = self.compiled.prefetch - self.returning_cols = self.compiled.returning - self.__process_defaults() + + if self.isinsert or self.isupdate or self.isdelete: + self.is_crud = True + self._is_explicit_returning = bool(compiled.statement._returning) + self._is_implicit_returning = bool( + compiled.returning and not compiled.statement._returning) + + if not self.isdelete: + if self.compiled.prefetch: + if self.executemany: + self._process_executemany_defaults() + else: + self._process_executesingle_defaults() processors = compiled._bind_processors @@ -577,21 +588,28 @@ class DefaultExecutionContext(interfaces.ExecutionContext): else: encode = not dialect.supports_unicode_statements for compiled_params in self.compiled_parameters: - param = {} + if encode: - for key in compiled_params: - if key in processors: - param[dialect._encoder(key)[0]] = \ - processors[key](compiled_params[key]) - else: - param[dialect._encoder(key)[0]] = \ - compiled_params[key] + param = dict( + ( + dialect._encoder(key)[0], + processors[key](compiled_params[key]) + if key in processors + else compiled_params[key] + ) + for key in compiled_params + ) else: - for key in compiled_params: - if key in processors: - param[key] = processors[key](compiled_params[key]) - else: - param[key] = compiled_params[key] + param = dict( + ( + key, + processors[key](compiled_params[key]) + if key in processors + else compiled_params[key] + ) + for key in compiled_params + ) + parameters.append(param) self.parameters = dialect.execute_sequence_format(parameters) @@ -603,10 +621,10 @@ class DefaultExecutionContext(interfaces.ExecutionContext): """Initialize execution context for a string SQL statement.""" self = cls.__new__(cls) - self.dialect = dialect self.root_connection = connection self._dbapi_connection = dbapi_connection - self.engine = connection.engine + self.dialect = connection.dialect + self.is_text = True # plain text statement self.execution_options = connection._execution_options @@ -647,21 +665,32 @@ class DefaultExecutionContext(interfaces.ExecutionContext): """Initialize execution context for a ColumnDefault construct.""" self = cls.__new__(cls) - self.dialect = dialect self.root_connection = connection self._dbapi_connection = dbapi_connection - self.engine = connection.engine + self.dialect = connection.dialect self.execution_options = connection._execution_options self.cursor = self.create_cursor() return self @util.memoized_property - def no_parameters(self): - return self.execution_options.get("no_parameters", False) + def engine(self): + return self.root_connection.engine @util.memoized_property - def is_crud(self): - return self.isinsert or self.isupdate or self.isdelete + def postfetch_cols(self): + return self.compiled.postfetch + + @util.memoized_property + def prefetch_cols(self): + return self.compiled.prefetch + + @util.memoized_property + def returning_cols(self): + self.compiled.returning + + @util.memoized_property + def no_parameters(self): + return self.execution_options.get("no_parameters", False) @util.memoized_property def should_autocommit(self): @@ -778,16 +807,51 @@ class DefaultExecutionContext(interfaces.ExecutionContext): def supports_sane_multi_rowcount(self): return self.dialect.supports_sane_multi_rowcount - def post_insert(self): - if not self._is_implicit_returning and \ - not self._is_explicit_returning and \ - not self.compiled.inline and \ - self.dialect.postfetch_lastrowid and \ - (not self.inserted_primary_key or - None in self.inserted_primary_key): + def _setup_crud_result_proxy(self): + if self.isinsert and \ + not self.executemany: + if not self._is_implicit_returning and \ + not self.compiled.inline and \ + self.dialect.postfetch_lastrowid: - table = self.compiled.statement.table - lastrowid = self.get_lastrowid() + self._setup_ins_pk_from_lastrowid() + + elif not self._is_implicit_returning: + self._setup_ins_pk_from_empty() + + result = self.get_result_proxy() + + if self.isinsert: + if self._is_implicit_returning: + row = result.fetchone() + self.returned_defaults = row + self._setup_ins_pk_from_implicit_returning(row) + result._soft_close(_autoclose_connection=False) + result._metadata = None + elif not self._is_explicit_returning: + result._soft_close(_autoclose_connection=False) + result._metadata = None + elif self.isupdate and self._is_implicit_returning: + row = result.fetchone() + self.returned_defaults = row + result._soft_close(_autoclose_connection=False) + result._metadata = None + + elif result._metadata is None: + # no results, get rowcount + # (which requires open cursor on some drivers + # such as kintersbasdb, mxodbc) + result.rowcount + result._soft_close(_autoclose_connection=False) + return result + + def _setup_ins_pk_from_lastrowid(self): + key_getter = self.compiled._key_getters_for_crud_column[2] + table = self.compiled.statement.table + compiled_params = self.compiled_parameters[0] + + lastrowid = self.get_lastrowid() + if lastrowid is not None: autoinc_col = table._autoincrement_column if autoinc_col is not None: # apply type post processors to the lastrowid @@ -795,35 +859,44 @@ class DefaultExecutionContext(interfaces.ExecutionContext): self.dialect, None) if proc is not None: lastrowid = proc(lastrowid) - self.inserted_primary_key = [ - lastrowid if c is autoinc_col else v - for c, v in zip( - table.primary_key, - self.inserted_primary_key) + lastrowid if c is autoinc_col else + compiled_params.get(key_getter(c), None) + for c in table.primary_key + ] + else: + # don't have a usable lastrowid, so + # do the same as _setup_ins_pk_from_empty + self.inserted_primary_key = [ + compiled_params.get(key_getter(c), None) + for c in table.primary_key ] - def _fetch_implicit_returning(self, resultproxy): + def _setup_ins_pk_from_empty(self): + key_getter = self.compiled._key_getters_for_crud_column[2] table = self.compiled.statement.table - row = resultproxy.fetchone() + compiled_params = self.compiled_parameters[0] + self.inserted_primary_key = [ + compiled_params.get(key_getter(c), None) + for c in table.primary_key + ] - ipk = [] - for c, v in zip(table.primary_key, self.inserted_primary_key): - if v is not None: - ipk.append(v) - else: - ipk.append(row[c]) + def _setup_ins_pk_from_implicit_returning(self, row): + key_getter = self.compiled._key_getters_for_crud_column[2] + table = self.compiled.statement.table + compiled_params = self.compiled_parameters[0] - self.inserted_primary_key = ipk - self.returned_defaults = row - - def _fetch_implicit_update_returning(self, resultproxy): - row = resultproxy.fetchone() - self.returned_defaults = row + self.inserted_primary_key = [ + row[col] if value is None else value + for col, value in [ + (col, compiled_params.get(key_getter(col), None)) + for col in table.primary_key + ] + ] def lastrow_has_defaults(self): return (self.isinsert or self.isupdate) and \ - bool(self.postfetch_cols) + bool(self.compiled.postfetch) def set_input_sizes(self, translate=None, exclude_types=None): """Given a cursor and ClauseParameters, call the appropriate @@ -901,58 +974,53 @@ class DefaultExecutionContext(interfaces.ExecutionContext): else: return self._exec_default(column.onupdate, column.type) - def __process_defaults(self): - """Generate default values for compiled insert/update statements, - and generate inserted_primary_key collection. - """ - + def _process_executemany_defaults(self): key_getter = self.compiled._key_getters_for_crud_column[2] - if self.executemany: - if len(self.compiled.prefetch): - scalar_defaults = {} + prefetch = self.compiled.prefetch + scalar_defaults = {} - # pre-determine scalar Python-side defaults - # to avoid many calls of get_insert_default()/ - # get_update_default() - for c in self.prefetch_cols: - if self.isinsert and c.default and c.default.is_scalar: - scalar_defaults[c] = c.default.arg - elif self.isupdate and c.onupdate and c.onupdate.is_scalar: - scalar_defaults[c] = c.onupdate.arg + # pre-determine scalar Python-side defaults + # to avoid many calls of get_insert_default()/ + # get_update_default() + for c in prefetch: + if self.isinsert and c.default and c.default.is_scalar: + scalar_defaults[c] = c.default.arg + elif self.isupdate and c.onupdate and c.onupdate.is_scalar: + scalar_defaults[c] = c.onupdate.arg - for param in self.compiled_parameters: - self.current_parameters = param - for c in self.prefetch_cols: - if c in scalar_defaults: - val = scalar_defaults[c] - elif self.isinsert: - val = self.get_insert_default(c) - else: - val = self.get_update_default(c) - if val is not None: - param[key_getter(c)] = val - del self.current_parameters - else: - self.current_parameters = compiled_parameters = \ - self.compiled_parameters[0] - - for c in self.compiled.prefetch: - if self.isinsert: + for param in self.compiled_parameters: + self.current_parameters = param + for c in prefetch: + if c in scalar_defaults: + val = scalar_defaults[c] + elif self.isinsert: val = self.get_insert_default(c) else: val = self.get_update_default(c) - if val is not None: - compiled_parameters[key_getter(c)] = val - del self.current_parameters + param[key_getter(c)] = val + del self.current_parameters + def _process_executesingle_defaults(self): + key_getter = self.compiled._key_getters_for_crud_column[2] + prefetch = self.compiled.prefetch + self.current_parameters = compiled_parameters = \ + self.compiled_parameters[0] + + for c in prefetch: if self.isinsert: - self.inserted_primary_key = [ - self.compiled_parameters[0].get(key_getter(c), None) - for c in self.compiled. - statement.table.primary_key - ] + if c.default and \ + not c.default.is_sequence and c.default.is_scalar: + val = c.default.arg + else: + val = self.get_insert_default(c) + else: + val = self.get_update_default(c) + + if val is not None: + compiled_parameters[key_getter(c)] = val + del self.current_parameters DefaultDialect.execution_ctx_cls = DefaultExecutionContext diff --git a/lib/python3.4/site-packages/sqlalchemy/engine/interfaces.py b/lib/python3.4/site-packages/sqlalchemy/engine/interfaces.py index 71df29c..1948342 100644 --- a/lib/python3.4/site-packages/sqlalchemy/engine/interfaces.py +++ b/lib/python3.4/site-packages/sqlalchemy/engine/interfaces.py @@ -1,5 +1,5 @@ # engine/interfaces.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -150,6 +150,16 @@ class Dialect(object): This will prevent types.Boolean from generating a CHECK constraint when that type is used. + dbapi_exception_translation_map + A dictionary of names that will contain as values the names of + pep-249 exceptions ("IntegrityError", "OperationalError", etc) + keyed to alternate class names, to support the case where a + DBAPI has exception classes that aren't named as they are + referred to (e.g. IntegrityError = MyException). In the vast + majority of cases this dictionary is empty. + + .. versionadded:: 1.0.5 + """ _has_events = False @@ -242,7 +252,9 @@ class Dialect(object): sequence a dictionary of the form - {'name' : str, 'start' :int, 'increment': int} + {'name' : str, 'start' :int, 'increment': int, 'minvalue': int, + 'maxvalue': int, 'nominvalue': bool, 'nomaxvalue': bool, + 'cycle': bool} Additional column attributes may be present. """ @@ -308,7 +320,15 @@ class Dialect(object): def get_table_names(self, connection, schema=None, **kw): """Return a list of table names for `schema`.""" - raise NotImplementedError + raise NotImplementedError() + + def get_temp_table_names(self, connection, schema=None, **kw): + """Return a list of temporary table names on the given connection, + if supported by the underlying backend. + + """ + + raise NotImplementedError() def get_view_names(self, connection, schema=None, **kw): """Return a list of all view names available in the database. @@ -319,6 +339,14 @@ class Dialect(object): raise NotImplementedError() + def get_temp_view_names(self, connection, schema=None, **kw): + """Return a list of temporary view names on the given connection, + if supported by the underlying backend. + + """ + + raise NotImplementedError() + def get_view_definition(self, connection, view_name, schema=None, **kw): """Return view definition. @@ -638,20 +666,120 @@ class Dialect(object): return None def reset_isolation_level(self, dbapi_conn): - """Given a DBAPI connection, revert its isolation to the default.""" + """Given a DBAPI connection, revert its isolation to the default. + + Note that this is a dialect-level method which is used as part + of the implementation of the :class:`.Connection` and + :class:`.Engine` + isolation level facilities; these APIs should be preferred for + most typical use cases. + + .. seealso:: + + :meth:`.Connection.get_isolation_level` - view current level + + :attr:`.Connection.default_isolation_level` - view default level + + :paramref:`.Connection.execution_options.isolation_level` - + set per :class:`.Connection` isolation level + + :paramref:`.create_engine.isolation_level` - + set per :class:`.Engine` isolation level + + """ raise NotImplementedError() def set_isolation_level(self, dbapi_conn, level): - """Given a DBAPI connection, set its isolation level.""" + """Given a DBAPI connection, set its isolation level. + + Note that this is a dialect-level method which is used as part + of the implementation of the :class:`.Connection` and + :class:`.Engine` + isolation level facilities; these APIs should be preferred for + most typical use cases. + + .. seealso:: + + :meth:`.Connection.get_isolation_level` - view current level + + :attr:`.Connection.default_isolation_level` - view default level + + :paramref:`.Connection.execution_options.isolation_level` - + set per :class:`.Connection` isolation level + + :paramref:`.create_engine.isolation_level` - + set per :class:`.Engine` isolation level + + """ raise NotImplementedError() def get_isolation_level(self, dbapi_conn): - """Given a DBAPI connection, return its isolation level.""" + """Given a DBAPI connection, return its isolation level. + + When working with a :class:`.Connection` object, the corresponding + DBAPI connection may be procured using the + :attr:`.Connection.connection` accessor. + + Note that this is a dialect-level method which is used as part + of the implementation of the :class:`.Connection` and + :class:`.Engine` isolation level facilities; + these APIs should be preferred for most typical use cases. + + + .. seealso:: + + :meth:`.Connection.get_isolation_level` - view current level + + :attr:`.Connection.default_isolation_level` - view default level + + :paramref:`.Connection.execution_options.isolation_level` - + set per :class:`.Connection` isolation level + + :paramref:`.create_engine.isolation_level` - + set per :class:`.Engine` isolation level + + + """ raise NotImplementedError() + @classmethod + def get_dialect_cls(cls, url): + """Given a URL, return the :class:`.Dialect` that will be used. + + This is a hook that allows an external plugin to provide functionality + around an existing dialect, by allowing the plugin to be loaded + from the url based on an entrypoint, and then the plugin returns + the actual dialect to be used. + + By default this just returns the cls. + + .. versionadded:: 1.0.3 + + """ + return cls + + @classmethod + def engine_created(cls, engine): + """A convenience hook called before returning the final :class:`.Engine`. + + If the dialect returned a different class from the + :meth:`.get_dialect_cls` + method, then the hook is called on both classes, first on + the dialect class returned by the :meth:`.get_dialect_cls` method and + then on the class on which the method was called. + + The hook should be used by dialects and/or wrappers to apply special + events to the engine or its components. In particular, it allows + a dialect-wrapping class to apply dialect-level events. + + .. versionadded:: 1.0.3 + + """ + pass + class ExecutionContext(object): """A messenger object for a Dialect that corresponds to a single @@ -901,7 +1029,23 @@ class ExceptionContext(object): connection = None """The :class:`.Connection` in use during the exception. - This member is always present. + This member is present, except in the case of a failure when + first connecting. + + .. seealso:: + + :attr:`.ExceptionContext.engine` + + + """ + + engine = None + """The :class:`.Engine` in use during the exception. + + This member should always be present, even in the case of a failure + when first connecting. + + .. versionadded:: 1.0.0 """ @@ -988,3 +1132,21 @@ class ExceptionContext(object): changing this flag. """ + + invalidate_pool_on_disconnect = True + """Represent whether all connections in the pool should be invalidated + when a "disconnect" condition is in effect. + + Setting this flag to False within the scope of the + :meth:`.ConnectionEvents.handle_error` event will have the effect such + that the full collection of connections in the pool will not be + invalidated during a disconnect; only the current connection that is the + subject of the error will actually be invalidated. + + The purpose of this flag is for custom disconnect-handling schemes where + the invalidation of other connections in the pool is to be performed + based on other conditions, or even on a per-connection basis. + + .. versionadded:: 1.0.3 + + """ diff --git a/lib/python3.4/site-packages/sqlalchemy/engine/reflection.py b/lib/python3.4/site-packages/sqlalchemy/engine/reflection.py index 012d1d3..98fcfa0 100644 --- a/lib/python3.4/site-packages/sqlalchemy/engine/reflection.py +++ b/lib/python3.4/site-packages/sqlalchemy/engine/reflection.py @@ -1,5 +1,5 @@ # engine/reflection.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -173,7 +173,14 @@ class Inspector(object): passed as ``None``. For special quoting, use :class:`.quoted_name`. :param order_by: Optional, may be the string "foreign_key" to sort - the result on foreign key dependencies. + the result on foreign key dependencies. Does not automatically + resolve cycles, and will raise :class:`.CircularDependencyError` + if cycles exist. + + .. deprecated:: 1.0.0 - see + :meth:`.Inspector.get_sorted_table_and_fkc_names` for a version + of this which resolves foreign key cycles between tables + automatically. .. versionchanged:: 0.8 the "foreign_key" sorting sorts tables in order of dependee to dependent; that is, in creation @@ -183,6 +190,8 @@ class Inspector(object): .. seealso:: + :meth:`.Inspector.get_sorted_table_and_fkc_names` + :attr:`.MetaData.sorted_tables` """ @@ -201,6 +210,88 @@ class Inspector(object): tnames = list(topological.sort(tuples, tnames)) return tnames + def get_sorted_table_and_fkc_names(self, schema=None): + """Return dependency-sorted table and foreign key constraint names in + referred to within a particular schema. + + This will yield 2-tuples of + ``(tablename, [(tname, fkname), (tname, fkname), ...])`` + consisting of table names in CREATE order grouped with the foreign key + constraint names that are not detected as belonging to a cycle. + The final element + will be ``(None, [(tname, fkname), (tname, fkname), ..])`` + which will consist of remaining + foreign key constraint names that would require a separate CREATE + step after-the-fact, based on dependencies between tables. + + .. versionadded:: 1.0.- + + .. seealso:: + + :meth:`.Inspector.get_table_names` + + :func:`.sort_tables_and_constraints` - similar method which works + with an already-given :class:`.MetaData`. + + """ + if hasattr(self.dialect, 'get_table_names'): + tnames = self.dialect.get_table_names( + self.bind, schema, info_cache=self.info_cache) + else: + tnames = self.engine.table_names(schema) + + tuples = set() + remaining_fkcs = set() + + fknames_for_table = {} + for tname in tnames: + fkeys = self.get_foreign_keys(tname, schema) + fknames_for_table[tname] = set( + [fk['name'] for fk in fkeys] + ) + for fkey in fkeys: + if tname != fkey['referred_table']: + tuples.add((fkey['referred_table'], tname)) + try: + candidate_sort = list(topological.sort(tuples, tnames)) + except exc.CircularDependencyError as err: + for edge in err.edges: + tuples.remove(edge) + remaining_fkcs.update( + (edge[1], fkc) + for fkc in fknames_for_table[edge[1]] + ) + + candidate_sort = list(topological.sort(tuples, tnames)) + return [ + (tname, fknames_for_table[tname].difference(remaining_fkcs)) + for tname in candidate_sort + ] + [(None, list(remaining_fkcs))] + + def get_temp_table_names(self): + """return a list of temporary table names for the current bind. + + This method is unsupported by most dialects; currently + only SQLite implements it. + + .. versionadded:: 1.0.0 + + """ + return self.dialect.get_temp_table_names( + self.bind, info_cache=self.info_cache) + + def get_temp_view_names(self): + """return a list of temporary view names for the current bind. + + This method is unsupported by most dialects; currently + only SQLite implements it. + + .. versionadded:: 1.0.0 + + """ + return self.dialect.get_temp_view_names( + self.bind, info_cache=self.info_cache) + def get_table_options(self, table_name, schema=None, **kw): """Return a dictionary of options specified when the table of the given name was created. @@ -370,6 +461,12 @@ class Inspector(object): unique boolean + dialect_options + dict of dialect-specific index options. May not be present + for all dialects. + + .. versionadded:: 1.0.0 + :param table_name: string name of the table. For special quoting, use :class:`.quoted_name`. @@ -465,55 +562,87 @@ class Inspector(object): for col_d in self.get_columns( table_name, schema, **table.dialect_kwargs): found_table = True - orig_name = col_d['name'] - table.dispatch.column_reflect(self, table, col_d) - - name = col_d['name'] - if include_columns and name not in include_columns: - continue - if exclude_columns and name in exclude_columns: - continue - - coltype = col_d['type'] - - col_kw = dict( - (k, col_d[k]) - for k in ['nullable', 'autoincrement', 'quote', 'info', 'key'] - if k in col_d - ) - - colargs = [] - if col_d.get('default') is not None: - # the "default" value is assumed to be a literal SQL - # expression, so is wrapped in text() so that no quoting - # occurs on re-issuance. - colargs.append( - sa_schema.DefaultClause( - sql.text(col_d['default']), _reflected=True - ) - ) - - if 'sequence' in col_d: - # TODO: mssql and sybase are using this. - seq = col_d['sequence'] - sequence = sa_schema.Sequence(seq['name'], 1, 1) - if 'start' in seq: - sequence.start = seq['start'] - if 'increment' in seq: - sequence.increment = seq['increment'] - colargs.append(sequence) - - cols_by_orig_name[orig_name] = col = \ - sa_schema.Column(name, coltype, *colargs, **col_kw) - - if col.key in table.primary_key: - col.primary_key = True - table.append_column(col) + self._reflect_column( + table, col_d, include_columns, + exclude_columns, cols_by_orig_name) if not found_table: raise exc.NoSuchTableError(table.name) + self._reflect_pk( + table_name, schema, table, cols_by_orig_name, exclude_columns) + + self._reflect_fk( + table_name, schema, table, cols_by_orig_name, + exclude_columns, reflection_options) + + self._reflect_indexes( + table_name, schema, table, cols_by_orig_name, + include_columns, exclude_columns, reflection_options) + + self._reflect_unique_constraints( + table_name, schema, table, cols_by_orig_name, + include_columns, exclude_columns, reflection_options) + + def _reflect_column( + self, table, col_d, include_columns, + exclude_columns, cols_by_orig_name): + + orig_name = col_d['name'] + + table.dispatch.column_reflect(self, table, col_d) + + # fetch name again as column_reflect is allowed to + # change it + name = col_d['name'] + if (include_columns and name not in include_columns) \ + or (exclude_columns and name in exclude_columns): + return + + coltype = col_d['type'] + + col_kw = dict( + (k, col_d[k]) + for k in ['nullable', 'autoincrement', 'quote', 'info', 'key'] + if k in col_d + ) + + colargs = [] + if col_d.get('default') is not None: + # the "default" value is assumed to be a literal SQL + # expression, so is wrapped in text() so that no quoting + # occurs on re-issuance. + colargs.append( + sa_schema.DefaultClause( + sql.text(col_d['default']), _reflected=True + ) + ) + + if 'sequence' in col_d: + self._reflect_col_sequence(col_d, colargs) + + cols_by_orig_name[orig_name] = col = \ + sa_schema.Column(name, coltype, *colargs, **col_kw) + + if col.key in table.primary_key: + col.primary_key = True + table.append_column(col) + + def _reflect_col_sequence(self, col_d, colargs): + if 'sequence' in col_d: + # TODO: mssql and sybase are using this. + seq = col_d['sequence'] + sequence = sa_schema.Sequence(seq['name'], 1, 1) + if 'start' in seq: + sequence.start = seq['start'] + if 'increment' in seq: + sequence.increment = seq['increment'] + colargs.append(sequence) + + def _reflect_pk( + self, table_name, schema, table, + cols_by_orig_name, exclude_columns): pk_cons = self.get_pk_constraint( table_name, schema, **table.dialect_kwargs) if pk_cons: @@ -530,6 +659,9 @@ class Inspector(object): # its column collection table.primary_key._reload(pk_cols) + def _reflect_fk( + self, table_name, schema, table, cols_by_orig_name, + exclude_columns, reflection_options): fkeys = self.get_foreign_keys( table_name, schema, **table.dialect_kwargs) for fkey_d in fkeys: @@ -572,24 +704,85 @@ class Inspector(object): sa_schema.ForeignKeyConstraint(constrained_columns, refspec, conname, link_to_name=True, **options)) + + def _reflect_indexes( + self, table_name, schema, table, cols_by_orig_name, + include_columns, exclude_columns, reflection_options): # Indexes indexes = self.get_indexes(table_name, schema) for index_d in indexes: name = index_d['name'] columns = index_d['column_names'] unique = index_d['unique'] - flavor = index_d.get('type', 'unknown type') + flavor = index_d.get('type', 'index') + dialect_options = index_d.get('dialect_options', {}) + + duplicates = index_d.get('duplicates_constraint') if include_columns and \ not set(columns).issubset(include_columns): util.warn( - "Omitting %s KEY for (%s), key covers omitted columns." % + "Omitting %s key for (%s), key covers omitted columns." % (flavor, ', '.join(columns))) continue + if duplicates: + continue # look for columns by orig name in cols_by_orig_name, # but support columns that are in-Python only as fallback - sa_schema.Index(name, *[ - cols_by_orig_name[c] if c in cols_by_orig_name - else table.c[c] - for c in columns - ], - **dict(unique=unique)) + idx_cols = [] + for c in columns: + try: + idx_col = cols_by_orig_name[c] \ + if c in cols_by_orig_name else table.c[c] + except KeyError: + util.warn( + "%s key '%s' was not located in " + "columns for table '%s'" % ( + flavor, c, table_name + )) + else: + idx_cols.append(idx_col) + + sa_schema.Index( + name, *idx_cols, + **dict(list(dialect_options.items()) + [('unique', unique)]) + ) + + def _reflect_unique_constraints( + self, table_name, schema, table, cols_by_orig_name, + include_columns, exclude_columns, reflection_options): + + # Unique Constraints + try: + constraints = self.get_unique_constraints(table_name, schema) + except NotImplementedError: + # optional dialect feature + return + + for const_d in constraints: + conname = const_d['name'] + columns = const_d['column_names'] + duplicates = const_d.get('duplicates_index') + if include_columns and \ + not set(columns).issubset(include_columns): + util.warn( + "Omitting unique constraint key for (%s), " + "key covers omitted columns." % + ', '.join(columns)) + continue + if duplicates: + continue + # look for columns by orig name in cols_by_orig_name, + # but support columns that are in-Python only as fallback + constrained_cols = [] + for c in columns: + try: + constrained_col = cols_by_orig_name[c] \ + if c in cols_by_orig_name else table.c[c] + except KeyError: + util.warn( + "unique constraint key '%s' was not located in " + "columns for table '%s'" % (c, table_name)) + else: + constrained_cols.append(constrained_col) + table.append_constraint( + sa_schema.UniqueConstraint(*constrained_cols, name=conname)) diff --git a/lib/python3.4/site-packages/sqlalchemy/engine/result.py b/lib/python3.4/site-packages/sqlalchemy/engine/result.py index e7d67c0..689382f 100644 --- a/lib/python3.4/site-packages/sqlalchemy/engine/result.py +++ b/lib/python3.4/site-packages/sqlalchemy/engine/result.py @@ -1,5 +1,5 @@ # engine/result.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -110,7 +110,7 @@ class RowProxy(BaseRowProxy): __slots__ = () def __contains__(self, key): - return self._parent._has_key(self._row, key) + return self._parent._has_key(key) def __getstate__(self): return { @@ -155,7 +155,7 @@ class RowProxy(BaseRowProxy): def has_key(self, key): """Return True if this RowProxy contains the given key.""" - return self._parent._has_key(self._row, key) + return self._parent._has_key(key) def items(self): """Return a list of tuples, each tuple containing a key/value pair.""" @@ -187,90 +187,165 @@ class ResultMetaData(object): context.""" def __init__(self, parent, metadata): - self._processors = processors = [] - - # We do not strictly need to store the processor in the key mapping, - # though it is faster in the Python version (probably because of the - # saved attribute lookup self._processors) - self._keymap = keymap = {} - self.keys = [] context = parent.context dialect = context.dialect typemap = dialect.dbapi_type_map translate_colname = context._translate_colname - self.case_sensitive = dialect.case_sensitive + self.case_sensitive = case_sensitive = dialect.case_sensitive - # high precedence key values. - primary_keymap = {} + if context.result_column_struct: + result_columns, cols_are_ordered = context.result_column_struct + num_ctx_cols = len(result_columns) + else: + num_ctx_cols = None - for i, rec in enumerate(metadata): - colname = rec[0] - coltype = rec[1] + if num_ctx_cols and \ + cols_are_ordered and \ + num_ctx_cols == len(metadata): + # case 1 - SQL expression statement, number of columns + # in result matches number of cols in compiled. This is the + # vast majority case for SQL expression constructs. In this + # case we don't bother trying to parse or match up to + # the colnames in the result description. + raw = [ + ( + idx, + key, + name.lower() if not case_sensitive else name, + context.get_result_processor( + type_, key, metadata[idx][1] + ), + obj, + None + ) for idx, (key, name, obj, type_) + in enumerate(result_columns) + ] + self.keys = [ + elem[0] for elem in result_columns + ] + else: + # case 2 - raw string, or number of columns in result does + # not match number of cols in compiled. The raw string case + # is very common. The latter can happen + # when text() is used with only a partial typemap, or + # in the extremely unlikely cases where the compiled construct + # has a single element with multiple col expressions in it + # (e.g. has commas embedded) or there's some kind of statement + # that is adding extra columns. + # In all these cases we fall back to the "named" approach + # that SQLAlchemy has used up through 0.9. - if dialect.description_encoding: - colname = dialect._description_decoder(colname) + if num_ctx_cols: + result_map = self._create_result_map( + result_columns, case_sensitive) + raw = [] + self.keys = [] + untranslated = None + for idx, rec in enumerate(metadata): + colname = rec[0] + coltype = rec[1] + + if dialect.description_encoding: + colname = dialect._description_decoder(colname) + + if translate_colname: + colname, untranslated = translate_colname(colname) + + if dialect.requires_name_normalize: + colname = dialect.normalize_name(colname) + + self.keys.append(colname) + if not case_sensitive: + colname = colname.lower() + + if num_ctx_cols: + try: + ctx_rec = result_map[colname] + except KeyError: + mapped_type = typemap.get(coltype, sqltypes.NULLTYPE) + obj = None + else: + obj = ctx_rec[1] + mapped_type = ctx_rec[2] + else: + mapped_type = typemap.get(coltype, sqltypes.NULLTYPE) + obj = None + processor = context.get_result_processor( + mapped_type, colname, coltype) + + raw.append( + (idx, colname, colname, processor, obj, untranslated) + ) + + # keymap indexes by integer index... + self._keymap = dict([ + (elem[0], (elem[3], elem[4], elem[0])) + for elem in raw + ]) + + # processors in key order for certain per-row + # views like __iter__ and slices + self._processors = [elem[3] for elem in raw] + + if num_ctx_cols: + # keymap by primary string... + by_key = dict([ + (elem[2], (elem[3], elem[4], elem[0])) + for elem in raw + ]) + + # if by-primary-string dictionary smaller (or bigger?!) than + # number of columns, assume we have dupes, rewrite + # dupe records with "None" for index which results in + # ambiguous column exception when accessed. + if len(by_key) != num_ctx_cols: + seen = set() + for rec in raw: + key = rec[1] + if key in seen: + by_key[key] = (None, by_key[key][1], None) + seen.add(key) + + # update keymap with secondary "object"-based keys + self._keymap.update([ + (obj_elem, by_key[elem[2]]) + for elem in raw if elem[4] + for obj_elem in elem[4] + ]) + + # update keymap with primary string names taking + # precedence + self._keymap.update(by_key) + else: + self._keymap.update([ + (elem[2], (elem[3], elem[4], elem[0])) + for elem in raw + ]) + # update keymap with "translated" names (sqlite-only thing) if translate_colname: - colname, untranslated = translate_colname(colname) + self._keymap.update([ + (elem[5], self._keymap[elem[2]]) + for elem in raw if elem[5] + ]) - if dialect.requires_name_normalize: - colname = dialect.normalize_name(colname) - - if context.result_map: - try: - name, obj, type_ = context.result_map[ - colname if self.case_sensitive else colname.lower()] - except KeyError: - name, obj, type_ = \ - colname, None, typemap.get(coltype, sqltypes.NULLTYPE) + @classmethod + def _create_result_map(cls, result_columns, case_sensitive=True): + d = {} + for elem in result_columns: + key, rec = elem[0], elem[1:] + if not case_sensitive: + key = key.lower() + if key in d: + # conflicting keyname, just double up the list + # of objects. this will cause an "ambiguous name" + # error if an attempt is made by the result set to + # access. + e_name, e_obj, e_type = d[key] + d[key] = e_name, e_obj + rec[1], e_type else: - name, obj, type_ = \ - colname, None, typemap.get(coltype, sqltypes.NULLTYPE) - - processor = context.get_result_processor(type_, colname, coltype) - - processors.append(processor) - rec = (processor, obj, i) - - # indexes as keys. This is only needed for the Python version of - # RowProxy (the C version uses a faster path for integer indexes). - primary_keymap[i] = rec - - # populate primary keymap, looking for conflicts. - if primary_keymap.setdefault( - name if self.case_sensitive - else name.lower(), - rec) is not rec: - # place a record that doesn't have the "index" - this - # is interpreted later as an AmbiguousColumnError, - # but only when actually accessed. Columns - # colliding by name is not a problem if those names - # aren't used; integer access is always - # unambiguous. - primary_keymap[name - if self.case_sensitive - else name.lower()] = rec = (None, obj, None) - - self.keys.append(colname) - if obj: - for o in obj: - keymap[o] = rec - # technically we should be doing this but we - # are saving on callcounts by not doing so. - # if keymap.setdefault(o, rec) is not rec: - # keymap[o] = (None, obj, None) - - if translate_colname and \ - untranslated: - keymap[untranslated] = rec - - # overwrite keymap values with those of the - # high precedence keymap. - keymap.update(primary_keymap) - - if parent._echo: - context.engine.logger.debug( - "Col %r", tuple(x[0] for x in metadata)) + d[key] = rec + return d @util.pending_deprecation("0.8", "sqlite dialect uses " "_translate_colname() now") @@ -335,12 +410,28 @@ class ResultMetaData(object): map[key] = result return result - def _has_key(self, row, key): + def _has_key(self, key): if key in self._keymap: return True else: return self._key_fallback(key, False) is not None + def _getter(self, key): + if key in self._keymap: + processor, obj, index = self._keymap[key] + else: + ret = self._key_fallback(key, False) + if ret is None: + return None + processor, obj, index = ret + + if index is None: + raise exc.InvalidRequestError( + "Ambiguous column name '%s' in result set! " + "try 'use_labels' option on select statement." % key) + + return operator.itemgetter(index) + def __getstate__(self): return { '_pickled_keymap': dict( @@ -391,21 +482,49 @@ class ResultProxy(object): out_parameters = None _can_close_connection = False _metadata = None + _soft_closed = False + closed = False def __init__(self, context): self.context = context self.dialect = context.dialect - self.closed = False self.cursor = self._saved_cursor = context.cursor self.connection = context.root_connection self._echo = self.connection._echo and \ context.engine._should_log_debug() self._init_metadata() + def _getter(self, key): + try: + getter = self._metadata._getter + except AttributeError: + return self._non_result(None) + else: + return getter(key) + + def _has_key(self, key): + try: + has_key = self._metadata._has_key + except AttributeError: + return self._non_result(None) + else: + return has_key(key) + def _init_metadata(self): metadata = self._cursor_description() if metadata is not None: - self._metadata = ResultMetaData(self, metadata) + if self.context.compiled and \ + 'compiled_cache' in self.context.execution_options: + if self.context.compiled._cached_metadata: + self._metadata = self.context.compiled._cached_metadata + else: + self._metadata = self.context.compiled._cached_metadata = \ + ResultMetaData(self, metadata) + else: + self._metadata = ResultMetaData(self, metadata) + if self._echo: + self.context.engine.logger.debug( + "Col %r", tuple(x[0] for x in metadata)) def keys(self): """Return the current set of string keys for rows.""" @@ -515,39 +634,85 @@ class ResultProxy(object): return self._saved_cursor.description - def close(self, _autoclose_connection=True): - """Close this ResultProxy. + def _soft_close(self, _autoclose_connection=True): + """Soft close this :class:`.ResultProxy`. - Closes the underlying DBAPI cursor corresponding to the execution. - - Note that any data cached within this ResultProxy is still available. - For some types of results, this may include buffered rows. - - If this ResultProxy was generated from an implicit execution, - the underlying Connection will also be closed (returns the - underlying DBAPI connection to the connection pool.) + This releases all DBAPI cursor resources, but leaves the + ResultProxy "open" from a semantic perspective, meaning the + fetchXXX() methods will continue to return empty results. This method is called automatically when: * all result rows are exhausted using the fetchXXX() methods. * cursor.description is None. + This method is **not public**, but is documented in order to clarify + the "autoclose" process used. + + .. versionadded:: 1.0.0 + + .. seealso:: + + :meth:`.ResultProxy.close` + + + """ + if self._soft_closed: + return + self._soft_closed = True + cursor = self.cursor + self.connection._safe_close_cursor(cursor) + if _autoclose_connection and \ + self.connection.should_close_with_result: + self.connection.close() + self.cursor = None + + def close(self): + """Close this ResultProxy. + + This closes out the underlying DBAPI cursor corresonding + to the statement execution, if one is stil present. Note that the + DBAPI cursor is automatically released when the :class:`.ResultProxy` + exhausts all available rows. :meth:`.ResultProxy.close` is generally + an optional method except in the case when discarding a + :class:`.ResultProxy` that still has additional rows pending for fetch. + + In the case of a result that is the product of + :ref:`connectionless execution `, + the underyling :class:`.Connection` object is also closed, which + :term:`releases` DBAPI connection resources. + + After this method is called, it is no longer valid to call upon + the fetch methods, which will raise a :class:`.ResourceClosedError` + on subsequent use. + + .. versionchanged:: 1.0.0 - the :meth:`.ResultProxy.close` method + has been separated out from the process that releases the underlying + DBAPI cursor resource. The "auto close" feature of the + :class:`.Connection` now performs a so-called "soft close", which + releases the underlying DBAPI cursor, but allows the + :class:`.ResultProxy` to still behave as an open-but-exhausted + result set; the actual :meth:`.ResultProxy.close` method is never + called. It is still safe to discard a :class:`.ResultProxy` + that has been fully exhausted without calling this method. + + .. seealso:: + + :ref:`connections_toplevel` + + :meth:`.ResultProxy._soft_close` + """ if not self.closed: + self._soft_close() self.closed = True - self.connection._safe_close_cursor(self.cursor) - if _autoclose_connection and \ - self.connection.should_close_with_result: - self.connection.close() - # allow consistent errors - self.cursor = None def __iter__(self): while True: row = self.fetchone() if row is None: - raise StopIteration + return else: yield row @@ -732,7 +897,7 @@ class ResultProxy(object): try: return self.cursor.fetchone() except AttributeError: - self._non_result() + return self._non_result(None) def _fetchmany_impl(self, size=None): try: @@ -741,22 +906,24 @@ class ResultProxy(object): else: return self.cursor.fetchmany(size) except AttributeError: - self._non_result() + return self._non_result([]) def _fetchall_impl(self): try: return self.cursor.fetchall() except AttributeError: - self._non_result() + return self._non_result([]) - def _non_result(self): + def _non_result(self, default): if self._metadata is None: raise exc.ResourceClosedError( "This result object does not return rows. " "It has been closed automatically.", ) - else: + elif self.closed: raise exc.ResourceClosedError("This result object is closed.") + else: + return default def process_rows(self, rows): process_row = self._process_row @@ -775,11 +942,25 @@ class ResultProxy(object): for row in rows] def fetchall(self): - """Fetch all rows, just like DB-API ``cursor.fetchall()``.""" + """Fetch all rows, just like DB-API ``cursor.fetchall()``. + + After all rows have been exhausted, the underlying DBAPI + cursor resource is released, and the object may be safely + discarded. + + Subsequent calls to :meth:`.ResultProxy.fetchall` will return + an empty list. After the :meth:`.ResultProxy.close` method is + called, the method will raise :class:`.ResourceClosedError`. + + .. versionchanged:: 1.0.0 - Added "soft close" behavior which + allows the result to be used in an "exhausted" state prior to + calling the :meth:`.ResultProxy.close` method. + + """ try: l = self.process_rows(self._fetchall_impl()) - self.close() + self._soft_close() return l except Exception as e: self.connection._handle_dbapi_exception( @@ -790,15 +971,25 @@ class ResultProxy(object): """Fetch many rows, just like DB-API ``cursor.fetchmany(size=cursor.arraysize)``. - If rows are present, the cursor remains open after this is called. - Else the cursor is automatically closed and an empty list is returned. + After all rows have been exhausted, the underlying DBAPI + cursor resource is released, and the object may be safely + discarded. + + Calls to :meth:`.ResultProxy.fetchmany` after all rows have been + exhuasted will return + an empty list. After the :meth:`.ResultProxy.close` method is + called, the method will raise :class:`.ResourceClosedError`. + + .. versionchanged:: 1.0.0 - Added "soft close" behavior which + allows the result to be used in an "exhausted" state prior to + calling the :meth:`.ResultProxy.close` method. """ try: l = self.process_rows(self._fetchmany_impl(size)) if len(l) == 0: - self.close() + self._soft_close() return l except Exception as e: self.connection._handle_dbapi_exception( @@ -808,8 +999,18 @@ class ResultProxy(object): def fetchone(self): """Fetch one row, just like DB-API ``cursor.fetchone()``. - If a row is present, the cursor remains open after this is called. - Else the cursor is automatically closed and None is returned. + After all rows have been exhausted, the underlying DBAPI + cursor resource is released, and the object may be safely + discarded. + + Calls to :meth:`.ResultProxy.fetchone` after all rows have + been exhausted will return ``None``. + After the :meth:`.ResultProxy.close` method is + called, the method will raise :class:`.ResourceClosedError`. + + .. versionchanged:: 1.0.0 - Added "soft close" behavior which + allows the result to be used in an "exhausted" state prior to + calling the :meth:`.ResultProxy.close` method. """ try: @@ -817,7 +1018,7 @@ class ResultProxy(object): if row is not None: return self.process_rows([row])[0] else: - self.close() + self._soft_close() return None except Exception as e: self.connection._handle_dbapi_exception( @@ -829,9 +1030,12 @@ class ResultProxy(object): Returns None if no row is present. + After calling this method, the object is fully closed, + e.g. the :meth:`.ResultProxy.close` method will have been called. + """ if self._metadata is None: - self._non_result() + return self._non_result(None) try: row = self._fetchone_impl() @@ -853,6 +1057,9 @@ class ResultProxy(object): Returns None if no row is present. + After calling this method, the object is fully closed, + e.g. the :meth:`.ResultProxy.close` method will have been called. + """ row = self.first() if row is not None: @@ -873,10 +1080,27 @@ class BufferedRowResultProxy(ResultProxy): The pre-fetching behavior fetches only one row initially, and then grows its buffer size by a fixed amount with each successive need - for additional rows up to a size of 100. + for additional rows up to a size of 1000. + + The size argument is configurable using the ``max_row_buffer`` + execution option:: + + with psycopg2_engine.connect() as conn: + + result = conn.execution_options( + stream_results=True, max_row_buffer=50 + ).execute("select * from table") + + .. versionadded:: 1.0.6 Added the ``max_row_buffer`` option. + + .. seealso:: + + :ref:`psycopg2_execution_options` """ def _init_metadata(self): + self._max_row_buffer = self.context.execution_options.get( + 'max_row_buffer', None) self.__buffer_rows() super(BufferedRowResultProxy, self)._init_metadata() @@ -896,13 +1120,21 @@ class BufferedRowResultProxy(ResultProxy): } def __buffer_rows(self): + if self.cursor is None: + return size = getattr(self, '_bufsize', 1) self.__rowbuffer = collections.deque(self.cursor.fetchmany(size)) self._bufsize = self.size_growth.get(size, size) + if self._max_row_buffer is not None: + self._bufsize = min(self._max_row_buffer, self._bufsize) + + def _soft_close(self, **kw): + self.__rowbuffer.clear() + super(BufferedRowResultProxy, self)._soft_close(**kw) def _fetchone_impl(self): - if self.closed: - return None + if self.cursor is None: + return self._non_result(None) if not self.__rowbuffer: self.__buffer_rows() if not self.__rowbuffer: @@ -921,6 +1153,8 @@ class BufferedRowResultProxy(ResultProxy): return result def _fetchall_impl(self): + if self.cursor is None: + return self._non_result([]) self.__rowbuffer.extend(self.cursor.fetchall()) ret = self.__rowbuffer self.__rowbuffer = collections.deque() @@ -943,11 +1177,15 @@ class FullyBufferedResultProxy(ResultProxy): def _buffer_rows(self): return collections.deque(self.cursor.fetchall()) + def _soft_close(self, **kw): + self.__rowbuffer.clear() + super(FullyBufferedResultProxy, self)._soft_close(**kw) + def _fetchone_impl(self): if self.__rowbuffer: return self.__rowbuffer.popleft() else: - return None + return self._non_result(None) def _fetchmany_impl(self, size=None): if size is None: @@ -961,6 +1199,8 @@ class FullyBufferedResultProxy(ResultProxy): return result def _fetchall_impl(self): + if not self.cursor: + return self._non_result([]) ret = self.__rowbuffer self.__rowbuffer = collections.deque() return ret diff --git a/lib/python3.4/site-packages/sqlalchemy/engine/strategies.py b/lib/python3.4/site-packages/sqlalchemy/engine/strategies.py index 38206be..2a018f8 100644 --- a/lib/python3.4/site-packages/sqlalchemy/engine/strategies.py +++ b/lib/python3.4/site-packages/sqlalchemy/engine/strategies.py @@ -1,5 +1,5 @@ # engine/strategies.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -48,7 +48,8 @@ class DefaultEngineStrategy(EngineStrategy): # create url.URL object u = url.make_url(name_or_url) - dialect_cls = u.get_dialect() + entrypoint = u._get_entrypoint() + dialect_cls = entrypoint.get_dialect_cls(u) if kwargs.pop('_coerce_config', False): def pop_kwarg(key, default=None): @@ -81,21 +82,19 @@ class DefaultEngineStrategy(EngineStrategy): # assemble connection arguments (cargs, cparams) = dialect.create_connect_args(u) cparams.update(pop_kwarg('connect_args', {})) + cargs = list(cargs) # allow mutability # look for existing pool or create pool = pop_kwarg('pool', None) if pool is None: - def connect(): - try: - return dialect.connect(*cargs, **cparams) - except dialect.dbapi.Error as e: - invalidated = dialect.is_disconnect(e, None, None) - util.raise_from_cause( - exc.DBAPIError.instance( - None, None, e, dialect.dbapi.Error, - connection_invalidated=invalidated - ) - ) + def connect(connection_record=None): + if dialect._has_events: + for fn in dialect.dispatch.do_connect: + connection = fn( + dialect, connection_record, cargs, cparams) + if connection is not None: + return connection + return dialect.connect(*cargs, **cparams) creator = pop_kwarg('creator', connect) @@ -162,9 +161,14 @@ class DefaultEngineStrategy(EngineStrategy): def first_connect(dbapi_connection, connection_record): c = base.Connection(engine, connection=dbapi_connection, _has_events=False) + c._execution_options = util.immutabledict() dialect.initialize(c) event.listen(pool, 'first_connect', first_connect, once=True) + dialect_cls.engine_created(engine) + if entrypoint is not dialect_cls: + entrypoint.engine_created(engine) + return engine diff --git a/lib/python3.4/site-packages/sqlalchemy/engine/threadlocal.py b/lib/python3.4/site-packages/sqlalchemy/engine/threadlocal.py index 637523a..505d1fa 100644 --- a/lib/python3.4/site-packages/sqlalchemy/engine/threadlocal.py +++ b/lib/python3.4/site-packages/sqlalchemy/engine/threadlocal.py @@ -1,5 +1,5 @@ # engine/threadlocal.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -59,7 +59,10 @@ class TLEngine(base.Engine): # guards against pool-level reapers, if desired. # or not connection.connection.is_valid: connection = self._tl_connection_cls( - self, self.pool.connect(), **kw) + self, + self._wrap_pool_connect( + self.pool.connect, connection), + **kw) self._connections.conn = weakref.ref(connection) return connection._increment_connect() diff --git a/lib/python3.4/site-packages/sqlalchemy/engine/url.py b/lib/python3.4/site-packages/sqlalchemy/engine/url.py index e362961..3cc2f35 100644 --- a/lib/python3.4/site-packages/sqlalchemy/engine/url.py +++ b/lib/python3.4/site-packages/sqlalchemy/engine/url.py @@ -1,5 +1,5 @@ # engine/url.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -105,11 +105,25 @@ class URL(object): self.database == other.database and \ self.query == other.query - def get_dialect(self): - """Return the SQLAlchemy database dialect class corresponding - to this URL's driver name. - """ + def get_backend_name(self): + if '+' not in self.drivername: + return self.drivername + else: + return self.drivername.split('+')[0] + def get_driver_name(self): + if '+' not in self.drivername: + return self.get_dialect().driver + else: + return self.drivername.split('+')[1] + + def _get_entrypoint(self): + """Return the "entry point" dialect class. + + This is normally the dialect itself except in the case when the + returned class implements the get_dialect_cls() method. + + """ if '+' not in self.drivername: name = self.drivername else: @@ -125,6 +139,14 @@ class URL(object): else: return cls + def get_dialect(self): + """Return the SQLAlchemy database dialect class corresponding + to this URL's driver name. + """ + entrypoint = self._get_entrypoint() + dialect_cls = entrypoint.get_dialect_cls(self) + return dialect_cls + def translate_connect_args(self, names=[], **kw): """Translate url attributes into a dictionary of connection arguments. diff --git a/lib/python3.4/site-packages/sqlalchemy/engine/util.py b/lib/python3.4/site-packages/sqlalchemy/engine/util.py index d9eb1df..d28d870 100644 --- a/lib/python3.4/site-packages/sqlalchemy/engine/util.py +++ b/lib/python3.4/site-packages/sqlalchemy/engine/util.py @@ -1,5 +1,5 @@ # engine/util.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/python3.4/site-packages/sqlalchemy/event/__init__.py b/lib/python3.4/site-packages/sqlalchemy/event/__init__.py index b93c0ef..dddb924 100644 --- a/lib/python3.4/site-packages/sqlalchemy/event/__init__.py +++ b/lib/python3.4/site-packages/sqlalchemy/event/__init__.py @@ -1,5 +1,5 @@ # event/__init__.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/python3.4/site-packages/sqlalchemy/event/api.py b/lib/python3.4/site-packages/sqlalchemy/event/api.py index 270e95c..0af48df 100644 --- a/lib/python3.4/site-packages/sqlalchemy/event/api.py +++ b/lib/python3.4/site-packages/sqlalchemy/event/api.py @@ -1,5 +1,5 @@ # event/api.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -58,6 +58,32 @@ def listen(target, identifier, fn, *args, **kw): .. versionadded:: 0.9.4 Added ``once=True`` to :func:`.event.listen` and :func:`.event.listens_for`. + .. note:: + + The :func:`.listen` function cannot be called at the same time + that the target event is being run. This has implications + for thread safety, and also means an event cannot be added + from inside the listener function for itself. The list of + events to be run are present inside of a mutable collection + that can't be changed during iteration. + + Event registration and removal is not intended to be a "high + velocity" operation; it is a configurational operation. For + systems that need to quickly associate and deassociate with + events at high scale, use a mutable structure that is handled + from inside of a single listener. + + .. versionchanged:: 1.0.0 - a ``collections.deque()`` object is now + used as the container for the list of events, which explicitly + disallows collection mutation while the collection is being + iterated. + + .. seealso:: + + :func:`.listens_for` + + :func:`.remove` + """ _event_key(target, identifier, fn).listen(*args, **kw) @@ -89,6 +115,10 @@ def listens_for(target, identifier, *args, **kw): .. versionadded:: 0.9.4 Added ``once=True`` to :func:`.event.listen` and :func:`.event.listens_for`. + .. seealso:: + + :func:`.listen` - general description of event listening + """ def decorate(fn): listen(target, identifier, fn, *args, **kw) @@ -120,6 +150,30 @@ def remove(target, identifier, fn): .. versionadded:: 0.9.0 + .. note:: + + The :func:`.remove` function cannot be called at the same time + that the target event is being run. This has implications + for thread safety, and also means an event cannot be removed + from inside the listener function for itself. The list of + events to be run are present inside of a mutable collection + that can't be changed during iteration. + + Event registration and removal is not intended to be a "high + velocity" operation; it is a configurational operation. For + systems that need to quickly associate and deassociate with + events at high scale, use a mutable structure that is handled + from inside of a single listener. + + .. versionchanged:: 1.0.0 - a ``collections.deque()`` object is now + used as the container for the list of events, which explicitly + disallows collection mutation while the collection is being + iterated. + + .. seealso:: + + :func:`.listen` + """ _event_key(target, identifier, fn).remove() diff --git a/lib/python3.4/site-packages/sqlalchemy/event/attr.py b/lib/python3.4/site-packages/sqlalchemy/event/attr.py index 7641b59..1494013 100644 --- a/lib/python3.4/site-packages/sqlalchemy/event/attr.py +++ b/lib/python3.4/site-packages/sqlalchemy/event/attr.py @@ -1,5 +1,5 @@ # event/attr.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -37,19 +37,24 @@ from . import registry from . import legacy from itertools import chain import weakref +import collections -class RefCollection(object): - @util.memoized_property - def ref(self): +class RefCollection(util.MemoizedSlots): + __slots__ = 'ref', + + def _memoized_attr_ref(self): return weakref.ref(self, registry._collection_gced) -class _DispatchDescriptor(RefCollection): - """Class-level attributes on :class:`._Dispatch` classes.""" +class _ClsLevelDispatch(RefCollection): + """Class-level events on :class:`._Dispatch` classes.""" + + __slots__ = ('name', 'arg_names', 'has_kw', + 'legacy_signatures', '_clslevel', '__weakref__') def __init__(self, parent_dispatch_cls, fn): - self.__name__ = fn.__name__ + self.name = fn.__name__ argspec = util.inspect_getargspec(fn) self.arg_names = argspec.args[1:] self.has_kw = bool(argspec.keywords) @@ -59,11 +64,9 @@ class _DispatchDescriptor(RefCollection): key=lambda s: s[0] ) )) - self.__doc__ = fn.__doc__ = legacy._augment_fn_docs( - self, parent_dispatch_cls, fn) + fn.__doc__ = legacy._augment_fn_docs(self, parent_dispatch_cls, fn) self._clslevel = weakref.WeakKeyDictionary() - self._empty_listeners = weakref.WeakKeyDictionary() def _adjust_fn_spec(self, fn, named): if named: @@ -96,8 +99,8 @@ class _DispatchDescriptor(RefCollection): self.update_subclass(cls) else: if cls not in self._clslevel: - self._clslevel[cls] = [] - self._clslevel[cls].insert(0, event_key._listen_fn) + self._clslevel[cls] = collections.deque() + self._clslevel[cls].appendleft(event_key._listen_fn) registry._stored_in_collection(event_key, self) def append(self, event_key, propagate): @@ -113,13 +116,13 @@ class _DispatchDescriptor(RefCollection): self.update_subclass(cls) else: if cls not in self._clslevel: - self._clslevel[cls] = [] + self._clslevel[cls] = collections.deque() self._clslevel[cls].append(event_key._listen_fn) registry._stored_in_collection(event_key, self) def update_subclass(self, target): if target not in self._clslevel: - self._clslevel[target] = [] + self._clslevel[target] = collections.deque() clslevel = self._clslevel[target] for cls in target.__mro__[1:]: if cls in self._clslevel: @@ -145,40 +148,29 @@ class _DispatchDescriptor(RefCollection): to_clear = set() for dispatcher in self._clslevel.values(): to_clear.update(dispatcher) - dispatcher[:] = [] + dispatcher.clear() registry._clear(self, to_clear) def for_modify(self, obj): """Return an event collection which can be modified. - For _DispatchDescriptor at the class level of + For _ClsLevelDispatch at the class level of a dispatcher, this returns self. """ return self - def __get__(self, obj, cls): - if obj is None: - return self - elif obj._parent_cls in self._empty_listeners: - ret = self._empty_listeners[obj._parent_cls] - else: - self._empty_listeners[obj._parent_cls] = ret = \ - _EmptyListener(self, obj._parent_cls) - # assigning it to __dict__ means - # memoized for fast re-access. but more memory. - obj.__dict__[self.__name__] = ret - return ret +class _InstanceLevelDispatch(RefCollection): + __slots__ = () -class _HasParentDispatchDescriptor(object): def _adjust_fn_spec(self, fn, named): return self.parent._adjust_fn_spec(fn, named) -class _EmptyListener(_HasParentDispatchDescriptor): - """Serves as a class-level interface to the events - served by a _DispatchDescriptor, when there are no +class _EmptyListener(_InstanceLevelDispatch): + """Serves as a proxy interface to the events + served by a _ClsLevelDispatch, when there are no instance-level events present. Is replaced by _ListenerCollection when instance-level @@ -186,14 +178,17 @@ class _EmptyListener(_HasParentDispatchDescriptor): """ + propagate = frozenset() + listeners = () + + __slots__ = 'parent', 'parent_listeners', 'name' + def __init__(self, parent, target_cls): if target_cls not in parent._clslevel: parent.update_subclass(target_cls) - self.parent = parent # _DispatchDescriptor + self.parent = parent # _ClsLevelDispatch self.parent_listeners = parent._clslevel[target_cls] - self.name = parent.__name__ - self.propagate = frozenset() - self.listeners = () + self.name = parent.name def for_modify(self, obj): """Return an event collection which can be modified. @@ -204,9 +199,11 @@ class _EmptyListener(_HasParentDispatchDescriptor): and returns it. """ - result = _ListenerCollection(self.parent, obj._parent_cls) - if obj.__dict__[self.name] is self: - obj.__dict__[self.name] = result + result = _ListenerCollection(self.parent, obj._instance_cls) + if getattr(obj, self.name) is self: + setattr(obj, self.name, result) + else: + assert isinstance(getattr(obj, self.name), _JoinedListener) return result def _needs_modify(self, *args, **kw): @@ -232,11 +229,10 @@ class _EmptyListener(_HasParentDispatchDescriptor): __nonzero__ = __bool__ -class _CompoundListener(_HasParentDispatchDescriptor): - _exec_once = False +class _CompoundListener(_InstanceLevelDispatch): + __slots__ = '_exec_once_mutex', '_exec_once' - @util.memoized_property - def _exec_once_mutex(self): + def _memoized_attr__exec_once_mutex(self): return threading.Lock() def exec_once(self, *args, **kw): @@ -271,7 +267,7 @@ class _CompoundListener(_HasParentDispatchDescriptor): __nonzero__ = __bool__ -class _ListenerCollection(RefCollection, _CompoundListener): +class _ListenerCollection(_CompoundListener): """Instance-level attributes on instances of :class:`._Dispatch`. Represents a collection of listeners. @@ -281,13 +277,18 @@ class _ListenerCollection(RefCollection, _CompoundListener): """ + __slots__ = ( + 'parent_listeners', 'parent', 'name', 'listeners', + 'propagate', '__weakref__') + def __init__(self, parent, target_cls): if target_cls not in parent._clslevel: parent.update_subclass(target_cls) + self._exec_once = False self.parent_listeners = parent._clslevel[target_cls] self.parent = parent - self.name = parent.__name__ - self.listeners = [] + self.name = parent.name + self.listeners = collections.deque() self.propagate = set() def for_modify(self, obj): @@ -318,14 +319,12 @@ class _ListenerCollection(RefCollection, _CompoundListener): registry._stored_in_collection_multi(self, other, to_associate) def insert(self, event_key, propagate): - if event_key._listen_fn not in self.listeners: - event_key.prepend_to_list(self, self.listeners) + if event_key.prepend_to_list(self, self.listeners): if propagate: self.propagate.add(event_key._listen_fn) def append(self, event_key, propagate): - if event_key._listen_fn not in self.listeners: - event_key.append_to_list(self, self.listeners) + if event_key.append_to_list(self, self.listeners): if propagate: self.propagate.add(event_key._listen_fn) @@ -337,28 +336,14 @@ class _ListenerCollection(RefCollection, _CompoundListener): def clear(self): registry._clear(self, self.listeners) self.propagate.clear() - self.listeners[:] = [] - - -class _JoinedDispatchDescriptor(object): - def __init__(self, name): - self.name = name - - def __get__(self, obj, cls): - if obj is None: - return self - else: - obj.__dict__[self.name] = ret = _JoinedListener( - obj.parent, self.name, - getattr(obj.local, self.name) - ) - return ret + self.listeners.clear() class _JoinedListener(_CompoundListener): - _exec_once = False + __slots__ = 'parent', 'name', 'local', 'parent_listeners' def __init__(self, parent, name, local): + self._exec_once = False self.parent = parent self.name = name self.local = local diff --git a/lib/python3.4/site-packages/sqlalchemy/event/base.py b/lib/python3.4/site-packages/sqlalchemy/event/base.py index 4925f6f..81ef5d8 100644 --- a/lib/python3.4/site-packages/sqlalchemy/event/base.py +++ b/lib/python3.4/site-packages/sqlalchemy/event/base.py @@ -1,5 +1,5 @@ # event/base.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -17,9 +17,11 @@ instances of ``_Dispatch``. """ from __future__ import absolute_import +import weakref + from .. import util -from .attr import _JoinedDispatchDescriptor, \ - _EmptyListener, _DispatchDescriptor +from .attr import _JoinedListener, \ + _EmptyListener, _ClsLevelDispatch _registrars = util.defaultdict(list) @@ -34,10 +36,11 @@ class _UnpickleDispatch(object): """ - def __call__(self, _parent_cls): - for cls in _parent_cls.__mro__: + def __call__(self, _instance_cls): + for cls in _instance_cls.__mro__: if 'dispatch' in cls.__dict__: - return cls.__dict__['dispatch'].dispatch_cls(_parent_cls) + return cls.__dict__['dispatch'].\ + dispatch_cls._for_class(_instance_cls) else: raise AttributeError("No class with a 'dispatch' member present.") @@ -62,16 +65,53 @@ class _Dispatch(object): """ - _events = None - """reference the :class:`.Events` class which this - :class:`._Dispatch` is created for.""" + # in one ORM edge case, an attribute is added to _Dispatch, + # so __dict__ is used in just that case and potentially others. + __slots__ = '_parent', '_instance_cls', '__dict__', '_empty_listeners' - def __init__(self, _parent_cls): - self._parent_cls = _parent_cls + _empty_listener_reg = weakref.WeakKeyDictionary() - @util.classproperty - def _listen(cls): - return cls._events._listen + def __init__(self, parent, instance_cls=None): + self._parent = parent + self._instance_cls = instance_cls + if instance_cls: + try: + self._empty_listeners = self._empty_listener_reg[instance_cls] + except KeyError: + self._empty_listeners = \ + self._empty_listener_reg[instance_cls] = dict( + (ls.name, _EmptyListener(ls, instance_cls)) + for ls in parent._event_descriptors + ) + else: + self._empty_listeners = {} + + def __getattr__(self, name): + # assign EmptyListeners as attributes on demand + # to reduce startup time for new dispatch objects + try: + ls = self._empty_listeners[name] + except KeyError: + raise AttributeError(name) + else: + setattr(self, ls.name, ls) + return ls + + @property + def _event_descriptors(self): + for k in self._event_names: + yield getattr(self, k) + + def _for_class(self, instance_cls): + return self.__class__(self, instance_cls) + + def _for_instance(self, instance): + instance_cls = instance.__class__ + return self._for_class(instance_cls) + + @property + def _listen(self): + return self._events._listen def _join(self, other): """Create a 'join' of this :class:`._Dispatch` and another. @@ -83,36 +123,27 @@ class _Dispatch(object): if '_joined_dispatch_cls' not in self.__class__.__dict__: cls = type( "Joined%s" % self.__class__.__name__, - (_JoinedDispatcher, self.__class__), {} + (_JoinedDispatcher, ), {'__slots__': self._event_names} ) - for ls in _event_descriptors(self): - setattr(cls, ls.name, _JoinedDispatchDescriptor(ls.name)) self.__class__._joined_dispatch_cls = cls return self._joined_dispatch_cls(self, other) def __reduce__(self): - return _UnpickleDispatch(), (self._parent_cls, ) + return _UnpickleDispatch(), (self._instance_cls, ) def _update(self, other, only_propagate=True): """Populate from the listeners in another :class:`_Dispatch` object.""" - - for ls in _event_descriptors(other): + for ls in other._event_descriptors: if isinstance(ls, _EmptyListener): continue getattr(self, ls.name).\ for_modify(self)._update(ls, only_propagate=only_propagate) - @util.hybridmethod def _clear(self): - for attr in dir(self): - if _is_event_name(attr): - getattr(self, attr).for_modify(self).clear() - - -def _event_descriptors(target): - return [getattr(target, k) for k in dir(target) if _is_event_name(k)] + for ls in self._event_descriptors: + ls.for_modify(self).clear() class _EventMeta(type): @@ -131,26 +162,37 @@ def _create_dispatcher_class(cls, classname, bases, dict_): # there's all kinds of ways to do this, # i.e. make a Dispatch class that shares the '_listen' method # of the Event class, this is the straight monkeypatch. - dispatch_base = getattr(cls, 'dispatch', _Dispatch) - dispatch_cls = type("%sDispatch" % classname, - (dispatch_base, ), {}) - cls._set_dispatch(cls, dispatch_cls) + if hasattr(cls, 'dispatch'): + dispatch_base = cls.dispatch.__class__ + else: + dispatch_base = _Dispatch - for k in dict_: - if _is_event_name(k): - setattr(dispatch_cls, k, _DispatchDescriptor(cls, dict_[k])) - _registrars[k].append(cls) + event_names = [k for k in dict_ if _is_event_name(k)] + dispatch_cls = type("%sDispatch" % classname, + (dispatch_base, ), {'__slots__': event_names}) + + dispatch_cls._event_names = event_names + + dispatch_inst = cls._set_dispatch(cls, dispatch_cls) + for k in dispatch_cls._event_names: + setattr(dispatch_inst, k, _ClsLevelDispatch(cls, dict_[k])) + _registrars[k].append(cls) + + for super_ in dispatch_cls.__bases__: + if issubclass(super_, _Dispatch) and super_ is not _Dispatch: + for ls in super_._events.dispatch._event_descriptors: + setattr(dispatch_inst, ls.name, ls) + dispatch_cls._event_names.append(ls.name) if getattr(cls, '_dispatch_target', None): cls._dispatch_target.dispatch = dispatcher(cls) def _remove_dispatcher(cls): - for k in dir(cls): - if _is_event_name(k): - _registrars[k].remove(cls) - if not _registrars[k]: - del _registrars[k] + for k in cls.dispatch._event_names: + _registrars[k].remove(cls) + if not _registrars[k]: + del _registrars[k] class Events(util.with_metaclass(_EventMeta, object)): @@ -163,17 +205,30 @@ class Events(util.with_metaclass(_EventMeta, object)): # "self.dispatch._events." # @staticemethod to allow easy "super" calls while in a metaclass # constructor. - cls.dispatch = dispatch_cls + cls.dispatch = dispatch_cls(None) dispatch_cls._events = cls + return cls.dispatch @classmethod def _accept_with(cls, target): # Mapper, ClassManager, Session override this to # also accept classes, scoped_sessions, sessionmakers, etc. if hasattr(target, 'dispatch') and ( - isinstance(target.dispatch, cls.dispatch) or - isinstance(target.dispatch, type) and - issubclass(target.dispatch, cls.dispatch) + + isinstance(target.dispatch, cls.dispatch.__class__) or + + + ( + isinstance(target.dispatch, type) and + isinstance(target.dispatch, cls.dispatch.__class__) + ) or + + ( + isinstance(target.dispatch, _JoinedDispatcher) and + isinstance(target.dispatch.parent, cls.dispatch.__class__) + ) + + ): return target else: @@ -195,10 +250,24 @@ class Events(util.with_metaclass(_EventMeta, object)): class _JoinedDispatcher(object): """Represent a connection between two _Dispatch objects.""" + __slots__ = 'local', 'parent', '_instance_cls' + def __init__(self, local, parent): self.local = local self.parent = parent - self._parent_cls = local._parent_cls + self._instance_cls = self.local._instance_cls + + def __getattr__(self, name): + # assign _JoinedListeners as attributes on demand + # to reduce startup time for new dispatch objects + ls = getattr(self.local, name) + jl = _JoinedListener(self.parent, ls.name, ls) + setattr(self, ls.name, jl) + return jl + + @property + def _listen(self): + return self.parent._listen class dispatcher(object): @@ -216,5 +285,5 @@ class dispatcher(object): def __get__(self, obj, cls): if obj is None: return self.dispatch_cls - obj.__dict__['dispatch'] = disp = self.dispatch_cls(cls) + obj.__dict__['dispatch'] = disp = self.dispatch_cls._for_instance(obj) return disp diff --git a/lib/python3.4/site-packages/sqlalchemy/event/legacy.py b/lib/python3.4/site-packages/sqlalchemy/event/legacy.py index 3b1519c..b359bf4 100644 --- a/lib/python3.4/site-packages/sqlalchemy/event/legacy.py +++ b/lib/python3.4/site-packages/sqlalchemy/event/legacy.py @@ -1,5 +1,5 @@ # event/legacy.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -22,8 +22,8 @@ def _legacy_signature(since, argnames, converter=None): return leg -def _wrap_fn_for_legacy(dispatch_descriptor, fn, argspec): - for since, argnames, conv in dispatch_descriptor.legacy_signatures: +def _wrap_fn_for_legacy(dispatch_collection, fn, argspec): + for since, argnames, conv in dispatch_collection.legacy_signatures: if argnames[-1] == "**kw": has_kw = True argnames = argnames[0:-1] @@ -40,7 +40,7 @@ def _wrap_fn_for_legacy(dispatch_descriptor, fn, argspec): return fn(*conv(*args)) else: def wrap_leg(*args, **kw): - argdict = dict(zip(dispatch_descriptor.arg_names, args)) + argdict = dict(zip(dispatch_collection.arg_names, args)) args = [argdict[name] for name in argnames] if has_kw: return fn(*args, **kw) @@ -58,16 +58,16 @@ def _indent(text, indent): ) -def _standard_listen_example(dispatch_descriptor, sample_target, fn): +def _standard_listen_example(dispatch_collection, sample_target, fn): example_kw_arg = _indent( "\n".join( "%(arg)s = kw['%(arg)s']" % {"arg": arg} - for arg in dispatch_descriptor.arg_names[0:2] + for arg in dispatch_collection.arg_names[0:2] ), " ") - if dispatch_descriptor.legacy_signatures: + if dispatch_collection.legacy_signatures: current_since = max(since for since, args, conv - in dispatch_descriptor.legacy_signatures) + in dispatch_collection.legacy_signatures) else: current_since = None text = ( @@ -80,7 +80,7 @@ def _standard_listen_example(dispatch_descriptor, sample_target, fn): "\n # ... (event handling logic) ...\n" ) - if len(dispatch_descriptor.arg_names) > 3: + if len(dispatch_collection.arg_names) > 3: text += ( "\n# named argument style (new in 0.9)\n" @@ -96,17 +96,17 @@ def _standard_listen_example(dispatch_descriptor, sample_target, fn): "current_since": " (arguments as of %s)" % current_since if current_since else "", "event_name": fn.__name__, - "has_kw_arguments": ", **kw" if dispatch_descriptor.has_kw else "", - "named_event_arguments": ", ".join(dispatch_descriptor.arg_names), + "has_kw_arguments": ", **kw" if dispatch_collection.has_kw else "", + "named_event_arguments": ", ".join(dispatch_collection.arg_names), "example_kw_arg": example_kw_arg, "sample_target": sample_target } return text -def _legacy_listen_examples(dispatch_descriptor, sample_target, fn): +def _legacy_listen_examples(dispatch_collection, sample_target, fn): text = "" - for since, args, conv in dispatch_descriptor.legacy_signatures: + for since, args, conv in dispatch_collection.legacy_signatures: text += ( "\n# legacy calling style (pre-%(since)s)\n" "@event.listens_for(%(sample_target)s, '%(event_name)s')\n" @@ -117,7 +117,7 @@ def _legacy_listen_examples(dispatch_descriptor, sample_target, fn): "since": since, "event_name": fn.__name__, "has_kw_arguments": " **kw" - if dispatch_descriptor.has_kw else "", + if dispatch_collection.has_kw else "", "named_event_arguments": ", ".join(args), "sample_target": sample_target } @@ -125,8 +125,8 @@ def _legacy_listen_examples(dispatch_descriptor, sample_target, fn): return text -def _version_signature_changes(dispatch_descriptor): - since, args, conv = dispatch_descriptor.legacy_signatures[0] +def _version_signature_changes(dispatch_collection): + since, args, conv = dispatch_collection.legacy_signatures[0] return ( "\n.. versionchanged:: %(since)s\n" " The ``%(event_name)s`` event now accepts the \n" @@ -135,14 +135,14 @@ def _version_signature_changes(dispatch_descriptor): " signature(s) listed above will be automatically \n" " adapted to the new signature." % { "since": since, - "event_name": dispatch_descriptor.__name__, - "named_event_arguments": ", ".join(dispatch_descriptor.arg_names), - "has_kw_arguments": ", **kw" if dispatch_descriptor.has_kw else "" + "event_name": dispatch_collection.name, + "named_event_arguments": ", ".join(dispatch_collection.arg_names), + "has_kw_arguments": ", **kw" if dispatch_collection.has_kw else "" } ) -def _augment_fn_docs(dispatch_descriptor, parent_dispatch_cls, fn): +def _augment_fn_docs(dispatch_collection, parent_dispatch_cls, fn): header = ".. container:: event_signatures\n\n"\ " Example argument forms::\n"\ "\n" @@ -152,16 +152,16 @@ def _augment_fn_docs(dispatch_descriptor, parent_dispatch_cls, fn): header + _indent( _standard_listen_example( - dispatch_descriptor, sample_target, fn), + dispatch_collection, sample_target, fn), " " * 8) ) - if dispatch_descriptor.legacy_signatures: + if dispatch_collection.legacy_signatures: text += _indent( _legacy_listen_examples( - dispatch_descriptor, sample_target, fn), + dispatch_collection, sample_target, fn), " " * 8) - text += _version_signature_changes(dispatch_descriptor) + text += _version_signature_changes(dispatch_collection) return util.inject_docstring_text(fn.__doc__, text, diff --git a/lib/python3.4/site-packages/sqlalchemy/event/registry.py b/lib/python3.4/site-packages/sqlalchemy/event/registry.py index a34de3c..e1e9262 100644 --- a/lib/python3.4/site-packages/sqlalchemy/event/registry.py +++ b/lib/python3.4/site-packages/sqlalchemy/event/registry.py @@ -1,5 +1,5 @@ # event/registry.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -37,7 +37,7 @@ listener collections and the listener fn contained _collection_to_key = collections.defaultdict(dict) """ -Given a _ListenerCollection or _DispatchDescriptor, can locate +Given a _ListenerCollection or _ClsLevelListener, can locate all the original listen() arguments and the listener fn contained ref(listenercollection) -> { @@ -71,13 +71,15 @@ def _stored_in_collection(event_key, owner): listen_ref = weakref.ref(event_key._listen_fn) if owner_ref in dispatch_reg: - assert dispatch_reg[owner_ref] == listen_ref - else: - dispatch_reg[owner_ref] = listen_ref + return False + + dispatch_reg[owner_ref] = listen_ref listener_to_key = _collection_to_key[owner_ref] listener_to_key[listen_ref] = key + return True + def _removed_from_collection(event_key, owner): key = event_key._key @@ -138,6 +140,10 @@ class _EventKey(object): """Represent :func:`.listen` arguments. """ + __slots__ = ( + 'target', 'identifier', 'fn', 'fn_key', 'fn_wrap', 'dispatch_target' + ) + def __init__(self, target, identifier, fn, dispatch_target, _fn_wrap=None): self.target = target @@ -180,6 +186,17 @@ class _EventKey(object): def listen(self, *args, **kw): once = kw.pop("once", False) + named = kw.pop("named", False) + + target, identifier, fn = \ + self.dispatch_target, self.identifier, self._listen_fn + + dispatch_collection = getattr(target.dispatch, identifier) + + adjusted_fn = dispatch_collection._adjust_fn_spec(fn, named) + + self = self.with_wrapper(adjusted_fn) + if once: self.with_wrapper( util.only_once(self._listen_fn)).listen(*args, **kw) @@ -213,34 +230,33 @@ class _EventKey(object): target, identifier, fn = \ self.dispatch_target, self.identifier, self._listen_fn - dispatch_descriptor = getattr(target.dispatch, identifier) - - fn = dispatch_descriptor._adjust_fn_spec(fn, named) - self = self.with_wrapper(fn) + dispatch_collection = getattr(target.dispatch, identifier) if insert: - dispatch_descriptor.\ + dispatch_collection.\ for_modify(target.dispatch).insert(self, propagate) else: - dispatch_descriptor.\ + dispatch_collection.\ for_modify(target.dispatch).append(self, propagate) @property def _listen_fn(self): return self.fn_wrap or self.fn - def append_value_to_list(self, owner, list_, value): - _stored_in_collection(self, owner) - list_.append(value) - def append_to_list(self, owner, list_): - _stored_in_collection(self, owner) - list_.append(self._listen_fn) + if _stored_in_collection(self, owner): + list_.append(self._listen_fn) + return True + else: + return False def remove_from_list(self, owner, list_): _removed_from_collection(self, owner) list_.remove(self._listen_fn) def prepend_to_list(self, owner, list_): - _stored_in_collection(self, owner) - list_.insert(0, self._listen_fn) + if _stored_in_collection(self, owner): + list_.appendleft(self._listen_fn) + return True + else: + return False diff --git a/lib/python3.4/site-packages/sqlalchemy/events.py b/lib/python3.4/site-packages/sqlalchemy/events.py index 42bbbfc..1abef26 100644 --- a/lib/python3.4/site-packages/sqlalchemy/events.py +++ b/lib/python3.4/site-packages/sqlalchemy/events.py @@ -1,5 +1,5 @@ # sqlalchemy/events.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -338,7 +338,7 @@ class PoolEvents(event.Events): """ - def reset(self, dbapi_connnection, connection_record): + def reset(self, dbapi_connection, connection_record): """Called before the "reset" action occurs for a pooled connection. This event represents @@ -371,7 +371,9 @@ class PoolEvents(event.Events): """Called when a DBAPI connection is to be "invalidated". This event is called any time the :meth:`._ConnectionRecord.invalidate` - method is invoked, either from API usage or via "auto-invalidation". + method is invoked, either from API usage or via "auto-invalidation", + without the ``soft`` flag. + The event occurs before a final attempt to call ``.close()`` on the connection occurs. @@ -392,6 +394,21 @@ class PoolEvents(event.Events): """ + def soft_invalidate(self, dbapi_connection, connection_record, exception): + """Called when a DBAPI connection is to be "soft invalidated". + + This event is called any time the :meth:`._ConnectionRecord.invalidate` + method is invoked with the ``soft`` flag. + + Soft invalidation refers to when the connection record that tracks + this connection will force a reconnect after the current connection + is checked in. It does not actively close the dbapi_connection + at the point at which it is called. + + .. versionadded:: 1.0.3 + + """ + class ConnectionEvents(event.Events): """Available events for :class:`.Connectable`, which includes @@ -420,6 +437,12 @@ class ConnectionEvents(event.Events): context, executemany): log.info("Received statement: %s" % statement) + When the methods are called with a `statement` parameter, such as in + :meth:`.after_cursor_execute`, :meth:`.before_cursor_execute` and + :meth:`.dbapi_error`, the statement is the exact SQL string that was + prepared for transmission to the DBAPI ``cursor`` in the connection's + :class:`.Dialect`. + The :meth:`.before_execute` and :meth:`.before_cursor_execute` events can also be established with the ``retval=True`` flag, which allows modification of the statement and parameters to be sent @@ -470,7 +493,8 @@ class ConnectionEvents(event.Events): @classmethod def _listen(cls, event_key, retval=False): target, identifier, fn = \ - event_key.dispatch_target, event_key.identifier, event_key.fn + event_key.dispatch_target, event_key.identifier, \ + event_key._listen_fn target._has_events = True @@ -548,9 +572,8 @@ class ConnectionEvents(event.Events): def before_cursor_execute(self, conn, cursor, statement, parameters, context, executemany): """Intercept low-level cursor execute() events before execution, - receiving the string - SQL statement and DBAPI-specific parameter list to be invoked - against a cursor. + receiving the string SQL statement and DBAPI-specific parameter list to + be invoked against a cursor. This event is a good choice for logging as well as late modifications to the SQL string. It's less ideal for parameter modifications except @@ -570,7 +593,7 @@ class ConnectionEvents(event.Events): :param conn: :class:`.Connection` object :param cursor: DBAPI cursor object - :param statement: string SQL statement + :param statement: string SQL statement, as to be passed to the DBAPI :param parameters: Dictionary, tuple, or list of parameters being passed to the ``execute()`` or ``executemany()`` method of the DBAPI ``cursor``. In some cases may be ``None``. @@ -595,7 +618,7 @@ class ConnectionEvents(event.Events): :param cursor: DBAPI cursor object. Will have results pending if the statement was a SELECT, but these should not be consumed as they will be needed by the :class:`.ResultProxy`. - :param statement: string SQL statement + :param statement: string SQL statement, as passed to the DBAPI :param parameters: Dictionary, tuple, or list of parameters being passed to the ``execute()`` or ``executemany()`` method of the DBAPI ``cursor``. In some cases may be ``None``. @@ -639,7 +662,7 @@ class ConnectionEvents(event.Events): :param conn: :class:`.Connection` object :param cursor: DBAPI cursor object - :param statement: string SQL statement + :param statement: string SQL statement, as passed to the DBAPI :param parameters: Dictionary, tuple, or list of parameters being passed to the ``execute()`` or ``executemany()`` method of the DBAPI ``cursor``. In some cases may be ``None``. @@ -701,6 +724,16 @@ class ConnectionEvents(event.Events): "failed" in str(context.original_exception): raise MySpecialException("failed operation") + .. warning:: Because the :meth:`.ConnectionEvents.handle_error` + event specifically provides for exceptions to be re-thrown as + the ultimate exception raised by the failed statement, + **stack traces will be misleading** if the user-defined event + handler itself fails and throws an unexpected exception; + the stack trace may not illustrate the actual code line that + failed! It is advised to code carefully here and use + logging and/or inline debugging if unexpected exceptions are + occurring. + Alternatively, a "chained" style of event handling can be used, by configuring the handler with the ``retval=True`` modifier and returning the new exception instance from the @@ -733,6 +766,22 @@ class ConnectionEvents(event.Events): .. versionadded:: 0.9.7 Added the :meth:`.ConnectionEvents.handle_error` hook. + .. versionchanged:: 1.0.0 The :meth:`.handle_error` event is now + invoked when an :class:`.Engine` fails during the initial + call to :meth:`.Engine.connect`, as well as when a + :class:`.Connection` object encounters an error during a + reconnect operation. + + .. versionchanged:: 1.0.0 The :meth:`.handle_error` event is + not fired off when a dialect makes use of the + ``skip_user_error_events`` execution option. This is used + by dialects which intend to catch SQLAlchemy-specific exceptions + within specific operations, such as when the MySQL dialect detects + a table not present within the ``has_table()`` dialect method. + Prior to 1.0.0, code which implements :meth:`.handle_error` needs + to ensure that exceptions thrown in these scenarios are re-raised + without modification. + """ def engine_connect(self, conn, branch): @@ -770,6 +819,11 @@ class ConnectionEvents(event.Events): .. seealso:: + :ref:`pool_disconnects_pessimistic` - illustrates how to use + :meth:`.ConnectionEvents.engine_connect` + to transparently ensure pooled connections are connected to the + database. + :meth:`.PoolEvents.checkout` the lower-level pool checkout event for an individual DBAPI connection @@ -833,6 +887,23 @@ class ConnectionEvents(event.Events): """ + def engine_disposed(self, engine): + """Intercept when the :meth:`.Engine.dispose` method is called. + + The :meth:`.Engine.dispose` method instructs the engine to + "dispose" of it's connection pool (e.g. :class:`.Pool`), and + replaces it with a new one. Disposing of the old pool has the + effect that existing checked-in connections are closed. The new + pool does not establish any new connections until it is first used. + + This event can be used to indicate that resources related to the + :class:`.Engine` should also be cleaned up, keeping in mind that the + :class:`.Engine` can still be used for new requests in which case + it re-acquires connection resources. + + .. versionadded:: 1.0.5 + + """ def begin(self, conn): """Intercept begin() events. @@ -985,6 +1056,23 @@ class DialectEvents(event.Events): else: return target + def do_connect(self, dialect, conn_rec, cargs, cparams): + """Receive connection arguments before a connection is made. + + Return a DBAPI connection to halt further events from invoking; + the returned connection will be used. + + Alternatively, the event can manipulate the cargs and/or cparams + collections; cargs will always be a Python list that can be mutated + in-place and cparams a Python dictionary. Return None to + allow control to pass to the next event handler and ultimately + to allow the dialect to connect normally, given the updated + arguments. + + .. versionadded:: 1.0.3 + + """ + def do_executemany(self, cursor, statement, parameters, context): """Receive a cursor to have executemany() called. diff --git a/lib/python3.4/site-packages/sqlalchemy/exc.py b/lib/python3.4/site-packages/sqlalchemy/exc.py index 7d333fc..2729842 100644 --- a/lib/python3.4/site-packages/sqlalchemy/exc.py +++ b/lib/python3.4/site-packages/sqlalchemy/exc.py @@ -1,5 +1,5 @@ # sqlalchemy/exc.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -13,8 +13,6 @@ raised as a result of DBAPI exceptions are all subclasses of """ -import traceback - class SQLAlchemyError(Exception): """Generic error class.""" @@ -54,8 +52,7 @@ class CircularDependencyError(SQLAlchemyError): or pre-deassociate one of the foreign key constrained values. The ``post_update`` flag described at :ref:`post_update` can resolve this cycle. - * In a :meth:`.MetaData.create_all`, :meth:`.MetaData.drop_all`, - :attr:`.MetaData.sorted_tables` operation, two :class:`.ForeignKey` + * In a :attr:`.MetaData.sorted_tables` operation, two :class:`.ForeignKey` or :class:`.ForeignKeyConstraint` objects mutually refer to each other. Apply the ``use_alter=True`` flag to one or both, see :ref:`use_alter`. @@ -63,7 +60,7 @@ class CircularDependencyError(SQLAlchemyError): """ def __init__(self, message, cycles, edges, msg=None): if msg is None: - message += " Cycles: %r all edges: %r" % (cycles, edges) + message += " (%s)" % ", ".join(repr(s) for s in cycles) else: message = msg SQLAlchemyError.__init__(self, message) @@ -238,14 +235,16 @@ class StatementError(SQLAlchemyError): def __str__(self): from sqlalchemy.sql import util - params_repr = util._repr_params(self.params, 10) + details = [SQLAlchemyError.__str__(self)] + if self.statement: + details.append("[SQL: %r]" % self.statement) + if self.params: + params_repr = util._repr_params(self.params, 10) + details.append("[parameters: %r]" % params_repr) return ' '.join([ "(%s)" % det for det in self.detail - ] + [ - SQLAlchemyError.__str__(self), - repr(self.statement), repr(params_repr) - ]) + ] + details) def __unicode__(self): return self.__str__() @@ -277,26 +276,35 @@ class DBAPIError(StatementError): @classmethod def instance(cls, statement, params, orig, dbapi_base_err, - connection_invalidated=False): + connection_invalidated=False, + dialect=None): # Don't ever wrap these, just return them directly as if # DBAPIError didn't exist. - if isinstance(orig, (KeyboardInterrupt, SystemExit, DontWrapMixin)): + if (isinstance(orig, BaseException) and + not isinstance(orig, Exception)) or \ + isinstance(orig, DontWrapMixin): return orig if orig is not None: # not a DBAPI error, statement is present. # raise a StatementError if not isinstance(orig, dbapi_base_err) and statement: - msg = traceback.format_exception_only( - orig.__class__, orig)[-1].strip() return StatementError( - "%s (original cause: %s)" % (str(orig), msg), + "(%s.%s) %s" % + (orig.__class__.__module__, orig.__class__.__name__, + orig), statement, params, orig ) - name, glob = orig.__class__.__name__, globals() - if name in glob and issubclass(glob[name], DBAPIError): - cls = glob[name] + glob = globals() + for super_ in orig.__class__.__mro__: + name = super_.__name__ + if dialect: + name = dialect.dbapi_exception_translation_map.get( + name, name) + if name in glob and issubclass(glob[name], DBAPIError): + cls = glob[name] + break return cls(statement, params, orig, connection_invalidated) @@ -307,13 +315,12 @@ class DBAPIError(StatementError): def __init__(self, statement, params, orig, connection_invalidated=False): try: text = str(orig) - except (KeyboardInterrupt, SystemExit): - raise except Exception as e: text = 'Error in str() of DB-API-generated exception: ' + str(e) StatementError.__init__( self, - '(%s) %s' % (orig.__class__.__name__, text), + '(%s.%s) %s' % ( + orig.__class__.__module__, orig.__class__.__name__, text, ), statement, params, orig diff --git a/lib/python3.4/site-packages/sqlalchemy/ext/__init__.py b/lib/python3.4/site-packages/sqlalchemy/ext/__init__.py index d213a0d..1c8a59a 100644 --- a/lib/python3.4/site-packages/sqlalchemy/ext/__init__.py +++ b/lib/python3.4/site-packages/sqlalchemy/ext/__init__.py @@ -1,6 +1,11 @@ # ext/__init__.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php + +from .. import util as _sa_util + +_sa_util.dependencies.resolve_all("sqlalchemy.ext") + diff --git a/lib/python3.4/site-packages/sqlalchemy/ext/associationproxy.py b/lib/python3.4/site-packages/sqlalchemy/ext/associationproxy.py index a987ab4..fdc44f3 100644 --- a/lib/python3.4/site-packages/sqlalchemy/ext/associationproxy.py +++ b/lib/python3.4/site-packages/sqlalchemy/ext/associationproxy.py @@ -1,5 +1,5 @@ # ext/associationproxy.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -77,16 +77,16 @@ def association_proxy(target_collection, attr, **kw): ASSOCIATION_PROXY = util.symbol('ASSOCIATION_PROXY') -"""Symbol indicating an :class:`_InspectionAttr` that's +"""Symbol indicating an :class:`InspectionAttr` that's of type :class:`.AssociationProxy`. - Is assigned to the :attr:`._InspectionAttr.extension_type` + Is assigned to the :attr:`.InspectionAttr.extension_type` attibute. """ -class AssociationProxy(interfaces._InspectionAttr): +class AssociationProxy(interfaces.InspectionAttrInfo): """A descriptor that presents a read/write view of an object attribute.""" is_attribute = False @@ -94,7 +94,7 @@ class AssociationProxy(interfaces._InspectionAttr): def __init__(self, target_collection, attr, creator=None, getset_factory=None, proxy_factory=None, - proxy_bulk_set=None): + proxy_bulk_set=None, info=None): """Construct a new :class:`.AssociationProxy`. The :func:`.association_proxy` function is provided as the usual @@ -138,6 +138,11 @@ class AssociationProxy(interfaces._InspectionAttr): :param proxy_bulk_set: Optional, use with proxy_factory. See the _set() method for details. + :param info: optional, will be assigned to + :attr:`.AssociationProxy.info` if present. + + .. versionadded:: 1.0.9 + """ self.target_collection = target_collection self.value_attr = attr @@ -150,6 +155,8 @@ class AssociationProxy(interfaces._InspectionAttr): self.key = '_%s_%s_%s' % ( type(self).__name__, target_collection, id(self)) self.collection_class = None + if info: + self.info = info @property def remote_attr(self): @@ -365,13 +372,17 @@ class AssociationProxy(interfaces._InspectionAttr): operators of the underlying proxied attributes. """ - - if self._value_is_scalar: - value_expr = getattr( - self.target_class, self.value_attr).has(criterion, **kwargs) + if self._target_is_object: + if self._value_is_scalar: + value_expr = getattr( + self.target_class, self.value_attr).has( + criterion, **kwargs) + else: + value_expr = getattr( + self.target_class, self.value_attr).any( + criterion, **kwargs) else: - value_expr = getattr( - self.target_class, self.value_attr).any(criterion, **kwargs) + value_expr = criterion # check _value_is_scalar here, otherwise # we're scalar->scalar - call .any() so that @@ -527,7 +538,10 @@ class _AssociationList(_AssociationCollection): return self.setter(object, value) def __getitem__(self, index): - return self._get(self.col[index]) + if not isinstance(index, slice): + return self._get(self.col[index]) + else: + return [self._get(member) for member in self.col[index]] def __setitem__(self, index, value): if not isinstance(index, slice): @@ -589,7 +603,7 @@ class _AssociationList(_AssociationCollection): for member in self.col: yield self._get(member) - raise StopIteration + return def append(self, value): item = self._create(value) @@ -893,7 +907,7 @@ class _AssociationSet(_AssociationCollection): """ for member in self.col: yield self._get(member) - raise StopIteration + return def add(self, value): if value not in self: diff --git a/lib/python3.4/site-packages/sqlalchemy/ext/automap.py b/lib/python3.4/site-packages/sqlalchemy/ext/automap.py index 121285a..023d11c 100644 --- a/lib/python3.4/site-packages/sqlalchemy/ext/automap.py +++ b/lib/python3.4/site-packages/sqlalchemy/ext/automap.py @@ -1,5 +1,5 @@ # ext/automap.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -11,12 +11,6 @@ schema, typically though not necessarily one which is reflected. .. versionadded:: 0.9.1 Added :mod:`sqlalchemy.ext.automap`. -.. note:: - - The :mod:`sqlalchemy.ext.automap` extension should be considered - **experimental** as of 0.9.1. Featureset and API stability is - not guaranteed at this time. - It is hoped that the :class:`.AutomapBase` system provides a quick and modernized solution to the problem that the very famous `SQLSoup `_ @@ -67,7 +61,7 @@ asking it to reflect the schema and produce mappings:: Above, calling :meth:`.AutomapBase.prepare` while passing along the :paramref:`.AutomapBase.prepare.reflect` parameter indicates that the :meth:`.MetaData.reflect` method will be called on this declarative base -classes' :class:`.MetaData` collection; then, each viable +classes' :class:`.MetaData` collection; then, each **viable** :class:`.Table` within the :class:`.MetaData` will get a new mapped class generated automatically. The :class:`.ForeignKeyConstraint` objects which link the various tables together will be used to produce new, bidirectional @@ -76,6 +70,12 @@ follow along a default naming scheme that we can customize. At this point, our basic mapping consisting of related ``User`` and ``Address`` classes is ready to use in the traditional way. +.. note:: By **viable**, we mean that for a table to be mapped, it must + specify a primary key. Additionally, if the table is detected as being + a pure association table between two other tables, it will not be directly + mapped and will instead be configured as a many-to-many table between + the mappings for the two referring tables. + Generating Mappings from an Existing MetaData ============================================= @@ -111,8 +111,8 @@ explicit table declaration:: User, Address, Order = Base.classes.user, Base.classes.address,\ Base.classes.user_order -Specifying Classes Explcitly -============================ +Specifying Classes Explicitly +============================= The :mod:`.sqlalchemy.ext.automap` extension allows classes to be defined explicitly, in a way similar to that of the :class:`.DeferredReflection` class. @@ -188,7 +188,7 @@ scheme for class names and a "pluralizer" for collection names using the "'words_and_underscores' -> 'WordsAndUnderscores'" return str(tablename[0].upper() + \\ - re.sub(r'_(\w)', lambda m: m.group(1).upper(), tablename[1:])) + re.sub(r'_([a-z])', lambda m: m.group(1).upper(), tablename[1:])) _pluralizer = inflect.engine() def pluralize_collection(base, local_cls, referred_cls, constraint): @@ -196,10 +196,9 @@ scheme for class names and a "pluralizer" for collection names using the "'SomeTerm' -> 'some_terms'" referred_name = referred_cls.__name__ - uncamelized = referred_name[0].lower() + \\ - re.sub(r'\W', - lambda m: "_%s" % m.group(0).lower(), - referred_name[1:]) + uncamelized = re.sub(r'[A-Z]', + lambda m: "_%s" % m.group(0).lower(), + referred_name)[1:] pluralized = _pluralizer.plural(uncamelized) return pluralized @@ -243,7 +242,26 @@ follows: one-to-many backref will be created on the referred class referring to this class. -4. The names of the relationships are determined using the +4. If any of the columns that are part of the :class:`.ForeignKeyConstraint` + are not nullable (e.g. ``nullable=False``), a + :paramref:`~.relationship.cascade` keyword argument + of ``all, delete-orphan`` will be added to the keyword arguments to + be passed to the relationship or backref. If the + :class:`.ForeignKeyConstraint` reports that + :paramref:`.ForeignKeyConstraint.ondelete` + is set to ``CASCADE`` for a not null or ``SET NULL`` for a nullable + set of columns, the option :paramref:`~.relationship.passive_deletes` + flag is set to ``True`` in the set of relationship keyword arguments. + Note that not all backends support reflection of ON DELETE. + + .. versionadded:: 1.0.0 - automap will detect non-nullable foreign key + constraints when producing a one-to-many relationship and establish + a default cascade of ``all, delete-orphan`` if so; additionally, + if the constraint specifies :paramref:`.ForeignKeyConstraint.ondelete` + of ``CASCADE`` for non-nullable or ``SET NULL`` for nullable columns, + the ``passive_deletes=True`` option is also added. + +5. The names of the relationships are determined using the :paramref:`.AutomapBase.prepare.name_for_scalar_relationship` and :paramref:`.AutomapBase.prepare.name_for_collection_relationship` callable functions. It is important to note that the default relationship @@ -252,18 +270,18 @@ follows: alternate class naming scheme, that's the name from which the relationship name will be derived. -5. The classes are inspected for an existing mapped property matching these +6. The classes are inspected for an existing mapped property matching these names. If one is detected on one side, but none on the other side, :class:`.AutomapBase` attempts to create a relationship on the missing side, then uses the :paramref:`.relationship.back_populates` parameter in order to point the new relationship to the other side. -6. In the usual case where no relationship is on either side, +7. In the usual case where no relationship is on either side, :meth:`.AutomapBase.prepare` produces a :func:`.relationship` on the "many-to-one" side and matches it to the other using the :paramref:`.relationship.backref` parameter. -7. Production of the :func:`.relationship` and optionally the :func:`.backref` +8. Production of the :func:`.relationship` and optionally the :func:`.backref` is handed off to the :paramref:`.AutomapBase.prepare.generate_relationship` function, which can be supplied by the end-user in order to augment the arguments passed to :func:`.relationship` or :func:`.backref` or to @@ -606,7 +624,7 @@ def generate_relationship( :param base: the :class:`.AutomapBase` class doing the prepare. :param direction: indicate the "direction" of the relationship; this will - be one of :data:`.ONETOMANY`, :data:`.MANYTOONE`, :data:`.MANYTOONE`. + be one of :data:`.ONETOMANY`, :data:`.MANYTOONE`, :data:`.MANYTOMANY`. :param return_fn: the function that is used by default to create the relationship. This will be either :func:`.relationship` or @@ -877,6 +895,19 @@ def _relationships_for_fks(automap_base, map_config, table_to_map_config, constraint ) + o2m_kws = {} + nullable = False not in set([fk.parent.nullable for fk in fks]) + if not nullable: + o2m_kws['cascade'] = "all, delete-orphan" + + if constraint.ondelete and \ + constraint.ondelete.lower() == "cascade": + o2m_kws['passive_deletes'] = True + else: + if constraint.ondelete and \ + constraint.ondelete.lower() == "set null": + o2m_kws['passive_deletes'] = True + create_backref = backref_name not in referred_cfg.properties if relationship_name not in map_config.properties: @@ -885,7 +916,8 @@ def _relationships_for_fks(automap_base, map_config, table_to_map_config, automap_base, interfaces.ONETOMANY, backref, backref_name, referred_cls, local_cls, - collection_class=collection_class) + collection_class=collection_class, + **o2m_kws) else: backref_obj = None rel = generate_relationship(automap_base, @@ -916,7 +948,8 @@ def _relationships_for_fks(automap_base, map_config, table_to_map_config, fk.parent for fk in constraint.elements], back_populates=relationship_name, - collection_class=collection_class) + collection_class=collection_class, + **o2m_kws) if rel is not None: referred_cfg.properties[backref_name] = rel map_config.properties[ diff --git a/lib/python3.4/site-packages/sqlalchemy/ext/baked.py b/lib/python3.4/site-packages/sqlalchemy/ext/baked.py new file mode 100644 index 0000000..2504be9 --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/ext/baked.py @@ -0,0 +1,523 @@ +# sqlalchemy/ext/baked.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php +"""Baked query extension. + +Provides a creational pattern for the :class:`.query.Query` object which +allows the fully constructed object, Core select statement, and string +compiled result to be fully cached. + + +""" + +from ..orm.query import Query +from ..orm import strategies, attributes, properties, \ + strategy_options, util as orm_util, interfaces +from .. import log as sqla_log +from ..sql import util as sql_util +from ..orm import exc as orm_exc +from .. import exc as sa_exc +from .. import util + +import copy +import logging + +log = logging.getLogger(__name__) + + +class BakedQuery(object): + """A builder object for :class:`.query.Query` objects.""" + + __slots__ = 'steps', '_bakery', '_cache_key', '_spoiled' + + def __init__(self, bakery, initial_fn, args=()): + self._cache_key = () + self._update_cache_key(initial_fn, args) + self.steps = [initial_fn] + self._spoiled = False + self._bakery = bakery + + @classmethod + def bakery(cls, size=200): + """Construct a new bakery.""" + + _bakery = util.LRUCache(size) + + def call(initial_fn, *args): + return cls(_bakery, initial_fn, args) + + return call + + def _clone(self): + b1 = BakedQuery.__new__(BakedQuery) + b1._cache_key = self._cache_key + b1.steps = list(self.steps) + b1._bakery = self._bakery + b1._spoiled = self._spoiled + return b1 + + def _update_cache_key(self, fn, args=()): + self._cache_key += (fn.__code__,) + args + + def __iadd__(self, other): + if isinstance(other, tuple): + self.add_criteria(*other) + else: + self.add_criteria(other) + return self + + def __add__(self, other): + if isinstance(other, tuple): + return self.with_criteria(*other) + else: + return self.with_criteria(other) + + def add_criteria(self, fn, *args): + """Add a criteria function to this :class:`.BakedQuery`. + + This is equivalent to using the ``+=`` operator to + modify a :class:`.BakedQuery` in-place. + + """ + self._update_cache_key(fn, args) + self.steps.append(fn) + return self + + def with_criteria(self, fn, *args): + """Add a criteria function to a :class:`.BakedQuery` cloned from this one. + + This is equivalent to using the ``+`` operator to + produce a new :class:`.BakedQuery` with modifications. + + """ + return self._clone().add_criteria(fn, *args) + + def for_session(self, session): + """Return a :class:`.Result` object for this :class:`.BakedQuery`. + + This is equivalent to calling the :class:`.BakedQuery` as a + Python callable, e.g. ``result = my_baked_query(session)``. + + """ + return Result(self, session) + + def __call__(self, session): + return self.for_session(session) + + def spoil(self, full=False): + """Cancel any query caching that will occur on this BakedQuery object. + + The BakedQuery can continue to be used normally, however additional + creational functions will not be cached; they will be called + on every invocation. + + This is to support the case where a particular step in constructing + a baked query disqualifies the query from being cacheable, such + as a variant that relies upon some uncacheable value. + + :param full: if False, only functions added to this + :class:`.BakedQuery` object subsequent to the spoil step will be + non-cached; the state of the :class:`.BakedQuery` up until + this point will be pulled from the cache. If True, then the + entire :class:`.Query` object is built from scratch each + time, with all creational functions being called on each + invocation. + + """ + if not full: + _spoil_point = self._clone() + _spoil_point._cache_key += ('_query_only', ) + self.steps = [_spoil_point._retrieve_baked_query] + self._spoiled = True + return self + + def _retrieve_baked_query(self, session): + query = self._bakery.get(self._cache_key, None) + if query is None: + query = self._as_query(session) + self._bakery[self._cache_key] = query.with_session(None) + return query.with_session(session) + + def _bake(self, session): + query = self._as_query(session) + + context = query._compile_context() + self._bake_subquery_loaders(session, context) + context.session = None + context.query = query = context.query.with_session(None) + query._execution_options = query._execution_options.union( + {"compiled_cache": self._bakery} + ) + # we'll be holding onto the query for some of its state, + # so delete some compilation-use-only attributes that can take up + # space + for attr in ( + '_correlate', '_from_obj', '_mapper_adapter_map', + '_joinpath', '_joinpoint'): + query.__dict__.pop(attr, None) + self._bakery[self._cache_key] = context + return context + + def _as_query(self, session): + query = self.steps[0](session) + + for step in self.steps[1:]: + query = step(query) + return query + + def _bake_subquery_loaders(self, session, context): + """convert subquery eager loaders in the cache into baked queries. + + For subquery eager loading to work, all we need here is that the + Query point to the correct session when it is run. However, since + we are "baking" anyway, we may as well also turn the query into + a "baked" query so that we save on performance too. + + """ + context.attributes['baked_queries'] = baked_queries = [] + for k, v in list(context.attributes.items()): + if isinstance(v, Query): + if 'subquery' in k: + bk = BakedQuery(self._bakery, lambda *args: v) + bk._cache_key = self._cache_key + k + bk._bake(session) + baked_queries.append((k, bk._cache_key, v)) + del context.attributes[k] + + def _unbake_subquery_loaders(self, session, context, params): + """Retrieve subquery eager loaders stored by _bake_subquery_loaders + and turn them back into Result objects that will iterate just + like a Query object. + + """ + for k, cache_key, query in context.attributes["baked_queries"]: + bk = BakedQuery(self._bakery, lambda sess: query.with_session(sess)) + bk._cache_key = cache_key + context.attributes[k] = bk.for_session(session).params(**params) + + +class Result(object): + """Invokes a :class:`.BakedQuery` against a :class:`.Session`. + + The :class:`.Result` object is where the actual :class:`.query.Query` + object gets created, or retrieved from the cache, + against a target :class:`.Session`, and is then invoked for results. + + """ + __slots__ = 'bq', 'session', '_params' + + def __init__(self, bq, session): + self.bq = bq + self.session = session + self._params = {} + + def params(self, *args, **kw): + """Specify parameters to be replaced into the string SQL statement.""" + + if len(args) == 1: + kw.update(args[0]) + elif len(args) > 0: + raise sa_exc.ArgumentError( + "params() takes zero or one positional argument, " + "which is a dictionary.") + self._params.update(kw) + return self + + def _as_query(self): + return self.bq._as_query(self.session).params(self._params) + + def __str__(self): + return str(self._as_query()) + + def __iter__(self): + bq = self.bq + if bq._spoiled: + return iter(self._as_query()) + + baked_context = bq._bakery.get(bq._cache_key, None) + if baked_context is None: + baked_context = bq._bake(self.session) + + context = copy.copy(baked_context) + context.session = self.session + context.attributes = context.attributes.copy() + + bq._unbake_subquery_loaders(self.session, context, self._params) + + context.statement.use_labels = True + if context.autoflush and not context.populate_existing: + self.session._autoflush() + return context.query.params(self._params).\ + with_session(self.session)._execute_and_instances(context) + + def first(self): + """Return the first row. + + Equivalent to :meth:`.Query.first`. + + """ + bq = self.bq.with_criteria(lambda q: q.slice(0, 1)) + ret = list(bq.for_session(self.session).params(self._params)) + if len(ret) > 0: + return ret[0] + else: + return None + + def one(self): + """Return exactly one result or raise an exception. + + Equivalent to :meth:`.Query.one`. + + """ + ret = list(self) + + l = len(ret) + if l == 1: + return ret[0] + elif l == 0: + raise orm_exc.NoResultFound("No row was found for one()") + else: + raise orm_exc.MultipleResultsFound( + "Multiple rows were found for one()") + + def one_or_none(self): + """Return one or zero results, or raise an exception for multiple + rows. + + Equivalent to :meth:`.Query.one_or_none`. + + .. versionadded:: 1.0.9 + + """ + ret = list(self) + + l = len(ret) + if l == 1: + return ret[0] + elif l == 0: + return None + else: + raise orm_exc.MultipleResultsFound( + "Multiple rows were found for one_or_none()") + + def all(self): + """Return all rows. + + Equivalent to :meth:`.Query.all`. + + """ + return list(self) + + def get(self, ident): + """Retrieve an object based on identity. + + Equivalent to :meth:`.Query.get`. + + """ + + query = self.bq.steps[0](self.session) + return query._get_impl(ident, self._load_on_ident) + + def _load_on_ident(self, query, key): + """Load the given identity key from the database.""" + + ident = key[1] + + mapper = query._mapper_zero() + + _get_clause, _get_params = mapper._get_clause + + def setup(query): + _lcl_get_clause = _get_clause + q = query._clone() + q._get_condition() + q._order_by = None + + # None present in ident - turn those comparisons + # into "IS NULL" + if None in ident: + nones = set([ + _get_params[col].key for col, value in + zip(mapper.primary_key, ident) if value is None + ]) + _lcl_get_clause = sql_util.adapt_criterion_to_null( + _lcl_get_clause, nones) + + _lcl_get_clause = q._adapt_clause(_lcl_get_clause, True, False) + q._criterion = _lcl_get_clause + return q + + # cache the query against a key that includes + # which positions in the primary key are NULL + # (remember, we can map to an OUTER JOIN) + bq = self.bq + + # add the clause we got from mapper._get_clause to the cache + # key so that if a race causes multiple calls to _get_clause, + # we've cached on ours + bq = bq._clone() + bq._cache_key += (_get_clause, ) + + bq = bq.with_criteria(setup, tuple(elem is None for elem in ident)) + + params = dict([ + (_get_params[primary_key].key, id_val) + for id_val, primary_key in zip(ident, mapper.primary_key) + ]) + + result = list(bq.for_session(self.session).params(**params)) + l = len(result) + if l > 1: + raise orm_exc.MultipleResultsFound() + elif l: + return result[0] + else: + return None + + +def bake_lazy_loaders(): + """Enable the use of baked queries for all lazyloaders systemwide. + + This operation should be safe for all lazy loaders, and will reduce + Python overhead for these operations. + + """ + BakedLazyLoader._strategy_keys[:] = [] + + properties.RelationshipProperty.strategy_for( + lazy="select")(BakedLazyLoader) + properties.RelationshipProperty.strategy_for( + lazy=True)(BakedLazyLoader) + properties.RelationshipProperty.strategy_for( + lazy="baked_select")(BakedLazyLoader) + + strategies.LazyLoader._strategy_keys[:] = BakedLazyLoader._strategy_keys[:] + + +def unbake_lazy_loaders(): + """Disable the use of baked queries for all lazyloaders systemwide. + + This operation reverts the changes produced by :func:`.bake_lazy_loaders`. + + """ + strategies.LazyLoader._strategy_keys[:] = [] + BakedLazyLoader._strategy_keys[:] = [] + + properties.RelationshipProperty.strategy_for( + lazy="select")(strategies.LazyLoader) + properties.RelationshipProperty.strategy_for( + lazy=True)(strategies.LazyLoader) + properties.RelationshipProperty.strategy_for( + lazy="baked_select")(BakedLazyLoader) + assert strategies.LazyLoader._strategy_keys + + +@sqla_log.class_logger +@properties.RelationshipProperty.strategy_for(lazy="baked_select") +class BakedLazyLoader(strategies.LazyLoader): + + def _emit_lazyload(self, session, state, ident_key, passive): + q = BakedQuery( + self.mapper._compiled_cache, + lambda session: session.query(self.mapper)) + q.add_criteria( + lambda q: q._adapt_all_clauses()._with_invoke_all_eagers(False), + self.parent_property) + + if not self.parent_property.bake_queries: + q.spoil(full=True) + + if self.parent_property.secondary is not None: + q.add_criteria( + lambda q: + q.select_from(self.mapper, self.parent_property.secondary)) + + pending = not state.key + + # don't autoflush on pending + if pending or passive & attributes.NO_AUTOFLUSH: + q.add_criteria(lambda q: q.autoflush(False)) + + if state.load_path: + q.spoil() + q.add_criteria( + lambda q: + q._with_current_path(state.load_path[self.parent_property])) + + if state.load_options: + q.spoil() + q.add_criteria( + lambda q: q._conditional_options(*state.load_options)) + + if self.use_get: + return q(session)._load_on_ident( + session.query(self.mapper), ident_key) + + if self.parent_property.order_by: + q.add_criteria( + lambda q: + q.order_by(*util.to_list(self.parent_property.order_by))) + + for rev in self.parent_property._reverse_property: + # reverse props that are MANYTOONE are loading *this* + # object from get(), so don't need to eager out to those. + if rev.direction is interfaces.MANYTOONE and \ + rev._use_get and \ + not isinstance(rev.strategy, strategies.LazyLoader): + q.add_criteria( + lambda q: + q.options( + strategy_options.Load( + rev.parent).baked_lazyload(rev.key))) + + lazy_clause, params = self._generate_lazy_clause(state, passive) + + if pending: + if orm_util._none_set.intersection(params.values()): + return None + + q.add_criteria(lambda q: q.filter(lazy_clause)) + result = q(session).params(**params).all() + if self.uselist: + return result + else: + l = len(result) + if l: + if l > 1: + util.warn( + "Multiple rows returned with " + "uselist=False for lazily-loaded attribute '%s' " + % self.parent_property) + + return result[0] + else: + return None + + +@strategy_options.loader_option() +def baked_lazyload(loadopt, attr): + """Indicate that the given attribute should be loaded using "lazy" + loading with a "baked" query used in the load. + + """ + return loadopt.set_relationship_strategy(attr, {"lazy": "baked_select"}) + + +@baked_lazyload._add_unbound_fn +def baked_lazyload(*keys): + return strategy_options._UnboundLoad._from_keys( + strategy_options._UnboundLoad.baked_lazyload, keys, False, {}) + + +@baked_lazyload._add_unbound_all_fn +def baked_lazyload_all(*keys): + return strategy_options._UnboundLoad._from_keys( + strategy_options._UnboundLoad.baked_lazyload, keys, True, {}) + +baked_lazyload = baked_lazyload._unbound_fn +baked_lazyload_all = baked_lazyload_all._unbound_all_fn + +bakery = BakedQuery.bakery diff --git a/lib/python3.4/site-packages/sqlalchemy/ext/compiler.py b/lib/python3.4/site-packages/sqlalchemy/ext/compiler.py index 8d169aa..86156be 100644 --- a/lib/python3.4/site-packages/sqlalchemy/ext/compiler.py +++ b/lib/python3.4/site-packages/sqlalchemy/ext/compiler.py @@ -1,5 +1,5 @@ # ext/compiler.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -121,9 +121,19 @@ below where we generate a CHECK constraint that embeds a SQL expression:: def compile_my_constraint(constraint, ddlcompiler, **kw): return "CONSTRAINT %s CHECK (%s)" % ( constraint.name, - ddlcompiler.sql_compiler.process(constraint.expression) + ddlcompiler.sql_compiler.process( + constraint.expression, literal_binds=True) ) +Above, we add an additional flag to the process step as called by +:meth:`.SQLCompiler.process`, which is the ``literal_binds`` flag. This +indicates that any SQL expression which refers to a :class:`.BindParameter` +object or other "literal" object such as those which refer to strings or +integers should be rendered **in-place**, rather than being referred to as +a bound parameter; when emitting DDL, bound parameters are typically not +supported. + + .. _enabling_compiled_autocommit: Enabling Autocommit on a Construct diff --git a/lib/python3.4/site-packages/sqlalchemy/ext/declarative/__init__.py b/lib/python3.4/site-packages/sqlalchemy/ext/declarative/__init__.py index 3cbc85c..f96a402 100644 --- a/lib/python3.4/site-packages/sqlalchemy/ext/declarative/__init__.py +++ b/lib/python3.4/site-packages/sqlalchemy/ext/declarative/__init__.py @@ -1,1310 +1,10 @@ # ext/declarative/__init__.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -""" -Synopsis -======== - -SQLAlchemy object-relational configuration involves the -combination of :class:`.Table`, :func:`.mapper`, and class -objects to define a mapped class. -:mod:`~sqlalchemy.ext.declarative` allows all three to be -expressed at once within the class declaration. As much as -possible, regular SQLAlchemy schema and ORM constructs are -used directly, so that configuration between "classical" ORM -usage and declarative remain highly similar. - -As a simple example:: - - from sqlalchemy.ext.declarative import declarative_base - - Base = declarative_base() - - class SomeClass(Base): - __tablename__ = 'some_table' - id = Column(Integer, primary_key=True) - name = Column(String(50)) - -Above, the :func:`declarative_base` callable returns a new base class from -which all mapped classes should inherit. When the class definition is -completed, a new :class:`.Table` and :func:`.mapper` will have been generated. - -The resulting table and mapper are accessible via -``__table__`` and ``__mapper__`` attributes on the -``SomeClass`` class:: - - # access the mapped Table - SomeClass.__table__ - - # access the Mapper - SomeClass.__mapper__ - -Defining Attributes -=================== - -In the previous example, the :class:`.Column` objects are -automatically named with the name of the attribute to which they are -assigned. - -To name columns explicitly with a name distinct from their mapped attribute, -just give the column a name. Below, column "some_table_id" is mapped to the -"id" attribute of `SomeClass`, but in SQL will be represented as -"some_table_id":: - - class SomeClass(Base): - __tablename__ = 'some_table' - id = Column("some_table_id", Integer, primary_key=True) - -Attributes may be added to the class after its construction, and they will be -added to the underlying :class:`.Table` and -:func:`.mapper` definitions as appropriate:: - - SomeClass.data = Column('data', Unicode) - SomeClass.related = relationship(RelatedInfo) - -Classes which are constructed using declarative can interact freely -with classes that are mapped explicitly with :func:`.mapper`. - -It is recommended, though not required, that all tables -share the same underlying :class:`~sqlalchemy.schema.MetaData` object, -so that string-configured :class:`~sqlalchemy.schema.ForeignKey` -references can be resolved without issue. - -Accessing the MetaData -======================= - -The :func:`declarative_base` base class contains a -:class:`.MetaData` object where newly defined -:class:`.Table` objects are collected. This object is -intended to be accessed directly for -:class:`.MetaData`-specific operations. Such as, to issue -CREATE statements for all tables:: - - engine = create_engine('sqlite://') - Base.metadata.create_all(engine) - -:func:`declarative_base` can also receive a pre-existing -:class:`.MetaData` object, which allows a -declarative setup to be associated with an already -existing traditional collection of :class:`~sqlalchemy.schema.Table` -objects:: - - mymetadata = MetaData() - Base = declarative_base(metadata=mymetadata) - - -.. _declarative_configuring_relationships: - -Configuring Relationships -========================= - -Relationships to other classes are done in the usual way, with the added -feature that the class specified to :func:`~sqlalchemy.orm.relationship` -may be a string name. The "class registry" associated with ``Base`` -is used at mapper compilation time to resolve the name into the actual -class object, which is expected to have been defined once the mapper -configuration is used:: - - class User(Base): - __tablename__ = 'users' - - id = Column(Integer, primary_key=True) - name = Column(String(50)) - addresses = relationship("Address", backref="user") - - class Address(Base): - __tablename__ = 'addresses' - - id = Column(Integer, primary_key=True) - email = Column(String(50)) - user_id = Column(Integer, ForeignKey('users.id')) - -Column constructs, since they are just that, are immediately usable, -as below where we define a primary join condition on the ``Address`` -class using them:: - - class Address(Base): - __tablename__ = 'addresses' - - id = Column(Integer, primary_key=True) - email = Column(String(50)) - user_id = Column(Integer, ForeignKey('users.id')) - user = relationship(User, primaryjoin=user_id == User.id) - -In addition to the main argument for :func:`~sqlalchemy.orm.relationship`, -other arguments which depend upon the columns present on an as-yet -undefined class may also be specified as strings. These strings are -evaluated as Python expressions. The full namespace available within -this evaluation includes all classes mapped for this declarative base, -as well as the contents of the ``sqlalchemy`` package, including -expression functions like :func:`~sqlalchemy.sql.expression.desc` and -:attr:`~sqlalchemy.sql.expression.func`:: - - class User(Base): - # .... - addresses = relationship("Address", - order_by="desc(Address.email)", - primaryjoin="Address.user_id==User.id") - -For the case where more than one module contains a class of the same name, -string class names can also be specified as module-qualified paths -within any of these string expressions:: - - class User(Base): - # .... - addresses = relationship("myapp.model.address.Address", - order_by="desc(myapp.model.address.Address.email)", - primaryjoin="myapp.model.address.Address.user_id==" - "myapp.model.user.User.id") - -The qualified path can be any partial path that removes ambiguity between -the names. For example, to disambiguate between -``myapp.model.address.Address`` and ``myapp.model.lookup.Address``, -we can specify ``address.Address`` or ``lookup.Address``:: - - class User(Base): - # .... - addresses = relationship("address.Address", - order_by="desc(address.Address.email)", - primaryjoin="address.Address.user_id==" - "User.id") - -.. versionadded:: 0.8 - module-qualified paths can be used when specifying string arguments - with Declarative, in order to specify specific modules. - -Two alternatives also exist to using string-based attributes. A lambda -can also be used, which will be evaluated after all mappers have been -configured:: - - class User(Base): - # ... - addresses = relationship(lambda: Address, - order_by=lambda: desc(Address.email), - primaryjoin=lambda: Address.user_id==User.id) - -Or, the relationship can be added to the class explicitly after the classes -are available:: - - User.addresses = relationship(Address, - primaryjoin=Address.user_id==User.id) - - - -.. _declarative_many_to_many: - -Configuring Many-to-Many Relationships -====================================== - -Many-to-many relationships are also declared in the same way -with declarative as with traditional mappings. The -``secondary`` argument to -:func:`.relationship` is as usual passed a -:class:`.Table` object, which is typically declared in the -traditional way. The :class:`.Table` usually shares -the :class:`.MetaData` object used by the declarative base:: - - keywords = Table( - 'keywords', Base.metadata, - Column('author_id', Integer, ForeignKey('authors.id')), - Column('keyword_id', Integer, ForeignKey('keywords.id')) - ) - - class Author(Base): - __tablename__ = 'authors' - id = Column(Integer, primary_key=True) - keywords = relationship("Keyword", secondary=keywords) - -Like other :func:`~sqlalchemy.orm.relationship` arguments, a string is accepted -as well, passing the string name of the table as defined in the -``Base.metadata.tables`` collection:: - - class Author(Base): - __tablename__ = 'authors' - id = Column(Integer, primary_key=True) - keywords = relationship("Keyword", secondary="keywords") - -As with traditional mapping, its generally not a good idea to use -a :class:`.Table` as the "secondary" argument which is also mapped to -a class, unless the :func:`.relationship` is declared with ``viewonly=True``. -Otherwise, the unit-of-work system may attempt duplicate INSERT and -DELETE statements against the underlying table. - -.. _declarative_sql_expressions: - -Defining SQL Expressions -======================== - -See :ref:`mapper_sql_expressions` for examples on declaratively -mapping attributes to SQL expressions. - -.. _declarative_table_args: - -Table Configuration -=================== - -Table arguments other than the name, metadata, and mapped Column -arguments are specified using the ``__table_args__`` class attribute. -This attribute accommodates both positional as well as keyword -arguments that are normally sent to the -:class:`~sqlalchemy.schema.Table` constructor. -The attribute can be specified in one of two forms. One is as a -dictionary:: - - class MyClass(Base): - __tablename__ = 'sometable' - __table_args__ = {'mysql_engine':'InnoDB'} - -The other, a tuple, where each argument is positional -(usually constraints):: - - class MyClass(Base): - __tablename__ = 'sometable' - __table_args__ = ( - ForeignKeyConstraint(['id'], ['remote_table.id']), - UniqueConstraint('foo'), - ) - -Keyword arguments can be specified with the above form by -specifying the last argument as a dictionary:: - - class MyClass(Base): - __tablename__ = 'sometable' - __table_args__ = ( - ForeignKeyConstraint(['id'], ['remote_table.id']), - UniqueConstraint('foo'), - {'autoload':True} - ) - -Using a Hybrid Approach with __table__ -======================================= - -As an alternative to ``__tablename__``, a direct -:class:`~sqlalchemy.schema.Table` construct may be used. The -:class:`~sqlalchemy.schema.Column` objects, which in this case require -their names, will be added to the mapping just like a regular mapping -to a table:: - - class MyClass(Base): - __table__ = Table('my_table', Base.metadata, - Column('id', Integer, primary_key=True), - Column('name', String(50)) - ) - -``__table__`` provides a more focused point of control for establishing -table metadata, while still getting most of the benefits of using declarative. -An application that uses reflection might want to load table metadata elsewhere -and pass it to declarative classes:: - - from sqlalchemy.ext.declarative import declarative_base - - Base = declarative_base() - Base.metadata.reflect(some_engine) - - class User(Base): - __table__ = metadata.tables['user'] - - class Address(Base): - __table__ = metadata.tables['address'] - -Some configuration schemes may find it more appropriate to use ``__table__``, -such as those which already take advantage of the data-driven nature of -:class:`.Table` to customize and/or automate schema definition. - -Note that when the ``__table__`` approach is used, the object is immediately -usable as a plain :class:`.Table` within the class declaration body itself, -as a Python class is only another syntactical block. Below this is illustrated -by using the ``id`` column in the ``primaryjoin`` condition of a -:func:`.relationship`:: - - class MyClass(Base): - __table__ = Table('my_table', Base.metadata, - Column('id', Integer, primary_key=True), - Column('name', String(50)) - ) - - widgets = relationship(Widget, - primaryjoin=Widget.myclass_id==__table__.c.id) - -Similarly, mapped attributes which refer to ``__table__`` can be placed inline, -as below where we assign the ``name`` column to the attribute ``_name``, -generating a synonym for ``name``:: - - from sqlalchemy.ext.declarative import synonym_for - - class MyClass(Base): - __table__ = Table('my_table', Base.metadata, - Column('id', Integer, primary_key=True), - Column('name', String(50)) - ) - - _name = __table__.c.name - - @synonym_for("_name") - def name(self): - return "Name: %s" % _name - -Using Reflection with Declarative -================================= - -It's easy to set up a :class:`.Table` that uses ``autoload=True`` -in conjunction with a mapped class:: - - class MyClass(Base): - __table__ = Table('mytable', Base.metadata, - autoload=True, autoload_with=some_engine) - -However, one improvement that can be made here is to not -require the :class:`.Engine` to be available when classes are -being first declared. To achieve this, use the -:class:`.DeferredReflection` mixin, which sets up mappings -only after a special ``prepare(engine)`` step is called:: - - from sqlalchemy.ext.declarative import declarative_base, DeferredReflection - - Base = declarative_base(cls=DeferredReflection) - - class Foo(Base): - __tablename__ = 'foo' - bars = relationship("Bar") - - class Bar(Base): - __tablename__ = 'bar' - - # illustrate overriding of "bar.foo_id" to have - # a foreign key constraint otherwise not - # reflected, such as when using MySQL - foo_id = Column(Integer, ForeignKey('foo.id')) - - Base.prepare(e) - -.. versionadded:: 0.8 - Added :class:`.DeferredReflection`. - -Mapper Configuration -==================== - -Declarative makes use of the :func:`~.orm.mapper` function internally -when it creates the mapping to the declared table. The options -for :func:`~.orm.mapper` are passed directly through via the -``__mapper_args__`` class attribute. As always, arguments which reference -locally mapped columns can reference them directly from within the -class declaration:: - - from datetime import datetime - - class Widget(Base): - __tablename__ = 'widgets' - - id = Column(Integer, primary_key=True) - timestamp = Column(DateTime, nullable=False) - - __mapper_args__ = { - 'version_id_col': timestamp, - 'version_id_generator': lambda v:datetime.now() - } - -.. _declarative_inheritance: - -Inheritance Configuration -========================= - -Declarative supports all three forms of inheritance as intuitively -as possible. The ``inherits`` mapper keyword argument is not needed -as declarative will determine this from the class itself. The various -"polymorphic" keyword arguments are specified using ``__mapper_args__``. - -Joined Table Inheritance -~~~~~~~~~~~~~~~~~~~~~~~~ - -Joined table inheritance is defined as a subclass that defines its own -table:: - - class Person(Base): - __tablename__ = 'people' - id = Column(Integer, primary_key=True) - discriminator = Column('type', String(50)) - __mapper_args__ = {'polymorphic_on': discriminator} - - class Engineer(Person): - __tablename__ = 'engineers' - __mapper_args__ = {'polymorphic_identity': 'engineer'} - id = Column(Integer, ForeignKey('people.id'), primary_key=True) - primary_language = Column(String(50)) - -Note that above, the ``Engineer.id`` attribute, since it shares the -same attribute name as the ``Person.id`` attribute, will in fact -represent the ``people.id`` and ``engineers.id`` columns together, -with the "Engineer.id" column taking precedence if queried directly. -To provide the ``Engineer`` class with an attribute that represents -only the ``engineers.id`` column, give it a different attribute name:: - - class Engineer(Person): - __tablename__ = 'engineers' - __mapper_args__ = {'polymorphic_identity': 'engineer'} - engineer_id = Column('id', Integer, ForeignKey('people.id'), - primary_key=True) - primary_language = Column(String(50)) - - -.. versionchanged:: 0.7 joined table inheritance favors the subclass - column over that of the superclass, such as querying above - for ``Engineer.id``. Prior to 0.7 this was the reverse. - -.. _declarative_single_table: - -Single Table Inheritance -~~~~~~~~~~~~~~~~~~~~~~~~ - -Single table inheritance is defined as a subclass that does not have -its own table; you just leave out the ``__table__`` and ``__tablename__`` -attributes:: - - class Person(Base): - __tablename__ = 'people' - id = Column(Integer, primary_key=True) - discriminator = Column('type', String(50)) - __mapper_args__ = {'polymorphic_on': discriminator} - - class Engineer(Person): - __mapper_args__ = {'polymorphic_identity': 'engineer'} - primary_language = Column(String(50)) - -When the above mappers are configured, the ``Person`` class is mapped -to the ``people`` table *before* the ``primary_language`` column is -defined, and this column will not be included in its own mapping. -When ``Engineer`` then defines the ``primary_language`` column, the -column is added to the ``people`` table so that it is included in the -mapping for ``Engineer`` and is also part of the table's full set of -columns. Columns which are not mapped to ``Person`` are also excluded -from any other single or joined inheriting classes using the -``exclude_properties`` mapper argument. Below, ``Manager`` will have -all the attributes of ``Person`` and ``Manager`` but *not* the -``primary_language`` attribute of ``Engineer``:: - - class Manager(Person): - __mapper_args__ = {'polymorphic_identity': 'manager'} - golf_swing = Column(String(50)) - -The attribute exclusion logic is provided by the -``exclude_properties`` mapper argument, and declarative's default -behavior can be disabled by passing an explicit ``exclude_properties`` -collection (empty or otherwise) to the ``__mapper_args__``. - -Resolving Column Conflicts -^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Note above that the ``primary_language`` and ``golf_swing`` columns -are "moved up" to be applied to ``Person.__table__``, as a result of their -declaration on a subclass that has no table of its own. A tricky case -comes up when two subclasses want to specify *the same* column, as below:: - - class Person(Base): - __tablename__ = 'people' - id = Column(Integer, primary_key=True) - discriminator = Column('type', String(50)) - __mapper_args__ = {'polymorphic_on': discriminator} - - class Engineer(Person): - __mapper_args__ = {'polymorphic_identity': 'engineer'} - start_date = Column(DateTime) - - class Manager(Person): - __mapper_args__ = {'polymorphic_identity': 'manager'} - start_date = Column(DateTime) - -Above, the ``start_date`` column declared on both ``Engineer`` and ``Manager`` -will result in an error:: - - sqlalchemy.exc.ArgumentError: Column 'start_date' on class - conflicts with existing - column 'people.start_date' - -In a situation like this, Declarative can't be sure -of the intent, especially if the ``start_date`` columns had, for example, -different types. A situation like this can be resolved by using -:class:`.declared_attr` to define the :class:`.Column` conditionally, taking -care to return the **existing column** via the parent ``__table__`` if it -already exists:: - - from sqlalchemy.ext.declarative import declared_attr - - class Person(Base): - __tablename__ = 'people' - id = Column(Integer, primary_key=True) - discriminator = Column('type', String(50)) - __mapper_args__ = {'polymorphic_on': discriminator} - - class Engineer(Person): - __mapper_args__ = {'polymorphic_identity': 'engineer'} - - @declared_attr - def start_date(cls): - "Start date column, if not present already." - return Person.__table__.c.get('start_date', Column(DateTime)) - - class Manager(Person): - __mapper_args__ = {'polymorphic_identity': 'manager'} - - @declared_attr - def start_date(cls): - "Start date column, if not present already." - return Person.__table__.c.get('start_date', Column(DateTime)) - -Above, when ``Manager`` is mapped, the ``start_date`` column is -already present on the ``Person`` class. Declarative lets us return -that :class:`.Column` as a result in this case, where it knows to skip -re-assigning the same column. If the mapping is mis-configured such -that the ``start_date`` column is accidentally re-assigned to a -different table (such as, if we changed ``Manager`` to be joined -inheritance without fixing ``start_date``), an error is raised which -indicates an existing :class:`.Column` is trying to be re-assigned to -a different owning :class:`.Table`. - -.. versionadded:: 0.8 :class:`.declared_attr` can be used on a non-mixin - class, and the returned :class:`.Column` or other mapped attribute - will be applied to the mapping as any other attribute. Previously, - the resulting attribute would be ignored, and also result in a warning - being emitted when a subclass was created. - -.. versionadded:: 0.8 :class:`.declared_attr`, when used either with a - mixin or non-mixin declarative class, can return an existing - :class:`.Column` already assigned to the parent :class:`.Table`, - to indicate that the re-assignment of the :class:`.Column` should be - skipped, however should still be mapped on the target class, - in order to resolve duplicate column conflicts. - -The same concept can be used with mixin classes (see -:ref:`declarative_mixins`):: - - class Person(Base): - __tablename__ = 'people' - id = Column(Integer, primary_key=True) - discriminator = Column('type', String(50)) - __mapper_args__ = {'polymorphic_on': discriminator} - - class HasStartDate(object): - @declared_attr - def start_date(cls): - return cls.__table__.c.get('start_date', Column(DateTime)) - - class Engineer(HasStartDate, Person): - __mapper_args__ = {'polymorphic_identity': 'engineer'} - - class Manager(HasStartDate, Person): - __mapper_args__ = {'polymorphic_identity': 'manager'} - -The above mixin checks the local ``__table__`` attribute for the column. -Because we're using single table inheritance, we're sure that in this case, -``cls.__table__`` refers to ``People.__table__``. If we were mixing joined- -and single-table inheritance, we might want our mixin to check more carefully -if ``cls.__table__`` is really the :class:`.Table` we're looking for. - -Concrete Table Inheritance -~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Concrete is defined as a subclass which has its own table and sets the -``concrete`` keyword argument to ``True``:: - - class Person(Base): - __tablename__ = 'people' - id = Column(Integer, primary_key=True) - name = Column(String(50)) - - class Engineer(Person): - __tablename__ = 'engineers' - __mapper_args__ = {'concrete':True} - id = Column(Integer, primary_key=True) - primary_language = Column(String(50)) - name = Column(String(50)) - -Usage of an abstract base class is a little less straightforward as it -requires usage of :func:`~sqlalchemy.orm.util.polymorphic_union`, -which needs to be created with the :class:`.Table` objects -before the class is built:: - - engineers = Table('engineers', Base.metadata, - Column('id', Integer, primary_key=True), - Column('name', String(50)), - Column('primary_language', String(50)) - ) - managers = Table('managers', Base.metadata, - Column('id', Integer, primary_key=True), - Column('name', String(50)), - Column('golf_swing', String(50)) - ) - - punion = polymorphic_union({ - 'engineer':engineers, - 'manager':managers - }, 'type', 'punion') - - class Person(Base): - __table__ = punion - __mapper_args__ = {'polymorphic_on':punion.c.type} - - class Engineer(Person): - __table__ = engineers - __mapper_args__ = {'polymorphic_identity':'engineer', 'concrete':True} - - class Manager(Person): - __table__ = managers - __mapper_args__ = {'polymorphic_identity':'manager', 'concrete':True} - -.. _declarative_concrete_helpers: - -Using the Concrete Helpers -^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Helper classes provides a simpler pattern for concrete inheritance. -With these objects, the ``__declare_first__`` helper is used to configure the -"polymorphic" loader for the mapper after all subclasses have been declared. - -.. versionadded:: 0.7.3 - -An abstract base can be declared using the -:class:`.AbstractConcreteBase` class:: - - from sqlalchemy.ext.declarative import AbstractConcreteBase - - class Employee(AbstractConcreteBase, Base): - pass - -To have a concrete ``employee`` table, use :class:`.ConcreteBase` instead:: - - from sqlalchemy.ext.declarative import ConcreteBase - - class Employee(ConcreteBase, Base): - __tablename__ = 'employee' - employee_id = Column(Integer, primary_key=True) - name = Column(String(50)) - __mapper_args__ = { - 'polymorphic_identity':'employee', - 'concrete':True} - - -Either ``Employee`` base can be used in the normal fashion:: - - class Manager(Employee): - __tablename__ = 'manager' - employee_id = Column(Integer, primary_key=True) - name = Column(String(50)) - manager_data = Column(String(40)) - __mapper_args__ = { - 'polymorphic_identity':'manager', - 'concrete':True} - - class Engineer(Employee): - __tablename__ = 'engineer' - employee_id = Column(Integer, primary_key=True) - name = Column(String(50)) - engineer_info = Column(String(40)) - __mapper_args__ = {'polymorphic_identity':'engineer', - 'concrete':True} - - -The :class:`.AbstractConcreteBase` class is itself mapped, and can be -used as a target of relationships:: - - class Company(Base): - __tablename__ = 'company' - - id = Column(Integer, primary_key=True) - employees = relationship("Employee", - primaryjoin="Company.id == Employee.company_id") - - -.. versionchanged:: 0.9.3 Support for use of :class:`.AbstractConcreteBase` - as the target of a :func:`.relationship` has been improved. - -It can also be queried directly:: - - for employee in session.query(Employee).filter(Employee.name == 'qbert'): - print(employee) - - -.. _declarative_mixins: - -Mixin and Custom Base Classes -============================== - -A common need when using :mod:`~sqlalchemy.ext.declarative` is to -share some functionality, such as a set of common columns, some common -table options, or other mapped properties, across many -classes. The standard Python idioms for this is to have the classes -inherit from a base which includes these common features. - -When using :mod:`~sqlalchemy.ext.declarative`, this idiom is allowed -via the usage of a custom declarative base class, as well as a "mixin" class -which is inherited from in addition to the primary base. Declarative -includes several helper features to make this work in terms of how -mappings are declared. An example of some commonly mixed-in -idioms is below:: - - from sqlalchemy.ext.declarative import declared_attr - - class MyMixin(object): - - @declared_attr - def __tablename__(cls): - return cls.__name__.lower() - - __table_args__ = {'mysql_engine': 'InnoDB'} - __mapper_args__= {'always_refresh': True} - - id = Column(Integer, primary_key=True) - - class MyModel(MyMixin, Base): - name = Column(String(1000)) - -Where above, the class ``MyModel`` will contain an "id" column -as the primary key, a ``__tablename__`` attribute that derives -from the name of the class itself, as well as ``__table_args__`` -and ``__mapper_args__`` defined by the ``MyMixin`` mixin class. - -There's no fixed convention over whether ``MyMixin`` precedes -``Base`` or not. Normal Python method resolution rules apply, and -the above example would work just as well with:: - - class MyModel(Base, MyMixin): - name = Column(String(1000)) - -This works because ``Base`` here doesn't define any of the -variables that ``MyMixin`` defines, i.e. ``__tablename__``, -``__table_args__``, ``id``, etc. If the ``Base`` did define -an attribute of the same name, the class placed first in the -inherits list would determine which attribute is used on the -newly defined class. - -Augmenting the Base -~~~~~~~~~~~~~~~~~~~ - -In addition to using a pure mixin, most of the techniques in this -section can also be applied to the base class itself, for patterns that -should apply to all classes derived from a particular base. This is achieved -using the ``cls`` argument of the :func:`.declarative_base` function:: - - from sqlalchemy.ext.declarative import declared_attr - - class Base(object): - @declared_attr - def __tablename__(cls): - return cls.__name__.lower() - - __table_args__ = {'mysql_engine': 'InnoDB'} - - id = Column(Integer, primary_key=True) - - from sqlalchemy.ext.declarative import declarative_base - - Base = declarative_base(cls=Base) - - class MyModel(Base): - name = Column(String(1000)) - -Where above, ``MyModel`` and all other classes that derive from ``Base`` will -have a table name derived from the class name, an ``id`` primary key column, -as well as the "InnoDB" engine for MySQL. - -Mixing in Columns -~~~~~~~~~~~~~~~~~ - -The most basic way to specify a column on a mixin is by simple -declaration:: - - class TimestampMixin(object): - created_at = Column(DateTime, default=func.now()) - - class MyModel(TimestampMixin, Base): - __tablename__ = 'test' - - id = Column(Integer, primary_key=True) - name = Column(String(1000)) - -Where above, all declarative classes that include ``TimestampMixin`` -will also have a column ``created_at`` that applies a timestamp to -all row insertions. - -Those familiar with the SQLAlchemy expression language know that -the object identity of clause elements defines their role in a schema. -Two ``Table`` objects ``a`` and ``b`` may both have a column called -``id``, but the way these are differentiated is that ``a.c.id`` -and ``b.c.id`` are two distinct Python objects, referencing their -parent tables ``a`` and ``b`` respectively. - -In the case of the mixin column, it seems that only one -:class:`.Column` object is explicitly created, yet the ultimate -``created_at`` column above must exist as a distinct Python object -for each separate destination class. To accomplish this, the declarative -extension creates a **copy** of each :class:`.Column` object encountered on -a class that is detected as a mixin. - -This copy mechanism is limited to simple columns that have no foreign -keys, as a :class:`.ForeignKey` itself contains references to columns -which can't be properly recreated at this level. For columns that -have foreign keys, as well as for the variety of mapper-level constructs -that require destination-explicit context, the -:class:`~.declared_attr` decorator is provided so that -patterns common to many classes can be defined as callables:: - - from sqlalchemy.ext.declarative import declared_attr - - class ReferenceAddressMixin(object): - @declared_attr - def address_id(cls): - return Column(Integer, ForeignKey('address.id')) - - class User(ReferenceAddressMixin, Base): - __tablename__ = 'user' - id = Column(Integer, primary_key=True) - -Where above, the ``address_id`` class-level callable is executed at the -point at which the ``User`` class is constructed, and the declarative -extension can use the resulting :class:`.Column` object as returned by -the method without the need to copy it. - -.. versionchanged:: > 0.6.5 - Rename 0.6.5 ``sqlalchemy.util.classproperty`` - into :class:`~.declared_attr`. - -Columns generated by :class:`~.declared_attr` can also be -referenced by ``__mapper_args__`` to a limited degree, currently -by ``polymorphic_on`` and ``version_id_col``, by specifying the -classdecorator itself into the dictionary - the declarative extension -will resolve them at class construction time:: - - class MyMixin: - @declared_attr - def type_(cls): - return Column(String(50)) - - __mapper_args__= {'polymorphic_on':type_} - - class MyModel(MyMixin, Base): - __tablename__='test' - id = Column(Integer, primary_key=True) - - - -Mixing in Relationships -~~~~~~~~~~~~~~~~~~~~~~~ - -Relationships created by :func:`~sqlalchemy.orm.relationship` are provided -with declarative mixin classes exclusively using the -:class:`.declared_attr` approach, eliminating any ambiguity -which could arise when copying a relationship and its possibly column-bound -contents. Below is an example which combines a foreign key column and a -relationship so that two classes ``Foo`` and ``Bar`` can both be configured to -reference a common target class via many-to-one:: - - class RefTargetMixin(object): - @declared_attr - def target_id(cls): - return Column('target_id', ForeignKey('target.id')) - - @declared_attr - def target(cls): - return relationship("Target") - - class Foo(RefTargetMixin, Base): - __tablename__ = 'foo' - id = Column(Integer, primary_key=True) - - class Bar(RefTargetMixin, Base): - __tablename__ = 'bar' - id = Column(Integer, primary_key=True) - - class Target(Base): - __tablename__ = 'target' - id = Column(Integer, primary_key=True) - -Using Advanced Relationship Arguments (e.g. ``primaryjoin``, etc.) -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -:func:`~sqlalchemy.orm.relationship` definitions which require explicit -primaryjoin, order_by etc. expressions should in all but the most -simplistic cases use **late bound** forms -for these arguments, meaning, using either the string form or a lambda. -The reason for this is that the related :class:`.Column` objects which are to -be configured using ``@declared_attr`` are not available to another -``@declared_attr`` attribute; while the methods will work and return new -:class:`.Column` objects, those are not the :class:`.Column` objects that -Declarative will be using as it calls the methods on its own, thus using -*different* :class:`.Column` objects. - -The canonical example is the primaryjoin condition that depends upon -another mixed-in column:: - - class RefTargetMixin(object): - @declared_attr - def target_id(cls): - return Column('target_id', ForeignKey('target.id')) - - @declared_attr - def target(cls): - return relationship(Target, - primaryjoin=Target.id==cls.target_id # this is *incorrect* - ) - -Mapping a class using the above mixin, we will get an error like:: - - sqlalchemy.exc.InvalidRequestError: this ForeignKey's parent column is not - yet associated with a Table. - -This is because the ``target_id`` :class:`.Column` we've called upon in our -``target()`` method is not the same :class:`.Column` that declarative is -actually going to map to our table. - -The condition above is resolved using a lambda:: - - class RefTargetMixin(object): - @declared_attr - def target_id(cls): - return Column('target_id', ForeignKey('target.id')) - - @declared_attr - def target(cls): - return relationship(Target, - primaryjoin=lambda: Target.id==cls.target_id - ) - -or alternatively, the string form (which ultimately generates a lambda):: - - class RefTargetMixin(object): - @declared_attr - def target_id(cls): - return Column('target_id', ForeignKey('target.id')) - - @declared_attr - def target(cls): - return relationship("Target", - primaryjoin="Target.id==%s.target_id" % cls.__name__ - ) - -Mixing in deferred(), column_property(), and other MapperProperty classes -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Like :func:`~sqlalchemy.orm.relationship`, all -:class:`~sqlalchemy.orm.interfaces.MapperProperty` subclasses such as -:func:`~sqlalchemy.orm.deferred`, :func:`~sqlalchemy.orm.column_property`, -etc. ultimately involve references to columns, and therefore, when -used with declarative mixins, have the :class:`.declared_attr` -requirement so that no reliance on copying is needed:: - - class SomethingMixin(object): - - @declared_attr - def dprop(cls): - return deferred(Column(Integer)) - - class Something(SomethingMixin, Base): - __tablename__ = "something" - -Mixing in Association Proxy and Other Attributes -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Mixins can specify user-defined attributes as well as other extension -units such as :func:`.association_proxy`. The usage of -:class:`.declared_attr` is required in those cases where the attribute must -be tailored specifically to the target subclass. An example is when -constructing multiple :func:`.association_proxy` attributes which each -target a different type of child object. Below is an -:func:`.association_proxy` / mixin example which provides a scalar list of -string values to an implementing class:: - - from sqlalchemy import Column, Integer, ForeignKey, String - from sqlalchemy.orm import relationship - from sqlalchemy.ext.associationproxy import association_proxy - from sqlalchemy.ext.declarative import declarative_base, declared_attr - - Base = declarative_base() - - class HasStringCollection(object): - @declared_attr - def _strings(cls): - class StringAttribute(Base): - __tablename__ = cls.string_table_name - id = Column(Integer, primary_key=True) - value = Column(String(50), nullable=False) - parent_id = Column(Integer, - ForeignKey('%s.id' % cls.__tablename__), - nullable=False) - def __init__(self, value): - self.value = value - - return relationship(StringAttribute) - - @declared_attr - def strings(cls): - return association_proxy('_strings', 'value') - - class TypeA(HasStringCollection, Base): - __tablename__ = 'type_a' - string_table_name = 'type_a_strings' - id = Column(Integer(), primary_key=True) - - class TypeB(HasStringCollection, Base): - __tablename__ = 'type_b' - string_table_name = 'type_b_strings' - id = Column(Integer(), primary_key=True) - -Above, the ``HasStringCollection`` mixin produces a :func:`.relationship` -which refers to a newly generated class called ``StringAttribute``. The -``StringAttribute`` class is generated with its own :class:`.Table` -definition which is local to the parent class making usage of the -``HasStringCollection`` mixin. It also produces an :func:`.association_proxy` -object which proxies references to the ``strings`` attribute onto the ``value`` -attribute of each ``StringAttribute`` instance. - -``TypeA`` or ``TypeB`` can be instantiated given the constructor -argument ``strings``, a list of strings:: - - ta = TypeA(strings=['foo', 'bar']) - tb = TypeA(strings=['bat', 'bar']) - -This list will generate a collection -of ``StringAttribute`` objects, which are persisted into a table that's -local to either the ``type_a_strings`` or ``type_b_strings`` table:: - - >>> print ta._strings - [<__main__.StringAttribute object at 0x10151cd90>, - <__main__.StringAttribute object at 0x10151ce10>] - -When constructing the :func:`.association_proxy`, the -:class:`.declared_attr` decorator must be used so that a distinct -:func:`.association_proxy` object is created for each of the ``TypeA`` -and ``TypeB`` classes. - -.. versionadded:: 0.8 :class:`.declared_attr` is usable with non-mapped - attributes, including user-defined attributes as well as - :func:`.association_proxy`. - - -Controlling table inheritance with mixins -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -The ``__tablename__`` attribute in conjunction with the hierarchy of -classes involved in a declarative mixin scenario controls what type of -table inheritance, if any, -is configured by the declarative extension. - -If the ``__tablename__`` is computed by a mixin, you may need to -control which classes get the computed attribute in order to get the -type of table inheritance you require. - -For example, if you had a mixin that computes ``__tablename__`` but -where you wanted to use that mixin in a single table inheritance -hierarchy, you can explicitly specify ``__tablename__`` as ``None`` to -indicate that the class should not have a table mapped:: - - from sqlalchemy.ext.declarative import declared_attr - - class Tablename: - @declared_attr - def __tablename__(cls): - return cls.__name__.lower() - - class Person(Tablename, Base): - id = Column(Integer, primary_key=True) - discriminator = Column('type', String(50)) - __mapper_args__ = {'polymorphic_on': discriminator} - - class Engineer(Person): - __tablename__ = None - __mapper_args__ = {'polymorphic_identity': 'engineer'} - primary_language = Column(String(50)) - -Alternatively, you can make the mixin intelligent enough to only -return a ``__tablename__`` in the event that no table is already -mapped in the inheritance hierarchy. To help with this, a -:func:`~sqlalchemy.ext.declarative.has_inherited_table` helper -function is provided that returns ``True`` if a parent class already -has a mapped table. - -As an example, here's a mixin that will only allow single table -inheritance:: - - from sqlalchemy.ext.declarative import declared_attr - from sqlalchemy.ext.declarative import has_inherited_table - - class Tablename(object): - @declared_attr - def __tablename__(cls): - if has_inherited_table(cls): - return None - return cls.__name__.lower() - - class Person(Tablename, Base): - id = Column(Integer, primary_key=True) - discriminator = Column('type', String(50)) - __mapper_args__ = {'polymorphic_on': discriminator} - - class Engineer(Person): - primary_language = Column(String(50)) - __mapper_args__ = {'polymorphic_identity': 'engineer'} - - -Combining Table/Mapper Arguments from Multiple Mixins -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -In the case of ``__table_args__`` or ``__mapper_args__`` -specified with declarative mixins, you may want to combine -some parameters from several mixins with those you wish to -define on the class iteself. The -:class:`.declared_attr` decorator can be used -here to create user-defined collation routines that pull -from multiple collections:: - - from sqlalchemy.ext.declarative import declared_attr - - class MySQLSettings(object): - __table_args__ = {'mysql_engine':'InnoDB'} - - class MyOtherMixin(object): - __table_args__ = {'info':'foo'} - - class MyModel(MySQLSettings, MyOtherMixin, Base): - __tablename__='my_model' - - @declared_attr - def __table_args__(cls): - args = dict() - args.update(MySQLSettings.__table_args__) - args.update(MyOtherMixin.__table_args__) - return args - - id = Column(Integer, primary_key=True) - -Creating Indexes with Mixins -~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To define a named, potentially multicolumn :class:`.Index` that applies to all -tables derived from a mixin, use the "inline" form of :class:`.Index` and -establish it as part of ``__table_args__``:: - - class MyMixin(object): - a = Column(Integer) - b = Column(Integer) - - @declared_attr - def __table_args__(cls): - return (Index('test_idx_%s' % cls.__tablename__, 'a', 'b'),) - - class MyModel(MyMixin, Base): - __tablename__ = 'atable' - c = Column(Integer,primary_key=True) - -Special Directives -================== - -``__declare_last__()`` -~~~~~~~~~~~~~~~~~~~~~~ - -The ``__declare_last__()`` hook allows definition of -a class level function that is automatically called by the -:meth:`.MapperEvents.after_configured` event, which occurs after mappings are -assumed to be completed and the 'configure' step has finished:: - - class MyClass(Base): - @classmethod - def __declare_last__(cls): - "" - # do something with mappings - -.. versionadded:: 0.7.3 - -``__declare_first__()`` -~~~~~~~~~~~~~~~~~~~~~~~ - -Like ``__declare_last__()``, but is called at the beginning of mapper -configuration via the :meth:`.MapperEvents.before_configured` event:: - - class MyClass(Base): - @classmethod - def __declare_first__(cls): - "" - # do something before mappings are configured - -.. versionadded:: 0.9.3 - -.. _declarative_abstract: - -``__abstract__`` -~~~~~~~~~~~~~~~~~~~ - -``__abstract__`` causes declarative to skip the production -of a table or mapper for the class entirely. A class can be added within a -hierarchy in the same way as mixin (see :ref:`declarative_mixins`), allowing -subclasses to extend just from the special class:: - - class SomeAbstractBase(Base): - __abstract__ = True - - def some_helpful_method(self): - "" - - @declared_attr - def __mapper_args__(cls): - return {"helpful mapper arguments":True} - - class MyMappedClass(SomeAbstractBase): - "" - -One possible use of ``__abstract__`` is to use a distinct -:class:`.MetaData` for different bases:: - - Base = declarative_base() - - class DefaultBase(Base): - __abstract__ = True - metadata = MetaData() - - class OtherBase(Base): - __abstract__ = True - metadata = MetaData() - -Above, classes which inherit from ``DefaultBase`` will use one -:class:`.MetaData` as the registry of tables, and those which inherit from -``OtherBase`` will use a different one. The tables themselves can then be -created perhaps within distinct databases:: - - DefaultBase.metadata.create_all(some_engine) - OtherBase.metadata_create_all(some_other_engine) - -.. versionadded:: 0.7.3 - -Class Constructor -================= - -As a convenience feature, the :func:`declarative_base` sets a default -constructor on classes which takes keyword arguments, and assigns them -to the named attributes:: - - e = Engineer(primary_language='python') - -Sessions -======== - -Note that ``declarative`` does nothing special with sessions, and is -only intended as an easier way to configure mappers and -:class:`~sqlalchemy.schema.Table` objects. A typical application -setup using :class:`~sqlalchemy.orm.scoping.scoped_session` might look like:: - - engine = create_engine('postgresql://scott:tiger@localhost/test') - Session = scoped_session(sessionmaker(autocommit=False, - autoflush=False, - bind=engine)) - Base = declarative_base() - -Mapped instances then make usage of -:class:`~sqlalchemy.orm.session.Session` in the usual way. - -""" - from .api import declarative_base, synonym_for, comparable_using, \ instrument_declarative, ConcreteBase, AbstractConcreteBase, \ DeclarativeMeta, DeferredReflection, has_inherited_table,\ @@ -1313,5 +13,6 @@ from .api import declarative_base, synonym_for, comparable_using, \ __all__ = ['declarative_base', 'synonym_for', 'has_inherited_table', 'comparable_using', 'instrument_declarative', 'declared_attr', + 'as_declarative', 'ConcreteBase', 'AbstractConcreteBase', 'DeclarativeMeta', 'DeferredReflection'] diff --git a/lib/python3.4/site-packages/sqlalchemy/ext/declarative/api.py b/lib/python3.4/site-packages/sqlalchemy/ext/declarative/api.py index daf8bff..54e78ee 100644 --- a/lib/python3.4/site-packages/sqlalchemy/ext/declarative/api.py +++ b/lib/python3.4/site-packages/sqlalchemy/ext/declarative/api.py @@ -1,5 +1,5 @@ # ext/declarative/api.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -7,13 +7,14 @@ """Public API functions and helpers for declarative.""" -from ...schema import Table, MetaData -from ...orm import synonym as _orm_synonym, mapper,\ +from ...schema import Table, MetaData, Column +from ...orm import synonym as _orm_synonym, \ comparable_property,\ - interfaces, properties + interfaces, properties, attributes from ...orm.util import polymorphic_union from ...orm.base import _mapper_or_none -from ...util import OrderedDict +from ...util import OrderedDict, hybridmethod, hybridproperty +from ... import util from ... import exc import weakref @@ -21,7 +22,6 @@ from .base import _as_declarative, \ _declarative_constructor,\ _DeferredMapperConfig, _add_attribute from .clsregistry import _class_resolver -from . import clsregistry def instrument_declarative(cls, registry, metadata): @@ -157,12 +157,90 @@ class declared_attr(interfaces._MappedAttribute, property): """ - def __init__(self, fget, *arg, **kw): - super(declared_attr, self).__init__(fget, *arg, **kw) + def __init__(self, fget, cascading=False): + super(declared_attr, self).__init__(fget) self.__doc__ = fget.__doc__ + self._cascading = cascading def __get__(desc, self, cls): - return desc.fget(cls) + reg = cls.__dict__.get('_sa_declared_attr_reg', None) + if reg is None: + manager = attributes.manager_of_class(cls) + if manager is None: + util.warn( + "Unmanaged access of declarative attribute %s from " + "non-mapped class %s" % + (desc.fget.__name__, cls.__name__)) + return desc.fget(cls) + + if reg is None: + return desc.fget(cls) + elif desc in reg: + return reg[desc] + else: + reg[desc] = obj = desc.fget(cls) + return obj + + @hybridmethod + def _stateful(cls, **kw): + return _stateful_declared_attr(**kw) + + @hybridproperty + def cascading(cls): + """Mark a :class:`.declared_attr` as cascading. + + This is a special-use modifier which indicates that a column + or MapperProperty-based declared attribute should be configured + distinctly per mapped subclass, within a mapped-inheritance scenario. + + Below, both MyClass as well as MySubClass will have a distinct + ``id`` Column object established:: + + class HasSomeAttribute(object): + @declared_attr.cascading + def some_id(cls): + if has_inherited_table(cls): + return Column( + ForeignKey('myclass.id'), primary_key=True) + else: + return Column(Integer, primary_key=True) + + return Column('id', Integer, primary_key=True) + + class MyClass(HasSomeAttribute, Base): + "" + # ... + + class MySubClass(MyClass): + "" + # ... + + The behavior of the above configuration is that ``MySubClass`` + will refer to both its own ``id`` column as well as that of + ``MyClass`` underneath the attribute named ``some_id``. + + .. seealso:: + + :ref:`declarative_inheritance` + + :ref:`mixin_inheritance_columns` + + + """ + return cls._stateful(cascading=True) + + +class _stateful_declared_attr(declared_attr): + def __init__(self, **kw): + self.kw = kw + + def _stateful(self, **kw): + new_kw = self.kw.copy() + new_kw.update(kw) + return _stateful_declared_attr(**new_kw) + + def __call__(self, fn): + return declared_attr(fn, **self.kw) def declarative_base(bind=None, metadata=None, mapper=None, cls=object, @@ -319,6 +397,15 @@ class ConcreteBase(object): 'polymorphic_identity':'manager', 'concrete':True} + .. seealso:: + + :class:`.AbstractConcreteBase` + + :ref:`concrete_inheritance` + + :ref:`inheritance_concrete_helpers` + + """ @classmethod @@ -349,9 +436,11 @@ class AbstractConcreteBase(ConcreteBase): ``__declare_last__()`` function, which is essentially a hook for the :meth:`.after_configured` event. - :class:`.AbstractConcreteBase` does not produce a mapped - table for the class itself. Compare to :class:`.ConcreteBase`, - which does. + :class:`.AbstractConcreteBase` does produce a mapped class + for the base class, however it is not persisted to any table; it + is instead mapped directly to the "polymorphic" selectable directly + and is only used for selecting. Compare to :class:`.ConcreteBase`, + which does create a persisted table for the base class. Example:: @@ -365,20 +454,79 @@ class AbstractConcreteBase(ConcreteBase): employee_id = Column(Integer, primary_key=True) name = Column(String(50)) manager_data = Column(String(40)) + __mapper_args__ = { - 'polymorphic_identity':'manager', - 'concrete':True} + 'polymorphic_identity':'manager', + 'concrete':True} + + The abstract base class is handled by declarative in a special way; + at class configuration time, it behaves like a declarative mixin + or an ``__abstract__`` base class. Once classes are configured + and mappings are produced, it then gets mapped itself, but + after all of its decscendants. This is a very unique system of mapping + not found in any other SQLAlchemy system. + + Using this approach, we can specify columns and properties + that will take place on mapped subclasses, in the way that + we normally do as in :ref:`declarative_mixins`:: + + class Company(Base): + __tablename__ = 'company' + id = Column(Integer, primary_key=True) + + class Employee(AbstractConcreteBase, Base): + employee_id = Column(Integer, primary_key=True) + + @declared_attr + def company_id(cls): + return Column(ForeignKey('company.id')) + + @declared_attr + def company(cls): + return relationship("Company") + + class Manager(Employee): + __tablename__ = 'manager' + + name = Column(String(50)) + manager_data = Column(String(40)) + + __mapper_args__ = { + 'polymorphic_identity':'manager', + 'concrete':True} + + When we make use of our mappings however, both ``Manager`` and + ``Employee`` will have an independently usable ``.company`` attribute:: + + session.query(Employee).filter(Employee.company.has(id=5)) + + .. versionchanged:: 1.0.0 - The mechanics of :class:`.AbstractConcreteBase` + have been reworked to support relationships established directly + on the abstract base, without any special configurational steps. + + .. seealso:: + + :class:`.ConcreteBase` + + :ref:`concrete_inheritance` + + :ref:`inheritance_concrete_helpers` """ - __abstract__ = True + __no_table__ = True @classmethod def __declare_first__(cls): - if hasattr(cls, '__mapper__'): + cls._sa_decl_prepare_nocascade() + + @classmethod + def _sa_decl_prepare_nocascade(cls): + if getattr(cls, '__mapper__', None): return - clsregistry.add_class(cls.__name__, cls) + to_map = _DeferredMapperConfig.config_for_cls(cls) + # can't rely on 'self_and_descendants' here # since technically an immediate subclass # might not be mapped, but a subclass @@ -392,11 +540,33 @@ class AbstractConcreteBase(ConcreteBase): if mn is not None: mappers.append(mn) pjoin = cls._create_polymorphic_union(mappers) - cls.__mapper__ = m = mapper(cls, pjoin, polymorphic_on=pjoin.c.type) + + # For columns that were declared on the class, these + # are normally ignored with the "__no_table__" mapping, + # unless they have a different attribute key vs. col name + # and are in the properties argument. + # In that case, ensure we update the properties entry + # to the correct column from the pjoin target table. + declared_cols = set(to_map.declared_columns) + for k, v in list(to_map.properties.items()): + if v in declared_cols: + to_map.properties[k] = pjoin.c[v.key] + + to_map.local_table = pjoin + + m_args = to_map.mapper_args_fn or dict + + def mapper_args(): + args = m_args() + args['polymorphic_on'] = pjoin.c.type + return args + to_map.mapper_args_fn = mapper_args + + m = to_map.map() for scls in cls.__subclasses__(): sm = _mapper_or_none(scls) - if sm.concrete and cls in scls.__bases__: + if sm and sm.concrete and cls in scls.__bases__: sm._set_concrete_base(m) diff --git a/lib/python3.4/site-packages/sqlalchemy/ext/declarative/base.py b/lib/python3.4/site-packages/sqlalchemy/ext/declarative/base.py index 94baeeb..59ebe37 100644 --- a/lib/python3.4/site-packages/sqlalchemy/ext/declarative/base.py +++ b/lib/python3.4/site-packages/sqlalchemy/ext/declarative/base.py @@ -1,5 +1,5 @@ # ext/declarative/base.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -19,6 +19,9 @@ from ... import event from . import clsregistry import collections import weakref +from sqlalchemy.orm import instrumentation + +declared_attr = declarative_props = None def _declared_mapping_info(cls): @@ -32,322 +35,431 @@ def _declared_mapping_info(cls): return None -def _as_declarative(cls, classname, dict_): - from .api import declared_attr +def _resolve_for_abstract(cls): + if cls is object: + return None - # dict_ will be a dictproxy, which we can't write to, and we need to! - dict_ = dict(dict_) + if _get_immediate_cls_attr(cls, '__abstract__', strict=True): + for sup in cls.__bases__: + sup = _resolve_for_abstract(sup) + if sup is not None: + return sup + else: + return None + else: + return cls - column_copies = {} - potential_columns = {} - mapper_args_fn = None - table_args = inherited_table_args = None - tablename = None +def _get_immediate_cls_attr(cls, attrname, strict=False): + """return an attribute of the class that is either present directly + on the class, e.g. not on a superclass, or is from a superclass but + this superclass is a mixin, that is, not a descendant of + the declarative base. - declarative_props = (declared_attr, util.classproperty) + This is used to detect attributes that indicate something about + a mapped class independently from any mapped classes that it may + inherit from. + + """ + if not issubclass(cls, object): + return None for base in cls.__mro__: _is_declarative_inherits = hasattr(base, '_decl_class_registry') - - if '__declare_last__' in base.__dict__: - @event.listens_for(mapper, "after_configured") - def go(): - cls.__declare_last__() - if '__declare_first__' in base.__dict__: - @event.listens_for(mapper, "before_configured") - def go(): - cls.__declare_first__() - if '__abstract__' in base.__dict__ and base.__abstract__: - if (base is cls or - (base in cls.__bases__ and not _is_declarative_inherits)): - return - - class_mapped = _declared_mapping_info(base) is not None - - for name, obj in vars(base).items(): - if name == '__mapper_args__': - if not mapper_args_fn and ( - not class_mapped or - isinstance(obj, declarative_props) - ): - # don't even invoke __mapper_args__ until - # after we've determined everything about the - # mapped table. - # make a copy of it so a class-level dictionary - # is not overwritten when we update column-based - # arguments. - mapper_args_fn = lambda: dict(cls.__mapper_args__) - elif name == '__tablename__': - if not tablename and ( - not class_mapped or - isinstance(obj, declarative_props) - ): - tablename = cls.__tablename__ - elif name == '__table_args__': - if not table_args and ( - not class_mapped or - isinstance(obj, declarative_props) - ): - table_args = cls.__table_args__ - if not isinstance(table_args, (tuple, dict, type(None))): - raise exc.ArgumentError( - "__table_args__ value must be a tuple, " - "dict, or None") - if base is not cls: - inherited_table_args = True - elif class_mapped: - if isinstance(obj, declarative_props): - util.warn("Regular (i.e. not __special__) " - "attribute '%s.%s' uses @declared_attr, " - "but owning class %s is mapped - " - "not applying to subclass %s." - % (base.__name__, name, base, cls)) - continue - elif base is not cls: - # we're a mixin. - if isinstance(obj, Column): - if getattr(cls, name) is not obj: - # if column has been overridden - # (like by the InstrumentedAttribute of the - # superclass), skip - continue - if obj.foreign_keys: - raise exc.InvalidRequestError( - "Columns with foreign keys to other columns " - "must be declared as @declared_attr callables " - "on declarative mixin classes. ") - if name not in dict_ and not ( - '__table__' in dict_ and - (obj.name or name) in dict_['__table__'].c - ) and name not in potential_columns: - potential_columns[name] = \ - column_copies[obj] = \ - obj.copy() - column_copies[obj]._creation_order = \ - obj._creation_order - elif isinstance(obj, MapperProperty): - raise exc.InvalidRequestError( - "Mapper properties (i.e. deferred," - "column_property(), relationship(), etc.) must " - "be declared as @declared_attr callables " - "on declarative mixin classes.") - elif isinstance(obj, declarative_props): - dict_[name] = ret = \ - column_copies[obj] = getattr(cls, name) - if isinstance(ret, (Column, MapperProperty)) and \ - ret.doc is None: - ret.doc = obj.__doc__ - - # apply inherited columns as we should - for k, v in potential_columns.items(): - dict_[k] = v - - if inherited_table_args and not tablename: - table_args = None - - clsregistry.add_class(classname, cls) - our_stuff = util.OrderedDict() - - for k in list(dict_): - - # TODO: improve this ? all dunders ? - if k in ('__table__', '__tablename__', '__mapper_args__'): - continue - - value = dict_[k] - if isinstance(value, declarative_props): - value = getattr(cls, k) - - elif isinstance(value, QueryableAttribute) and \ - value.class_ is not cls and \ - value.key != k: - # detect a QueryableAttribute that's already mapped being - # assigned elsewhere in userland, turn into a synonym() - value = synonym(value.key) - setattr(cls, k, value) - - if (isinstance(value, tuple) and len(value) == 1 and - isinstance(value[0], (Column, MapperProperty))): - util.warn("Ignoring declarative-like tuple value of attribute " - "%s: possibly a copy-and-paste error with a comma " - "left at the end of the line?" % k) - continue - if not isinstance(value, (Column, MapperProperty)): - if not k.startswith('__'): - dict_.pop(k) - setattr(cls, k, value) - continue - if k == 'metadata': - raise exc.InvalidRequestError( - "Attribute name 'metadata' is reserved " - "for the MetaData instance when using a " - "declarative base class." - ) - prop = clsregistry._deferred_relationship(cls, value) - our_stuff[k] = prop - - # set up attributes in the order they were created - our_stuff.sort(key=lambda key: our_stuff[key]._creation_order) - - # extract columns from the class dict - declared_columns = set() - name_to_prop_key = collections.defaultdict(set) - for key, c in list(our_stuff.items()): - if isinstance(c, (ColumnProperty, CompositeProperty)): - for col in c.columns: - if isinstance(col, Column) and \ - col.table is None: - _undefer_column_name(key, col) - if not isinstance(c, CompositeProperty): - name_to_prop_key[col.name].add(key) - declared_columns.add(col) - elif isinstance(c, Column): - _undefer_column_name(key, c) - name_to_prop_key[c.name].add(key) - declared_columns.add(c) - # if the column is the same name as the key, - # remove it from the explicit properties dict. - # the normal rules for assigning column-based properties - # will take over, including precedence of columns - # in multi-column ColumnProperties. - if key == c.key: - del our_stuff[key] - - for name, keys in name_to_prop_key.items(): - if len(keys) > 1: - util.warn( - "On class %r, Column object %r named directly multiple times, " - "only one will be used: %s" % - (classname, name, (", ".join(sorted(keys)))) - ) - - declared_columns = sorted( - declared_columns, key=lambda c: c._creation_order) - table = None - - if hasattr(cls, '__table_cls__'): - table_cls = util.unbound_method_to_callable(cls.__table_cls__) + if attrname in base.__dict__ and ( + base is cls or + ((base in cls.__bases__ if strict else True) + and not _is_declarative_inherits) + ): + return getattr(base, attrname) else: - table_cls = Table + return None - if '__table__' not in dict_: - if tablename is not None: - args, table_kw = (), {} - if table_args: - if isinstance(table_args, dict): - table_kw = table_args - elif isinstance(table_args, tuple): - if isinstance(table_args[-1], dict): - args, table_kw = table_args[0:-1], table_args[-1] - else: - args = table_args +def _as_declarative(cls, classname, dict_): + global declared_attr, declarative_props + if declared_attr is None: + from .api import declared_attr + declarative_props = (declared_attr, util.classproperty) - autoload = dict_.get('__autoload__') - if autoload: - table_kw['autoload'] = True + if _get_immediate_cls_attr(cls, '__abstract__', strict=True): + return - cls.__table__ = table = table_cls( - tablename, cls.metadata, - *(tuple(declared_columns) + tuple(args)), - **table_kw) - else: - table = cls.__table__ - if declared_columns: - for c in declared_columns: - if not table.c.contains_column(c): - raise exc.ArgumentError( - "Can't add additional column %r when " - "specifying __table__" % c.key - ) - - if hasattr(cls, '__mapper_cls__'): - mapper_cls = util.unbound_method_to_callable(cls.__mapper_cls__) - else: - mapper_cls = mapper - - for c in cls.__bases__: - if _declared_mapping_info(c) is not None: - inherits = c - break - else: - inherits = None - - if table is None and inherits is None: - raise exc.InvalidRequestError( - "Class %r does not have a __table__ or __tablename__ " - "specified and does not inherit from an existing " - "table-mapped class." % cls - ) - elif inherits: - inherited_mapper = _declared_mapping_info(inherits) - inherited_table = inherited_mapper.local_table - inherited_mapped_table = inherited_mapper.mapped_table - - if table is None: - # single table inheritance. - # ensure no table args - if table_args: - raise exc.ArgumentError( - "Can't place __table_args__ on an inherited class " - "with no table." - ) - # add any columns declared here to the inherited table. - for c in declared_columns: - if c.primary_key: - raise exc.ArgumentError( - "Can't place primary key columns on an inherited " - "class with no table." - ) - if c.name in inherited_table.c: - if inherited_table.c[c.name] is c: - continue - raise exc.ArgumentError( - "Column '%s' on class %s conflicts with " - "existing column '%s'" % - (c, cls, inherited_table.c[c.name]) - ) - inherited_table.append_column(c) - if inherited_mapped_table is not None and \ - inherited_mapped_table is not inherited_table: - inherited_mapped_table._refresh_for_new_column(c) - - defer_map = hasattr(cls, '_sa_decl_prepare') - if defer_map: - cfg_cls = _DeferredMapperConfig - else: - cfg_cls = _MapperConfig - mt = cfg_cls(mapper_cls, - cls, table, - inherits, - declared_columns, - column_copies, - our_stuff, - mapper_args_fn) - if not defer_map: - mt.map() + _MapperConfig.setup_mapping(cls, classname, dict_) class _MapperConfig(object): - mapped_table = None + @classmethod + def setup_mapping(cls, cls_, classname, dict_): + defer_map = _get_immediate_cls_attr( + cls_, '_sa_decl_prepare_nocascade', strict=True) or \ + hasattr(cls_, '_sa_decl_prepare') - def __init__(self, mapper_cls, - cls, - table, - inherits, - declared_columns, - column_copies, - properties, mapper_args_fn): - self.mapper_cls = mapper_cls - self.cls = cls - self.local_table = table - self.inherits = inherits - self.properties = properties + if defer_map: + cfg_cls = _DeferredMapperConfig + else: + cfg_cls = _MapperConfig + cfg_cls(cls_, classname, dict_) + + def __init__(self, cls_, classname, dict_): + + self.cls = cls_ + + # dict_ will be a dictproxy, which we can't write to, and we need to! + self.dict_ = dict(dict_) + self.classname = classname + self.mapped_table = None + self.properties = util.OrderedDict() + self.declared_columns = set() + self.column_copies = {} + self._setup_declared_events() + + # temporary registry. While early 1.0 versions + # set up the ClassManager here, by API contract + # we can't do that until there's a mapper. + self.cls._sa_declared_attr_reg = {} + + self._scan_attributes() + + clsregistry.add_class(self.classname, self.cls) + + self._extract_mappable_attributes() + + self._extract_declared_columns() + + self._setup_table() + + self._setup_inheritance() + + self._early_mapping() + + def _early_mapping(self): + self.map() + + def _setup_declared_events(self): + if _get_immediate_cls_attr(self.cls, '__declare_last__'): + @event.listens_for(mapper, "after_configured") + def after_configured(): + self.cls.__declare_last__() + + if _get_immediate_cls_attr(self.cls, '__declare_first__'): + @event.listens_for(mapper, "before_configured") + def before_configured(): + self.cls.__declare_first__() + + def _scan_attributes(self): + cls = self.cls + dict_ = self.dict_ + column_copies = self.column_copies + mapper_args_fn = None + table_args = inherited_table_args = None + tablename = None + + for base in cls.__mro__: + class_mapped = base is not cls and \ + _declared_mapping_info(base) is not None and \ + not _get_immediate_cls_attr( + base, '_sa_decl_prepare_nocascade', strict=True) + + if not class_mapped and base is not cls: + self._produce_column_copies(base) + + for name, obj in vars(base).items(): + if name == '__mapper_args__': + if not mapper_args_fn and ( + not class_mapped or + isinstance(obj, declarative_props) + ): + # don't even invoke __mapper_args__ until + # after we've determined everything about the + # mapped table. + # make a copy of it so a class-level dictionary + # is not overwritten when we update column-based + # arguments. + mapper_args_fn = lambda: dict(cls.__mapper_args__) + elif name == '__tablename__': + if not tablename and ( + not class_mapped or + isinstance(obj, declarative_props) + ): + tablename = cls.__tablename__ + elif name == '__table_args__': + if not table_args and ( + not class_mapped or + isinstance(obj, declarative_props) + ): + table_args = cls.__table_args__ + if not isinstance( + table_args, (tuple, dict, type(None))): + raise exc.ArgumentError( + "__table_args__ value must be a tuple, " + "dict, or None") + if base is not cls: + inherited_table_args = True + elif class_mapped: + if isinstance(obj, declarative_props): + util.warn("Regular (i.e. not __special__) " + "attribute '%s.%s' uses @declared_attr, " + "but owning class %s is mapped - " + "not applying to subclass %s." + % (base.__name__, name, base, cls)) + continue + elif base is not cls: + # we're a mixin, abstract base, or something that is + # acting like that for now. + if isinstance(obj, Column): + # already copied columns to the mapped class. + continue + elif isinstance(obj, MapperProperty): + raise exc.InvalidRequestError( + "Mapper properties (i.e. deferred," + "column_property(), relationship(), etc.) must " + "be declared as @declared_attr callables " + "on declarative mixin classes.") + elif isinstance(obj, declarative_props): + oldclassprop = isinstance(obj, util.classproperty) + if not oldclassprop and obj._cascading: + dict_[name] = column_copies[obj] = \ + ret = obj.__get__(obj, cls) + setattr(cls, name, ret) + else: + if oldclassprop: + util.warn_deprecated( + "Use of sqlalchemy.util.classproperty on " + "declarative classes is deprecated.") + dict_[name] = column_copies[obj] = \ + ret = getattr(cls, name) + if isinstance(ret, (Column, MapperProperty)) and \ + ret.doc is None: + ret.doc = obj.__doc__ + + if inherited_table_args and not tablename: + table_args = None + + self.table_args = table_args + self.tablename = tablename self.mapper_args_fn = mapper_args_fn - self.declared_columns = declared_columns - self.column_copies = column_copies + + def _produce_column_copies(self, base): + cls = self.cls + dict_ = self.dict_ + column_copies = self.column_copies + # copy mixin columns to the mapped class + for name, obj in vars(base).items(): + if isinstance(obj, Column): + if getattr(cls, name) is not obj: + # if column has been overridden + # (like by the InstrumentedAttribute of the + # superclass), skip + continue + elif obj.foreign_keys: + raise exc.InvalidRequestError( + "Columns with foreign keys to other columns " + "must be declared as @declared_attr callables " + "on declarative mixin classes. ") + elif name not in dict_ and not ( + '__table__' in dict_ and + (obj.name or name) in dict_['__table__'].c + ): + column_copies[obj] = copy_ = obj.copy() + copy_._creation_order = obj._creation_order + setattr(cls, name, copy_) + dict_[name] = copy_ + + def _extract_mappable_attributes(self): + cls = self.cls + dict_ = self.dict_ + + our_stuff = self.properties + + for k in list(dict_): + + if k in ('__table__', '__tablename__', '__mapper_args__'): + continue + + value = dict_[k] + if isinstance(value, declarative_props): + value = getattr(cls, k) + + elif isinstance(value, QueryableAttribute) and \ + value.class_ is not cls and \ + value.key != k: + # detect a QueryableAttribute that's already mapped being + # assigned elsewhere in userland, turn into a synonym() + value = synonym(value.key) + setattr(cls, k, value) + + if (isinstance(value, tuple) and len(value) == 1 and + isinstance(value[0], (Column, MapperProperty))): + util.warn("Ignoring declarative-like tuple value of attribute " + "%s: possibly a copy-and-paste error with a comma " + "left at the end of the line?" % k) + continue + elif not isinstance(value, (Column, MapperProperty)): + # using @declared_attr for some object that + # isn't Column/MapperProperty; remove from the dict_ + # and place the evaluated value onto the class. + if not k.startswith('__'): + dict_.pop(k) + setattr(cls, k, value) + continue + # we expect to see the name 'metadata' in some valid cases; + # however at this point we see it's assigned to something trying + # to be mapped, so raise for that. + elif k == 'metadata': + raise exc.InvalidRequestError( + "Attribute name 'metadata' is reserved " + "for the MetaData instance when using a " + "declarative base class." + ) + prop = clsregistry._deferred_relationship(cls, value) + our_stuff[k] = prop + + def _extract_declared_columns(self): + our_stuff = self.properties + + # set up attributes in the order they were created + our_stuff.sort(key=lambda key: our_stuff[key]._creation_order) + + # extract columns from the class dict + declared_columns = self.declared_columns + name_to_prop_key = collections.defaultdict(set) + for key, c in list(our_stuff.items()): + if isinstance(c, (ColumnProperty, CompositeProperty)): + for col in c.columns: + if isinstance(col, Column) and \ + col.table is None: + _undefer_column_name(key, col) + if not isinstance(c, CompositeProperty): + name_to_prop_key[col.name].add(key) + declared_columns.add(col) + elif isinstance(c, Column): + _undefer_column_name(key, c) + name_to_prop_key[c.name].add(key) + declared_columns.add(c) + # if the column is the same name as the key, + # remove it from the explicit properties dict. + # the normal rules for assigning column-based properties + # will take over, including precedence of columns + # in multi-column ColumnProperties. + if key == c.key: + del our_stuff[key] + + for name, keys in name_to_prop_key.items(): + if len(keys) > 1: + util.warn( + "On class %r, Column object %r named " + "directly multiple times, " + "only one will be used: %s" % + (self.classname, name, (", ".join(sorted(keys)))) + ) + + def _setup_table(self): + cls = self.cls + tablename = self.tablename + table_args = self.table_args + dict_ = self.dict_ + declared_columns = self.declared_columns + + declared_columns = self.declared_columns = sorted( + declared_columns, key=lambda c: c._creation_order) + table = None + + if hasattr(cls, '__table_cls__'): + table_cls = util.unbound_method_to_callable(cls.__table_cls__) + else: + table_cls = Table + + if '__table__' not in dict_: + if tablename is not None: + + args, table_kw = (), {} + if table_args: + if isinstance(table_args, dict): + table_kw = table_args + elif isinstance(table_args, tuple): + if isinstance(table_args[-1], dict): + args, table_kw = table_args[0:-1], table_args[-1] + else: + args = table_args + + autoload = dict_.get('__autoload__') + if autoload: + table_kw['autoload'] = True + + cls.__table__ = table = table_cls( + tablename, cls.metadata, + *(tuple(declared_columns) + tuple(args)), + **table_kw) + else: + table = cls.__table__ + if declared_columns: + for c in declared_columns: + if not table.c.contains_column(c): + raise exc.ArgumentError( + "Can't add additional column %r when " + "specifying __table__" % c.key + ) + self.local_table = table + + def _setup_inheritance(self): + table = self.local_table + cls = self.cls + table_args = self.table_args + declared_columns = self.declared_columns + for c in cls.__bases__: + c = _resolve_for_abstract(c) + if c is None: + continue + if _declared_mapping_info(c) is not None and \ + not _get_immediate_cls_attr( + c, '_sa_decl_prepare_nocascade', strict=True): + self.inherits = c + break + else: + self.inherits = None + + if table is None and self.inherits is None and \ + not _get_immediate_cls_attr(cls, '__no_table__'): + + raise exc.InvalidRequestError( + "Class %r does not have a __table__ or __tablename__ " + "specified and does not inherit from an existing " + "table-mapped class." % cls + ) + elif self.inherits: + inherited_mapper = _declared_mapping_info(self.inherits) + inherited_table = inherited_mapper.local_table + inherited_mapped_table = inherited_mapper.mapped_table + + if table is None: + # single table inheritance. + # ensure no table args + if table_args: + raise exc.ArgumentError( + "Can't place __table_args__ on an inherited class " + "with no table." + ) + # add any columns declared here to the inherited table. + for c in declared_columns: + if c.primary_key: + raise exc.ArgumentError( + "Can't place primary key columns on an inherited " + "class with no table." + ) + if c.name in inherited_table.c: + if inherited_table.c[c.name] is c: + continue + raise exc.ArgumentError( + "Column '%s' on class %s conflicts with " + "existing column '%s'" % + (c, cls, inherited_table.c[c.name]) + ) + inherited_table.append_column(c) + if inherited_mapped_table is not None and \ + inherited_mapped_table is not inherited_table: + inherited_mapped_table._refresh_for_new_column(c) def _prepare_mapper_arguments(self): properties = self.properties @@ -401,20 +513,31 @@ class _MapperConfig(object): properties[k] = [col] + p.columns result_mapper_args = mapper_args.copy() result_mapper_args['properties'] = properties - return result_mapper_args + self.mapper_args = result_mapper_args def map(self): - mapper_args = self._prepare_mapper_arguments() - self.cls.__mapper__ = self.mapper_cls( + self._prepare_mapper_arguments() + if hasattr(self.cls, '__mapper_cls__'): + mapper_cls = util.unbound_method_to_callable( + self.cls.__mapper_cls__) + else: + mapper_cls = mapper + + self.cls.__mapper__ = mp_ = mapper_cls( self.cls, self.local_table, - **mapper_args + **self.mapper_args ) + del self.cls._sa_declared_attr_reg + return mp_ class _DeferredMapperConfig(_MapperConfig): _configs = util.OrderedDict() + def _early_mapping(self): + pass + @property def cls(self): return self._cls() @@ -466,7 +589,7 @@ class _DeferredMapperConfig(_MapperConfig): def map(self): self._configs.pop(self._cls, None) - super(_DeferredMapperConfig, self).map() + return super(_DeferredMapperConfig, self).map() def _add_attribute(cls, key, value): diff --git a/lib/python3.4/site-packages/sqlalchemy/ext/declarative/clsregistry.py b/lib/python3.4/site-packages/sqlalchemy/ext/declarative/clsregistry.py index 4595b85..0d62bd2 100644 --- a/lib/python3.4/site-packages/sqlalchemy/ext/declarative/clsregistry.py +++ b/lib/python3.4/site-packages/sqlalchemy/ext/declarative/clsregistry.py @@ -1,5 +1,5 @@ # ext/declarative/clsregistry.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -71,6 +71,8 @@ class _MultipleClassMarker(object): """ + __slots__ = 'on_remove', 'contents', '__weakref__' + def __init__(self, classes, on_remove=None): self.on_remove = on_remove self.contents = set([ @@ -103,7 +105,12 @@ class _MultipleClassMarker(object): self.on_remove() def add_item(self, item): - modules = set([cls().__module__ for cls in self.contents]) + # protect against class registration race condition against + # asynchronous garbage collection calling _remove_item, + # [ticket:3208] + modules = set([ + cls.__module__ for cls in + [ref() for ref in self.contents] if cls is not None]) if item.__module__ in modules: util.warn( "This declarative base already contains a class with the " @@ -122,6 +129,8 @@ class _ModuleMarker(object): """ + __slots__ = 'parent', 'name', 'contents', 'mod_ns', 'path', '__weakref__' + def __init__(self, name, parent): self.parent = parent self.name = name @@ -167,6 +176,8 @@ class _ModuleMarker(object): class _ModNS(object): + __slots__ = '__parent', + def __init__(self, parent): self.__parent = parent @@ -188,6 +199,8 @@ class _ModNS(object): class _GetColumns(object): + __slots__ = 'cls', + def __init__(self, cls): self.cls = cls @@ -216,6 +229,8 @@ inspection._inspects(_GetColumns)( class _GetTable(object): + __slots__ = 'key', 'metadata' + def __init__(self, key, metadata): self.key = key self.metadata = metadata @@ -306,7 +321,8 @@ def _deferred_relationship(cls, prop): key, kwargs = prop.backref for attr in ('primaryjoin', 'secondaryjoin', 'secondary', 'foreign_keys', 'remote_side', 'order_by'): - if attr in kwargs and isinstance(kwargs[attr], str): + if attr in kwargs and isinstance(kwargs[attr], + util.string_types): kwargs[attr] = resolve_arg(kwargs[attr]) return prop diff --git a/lib/python3.4/site-packages/sqlalchemy/ext/horizontal_shard.py b/lib/python3.4/site-packages/sqlalchemy/ext/horizontal_shard.py index d311fb2..996e81f 100644 --- a/lib/python3.4/site-packages/sqlalchemy/ext/horizontal_shard.py +++ b/lib/python3.4/site-packages/sqlalchemy/ext/horizontal_shard.py @@ -1,5 +1,5 @@ # ext/horizontal_shard.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/python3.4/site-packages/sqlalchemy/ext/hybrid.py b/lib/python3.4/site-packages/sqlalchemy/ext/hybrid.py index 9f4e09e..bbf3867 100644 --- a/lib/python3.4/site-packages/sqlalchemy/ext/hybrid.py +++ b/lib/python3.4/site-packages/sqlalchemy/ext/hybrid.py @@ -1,5 +1,5 @@ # ext/hybrid.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -45,8 +45,8 @@ as the class itself:: return self.end - self.start @hybrid_method - def contains(self,point): - return (self.start <= point) & (point < self.end) + def contains(self, point): + return (self.start <= point) & (point <= self.end) @hybrid_method def intersects(self, other): @@ -145,7 +145,7 @@ usage of the absolute value function:: return func.abs(cls.length) / 2 Above the Python function ``abs()`` is used for instance-level -operations, the SQL function ``ABS()`` is used via the :attr:`.func` +operations, the SQL function ``ABS()`` is used via the :data:`.func` object for class-level expressions:: >>> i1.radius @@ -634,10 +634,10 @@ from .. import util from ..orm import attributes, interfaces HYBRID_METHOD = util.symbol('HYBRID_METHOD') -"""Symbol indicating an :class:`_InspectionAttr` that's +"""Symbol indicating an :class:`InspectionAttr` that's of type :class:`.hybrid_method`. - Is assigned to the :attr:`._InspectionAttr.extension_type` + Is assigned to the :attr:`.InspectionAttr.extension_type` attibute. .. seealso:: @@ -647,10 +647,10 @@ HYBRID_METHOD = util.symbol('HYBRID_METHOD') """ HYBRID_PROPERTY = util.symbol('HYBRID_PROPERTY') -"""Symbol indicating an :class:`_InspectionAttr` that's +"""Symbol indicating an :class:`InspectionAttr` that's of type :class:`.hybrid_method`. - Is assigned to the :attr:`._InspectionAttr.extension_type` + Is assigned to the :attr:`.InspectionAttr.extension_type` attibute. .. seealso:: @@ -660,7 +660,7 @@ HYBRID_PROPERTY = util.symbol('HYBRID_PROPERTY') """ -class hybrid_method(interfaces._InspectionAttr): +class hybrid_method(interfaces.InspectionAttrInfo): """A decorator which allows definition of a Python object method with both instance-level and class-level behavior. @@ -703,7 +703,7 @@ class hybrid_method(interfaces._InspectionAttr): return self -class hybrid_property(interfaces._InspectionAttr): +class hybrid_property(interfaces.InspectionAttrInfo): """A decorator which allows definition of a Python descriptor with both instance-level and class-level behavior. diff --git a/lib/python3.4/site-packages/sqlalchemy/ext/instrumentation.py b/lib/python3.4/site-packages/sqlalchemy/ext/instrumentation.py index 0241366..30a0ab7 100644 --- a/lib/python3.4/site-packages/sqlalchemy/ext/instrumentation.py +++ b/lib/python3.4/site-packages/sqlalchemy/ext/instrumentation.py @@ -166,7 +166,13 @@ class ExtendedInstrumentationRegistry(InstrumentationFactory): def manager_of_class(self, cls): if cls is None: return None - return self._manager_finders.get(cls, _default_manager_getter)(cls) + try: + finder = self._manager_finders.get(cls, _default_manager_getter) + except TypeError: + # due to weakref lookup on invalid object + return None + else: + return finder(cls) def state_of(self, instance): if instance is None: @@ -392,6 +398,7 @@ def _reinstall_default_lookups(): manager_of_class=_default_manager_getter ) ) + _instrumentation_factory._extended = False def _install_lookups(lookups): diff --git a/lib/python3.4/site-packages/sqlalchemy/ext/mutable.py b/lib/python3.4/site-packages/sqlalchemy/ext/mutable.py index 7469bcb..97f720c 100644 --- a/lib/python3.4/site-packages/sqlalchemy/ext/mutable.py +++ b/lib/python3.4/site-packages/sqlalchemy/ext/mutable.py @@ -1,5 +1,5 @@ # ext/mutable.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -402,6 +402,27 @@ class MutableBase(object): msg = "Attribute '%s' does not accept objects of type %s" raise ValueError(msg % (key, type(value))) + @classmethod + def _get_listen_keys(cls, attribute): + """Given a descriptor attribute, return a ``set()`` of the attribute + keys which indicate a change in the state of this attribute. + + This is normally just ``set([attribute.key])``, but can be overridden + to provide for additional keys. E.g. a :class:`.MutableComposite` + augments this set with the attribute keys associated with the columns + that comprise the composite value. + + This collection is consulted in the case of intercepting the + :meth:`.InstanceEvents.refresh` and + :meth:`.InstanceEvents.refresh_flush` events, which pass along a list + of attribute names that have been refreshed; the list is compared + against this set to determine if action needs to be taken. + + .. versionadded:: 1.0.5 + + """ + return set([attribute.key]) + @classmethod def _listen_on_attribute(cls, attribute, coerce, parent_cls): """Establish this type as a mutation listener for the given @@ -415,6 +436,8 @@ class MutableBase(object): # rely on "propagate" here parent_cls = attribute.class_ + listen_keys = cls._get_listen_keys(attribute) + def load(state, *args): """Listen for objects loaded or refreshed. @@ -429,6 +452,10 @@ class MutableBase(object): state.dict[key] = val val._parents[state.obj()] = key + def load_attrs(state, ctx, attrs): + if not attrs or listen_keys.intersection(attrs): + load(state) + def set(target, value, oldvalue, initiator): """Listen for set/replace events on the target data member. @@ -463,7 +490,9 @@ class MutableBase(object): event.listen(parent_cls, 'load', load, raw=True, propagate=True) - event.listen(parent_cls, 'refresh', load, + event.listen(parent_cls, 'refresh', load_attrs, + raw=True, propagate=True) + event.listen(parent_cls, 'refresh_flush', load_attrs, raw=True, propagate=True) event.listen(attribute, 'set', set, raw=True, retval=True, propagate=True) @@ -574,6 +603,10 @@ class MutableComposite(MutableBase): """ + @classmethod + def _get_listen_keys(cls, attribute): + return set([attribute.key]).union(attribute.property._attribute_keys) + def changed(self): """Subclasses should call this method whenever change events occur.""" @@ -602,6 +635,18 @@ _setup_composite_listener() class MutableDict(Mutable, dict): """A dictionary type that implements :class:`.Mutable`. + The :class:`.MutableDict` object implements a dictionary that will + emit change events to the underlying mapping when the contents of + the dictionary are altered, including when values are added or removed. + + Note that :class:`.MutableDict` does **not** apply mutable tracking to the + *values themselves* inside the dictionary. Therefore it is not a sufficient + solution for the use case of tracking deep changes to a *recursive* + dictionary structure, such as a JSON structure. To support this use case, + build a subclass of :class:`.MutableDict` that provides appropriate + coersion to the values placed in the dictionary so that they too are + "mutable", and emit events up to their parent structure. + .. versionadded:: 0.8 """ @@ -621,16 +666,30 @@ class MutableDict(Mutable, dict): dict.__delitem__(self, key) self.changed() + def update(self, *a, **kw): + dict.update(self, *a, **kw) + self.changed() + + def pop(self, *arg): + result = dict.pop(self, *arg) + self.changed() + return result + + def popitem(self): + result = dict.popitem(self) + self.changed() + return result + def clear(self): dict.clear(self) self.changed() @classmethod def coerce(cls, key, value): - """Convert plain dictionary to MutableDict.""" - if not isinstance(value, MutableDict): + """Convert plain dictionary to instance of this class.""" + if not isinstance(value, cls): if isinstance(value, dict): - return MutableDict(value) + return cls(value) return Mutable.coerce(key, value) else: return value diff --git a/lib/python3.4/site-packages/sqlalchemy/ext/orderinglist.py b/lib/python3.4/site-packages/sqlalchemy/ext/orderinglist.py index 67fda44..d060a4f 100644 --- a/lib/python3.4/site-packages/sqlalchemy/ext/orderinglist.py +++ b/lib/python3.4/site-packages/sqlalchemy/ext/orderinglist.py @@ -1,5 +1,5 @@ # ext/orderinglist.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -119,7 +119,7 @@ start numbering at 1 or some other integer, provide ``count_from=1``. """ -from ..orm.collections import collection +from ..orm.collections import collection, collection_adapter from .. import util __all__ = ['ordering_list'] @@ -319,7 +319,10 @@ class OrderingList(list): def remove(self, entity): super(OrderingList, self).remove(entity) - self._reorder() + + adapter = collection_adapter(self) + if adapter and adapter._referenced_by_owner: + self._reorder() def pop(self, index=-1): entity = super(OrderingList, self).pop(index) diff --git a/lib/python3.4/site-packages/sqlalchemy/ext/serializer.py b/lib/python3.4/site-packages/sqlalchemy/ext/serializer.py index bf8d67d..893f7be 100644 --- a/lib/python3.4/site-packages/sqlalchemy/ext/serializer.py +++ b/lib/python3.4/site-packages/sqlalchemy/ext/serializer.py @@ -1,5 +1,5 @@ # ext/serializer.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/python3.4/site-packages/sqlalchemy/inspection.py b/lib/python3.4/site-packages/sqlalchemy/inspection.py index ab9f2ae..5c16c45 100644 --- a/lib/python3.4/site-packages/sqlalchemy/inspection.py +++ b/lib/python3.4/site-packages/sqlalchemy/inspection.py @@ -1,5 +1,5 @@ # sqlalchemy/inspect.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/python3.4/site-packages/sqlalchemy/interfaces.py b/lib/python3.4/site-packages/sqlalchemy/interfaces.py index ae11d19..464ad9f 100644 --- a/lib/python3.4/site-packages/sqlalchemy/interfaces.py +++ b/lib/python3.4/site-packages/sqlalchemy/interfaces.py @@ -1,5 +1,5 @@ # sqlalchemy/interfaces.py -# Copyright (C) 2007-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2007-2016 the SQLAlchemy authors and contributors # # Copyright (C) 2007 Jason Kirtland jek@discorporate.us # diff --git a/lib/python3.4/site-packages/sqlalchemy/log.py b/lib/python3.4/site-packages/sqlalchemy/log.py index b3c9ae0..b23de90 100644 --- a/lib/python3.4/site-packages/sqlalchemy/log.py +++ b/lib/python3.4/site-packages/sqlalchemy/log.py @@ -1,5 +1,5 @@ # sqlalchemy/log.py -# Copyright (C) 2006-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2006-2016 the SQLAlchemy authors and contributors # # Includes alterations by Vinay Sajip vinay_sajip@yahoo.co.uk # diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/__init__.py b/lib/python3.4/site-packages/sqlalchemy/orm/__init__.py index 741e79b..7425737 100644 --- a/lib/python3.4/site-packages/sqlalchemy/orm/__init__.py +++ b/lib/python3.4/site-packages/sqlalchemy/orm/__init__.py @@ -1,5 +1,5 @@ # orm/__init__.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -149,7 +149,12 @@ def backref(name, **kwargs): 'items':relationship( SomeItem, backref=backref('parent', lazy='subquery')) + .. seealso:: + + :ref:`relationships_backref` + """ + return (name, kwargs) diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/attributes.py b/lib/python3.4/site-packages/sqlalchemy/orm/attributes.py index 9ffe5c8..16b3264 100644 --- a/lib/python3.4/site-packages/sqlalchemy/orm/attributes.py +++ b/lib/python3.4/site-packages/sqlalchemy/orm/attributes.py @@ -1,5 +1,5 @@ # orm/attributes.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -30,9 +30,8 @@ from .base import state_str, instance_str @inspection._self_inspects class QueryableAttribute(interfaces._MappedAttribute, - interfaces._InspectionAttr, + interfaces.InspectionAttr, interfaces.PropComparator): - """Base class for :term:`descriptor` objects that intercept attribute events on behalf of a :class:`.MapperProperty` object. The actual :class:`.MapperProperty` is accessible @@ -212,7 +211,6 @@ class QueryableAttribute(interfaces._MappedAttribute, class InstrumentedAttribute(QueryableAttribute): - """Class bound instrumented attribute which adds basic :term:`descriptor` methods. @@ -250,7 +248,6 @@ def create_proxied_attribute(descriptor): # function is removed from ext/hybrid.py class Proxy(QueryableAttribute): - """Presents the :class:`.QueryableAttribute` interface as a proxy on top of a Python descriptor / :class:`.PropComparator` combination. @@ -330,7 +327,6 @@ OP_REPLACE = util.symbol("REPLACE") class Event(object): - """A token propagated throughout the course of a chain of attribute events. @@ -349,23 +345,26 @@ class Event(object): .. versionadded:: 0.9.0 - """ + :var impl: The :class:`.AttributeImpl` which is the current event + initiator. - impl = None - """The :class:`.AttributeImpl` which is the current event initiator. - """ - - op = None - """The symbol :attr:`.OP_APPEND`, :attr:`.OP_REMOVE` or :attr:`.OP_REPLACE`, - indicating the source operation. + :var op: The symbol :attr:`.OP_APPEND`, :attr:`.OP_REMOVE` or + :attr:`.OP_REPLACE`, indicating the source operation. """ + __slots__ = 'impl', 'op', 'parent_token' + def __init__(self, attribute_impl, op): self.impl = attribute_impl self.op = op self.parent_token = self.impl.parent_token + def __eq__(self, other): + return isinstance(other, Event) and \ + other.impl is self.impl and \ + other.op == self.op + @property def key(self): return self.impl.key @@ -375,7 +374,6 @@ class Event(object): class AttributeImpl(object): - """internal implementation for instrumented attributes.""" def __init__(self, class_, key, @@ -455,6 +453,11 @@ class AttributeImpl(object): self.expire_missing = expire_missing + __slots__ = ( + 'class_', 'key', 'callable_', 'dispatch', 'trackparent', + 'parent_token', 'send_modified_events', 'is_equal', 'expire_missing' + ) + def __str__(self): return "%s.%s" % (self.class_.__name__, self.key) @@ -524,23 +527,6 @@ class AttributeImpl(object): state.parents[id_] = False - def set_callable(self, state, callable_): - """Set a callable function for this attribute on the given object. - - This callable will be executed when the attribute is next - accessed, and is assumed to construct part of the instances - previously stored state. When its value or values are loaded, - they will be established as part of the instance's *committed - state*. While *trackparent* information will be assembled for - these instances, attribute-level event handlers will not be - fired. - - The callable overrides the class level callable set in the - ``InstrumentedAttribute`` constructor. - - """ - state.callables[self.key] = callable_ - def get_history(self, state, dict_, passive=PASSIVE_OFF): raise NotImplementedError() @@ -565,7 +551,11 @@ class AttributeImpl(object): def initialize(self, state, dict_): """Initialize the given state's attribute with an empty value.""" - dict_[self.key] = None + # As of 1.0, we don't actually set a value in + # dict_. This is so that the state of the object does not get + # modified without emitting the appropriate events. + + return None def get(self, state, dict_, passive=PASSIVE_OFF): @@ -584,7 +574,9 @@ class AttributeImpl(object): if not passive & CALLABLES_OK: return PASSIVE_NO_RESULT - if key in state.callables: + if key in state.expired_attributes: + value = state._load_expired(state, passive) + elif key in state.callables: callable_ = state.callables[key] value = callable_(state, passive) elif self.callable_: @@ -632,7 +624,7 @@ class AttributeImpl(object): if self.key in state.committed_state: value = state.committed_state[self.key] - if value is NO_VALUE: + if value in (NO_VALUE, NEVER_SET): return None else: return value @@ -648,7 +640,6 @@ class AttributeImpl(object): class ScalarAttributeImpl(AttributeImpl): - """represents a scalar value-holding InstrumentedAttribute.""" accepts_scalar_loader = True @@ -656,6 +647,23 @@ class ScalarAttributeImpl(AttributeImpl): supports_population = True collection = False + __slots__ = '_replace_token', '_append_token', '_remove_token' + + def __init__(self, *arg, **kw): + super(ScalarAttributeImpl, self).__init__(*arg, **kw) + self._replace_token = self._append_token = None + self._remove_token = None + + def _init_append_token(self): + self._replace_token = self._append_token = Event(self, OP_REPLACE) + return self._replace_token + + _init_append_or_replace_token = _init_append_token + + def _init_remove_token(self): + self._remove_token = Event(self, OP_REMOVE) + return self._remove_token + def delete(self, state, dict_): # TODO: catch key errors, convert to attributeerror? @@ -694,27 +702,18 @@ class ScalarAttributeImpl(AttributeImpl): state._modified_event(dict_, self, old) dict_[self.key] = value - @util.memoized_property - def _replace_token(self): - return Event(self, OP_REPLACE) - - @util.memoized_property - def _append_token(self): - return Event(self, OP_REPLACE) - - @util.memoized_property - def _remove_token(self): - return Event(self, OP_REMOVE) - def fire_replace_event(self, state, dict_, value, previous, initiator): for fn in self.dispatch.set: value = fn( - state, value, previous, initiator or self._replace_token) + state, value, previous, + initiator or self._replace_token or + self._init_append_or_replace_token()) return value def fire_remove_event(self, state, dict_, value, initiator): for fn in self.dispatch.remove: - fn(state, value, initiator or self._remove_token) + fn(state, value, + initiator or self._remove_token or self._init_remove_token()) @property def type(self): @@ -722,7 +721,6 @@ class ScalarAttributeImpl(AttributeImpl): class ScalarObjectAttributeImpl(ScalarAttributeImpl): - """represents a scalar-holding InstrumentedAttribute, where the target object is also instrumented. @@ -735,9 +733,13 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl): supports_population = True collection = False + __slots__ = () + def delete(self, state, dict_): old = self.get(state, dict_) - self.fire_remove_event(state, dict_, old, self._remove_token) + self.fire_remove_event( + state, dict_, old, + self._remove_token or self._init_remove_token()) del dict_[self.key] def get_history(self, state, dict_, passive=PASSIVE_OFF): @@ -787,14 +789,7 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl): old = self.get( state, dict_, passive=PASSIVE_ONLY_PERSISTENT | NO_AUTOFLUSH) else: - # would like to call with PASSIVE_NO_FETCH ^ INIT_OK. However, - # we have a long-standing behavior that a "get()" on never set - # should implicitly set the value to None. Leaving INIT_OK - # set here means we are consistent whether or not we did a get - # first. - # see test_use_object_set_None vs. - # test_use_object_get_first_set_None in test_attributes.py - old = self.get(state, dict_, passive=PASSIVE_NO_FETCH) + old = self.get(state, dict_, passive=PASSIVE_NO_FETCH ^ INIT_OK) if check_old is not None and \ old is not PASSIVE_NO_RESULT and \ @@ -817,7 +812,8 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl): self.sethasparent(instance_state(value), state, False) for fn in self.dispatch.remove: - fn(state, value, initiator or self._remove_token) + fn(state, value, initiator or + self._remove_token or self._init_remove_token()) state._modified_event(dict_, self, value) @@ -829,7 +825,8 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl): for fn in self.dispatch.set: value = fn( - state, value, previous, initiator or self._replace_token) + state, value, previous, initiator or + self._replace_token or self._init_append_or_replace_token()) state._modified_event(dict_, self, previous) @@ -841,7 +838,6 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl): class CollectionAttributeImpl(AttributeImpl): - """A collection-holding attribute that instruments changes in membership. Only handles collections of instrumented objects. @@ -857,6 +853,8 @@ class CollectionAttributeImpl(AttributeImpl): supports_population = True collection = True + __slots__ = 'copy', 'collection_factory', '_append_token', '_remove_token' + def __init__(self, class_, key, callable_, dispatch, typecallable=None, trackparent=False, extension=None, copy_function=None, compare_function=None, **kwargs): @@ -873,6 +871,26 @@ class CollectionAttributeImpl(AttributeImpl): copy_function = self.__copy self.copy = copy_function self.collection_factory = typecallable + self._append_token = None + self._remove_token = None + + if getattr(self.collection_factory, "_sa_linker", None): + + @event.listens_for(self, "init_collection") + def link(target, collection, collection_adapter): + collection._sa_linker(collection_adapter) + + @event.listens_for(self, "dispose_collection") + def unlink(target, collection, collection_adapter): + collection._sa_linker(None) + + def _init_append_token(self): + self._append_token = Event(self, OP_APPEND) + return self._append_token + + def _init_remove_token(self): + self._remove_token = Event(self, OP_REMOVE) + return self._remove_token def __copy(self, item): return [y for y in collections.collection_adapter(item)] @@ -916,17 +934,11 @@ class CollectionAttributeImpl(AttributeImpl): return [(instance_state(o), o) for o in current] - @util.memoized_property - def _append_token(self): - return Event(self, OP_APPEND) - - @util.memoized_property - def _remove_token(self): - return Event(self, OP_REMOVE) - def fire_append_event(self, state, dict_, value, initiator): for fn in self.dispatch.append: - value = fn(state, value, initiator or self._append_token) + value = fn( + state, value, + initiator or self._append_token or self._init_append_token()) state._modified_event(dict_, self, NEVER_SET, True) @@ -943,7 +955,8 @@ class CollectionAttributeImpl(AttributeImpl): self.sethasparent(instance_state(value), state, False) for fn in self.dispatch.remove: - fn(state, value, initiator or self._remove_token) + fn(state, value, + initiator or self._remove_token or self._init_remove_token()) state._modified_event(dict_, self, NEVER_SET, True) @@ -966,9 +979,14 @@ class CollectionAttributeImpl(AttributeImpl): return user_data def _initialize_collection(self, state): - return state.manager.initialize_collection( + + adapter, collection = state.manager.initialize_collection( self.key, state, self.collection_factory) + self.dispatch.init_collection(state, collection, adapter) + + return adapter, collection + def append(self, state, dict_, value, initiator, passive=PASSIVE_OFF): collection = self.get_collection(state, dict_, passive=passive) if collection is PASSIVE_NO_RESULT: @@ -1037,12 +1055,14 @@ class CollectionAttributeImpl(AttributeImpl): # place a copy of "old" in state.committed_state state._modified_event(dict_, self, old, True) - old_collection = getattr(old, '_sa_adapter') + old_collection = old._sa_adapter dict_[self.key] = user_data collections.bulk_replace(new_values, old_collection, new_collection) - old_collection.unlink(old) + + del old._sa_adapter + self.dispatch.dispose_collection(state, old, old_collection) def _invalidate_collection(self, collection): adapter = getattr(collection, '_sa_adapter') @@ -1128,7 +1148,8 @@ def backref_listeners(attribute, key, uselist): impl.pop(old_state, old_dict, state.obj(), - parent_impl._append_token, + parent_impl._append_token or + parent_impl._init_append_token(), passive=PASSIVE_NO_FETCH) if child is not None: @@ -1208,7 +1229,6 @@ History = util.namedtuple("History", [ class History(History): - """A 3-tuple of added, unchanged and deleted values, representing the changes which have occurred on an instrumented attribute. diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/base.py b/lib/python3.4/site-packages/sqlalchemy/orm/base.py index 61630b9..7947cd7 100644 --- a/lib/python3.4/site-packages/sqlalchemy/orm/base.py +++ b/lib/python3.4/site-packages/sqlalchemy/orm/base.py @@ -1,5 +1,5 @@ # orm/base.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -144,40 +144,50 @@ _INSTRUMENTOR = ('mapper', 'instrumentor') EXT_CONTINUE = util.symbol('EXT_CONTINUE') EXT_STOP = util.symbol('EXT_STOP') -ONETOMANY = util.symbol('ONETOMANY', - """Indicates the one-to-many direction for a :func:`.relationship`. +ONETOMANY = util.symbol( + 'ONETOMANY', + """Indicates the one-to-many direction for a :func:`.relationship`. -This symbol is typically used by the internals but may be exposed within -certain API features. + This symbol is typically used by the internals but may be exposed within + certain API features. -""") + """) -MANYTOONE = util.symbol('MANYTOONE', - """Indicates the many-to-one direction for a :func:`.relationship`. +MANYTOONE = util.symbol( + 'MANYTOONE', + """Indicates the many-to-one direction for a :func:`.relationship`. -This symbol is typically used by the internals but may be exposed within -certain API features. + This symbol is typically used by the internals but may be exposed within + certain API features. -""") + """) -MANYTOMANY = util.symbol('MANYTOMANY', - """Indicates the many-to-many direction for a :func:`.relationship`. +MANYTOMANY = util.symbol( + 'MANYTOMANY', + """Indicates the many-to-many direction for a :func:`.relationship`. -This symbol is typically used by the internals but may be exposed within -certain API features. + This symbol is typically used by the internals but may be exposed within + certain API features. -""") + """) -NOT_EXTENSION = util.symbol('NOT_EXTENSION', - """Symbol indicating an :class:`_InspectionAttr` that's - not part of sqlalchemy.ext. +NOT_EXTENSION = util.symbol( + 'NOT_EXTENSION', + """Symbol indicating an :class:`InspectionAttr` that's + not part of sqlalchemy.ext. - Is assigned to the :attr:`._InspectionAttr.extension_type` - attibute. + Is assigned to the :attr:`.InspectionAttr.extension_type` + attibute. -""") + """) -_none_set = frozenset([None]) +_never_set = frozenset([NEVER_SET]) + +_none_set = frozenset([None, NEVER_SET, PASSIVE_NO_RESULT]) + +_SET_DEFERRED_EXPIRED = util.symbol("SET_DEFERRED_EXPIRED") + +_DEFER_FOR_STATE = util.symbol("DEFER_FOR_STATE") def _generative(*assertions): @@ -319,10 +329,9 @@ def _is_mapped_class(entity): insp = inspection.inspect(entity, False) return insp is not None and \ - hasattr(insp, "mapper") and \ + not insp.is_clause_element and \ ( - insp.is_mapper - or insp.is_aliased_class + insp.is_mapper or insp.is_aliased_class ) @@ -419,7 +428,7 @@ def class_mapper(class_, configure=True): return mapper -class _InspectionAttr(object): +class InspectionAttr(object): """A base class applied to all ORM objects that can be returned by the :func:`.inspect` function. @@ -433,6 +442,7 @@ class _InspectionAttr(object): here intact for forwards-compatibility. """ + __slots__ = () is_selectable = False """Return True if this object is an instance of :class:`.Selectable`.""" @@ -456,7 +466,7 @@ class _InspectionAttr(object): :class:`.QueryableAttribute` which handles attributes events on behalf of a :class:`.MapperProperty`. But can also be an extension type such as :class:`.AssociationProxy` or :class:`.hybrid_property`. - The :attr:`._InspectionAttr.extension_type` will refer to a constant + The :attr:`.InspectionAttr.extension_type` will refer to a constant identifying the specific subtype. .. seealso:: @@ -485,8 +495,46 @@ class _InspectionAttr(object): """ +class InspectionAttrInfo(InspectionAttr): + """Adds the ``.info`` attribute to :class:`.InspectionAttr`. + + The rationale for :class:`.InspectionAttr` vs. :class:`.InspectionAttrInfo` + is that the former is compatible as a mixin for classes that specify + ``__slots__``; this is essentially an implementation artifact. + + """ + + @util.memoized_property + def info(self): + """Info dictionary associated with the object, allowing user-defined + data to be associated with this :class:`.InspectionAttr`. + + The dictionary is generated when first accessed. Alternatively, + it can be specified as a constructor argument to the + :func:`.column_property`, :func:`.relationship`, or :func:`.composite` + functions. + + .. versionadded:: 0.8 Added support for .info to all + :class:`.MapperProperty` subclasses. + + .. versionchanged:: 1.0.0 :attr:`.MapperProperty.info` is also + available on extension types via the + :attr:`.InspectionAttrInfo.info` attribute, so that it can apply + to a wider variety of ORM and extension constructs. + + .. seealso:: + + :attr:`.QueryableAttribute.info` + + :attr:`.SchemaItem.info` + + """ + return {} + + class _MappedAttribute(object): """Mixin for attributes which should be replaced by mapper-assigned attributes. """ + __slots__ = () diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/collections.py b/lib/python3.4/site-packages/sqlalchemy/orm/collections.py index 698677a..f3c609f 100644 --- a/lib/python3.4/site-packages/sqlalchemy/orm/collections.py +++ b/lib/python3.4/site-packages/sqlalchemy/orm/collections.py @@ -1,5 +1,5 @@ # orm/collections.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -111,6 +111,7 @@ from ..sql import expression from .. import util, exc as sa_exc from . import base +from sqlalchemy.util.compat import inspect_getargspec __all__ = ['collection', 'collection_adapter', 'mapped_collection', 'column_mapped_collection', @@ -429,6 +430,10 @@ class collection(object): the instance. A single argument is passed: the collection adapter that has been linked, or None if unlinking. + .. deprecated:: 1.0.0 - the :meth:`.collection.linker` handler + is superseded by the :meth:`.AttributeEvents.init_collection` + and :meth:`.AttributeEvents.dispose_collection` handlers. + """ fn._sa_instrument_role = 'linker' return fn @@ -575,7 +580,7 @@ class CollectionAdapter(object): self._key = attr.key self._data = weakref.ref(data) self.owner_state = owner_state - self.link_to_self(data) + data._sa_adapter = self def _warn_invalidated(self): util.warn("This collection has been invalidated.") @@ -585,24 +590,20 @@ class CollectionAdapter(object): "The entity collection being adapted." return self._data() + @property + def _referenced_by_owner(self): + """return True if the owner state still refers to this collection. + + This will return False within a bulk replace operation, + where this collection is the one being replaced. + + """ + return self.owner_state.dict[self._key] is self._data() + @util.memoized_property def attr(self): return self.owner_state.manager[self._key].impl - def link_to_self(self, data): - """Link a collection to this adapter""" - - data._sa_adapter = self - if data._sa_linker: - data._sa_linker(self) - - def unlink(self, data): - """Unlink a collection from any adapter""" - - del data._sa_adapter - if data._sa_linker: - data._sa_linker(None) - def adapt_like_to_iterable(self, obj): """Converts collection-compatible objects to an iterable of values. @@ -861,11 +862,24 @@ def _instrument_class(cls): "Can not instrument a built-in type. Use a " "subclass, even a trivial one.") + roles, methods = _locate_roles_and_methods(cls) + + _setup_canned_roles(cls, roles, methods) + + _assert_required_roles(cls, roles, methods) + + _set_collection_attributes(cls, roles, methods) + + +def _locate_roles_and_methods(cls): + """search for _sa_instrument_role-decorated methods in + method resolution order, assign to roles. + + """ + roles = {} methods = {} - # search for _sa_instrument_role-decorated methods in - # method resolution order, assign to roles for supercls in cls.__mro__: for name, method in vars(supercls).items(): if not util.callable(method): @@ -890,14 +904,19 @@ def _instrument_class(cls): assert op in ('fire_append_event', 'fire_remove_event') after = op if before: - methods[name] = before[0], before[1], after + methods[name] = before + (after, ) elif after: methods[name] = None, None, after + return roles, methods - # see if this class has "canned" roles based on a known - # collection type (dict, set, list). Apply those roles - # as needed to the "roles" dictionary, and also - # prepare "decorator" methods + +def _setup_canned_roles(cls, roles, methods): + """see if this class has "canned" roles based on a known + collection type (dict, set, list). Apply those roles + as needed to the "roles" dictionary, and also + prepare "decorator" methods + + """ collection_type = util.duck_type_collection(cls) if collection_type in __interfaces: canned_roles, decorators = __interfaces[collection_type] @@ -911,8 +930,12 @@ def _instrument_class(cls): not hasattr(fn, '_sa_instrumented')): setattr(cls, method, decorator(fn)) - # ensure all roles are present, and apply implicit instrumentation if - # needed + +def _assert_required_roles(cls, roles, methods): + """ensure all roles are present, and apply implicit instrumentation if + needed + + """ if 'appender' not in roles or not hasattr(cls, roles['appender']): raise sa_exc.ArgumentError( "Type %s must elect an appender method to be " @@ -934,8 +957,12 @@ def _instrument_class(cls): "Type %s must elect an iterator method to be " "a collection class" % cls.__name__) - # apply ad-hoc instrumentation from decorators, class-level defaults - # and implicit role declarations + +def _set_collection_attributes(cls, roles, methods): + """apply ad-hoc instrumentation from decorators, class-level defaults + and implicit role declarations + + """ for method_name, (before, argument, after) in methods.items(): setattr(cls, method_name, _instrument_membership_mutator(getattr(cls, method_name), @@ -945,8 +972,7 @@ def _instrument_class(cls): setattr(cls, '_sa_%s' % role, getattr(cls, method_name)) cls._sa_adapter = None - if not hasattr(cls, '_sa_linker'): - cls._sa_linker = None + if not hasattr(cls, '_sa_converter'): cls._sa_converter = None cls._sa_instrumented = id(cls) @@ -957,7 +983,7 @@ def _instrument_membership_mutator(method, before, argument, after): adapter.""" # This isn't smart enough to handle @adds(1) for 'def fn(self, (a, b))' if before: - fn_args = list(util.flatten_iterator(inspect.getargspec(method)[0])) + fn_args = list(util.flatten_iterator(inspect_getargspec(method)[0])) if isinstance(argument, int): pos_arg = argument named_arg = len(fn_args) > argument and fn_args[argument] or None @@ -1482,8 +1508,8 @@ class MappedCollection(dict): def __init__(self, keyfunc): """Create a new collection with keying provided by keyfunc. - keyfunc may be any callable any callable that takes an object and - returns an object for use as a dictionary key. + keyfunc may be any callable that takes an object and returns an object + for use as a dictionary key. The keyfunc will be called every time the ORM needs to add a member by value-only (such as when loading instances from the database) or diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/dependency.py b/lib/python3.4/site-packages/sqlalchemy/orm/dependency.py index c1cf66f..a3e5b12 100644 --- a/lib/python3.4/site-packages/sqlalchemy/orm/dependency.py +++ b/lib/python3.4/site-packages/sqlalchemy/orm/dependency.py @@ -1,5 +1,5 @@ # orm/dependency.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -303,9 +303,9 @@ class DependencyProcessor(object): set ) - def _post_update(self, state, uowcommit, related): + def _post_update(self, state, uowcommit, related, is_m2o_delete=False): for x in related: - if x is not None: + if not is_m2o_delete or x is not None: uowcommit.issue_post_update( state, [r for l, r in self.prop.synchronize_pairs] @@ -740,7 +740,9 @@ class ManyToOneDP(DependencyProcessor): self.key, self._passive_delete_flag) if history: - self._post_update(state, uowcommit, history.sum()) + self._post_update( + state, uowcommit, history.sum(), + is_m2o_delete=True) def process_saves(self, uowcommit, states): for state in states: @@ -1119,6 +1121,7 @@ class ManyToManyDP(DependencyProcessor): if c.key in associationrow ])) result = connection.execute(statement, secondary_update) + if result.supports_sane_multi_rowcount() and \ result.rowcount != len(secondary_update): raise exc.StaleDataError( diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/deprecated_interfaces.py b/lib/python3.4/site-packages/sqlalchemy/orm/deprecated_interfaces.py index fa693c9..6477e82 100644 --- a/lib/python3.4/site-packages/sqlalchemy/orm/deprecated_interfaces.py +++ b/lib/python3.4/site-packages/sqlalchemy/orm/deprecated_interfaces.py @@ -1,5 +1,5 @@ # orm/deprecated_interfaces.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -67,10 +67,6 @@ class MapperExtension(object): ( 'init_instance', 'init_failed', - 'translate_row', - 'create_instance', - 'append_result', - 'populate_instance', 'reconstruct_instance', 'before_insert', 'after_insert', @@ -156,108 +152,6 @@ class MapperExtension(object): """ return EXT_CONTINUE - def translate_row(self, mapper, context, row): - """Perform pre-processing on the given result row and return a - new row instance. - - This is called when the mapper first receives a row, before - the object identity or the instance itself has been derived - from that row. The given row may or may not be a - ``RowProxy`` object - it will always be a dictionary-like - object which contains mapped columns as keys. The - returned object should also be a dictionary-like object - which recognizes mapped columns as keys. - - If the ultimate return value is EXT_CONTINUE, the row - is not translated. - - """ - return EXT_CONTINUE - - def create_instance(self, mapper, selectcontext, row, class_): - """Receive a row when a new object instance is about to be - created from that row. - - The method can choose to create the instance itself, or it can return - EXT_CONTINUE to indicate normal object creation should take place. - - mapper - The mapper doing the operation - - selectcontext - The QueryContext generated from the Query. - - row - The result row from the database - - class\_ - The class we are mapping. - - return value - A new object instance, or EXT_CONTINUE - - """ - return EXT_CONTINUE - - def append_result(self, mapper, selectcontext, row, instance, - result, **flags): - """Receive an object instance before that instance is appended - to a result list. - - If this method returns EXT_CONTINUE, result appending will proceed - normally. if this method returns any other value or None, - result appending will not proceed for this instance, giving - this extension an opportunity to do the appending itself, if - desired. - - mapper - The mapper doing the operation. - - selectcontext - The QueryContext generated from the Query. - - row - The result row from the database. - - instance - The object instance to be appended to the result. - - result - List to which results are being appended. - - \**flags - extra information about the row, same as criterion in - ``create_row_processor()`` method of - :class:`~sqlalchemy.orm.interfaces.MapperProperty` - """ - - return EXT_CONTINUE - - def populate_instance(self, mapper, selectcontext, row, - instance, **flags): - """Receive an instance before that instance has - its attributes populated. - - This usually corresponds to a newly loaded instance but may - also correspond to an already-loaded instance which has - unloaded attributes to be populated. The method may be called - many times for a single instance, as multiple result rows are - used to populate eagerly loaded collections. - - If this method returns EXT_CONTINUE, instance population will - proceed normally. If any other value or None is returned, - instance population will not proceed, giving this extension an - opportunity to populate the instance itself, if desired. - - .. deprecated:: 0.5 - Most usages of this hook are obsolete. For a - generic "object has been newly created from a row" hook, use - ``reconstruct_instance()``, or the ``@orm.reconstructor`` - decorator. - - """ - return EXT_CONTINUE - def reconstruct_instance(self, mapper, instance): """Receive an object instance after it has been created via ``__new__``, and after initial attribute population has diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/descriptor_props.py b/lib/python3.4/site-packages/sqlalchemy/orm/descriptor_props.py index 5ed24b8..6c87ef9 100644 --- a/lib/python3.4/site-packages/sqlalchemy/orm/descriptor_props.py +++ b/lib/python3.4/site-packages/sqlalchemy/orm/descriptor_props.py @@ -1,5 +1,5 @@ # orm/descriptor_props.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -143,6 +143,7 @@ class CompositeProperty(DescriptorProperty): class. **Deprecated.** Please see :class:`.AttributeEvents`. """ + super(CompositeProperty, self).__init__() self.attrs = attrs self.composite_class = class_ @@ -372,9 +373,9 @@ class CompositeProperty(DescriptorProperty): property.key, *expr) def create_row_processor(self, query, procs, labels): - def proc(row, result): + def proc(row): return self.property.composite_class( - *[proc(row, result) for proc in procs]) + *[proc(row) for proc in procs]) return proc class Comparator(PropComparator): @@ -471,6 +472,7 @@ class ConcreteInheritedProperty(DescriptorProperty): return comparator_callable def __init__(self): + super(ConcreteInheritedProperty, self).__init__() def warn(): raise AttributeError("Concrete %s does not implement " "attribute %r at the instance level. Add " @@ -496,7 +498,7 @@ class SynonymProperty(DescriptorProperty): def __init__(self, name, map_column=None, descriptor=None, comparator_factory=None, - doc=None): + doc=None, info=None): """Denote an attribute name as a synonym to a mapped property, in that the attribute will mirror the value and expression behavior of another attribute. @@ -531,6 +533,11 @@ class SynonymProperty(DescriptorProperty): conjunction with the ``descriptor`` argument in order to link a user-defined descriptor as a "wrapper" for an existing column. + :param info: Optional data dictionary which will be populated into the + :attr:`.InspectionAttr.info` attribute of this object. + + .. versionadded:: 1.0.0 + :param comparator_factory: A subclass of :class:`.PropComparator` that will provide custom comparison behavior at the SQL expression level. @@ -550,12 +557,15 @@ class SynonymProperty(DescriptorProperty): more complicated attribute-wrapping schemes than synonyms. """ + super(SynonymProperty, self).__init__() self.name = name self.map_column = map_column self.descriptor = descriptor self.comparator_factory = comparator_factory self.doc = doc or (descriptor and descriptor.__doc__) or None + if info: + self.info = info util.set_creation_order(self) @@ -608,7 +618,8 @@ class SynonymProperty(DescriptorProperty): class ComparableProperty(DescriptorProperty): """Instruments a Python property for use in query expressions.""" - def __init__(self, comparator_factory, descriptor=None, doc=None): + def __init__( + self, comparator_factory, descriptor=None, doc=None, info=None): """Provides a method of applying a :class:`.PropComparator` to any Python descriptor attribute. @@ -670,10 +681,18 @@ class ComparableProperty(DescriptorProperty): The like-named descriptor will be automatically retrieved from the mapped class if left blank in a ``properties`` declaration. + :param info: Optional data dictionary which will be populated into the + :attr:`.InspectionAttr.info` attribute of this object. + + .. versionadded:: 1.0.0 + """ + super(ComparableProperty, self).__init__() self.descriptor = descriptor self.comparator_factory = comparator_factory self.doc = doc or (descriptor and descriptor.__doc__) or None + if info: + self.info = info util.set_creation_order(self) def _comparator_factory(self, mapper): diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/dynamic.py b/lib/python3.4/site-packages/sqlalchemy/orm/dynamic.py index 51db1b1..88187cd 100644 --- a/lib/python3.4/site-packages/sqlalchemy/orm/dynamic.py +++ b/lib/python3.4/site-packages/sqlalchemy/orm/dynamic.py @@ -1,5 +1,5 @@ # orm/dynamic.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -221,10 +221,8 @@ class AppenderMixin(object): mapper = object_mapper(instance) prop = mapper._props[self.attr.key] - self._criterion = prop.compare( - operators.eq, + self._criterion = prop._with_parent( instance, - value_is_parent=True, alias_secondary=False) if self.attr.order_by: diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/evaluator.py b/lib/python3.4/site-packages/sqlalchemy/orm/evaluator.py index efd2bfe..534e7fa 100644 --- a/lib/python3.4/site-packages/sqlalchemy/orm/evaluator.py +++ b/lib/python3.4/site-packages/sqlalchemy/orm/evaluator.py @@ -1,5 +1,5 @@ # orm/evaluator.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -7,7 +7,6 @@ import operator from ..sql import operators -from .. import util class UnevaluatableError(Exception): @@ -27,7 +26,6 @@ _notimplemented_ops = set(getattr(operators, op) class EvaluatorCompiler(object): - def __init__(self, target_cls=None): self.target_cls = target_cls @@ -55,14 +53,9 @@ class EvaluatorCompiler(object): parentmapper = clause._annotations['parentmapper'] if self.target_cls and not issubclass( self.target_cls, parentmapper.class_): - util.warn( - "Can't do in-Python evaluation of criteria against " - "alternate class %s; " - "expiration of objects will not be accurate " - "and/or may fail. synchronize_session should be set to " - "False or 'fetch'. " - "This warning will be an exception " - "in 1.0." % parentmapper.class_ + raise UnevaluatableError( + "Can't evaluate criteria against alternate class %s" % + parentmapper.class_ ) key = parentmapper._columntoproperty[clause].key else: diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/events.py b/lib/python3.4/site-packages/sqlalchemy/orm/events.py index aa99673..67ce46e 100644 --- a/lib/python3.4/site-packages/sqlalchemy/orm/events.py +++ b/lib/python3.4/site-packages/sqlalchemy/orm/events.py @@ -1,5 +1,5 @@ # orm/events.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -17,7 +17,8 @@ from . import mapperlib, instrumentation from .session import Session, sessionmaker from .scoping import scoped_session from .attributes import QueryableAttribute - +from .query import Query +from sqlalchemy.util.compat import inspect_getargspec class InstrumentationEvents(event.Events): """Events related to class instrumentation events. @@ -61,7 +62,8 @@ class InstrumentationEvents(event.Events): @classmethod def _listen(cls, event_key, propagate=True, **kw): target, identifier, fn = \ - event_key.dispatch_target, event_key.identifier, event_key.fn + event_key.dispatch_target, event_key.identifier, \ + event_key._listen_fn def listen(target_cls, *arg): listen_cls = target() @@ -192,7 +194,8 @@ class InstanceEvents(event.Events): @classmethod def _listen(cls, event_key, raw=False, propagate=False, **kw): target, identifier, fn = \ - event_key.dispatch_target, event_key.identifier, event_key.fn + event_key.dispatch_target, event_key.identifier, \ + event_key._listen_fn if not raw: def wrap(state, *arg, **kw): @@ -214,14 +217,41 @@ class InstanceEvents(event.Events): def first_init(self, manager, cls): """Called when the first instance of a particular mapping is called. + This event is called when the ``__init__`` method of a class + is called the first time for that particular class. The event + invokes before ``__init__`` actually proceeds as well as before + the :meth:`.InstanceEvents.init` event is invoked. + """ def init(self, target, args, kwargs): """Receive an instance when its constructor is called. This method is only called during a userland construction of - an object. It is not called when an object is loaded from the - database. + an object, in conjunction with the object's constructor, e.g. + its ``__init__`` method. It is not called when an object is + loaded from the database; see the :meth:`.InstanceEvents.load` + event in order to intercept a database load. + + The event is called before the actual ``__init__`` constructor + of the object is called. The ``kwargs`` dictionary may be + modified in-place in order to affect what is passed to + ``__init__``. + + :param target: the mapped instance. If + the event is configured with ``raw=True``, this will + instead be the :class:`.InstanceState` state-management + object associated with the instance. + :param args: positional arguments passed to the ``__init__`` method. + This is passed as a tuple and is currently immutable. + :param kwargs: keyword arguments passed to the ``__init__`` method. + This structure *can* be altered in place. + + .. seealso:: + + :meth:`.InstanceEvents.init_failure` + + :meth:`.InstanceEvents.load` """ @@ -230,8 +260,31 @@ class InstanceEvents(event.Events): and raised an exception. This method is only called during a userland construction of - an object. It is not called when an object is loaded from the - database. + an object, in conjunction with the object's constructor, e.g. + its ``__init__`` method. It is not called when an object is loaded + from the database. + + The event is invoked after an exception raised by the ``__init__`` + method is caught. After the event + is invoked, the original exception is re-raised outwards, so that + the construction of the object still raises an exception. The + actual exception and stack trace raised should be present in + ``sys.exc_info()``. + + :param target: the mapped instance. If + the event is configured with ``raw=True``, this will + instead be the :class:`.InstanceState` state-management + object associated with the instance. + :param args: positional arguments that were passed to the ``__init__`` + method. + :param kwargs: keyword arguments that were passed to the ``__init__`` + method. + + .. seealso:: + + :meth:`.InstanceEvents.init` + + :meth:`.InstanceEvents.load` """ @@ -258,22 +311,58 @@ class InstanceEvents(event.Events): ``None`` if the load does not correspond to a :class:`.Query`, such as during :meth:`.Session.merge`. + .. seealso:: + + :meth:`.InstanceEvents.init` + + :meth:`.InstanceEvents.refresh` + """ def refresh(self, target, context, attrs): """Receive an object instance after one or more attributes have been refreshed from a query. + Contrast this to the :meth:`.InstanceEvents.load` method, which + is invoked when the object is first loaded from a query. + :param target: the mapped instance. If the event is configured with ``raw=True``, this will instead be the :class:`.InstanceState` state-management object associated with the instance. :param context: the :class:`.QueryContext` corresponding to the current :class:`.Query` in progress. - :param attrs: iterable collection of attribute names which + :param attrs: sequence of attribute names which were populated, or None if all column-mapped, non-deferred attributes were populated. + .. seealso:: + + :meth:`.InstanceEvents.load` + + """ + + def refresh_flush(self, target, flush_context, attrs): + """Receive an object instance after one or more attributes have + been refreshed within the persistence of the object. + + This event is the same as :meth:`.InstanceEvents.refresh` except + it is invoked within the unit of work flush process, and the values + here typically come from the process of handling an INSERT or + UPDATE, such as via the RETURNING clause or from Python-side default + values. + + .. versionadded:: 1.0.5 + + :param target: the mapped instance. If + the event is configured with ``raw=True``, this will + instead be the :class:`.InstanceState` state-management + object associated with the instance. + :param flush_context: Internal :class:`.UOWTransaction` object + which handles the details of the flush. + :param attrs: sequence of attribute names which + were populated. + """ def expire(self, target, attrs): @@ -287,24 +376,12 @@ class InstanceEvents(event.Events): the event is configured with ``raw=True``, this will instead be the :class:`.InstanceState` state-management object associated with the instance. - :param attrs: iterable collection of attribute + :param attrs: sequence of attribute names which were expired, or None if all attributes were expired. """ - def resurrect(self, target): - """Receive an object instance as it is 'resurrected' from - garbage collection, which occurs when a "dirty" state falls - out of scope. - - :param target: the mapped instance. If - the event is configured with ``raw=True``, this will - instead be the :class:`.InstanceState` state-management - object associated with the instance. - - """ - def pickle(self, target, state_dict): """Receive an object instance when its associated state is being pickled. @@ -510,7 +587,8 @@ class MapperEvents(event.Events): def _listen( cls, event_key, raw=False, retval=False, propagate=False, **kw): target, identifier, fn = \ - event_key.dispatch_target, event_key.identifier, event_key.fn + event_key.dispatch_target, event_key.identifier, \ + event_key._listen_fn if identifier in ("before_configured", "after_configured") and \ target is not mapperlib.Mapper: @@ -524,7 +602,7 @@ class MapperEvents(event.Events): meth = getattr(cls, identifier) try: target_index = \ - inspect.getargspec(meth)[0].index('target') - 1 + inspect_getargspec(meth)[0].index('target') - 1 except ValueError: target_index = None @@ -575,32 +653,67 @@ class MapperEvents(event.Events): """ def mapper_configured(self, mapper, class_): - """Called when the mapper for the class is fully configured. + """Called when a specific mapper has completed its own configuration + within the scope of the :func:`.configure_mappers` call. - This event is the latest phase of mapper construction, and - is invoked when the mapped classes are first used, so that - relationships between mappers can be resolved. When the event is - called, the mapper should be in its final state. + The :meth:`.MapperEvents.mapper_configured` event is invoked + for each mapper that is encountered when the + :func:`.orm.configure_mappers` function proceeds through the current + list of not-yet-configured mappers. + :func:`.orm.configure_mappers` is typically invoked + automatically as mappings are first used, as well as each time + new mappers have been made available and new mapper use is + detected. - While the configuration event normally occurs automatically, - it can be forced to occur ahead of time, in the case where the event - is needed before any actual mapper usage, by using the - :func:`.configure_mappers` function. + When the event is called, the mapper should be in its final + state, but **not including backrefs** that may be invoked from + other mappers; they might still be pending within the + configuration operation. Bidirectional relationships that + are instead configured via the + :paramref:`.orm.relationship.back_populates` argument + *will* be fully available, since this style of relationship does not + rely upon other possibly-not-configured mappers to know that they + exist. + For an event that is guaranteed to have **all** mappers ready + to go including backrefs that are defined only on other + mappings, use the :meth:`.MapperEvents.after_configured` + event; this event invokes only after all known mappings have been + fully configured. + + The :meth:`.MapperEvents.mapper_configured` event, unlike + :meth:`.MapperEvents.before_configured` or + :meth:`.MapperEvents.after_configured`, + is called for each mapper/class individually, and the mapper is + passed to the event itself. It also is called exactly once for + a particular mapper. The event is therefore useful for + configurational steps that benefit from being invoked just once + on a specific mapper basis, which don't require that "backref" + configurations are necessarily ready yet. :param mapper: the :class:`.Mapper` which is the target of this event. :param class\_: the mapped class. + .. seealso:: + + :meth:`.MapperEvents.before_configured` + + :meth:`.MapperEvents.after_configured` + """ # TODO: need coverage for this event def before_configured(self): """Called before a series of mappers have been configured. - This corresponds to the :func:`.orm.configure_mappers` call, which - note is usually called automatically as mappings are first - used. + The :meth:`.MapperEvents.before_configured` event is invoked + each time the :func:`.orm.configure_mappers` function is + invoked, before the function has done any of its work. + :func:`.orm.configure_mappers` is typically invoked + automatically as mappings are first used, as well as each time + new mappers have been made available and new mapper use is + detected. This event can **only** be applied to the :class:`.Mapper` class or :func:`.mapper` function, and not to individual mappings or @@ -612,11 +725,16 @@ class MapperEvents(event.Events): def go(): # ... + Constrast this event to :meth:`.MapperEvents.after_configured`, + which is invoked after the series of mappers has been configured, + as well as :meth:`.MapperEvents.mapper_configured`, which is invoked + on a per-mapper basis as each one is configured to the extent possible. + Theoretically this event is called once per application, but is actually called any time new mappers are to be affected by a :func:`.orm.configure_mappers` call. If new mappings are constructed after existing ones have - already been used, this event can be called again. To ensure + already been used, this event will likely be called again. To ensure that a particular event is only called once and no further, the ``once=True`` argument (new in 0.9.4) can be applied:: @@ -629,14 +747,33 @@ class MapperEvents(event.Events): .. versionadded:: 0.9.3 + + .. seealso:: + + :meth:`.MapperEvents.mapper_configured` + + :meth:`.MapperEvents.after_configured` + """ def after_configured(self): """Called after a series of mappers have been configured. - This corresponds to the :func:`.orm.configure_mappers` call, which - note is usually called automatically as mappings are first - used. + The :meth:`.MapperEvents.after_configured` event is invoked + each time the :func:`.orm.configure_mappers` function is + invoked, after the function has completed its work. + :func:`.orm.configure_mappers` is typically invoked + automatically as mappings are first used, as well as each time + new mappers have been made available and new mapper use is + detected. + + Contrast this event to the :meth:`.MapperEvents.mapper_configured` + event, which is called on a per-mapper basis while the configuration + operation proceeds; unlike that event, when this event is invoked, + all cross-configurations (e.g. backrefs) will also have been made + available for any mappers that were pending. + Also constrast to :meth:`.MapperEvents.before_configured`, + which is invoked before the series of mappers has been configured. This event can **only** be applied to the :class:`.Mapper` class or :func:`.mapper` function, and not to individual mappings or @@ -652,7 +789,7 @@ class MapperEvents(event.Events): application, but is actually called any time new mappers have been affected by a :func:`.orm.configure_mappers` call. If new mappings are constructed after existing ones have - already been used, this event can be called again. To ensure + already been used, this event will likely be called again. To ensure that a particular event is only called once and no further, the ``once=True`` argument (new in 0.9.4) can be applied:: @@ -662,144 +799,11 @@ class MapperEvents(event.Events): def go(): # ... - """ + .. seealso:: - def translate_row(self, mapper, context, row): - """Perform pre-processing on the given result row and return a - new row instance. + :meth:`.MapperEvents.mapper_configured` - .. deprecated:: 0.9 the :meth:`.translate_row` event should - be considered as legacy. The row as delivered in a mapper - load operation typically requires that highly technical - details be accommodated in order to identity the correct - column keys are present in the row, rendering this particular - event hook as difficult to use and unreliable. - - This listener is typically registered with ``retval=True``. - It is called when the mapper first receives a row, before - the object identity or the instance itself has been derived - from that row. The given row may or may not be a - :class:`.RowProxy` object - it will always be a dictionary-like - object which contains mapped columns as keys. The - returned object should also be a dictionary-like object - which recognizes mapped columns as keys. - - :param mapper: the :class:`.Mapper` which is the target - of this event. - :param context: the :class:`.QueryContext`, which includes - a handle to the current :class:`.Query` in progress as well - as additional state information. - :param row: the result row being handled. This may be - an actual :class:`.RowProxy` or may be a dictionary containing - :class:`.Column` objects as keys. - :return: When configured with ``retval=True``, the function - should return a dictionary-like row object, or ``EXT_CONTINUE``, - indicating the original row should be used. - - - """ - - def create_instance(self, mapper, context, row, class_): - """Receive a row when a new object instance is about to be - created from that row. - - .. deprecated:: 0.9 the :meth:`.create_instance` event should - be considered as legacy. Manipulation of the object construction - mechanics during a load should not be necessary. - - The method can choose to create the instance itself, or it can return - EXT_CONTINUE to indicate normal object creation should take place. - This listener is typically registered with ``retval=True``. - - :param mapper: the :class:`.Mapper` which is the target - of this event. - :param context: the :class:`.QueryContext`, which includes - a handle to the current :class:`.Query` in progress as well - as additional state information. - :param row: the result row being handled. This may be - an actual :class:`.RowProxy` or may be a dictionary containing - :class:`.Column` objects as keys. - :param class\_: the mapped class. - :return: When configured with ``retval=True``, the return value - should be a newly created instance of the mapped class, - or ``EXT_CONTINUE`` indicating that default object construction - should take place. - - """ - - def append_result(self, mapper, context, row, target, - result, **flags): - """Receive an object instance before that instance is appended - to a result list. - - .. deprecated:: 0.9 the :meth:`.append_result` event should - be considered as legacy. It is a difficult to use method - whose original purpose is better suited by custom collection - classes. - - This is a rarely used hook which can be used to alter - the construction of a result list returned by :class:`.Query`. - - :param mapper: the :class:`.Mapper` which is the target - of this event. - :param context: the :class:`.QueryContext`, which includes - a handle to the current :class:`.Query` in progress as well - as additional state information. - :param row: the result row being handled. This may be - an actual :class:`.RowProxy` or may be a dictionary containing - :class:`.Column` objects as keys. - :param target: the mapped instance being populated. If - the event is configured with ``raw=True``, this will - instead be the :class:`.InstanceState` state-management - object associated with the instance. - :param result: a list-like object where results are being - appended. - :param \**flags: Additional state information about the - current handling of the row. - :return: If this method is registered with ``retval=True``, - a return value of ``EXT_STOP`` will prevent the instance - from being appended to the given result list, whereas a - return value of ``EXT_CONTINUE`` will result in the default - behavior of appending the value to the result list. - - """ - - def populate_instance(self, mapper, context, row, - target, **flags): - """Receive an instance before that instance has - its attributes populated. - - .. deprecated:: 0.9 the :meth:`.populate_instance` event should - be considered as legacy. The mechanics of instance population - should not need modification; special "on load" rules can as always - be accommodated by the :class:`.InstanceEvents.load` event. - - This usually corresponds to a newly loaded instance but may - also correspond to an already-loaded instance which has - unloaded attributes to be populated. The method may be called - many times for a single instance, as multiple result rows are - used to populate eagerly loaded collections. - - Most usages of this hook are obsolete. For a - generic "object has been newly created from a row" hook, use - :meth:`.InstanceEvents.load`. - - :param mapper: the :class:`.Mapper` which is the target - of this event. - :param context: the :class:`.QueryContext`, which includes - a handle to the current :class:`.Query` in progress as well - as additional state information. - :param row: the result row being handled. This may be - an actual :class:`.RowProxy` or may be a dictionary containing - :class:`.Column` objects as keys. - :param target: the mapped instance. If - the event is configured with ``raw=True``, this will - instead be the :class:`.InstanceState` state-management - object associated with the instance. - :return: When configured with ``retval=True``, a return - value of ``EXT_STOP`` will bypass instance population by - the mapper. A value of ``EXT_CONTINUE`` indicates that - default instance population should take place. + :meth:`.MapperEvents.before_configured` """ @@ -822,30 +826,14 @@ class MapperEvents(event.Events): steps. .. warning:: - Mapper-level flush events are designed to operate **on attributes - local to the immediate object being handled - and via SQL operations with the given** - :class:`.Connection` **only.** Handlers here should **not** make - alterations to the state of the :class:`.Session` overall, and - in general should not affect any :func:`.relationship` -mapped - attributes, as session cascade rules will not function properly, - nor is it always known if the related class has already been - handled. Operations that **are not supported in mapper - events** include: - * :meth:`.Session.add` - * :meth:`.Session.delete` - * Mapped collection append, add, remove, delete, discard, etc. - * Mapped relationship attribute set/del events, - i.e. ``someobject.related = someotherobject`` - - Operations which manipulate the state of the object - relative to other objects are better handled: - - * In the ``__init__()`` method of the mapped object itself, or - another method designed to establish some particular state. - * In a ``@validates`` handler, see :ref:`simple_validators` - * Within the :meth:`.SessionEvents.before_flush` event. + Mapper-level flush events only allow **very limited operations**, + on attributes local to the row being operated upon only, + as well as allowing any SQL to be emitted on the given + :class:`.Connection`. **Please read fully** the notes + at :ref:`session_persistence_mapper` for guidelines on using + these methods; generally, the :meth:`.SessionEvents.before_flush` + method should be preferred for general on-flush changes. :param mapper: the :class:`.Mapper` which is the target of this event. @@ -859,6 +847,10 @@ class MapperEvents(event.Events): object associated with the instance. :return: No return value is supported by this event. + .. seealso:: + + :ref:`session_persistence_events` + """ def after_insert(self, mapper, connection, target): @@ -880,30 +872,14 @@ class MapperEvents(event.Events): event->persist->event steps. .. warning:: - Mapper-level flush events are designed to operate **on attributes - local to the immediate object being handled - and via SQL operations with the given** - :class:`.Connection` **only.** Handlers here should **not** make - alterations to the state of the :class:`.Session` overall, and in - general should not affect any :func:`.relationship` -mapped - attributes, as session cascade rules will not function properly, - nor is it always known if the related class has already been - handled. Operations that **are not supported in mapper - events** include: - * :meth:`.Session.add` - * :meth:`.Session.delete` - * Mapped collection append, add, remove, delete, discard, etc. - * Mapped relationship attribute set/del events, - i.e. ``someobject.related = someotherobject`` - - Operations which manipulate the state of the object - relative to other objects are better handled: - - * In the ``__init__()`` method of the mapped object itself, - or another method designed to establish some particular state. - * In a ``@validates`` handler, see :ref:`simple_validators` - * Within the :meth:`.SessionEvents.before_flush` event. + Mapper-level flush events only allow **very limited operations**, + on attributes local to the row being operated upon only, + as well as allowing any SQL to be emitted on the given + :class:`.Connection`. **Please read fully** the notes + at :ref:`session_persistence_mapper` for guidelines on using + these methods; generally, the :meth:`.SessionEvents.before_flush` + method should be preferred for general on-flush changes. :param mapper: the :class:`.Mapper` which is the target of this event. @@ -917,6 +893,10 @@ class MapperEvents(event.Events): object associated with the instance. :return: No return value is supported by this event. + .. seealso:: + + :ref:`session_persistence_events` + """ def before_update(self, mapper, connection, target): @@ -957,29 +937,14 @@ class MapperEvents(event.Events): steps. .. warning:: - Mapper-level flush events are designed to operate **on attributes - local to the immediate object being handled - and via SQL operations with the given** :class:`.Connection` - **only.** Handlers here should **not** make alterations to the - state of the :class:`.Session` overall, and in general should not - affect any :func:`.relationship` -mapped attributes, as - session cascade rules will not function properly, nor is it - always known if the related class has already been handled. - Operations that **are not supported in mapper events** include: - * :meth:`.Session.add` - * :meth:`.Session.delete` - * Mapped collection append, add, remove, delete, discard, etc. - * Mapped relationship attribute set/del events, - i.e. ``someobject.related = someotherobject`` - - Operations which manipulate the state of the object - relative to other objects are better handled: - - * In the ``__init__()`` method of the mapped object itself, - or another method designed to establish some particular state. - * In a ``@validates`` handler, see :ref:`simple_validators` - * Within the :meth:`.SessionEvents.before_flush` event. + Mapper-level flush events only allow **very limited operations**, + on attributes local to the row being operated upon only, + as well as allowing any SQL to be emitted on the given + :class:`.Connection`. **Please read fully** the notes + at :ref:`session_persistence_mapper` for guidelines on using + these methods; generally, the :meth:`.SessionEvents.before_flush` + method should be preferred for general on-flush changes. :param mapper: the :class:`.Mapper` which is the target of this event. @@ -992,6 +957,11 @@ class MapperEvents(event.Events): instead be the :class:`.InstanceState` state-management object associated with the instance. :return: No return value is supported by this event. + + .. seealso:: + + :ref:`session_persistence_events` + """ def after_update(self, mapper, connection, target): @@ -1031,29 +1001,14 @@ class MapperEvents(event.Events): steps. .. warning:: - Mapper-level flush events are designed to operate **on attributes - local to the immediate object being handled - and via SQL operations with the given** :class:`.Connection` - **only.** Handlers here should **not** make alterations to the - state of the :class:`.Session` overall, and in general should not - affect any :func:`.relationship` -mapped attributes, as - session cascade rules will not function properly, nor is it - always known if the related class has already been handled. - Operations that **are not supported in mapper events** include: - * :meth:`.Session.add` - * :meth:`.Session.delete` - * Mapped collection append, add, remove, delete, discard, etc. - * Mapped relationship attribute set/del events, - i.e. ``someobject.related = someotherobject`` - - Operations which manipulate the state of the object - relative to other objects are better handled: - - * In the ``__init__()`` method of the mapped object itself, - or another method designed to establish some particular state. - * In a ``@validates`` handler, see :ref:`simple_validators` - * Within the :meth:`.SessionEvents.before_flush` event. + Mapper-level flush events only allow **very limited operations**, + on attributes local to the row being operated upon only, + as well as allowing any SQL to be emitted on the given + :class:`.Connection`. **Please read fully** the notes + at :ref:`session_persistence_mapper` for guidelines on using + these methods; generally, the :meth:`.SessionEvents.before_flush` + method should be preferred for general on-flush changes. :param mapper: the :class:`.Mapper` which is the target of this event. @@ -1067,6 +1022,10 @@ class MapperEvents(event.Events): object associated with the instance. :return: No return value is supported by this event. + .. seealso:: + + :ref:`session_persistence_events` + """ def before_delete(self, mapper, connection, target): @@ -1082,29 +1041,14 @@ class MapperEvents(event.Events): once in a later step. .. warning:: - Mapper-level flush events are designed to operate **on attributes - local to the immediate object being handled - and via SQL operations with the given** :class:`.Connection` - **only.** Handlers here should **not** make alterations to the - state of the :class:`.Session` overall, and in general should not - affect any :func:`.relationship` -mapped attributes, as - session cascade rules will not function properly, nor is it - always known if the related class has already been handled. - Operations that **are not supported in mapper events** include: - * :meth:`.Session.add` - * :meth:`.Session.delete` - * Mapped collection append, add, remove, delete, discard, etc. - * Mapped relationship attribute set/del events, - i.e. ``someobject.related = someotherobject`` - - Operations which manipulate the state of the object - relative to other objects are better handled: - - * In the ``__init__()`` method of the mapped object itself, - or another method designed to establish some particular state. - * In a ``@validates`` handler, see :ref:`simple_validators` - * Within the :meth:`.SessionEvents.before_flush` event. + Mapper-level flush events only allow **very limited operations**, + on attributes local to the row being operated upon only, + as well as allowing any SQL to be emitted on the given + :class:`.Connection`. **Please read fully** the notes + at :ref:`session_persistence_mapper` for guidelines on using + these methods; generally, the :meth:`.SessionEvents.before_flush` + method should be preferred for general on-flush changes. :param mapper: the :class:`.Mapper` which is the target of this event. @@ -1118,6 +1062,10 @@ class MapperEvents(event.Events): object associated with the instance. :return: No return value is supported by this event. + .. seealso:: + + :ref:`session_persistence_events` + """ def after_delete(self, mapper, connection, target): @@ -1133,29 +1081,14 @@ class MapperEvents(event.Events): once in a previous step. .. warning:: - Mapper-level flush events are designed to operate **on attributes - local to the immediate object being handled - and via SQL operations with the given** :class:`.Connection` - **only.** Handlers here should **not** make alterations to the - state of the :class:`.Session` overall, and in general should not - affect any :func:`.relationship` -mapped attributes, as - session cascade rules will not function properly, nor is it - always known if the related class has already been handled. - Operations that **are not supported in mapper events** include: - * :meth:`.Session.add` - * :meth:`.Session.delete` - * Mapped collection append, add, remove, delete, discard, etc. - * Mapped relationship attribute set/del events, - i.e. ``someobject.related = someotherobject`` - - Operations which manipulate the state of the object - relative to other objects are better handled: - - * In the ``__init__()`` method of the mapped object itself, - or another method designed to establish some particular state. - * In a ``@validates`` handler, see :ref:`simple_validators` - * Within the :meth:`.SessionEvents.before_flush` event. + Mapper-level flush events only allow **very limited operations**, + on attributes local to the row being operated upon only, + as well as allowing any SQL to be emitted on the given + :class:`.Connection`. **Please read fully** the notes + at :ref:`session_persistence_mapper` for guidelines on using + these methods; generally, the :meth:`.SessionEvents.before_flush` + method should be preferred for general on-flush changes. :param mapper: the :class:`.Mapper` which is the target of this event. @@ -1169,6 +1102,10 @@ class MapperEvents(event.Events): object associated with the instance. :return: No return value is supported by this event. + .. seealso:: + + :ref:`session_persistence_events` + """ @@ -1409,6 +1346,8 @@ class SessionEvents(event.Events): :meth:`~.SessionEvents.after_flush_postexec` + :ref:`session_persistence_events` + """ def after_flush(self, session, flush_context): @@ -1429,6 +1368,8 @@ class SessionEvents(event.Events): :meth:`~.SessionEvents.after_flush_postexec` + :ref:`session_persistence_events` + """ def after_flush_postexec(self, session, flush_context): @@ -1451,6 +1392,8 @@ class SessionEvents(event.Events): :meth:`~.SessionEvents.after_flush` + :ref:`session_persistence_events` + """ def after_begin(self, session, transaction, connection): @@ -1488,6 +1431,8 @@ class SessionEvents(event.Events): :meth:`~.SessionEvents.after_attach` + :ref:`session_lifecycle_events` + """ def after_attach(self, session, instance): @@ -1510,6 +1455,8 @@ class SessionEvents(event.Events): :meth:`~.SessionEvents.before_attach` + :ref:`session_lifecycle_events` + """ @event._legacy_signature("0.9", @@ -1627,8 +1574,9 @@ class AttributeEvents(event.Events): @staticmethod def _set_dispatch(cls, dispatch_cls): - event.Events._set_dispatch(cls, dispatch_cls) + dispatch = event.Events._set_dispatch(cls, dispatch_cls) dispatch_cls._active_history = False + return dispatch @classmethod def _accept_with(cls, target): @@ -1644,7 +1592,8 @@ class AttributeEvents(event.Events): propagate=False): target, identifier, fn = \ - event_key.dispatch_target, event_key.identifier, event_key.fn + event_key.dispatch_target, event_key.identifier, \ + event_key._listen_fn if active_history: target.dispatch._active_history = True @@ -1744,3 +1693,109 @@ class AttributeEvents(event.Events): the given value, or a new effective value, should be returned. """ + + def init_collection(self, target, collection, collection_adapter): + """Receive a 'collection init' event. + + This event is triggered for a collection-based attribute, when + the initial "empty collection" is first generated for a blank + attribute, as well as for when the collection is replaced with + a new one, such as via a set event. + + E.g., given that ``User.addresses`` is a relationship-based + collection, the event is triggered here:: + + u1 = User() + u1.addresses.append(a1) # <- new collection + + and also during replace operations:: + + u1.addresses = [a2, a3] # <- new collection + + :param target: the object instance receiving the event. + If the listener is registered with ``raw=True``, this will + be the :class:`.InstanceState` object. + :param collection: the new collection. This will always be generated + from what was specified as + :paramref:`.RelationshipProperty.collection_class`, and will always + be empty. + :param collection_adpater: the :class:`.CollectionAdapter` that will + mediate internal access to the collection. + + .. versionadded:: 1.0.0 the :meth:`.AttributeEvents.init_collection` + and :meth:`.AttributeEvents.dispose_collection` events supersede + the :class:`.collection.linker` hook. + + """ + + def dispose_collection(self, target, collection, collection_adpater): + """Receive a 'collection dispose' event. + + This event is triggered for a collection-based attribute when + a collection is replaced, that is:: + + u1.addresses.append(a1) + + u1.addresses = [a2, a3] # <- old collection is disposed + + The mechanics of the event will typically include that the given + collection is empty, even if it stored objects while being replaced. + + .. versionadded:: 1.0.0 the :meth:`.AttributeEvents.init_collection` + and :meth:`.AttributeEvents.dispose_collection` events supersede + the :class:`.collection.linker` hook. + + """ + + +class QueryEvents(event.Events): + """Represent events within the construction of a :class:`.Query` object. + + The events here are intended to be used with an as-yet-unreleased + inspection system for :class:`.Query`. Some very basic operations + are possible now, however the inspection system is intended to allow + complex query manipulations to be automated. + + .. versionadded:: 1.0.0 + + """ + + _target_class_doc = "SomeQuery" + _dispatch_target = Query + + def before_compile(self, query): + """Receive the :class:`.Query` object before it is composed into a + core :class:`.Select` object. + + This event is intended to allow changes to the query given:: + + @event.listens_for(Query, "before_compile", retval=True) + def no_deleted(query): + for desc in query.column_descriptions: + if desc['type'] is User: + entity = desc['entity'] + query = query.filter(entity.deleted == False) + return query + + The event should normally be listened with the ``retval=True`` + parameter set, so that the modified query may be returned. + + + """ + + @classmethod + def _listen( + cls, event_key, retval=False, **kw): + fn = event_key._listen_fn + + if not retval: + def wrap(*arg, **kw): + if not retval: + query = arg[0] + fn(*arg, **kw) + return query + else: + return fn(*arg, **kw) + event_key = event_key.with_wrapper(wrap) + + event_key.base_listen(**kw) diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/exc.py b/lib/python3.4/site-packages/sqlalchemy/orm/exc.py index ff0ece4..db99322 100644 --- a/lib/python3.4/site-packages/sqlalchemy/orm/exc.py +++ b/lib/python3.4/site-packages/sqlalchemy/orm/exc.py @@ -1,5 +1,5 @@ # orm/exc.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/identity.py b/lib/python3.4/site-packages/sqlalchemy/orm/identity.py index e48203d..5646732 100644 --- a/lib/python3.4/site-packages/sqlalchemy/orm/identity.py +++ b/lib/python3.4/site-packages/sqlalchemy/orm/identity.py @@ -1,5 +1,5 @@ # orm/identity.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -10,18 +10,26 @@ from . import attributes from .. import util -class IdentityMap(dict): - +class IdentityMap(object): def __init__(self): + self._dict = {} self._modified = set() self._wr = weakref.ref(self) + def keys(self): + return self._dict.keys() + def replace(self, state): raise NotImplementedError() def add(self, state): raise NotImplementedError() + def _add_unpresent(self, state, key): + """optional inlined form of add() which can assume item isn't present + in the map""" + self.add(state) + def update(self, dict): raise NotImplementedError("IdentityMap uses add() to insert data") @@ -36,7 +44,8 @@ class IdentityMap(dict): def _manage_removed_state(self, state): del state._instance_dict - self._modified.discard(state) + if state.modified: + self._modified.discard(state) def _dirty_states(self): return self._modified @@ -60,6 +69,9 @@ class IdentityMap(dict): def setdefault(self, key, default=None): raise NotImplementedError("IdentityMap uses add() to insert data") + def __len__(self): + return len(self._dict) + def copy(self): raise NotImplementedError() @@ -72,11 +84,8 @@ class IdentityMap(dict): class WeakInstanceDict(IdentityMap): - def __init__(self): - IdentityMap.__init__(self) - def __getitem__(self, key): - state = dict.__getitem__(self, key) + state = self._dict[key] o = state.obj() if o is None: raise KeyError(key) @@ -84,8 +93,8 @@ class WeakInstanceDict(IdentityMap): def __contains__(self, key): try: - if dict.__contains__(self, key): - state = dict.__getitem__(self, key) + if key in self._dict: + state = self._dict[key] o = state.obj() else: return False @@ -95,25 +104,25 @@ class WeakInstanceDict(IdentityMap): return o is not None def contains_state(self, state): - return dict.get(self, state.key) is state + return state.key in self._dict and self._dict[state.key] is state def replace(self, state): - if dict.__contains__(self, state.key): - existing = dict.__getitem__(self, state.key) + if state.key in self._dict: + existing = self._dict[state.key] if existing is not state: self._manage_removed_state(existing) else: return - dict.__setitem__(self, state.key, state) + self._dict[state.key] = state self._manage_incoming_state(state) def add(self, state): key = state.key # inline of self.__contains__ - if dict.__contains__(self, key): + if key in self._dict: try: - existing_state = dict.__getitem__(self, key) + existing_state = self._dict[key] if existing_state is not state: o = existing_state.obj() if o is not None: @@ -125,19 +134,24 @@ class WeakInstanceDict(IdentityMap): return except KeyError: pass - dict.__setitem__(self, key, state) + self._dict[key] = state self._manage_incoming_state(state) + def _add_unpresent(self, state, key): + # inlined form of add() called by loading.py + self._dict[key] = state + state._instance_dict = self._wr + def get(self, key, default=None): - state = dict.get(self, key, default) - if state is default: + if key not in self._dict: return default + state = self._dict[key] o = state.obj() if o is None: return default return o - def _items(self): + def items(self): values = self.all_states() result = [] for state in values: @@ -146,7 +160,7 @@ class WeakInstanceDict(IdentityMap): result.append((state.key, value)) return result - def _values(self): + def values(self): values = self.all_states() result = [] for state in values: @@ -156,39 +170,80 @@ class WeakInstanceDict(IdentityMap): return result + def __iter__(self): + return iter(self.keys()) + if util.py2k: - items = _items - values = _values def iteritems(self): return iter(self.items()) def itervalues(self): return iter(self.values()) - else: - def items(self): - return iter(self._items()) - - def values(self): - return iter(self._values()) def all_states(self): if util.py2k: - return dict.values(self) + return self._dict.values() else: - return list(dict.values(self)) + return list(self._dict.values()) + + def _fast_discard(self, state): + self._dict.pop(state.key, None) def discard(self, state): - st = dict.get(self, state.key, None) - if st is state: - dict.pop(self, state.key, None) + st = self._dict.pop(state.key, None) + if st: + assert st is state self._manage_removed_state(state) + def safe_discard(self, state): + if state.key in self._dict: + st = self._dict[state.key] + if st is state: + self._dict.pop(state.key, None) + self._manage_removed_state(state) + def prune(self): return 0 class StrongInstanceDict(IdentityMap): + """A 'strong-referencing' version of the identity map. + + .. deprecated:: this object is present in order to fulfill + the ``weak_identity_map=False`` option of the Session. + This option is present to allow compatibility with older applications, + but it is recommended that strong references to objects + be maintained by the calling application + externally to the :class:`.Session` itself, to the degree + that is needed by the application. + + """ + + if util.py2k: + def itervalues(self): + return self._dict.itervalues() + + def iteritems(self): + return self._dict.iteritems() + + def __iter__(self): + return iter(self.dict_) + + def __getitem__(self, key): + return self._dict[key] + + def __contains__(self, key): + return key in self._dict + + def get(self, key, default=None): + return self._dict.get(key, default) + + def values(self): + return self._dict.values() + + def items(self): + return self._dict.items() def all_states(self): return [attributes.instance_state(o) for o in self.values()] @@ -199,36 +254,48 @@ class StrongInstanceDict(IdentityMap): attributes.instance_state(self[state.key]) is state) def replace(self, state): - if dict.__contains__(self, state.key): - existing = dict.__getitem__(self, state.key) + if state.key in self._dict: + existing = self._dict[state.key] existing = attributes.instance_state(existing) if existing is not state: self._manage_removed_state(existing) else: return - dict.__setitem__(self, state.key, state.obj()) + self._dict[state.key] = state.obj() self._manage_incoming_state(state) def add(self, state): if state.key in self: - if attributes.instance_state( - dict.__getitem__( - self, - state.key)) is not state: + if attributes.instance_state(self._dict[state.key]) is not state: raise AssertionError('A conflicting state is already ' 'present in the identity map for key %r' % (state.key, )) else: - dict.__setitem__(self, state.key, state.obj()) + self._dict[state.key] = state.obj() self._manage_incoming_state(state) + def _add_unpresent(self, state, key): + # inlined form of add() called by loading.py + self._dict[key] = state.obj() + state._instance_dict = self._wr + + def _fast_discard(self, state): + self._dict.pop(state.key, None) + def discard(self, state): - obj = dict.get(self, state.key, None) + obj = self._dict.pop(state.key, None) if obj is not None: + self._manage_removed_state(state) + st = attributes.instance_state(obj) + assert st is state + + def safe_discard(self, state): + if state.key in self._dict: + obj = self._dict[state.key] st = attributes.instance_state(obj) if st is state: - dict.pop(self, state.key, None) + self._dict.pop(state.key, None) self._manage_removed_state(state) def prune(self): @@ -241,7 +308,7 @@ class StrongInstanceDict(IdentityMap): keepers = weakref.WeakValueDictionary() keepers.update(self) - dict.clear(self) - dict.update(self, keepers) + self._dict.clear() + self._dict.update(keepers) self.modified = bool(dirty) return ref_count - len(self) diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/instrumentation.py b/lib/python3.4/site-packages/sqlalchemy/orm/instrumentation.py index cc390a1..d41ee59 100644 --- a/lib/python3.4/site-packages/sqlalchemy/orm/instrumentation.py +++ b/lib/python3.4/site-packages/sqlalchemy/orm/instrumentation.py @@ -1,5 +1,5 @@ # orm/instrumentation.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -35,13 +35,17 @@ from .. import util from . import base -class ClassManager(dict): +_memoized_key_collection = util.group_expirable_memoized_property() + +class ClassManager(dict): """tracks state information at the class level.""" MANAGER_ATTR = base.DEFAULT_MANAGER_ATTR STATE_ATTR = base.DEFAULT_STATE_ATTR + _state_setter = staticmethod(util.attrsetter(STATE_ATTR)) + deferred_scalar_loader = None original_init = object.__init__ @@ -91,6 +95,21 @@ class ClassManager(dict): def is_mapped(self): return 'mapper' in self.__dict__ + @_memoized_key_collection + def _all_key_set(self): + return frozenset(self) + + @_memoized_key_collection + def _collection_impl_keys(self): + return frozenset([ + attr.key for attr in self.values() if attr.impl.collection]) + + @_memoized_key_collection + def _scalar_loader_impls(self): + return frozenset([ + attr.impl for attr in + self.values() if attr.impl.accepts_scalar_loader]) + @util.memoized_property def mapper(self): # raises unless self.mapper has been assigned @@ -98,7 +117,7 @@ class ClassManager(dict): def _all_sqla_attributes(self, exclude=None): """return an iterator of all classbound attributes that are - implement :class:`._InspectionAttr`. + implement :class:`.InspectionAttr`. This includes :class:`.QueryableAttribute` as well as extension types such as :class:`.hybrid_property` and @@ -111,7 +130,7 @@ class ClassManager(dict): for key in set(supercls.__dict__).difference(exclude): exclude.add(key) val = supercls.__dict__[key] - if isinstance(val, interfaces._InspectionAttr): + if isinstance(val, interfaces.InspectionAttr): yield key, val def _attr_has_impl(self, key): @@ -194,6 +213,7 @@ class ClassManager(dict): else: self.local_attrs[key] = inst self.install_descriptor(key, inst) + _memoized_key_collection.expire_instance(self) self[key] = inst for cls in self.class_.__subclasses__(): @@ -222,6 +242,7 @@ class ClassManager(dict): else: del self.local_attrs[key] self.uninstall_descriptor(key) + _memoized_key_collection.expire_instance(self) del self[key] for cls in self.class_.__subclasses__(): manager = manager_of_class(cls) @@ -289,13 +310,15 @@ class ClassManager(dict): def new_instance(self, state=None): instance = self.class_.__new__(self.class_) - setattr(instance, self.STATE_ATTR, - state or self._state_constructor(instance, self)) + if state is None: + state = self._state_constructor(instance, self) + self._state_setter(instance, state) return instance def setup_instance(self, instance, state=None): - setattr(instance, self.STATE_ATTR, - state or self._state_constructor(instance, self)) + if state is None: + state = self._state_constructor(instance, self) + self._state_setter(instance, state) def teardown_instance(self, instance): delattr(instance, self.STATE_ATTR) @@ -322,7 +345,7 @@ class ClassManager(dict): _new_state_if_none(instance) else: state = self._state_constructor(instance, self) - setattr(instance, self.STATE_ATTR, state) + self._state_setter(instance, state) return state def has_state(self, instance): @@ -344,7 +367,6 @@ class ClassManager(dict): class _SerializeManager(object): - """Provide serialization of a :class:`.ClassManager`. The :class:`.InstanceState` uses ``__init__()`` on serialize @@ -379,7 +401,6 @@ class _SerializeManager(object): class InstrumentationFactory(object): - """Factory for new ClassManager instances.""" def create_manager_for_cls(self, class_): diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/interfaces.py b/lib/python3.4/site-packages/sqlalchemy/orm/interfaces.py index 9bc1c3d..2ff00ae 100644 --- a/lib/python3.4/site-packages/sqlalchemy/orm/interfaces.py +++ b/lib/python3.4/site-packages/sqlalchemy/orm/interfaces.py @@ -1,5 +1,5 @@ # orm/interfaces.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -9,25 +9,28 @@ Contains various base classes used throughout the ORM. -Defines the now deprecated ORM extension classes as well -as ORM internals. +Defines some key base classes prominent within the internals, +as well as the now-deprecated ORM extension classes. Other than the deprecated extensions, this module and the -classes within should be considered mostly private. +classes within are mostly private, though some attributes +are exposed when inspecting mappings. """ from __future__ import absolute_import -from .. import exc as sa_exc, util, inspect +from .. import util from ..sql import operators -from collections import deque from .base import (ONETOMANY, MANYTOONE, MANYTOMANY, EXT_CONTINUE, EXT_STOP, NOT_EXTENSION) -from .base import _InspectionAttr, _MappedAttribute -from .path_registry import PathRegistry +from .base import (InspectionAttr, InspectionAttr, + InspectionAttrInfo, _MappedAttribute) import collections +from .. import inspect +# imported later +MapperExtension = SessionExtension = AttributeExtension = None __all__ = ( 'AttributeExtension', @@ -47,11 +50,8 @@ __all__ = ( ) -class MapperProperty(_MappedAttribute, _InspectionAttr): - """Manage the relationship of a ``Mapper`` to a single class - attribute, as well as that attribute as it appears on individual - instances of the class, including attribute instrumentation, - attribute access, loading behavior, and dependency calculations. +class MapperProperty(_MappedAttribute, InspectionAttr, util.MemoizedSlots): + """Represent a particular class attribute mapped by :class:`.Mapper`. The most common occurrences of :class:`.MapperProperty` are the mapped :class:`.Column`, which is represented in a mapping as @@ -62,14 +62,51 @@ class MapperProperty(_MappedAttribute, _InspectionAttr): """ + __slots__ = ( + '_configure_started', '_configure_finished', 'parent', 'key', + 'info' + ) + cascade = frozenset() """The set of 'cascade' attribute names. This collection is checked before the 'cascade_iterator' method is called. + The collection typically only applies to a RelationshipProperty. + """ is_property = True + """Part of the InspectionAttr interface; states this object is a + mapper property. + + """ + + def _memoized_attr_info(self): + """Info dictionary associated with the object, allowing user-defined + data to be associated with this :class:`.InspectionAttr`. + + The dictionary is generated when first accessed. Alternatively, + it can be specified as a constructor argument to the + :func:`.column_property`, :func:`.relationship`, or :func:`.composite` + functions. + + .. versionadded:: 0.8 Added support for .info to all + :class:`.MapperProperty` subclasses. + + .. versionchanged:: 1.0.0 :attr:`.MapperProperty.info` is also + available on extension types via the + :attr:`.InspectionAttrInfo.info` attribute, so that it can apply + to a wider variety of ORM and extension constructs. + + .. seealso:: + + :attr:`.QueryableAttribute.info` + + :attr:`.SchemaItem.info` + + """ + return {} def setup(self, context, entity, path, adapter, **kwargs): """Called by Query for the purposes of constructing a SQL statement. @@ -77,16 +114,15 @@ class MapperProperty(_MappedAttribute, _InspectionAttr): Each MapperProperty associated with the target mapper processes the statement referenced by the query context, adding columns and/or criterion as appropriate. - """ - pass + """ def create_row_processor(self, context, path, - mapper, row, adapter): - """Return a 3-tuple consisting of three row processing functions. + mapper, result, adapter, populators): + """Produce row processing functions and append to the given + set of populators lists. """ - return None, None, None def cascade_iterator(self, type_, state, visited_instances=None, halt_on=None): @@ -98,41 +134,44 @@ class MapperProperty(_MappedAttribute, _InspectionAttr): Note that the 'cascade' collection on this MapperProperty is checked first for the given type before cascade_iterator is called. - See PropertyLoader for the related instance implementation. + This method typically only applies to RelationshipProperty. + """ return iter(()) def set_parent(self, parent, init): - self.parent = parent + """Set the parent mapper that references this MapperProperty. - def instrument_class(self, mapper): # pragma: no-coverage - raise NotImplementedError() - - @util.memoized_property - def info(self): - """Info dictionary associated with the object, allowing user-defined - data to be associated with this :class:`.MapperProperty`. - - The dictionary is generated when first accessed. Alternatively, - it can be specified as a constructor argument to the - :func:`.column_property`, :func:`.relationship`, or :func:`.composite` - functions. - - .. versionadded:: 0.8 Added support for .info to all - :class:`.MapperProperty` subclasses. - - .. seealso:: - - :attr:`.QueryableAttribute.info` - - :attr:`.SchemaItem.info` + This method is overridden by some subclasses to perform extra + setup when the mapper is first known. """ - return {} + self.parent = parent - _configure_started = False - _configure_finished = False + def instrument_class(self, mapper): + """Hook called by the Mapper to the property to initiate + instrumentation of the class attribute managed by this + MapperProperty. + + The MapperProperty here will typically call out to the + attributes module to set up an InstrumentedAttribute. + + This step is the first of two steps to set up an InstrumentedAttribute, + and is called early in the mapper setup process. + + The second step is typically the init_class_attribute step, + called from StrategizedProperty via the post_instrument_class() + hook. This step assigns additional state to the InstrumentedAttribute + (specifically the "impl") which has been determined after the + MapperProperty has determined what kind of persistence + management it needs to do (e.g. scalar, object, collection, etc). + + """ + + def __init__(self): + self._configure_started = False + self._configure_finished = False def init(self): """Called after all mappers are created to assemble @@ -179,45 +218,28 @@ class MapperProperty(_MappedAttribute, _InspectionAttr): """ - pass - def post_instrument_class(self, mapper): """Perform instrumentation adjustments that need to occur after init() has completed. + The given Mapper is the Mapper invoking the operation, which + may not be the same Mapper as self.parent in an inheritance + scenario; however, Mapper will always at least be a sub-mapper of + self.parent. + + This method is typically used by StrategizedProperty, which delegates + it to LoaderStrategy.init_class_attribute() to perform final setup + on the class-bound InstrumentedAttribute. + """ - pass - - def is_primary(self): - """Return True if this ``MapperProperty``'s mapper is the - primary mapper for its class. - - This flag is used to indicate that the ``MapperProperty`` can - define attribute instrumentation for the class at the class - level (as opposed to the individual instance level). - """ - - return not self.parent.non_primary def merge(self, session, source_state, source_dict, dest_state, dest_dict, load, _recursive): """Merge the attribute represented by this ``MapperProperty`` - from source to destination object""" + from source to destination object. - pass - - def compare(self, operator, value, **kw): - """Return a compare operation for the columns represented by - this ``MapperProperty`` to the given value, which may be a - column value or an instance. 'operator' is an operator from - the operators module, or from sql.Comparator. - - By default uses the PropComparator attached to this MapperProperty - under the attribute name "comparator". """ - return operator(self.comparator, value) - def __repr__(self): return '<%s at 0x%x; %s>' % ( self.__class__.__name__, @@ -225,8 +247,7 @@ class MapperProperty(_MappedAttribute, _InspectionAttr): class PropComparator(operators.ColumnOperators): - """Defines boolean, comparison, and other operators for - :class:`.MapperProperty` objects. + """Defines SQL operators for :class:`.MapperProperty` objects. SQLAlchemy allows for operators to be redefined at both the Core and ORM level. :class:`.PropComparator` @@ -313,9 +334,11 @@ class PropComparator(operators.ColumnOperators): """ + __slots__ = 'prop', 'property', '_parententity', '_adapt_to_entity' + def __init__(self, prop, parentmapper, adapt_to_entity=None): self.prop = self.property = prop - self._parentmapper = parentmapper + self._parententity = adapt_to_entity or parentmapper self._adapt_to_entity = adapt_to_entity def __clause_element__(self): @@ -328,7 +351,13 @@ class PropComparator(operators.ColumnOperators): """Return a copy of this PropComparator which will use the given :class:`.AliasedInsp` to produce corresponding expressions. """ - return self.__class__(self.prop, self._parentmapper, adapt_to_entity) + return self.__class__(self.prop, self._parententity, adapt_to_entity) + + @property + def _parentmapper(self): + """legacy; this is renamed to _parententity to be + compatible with QueryableAttribute.""" + return inspect(self._parententity).mapper @property def adapter(self): @@ -341,7 +370,7 @@ class PropComparator(operators.ColumnOperators): else: return self._adapt_to_entity._adapt_element - @util.memoized_property + @property def info(self): return self.property.info @@ -421,8 +450,17 @@ class StrategizedProperty(MapperProperty): strategies can be selected at Query time through the usage of ``StrategizedOption`` objects via the Query.options() method. + The mechanics of StrategizedProperty are used for every Query + invocation for every mapped attribute participating in that Query, + to determine first how the attribute will be rendered in SQL + and secondly how the attribute will retrieve a value from a result + row and apply it to a mapped object. The routines here are very + performance-critical. + """ + __slots__ = '_strategies', 'strategy' + strategy_wildcard_key = None def _get_context_loader(self, context, path): @@ -457,7 +495,8 @@ class StrategizedProperty(MapperProperty): def _get_strategy_by_cls(self, cls): return self._get_strategy(cls._strategy_keys[0]) - def setup(self, context, entity, path, adapter, **kwargs): + def setup( + self, context, entity, path, adapter, **kwargs): loader = self._get_context_loader(context, path) if loader and loader.strategy: strat = self._get_strategy(loader.strategy) @@ -465,32 +504,38 @@ class StrategizedProperty(MapperProperty): strat = self.strategy strat.setup_query(context, entity, path, loader, adapter, **kwargs) - def create_row_processor(self, context, path, mapper, row, adapter): + def create_row_processor( + self, context, path, mapper, + result, adapter, populators): loader = self._get_context_loader(context, path) if loader and loader.strategy: strat = self._get_strategy(loader.strategy) else: strat = self.strategy - return strat.create_row_processor(context, path, loader, - mapper, row, adapter) + strat.create_row_processor( + context, path, loader, + mapper, result, adapter, populators) def do_init(self): self._strategies = {} self.strategy = self._get_strategy_by_cls(self.strategy_class) def post_instrument_class(self, mapper): - if self.is_primary() and \ + if not self.parent.non_primary and \ not mapper.class_manager._attr_has_impl(self.key): self.strategy.init_class_attribute(mapper) - _strategies = collections.defaultdict(dict) + _all_strategies = collections.defaultdict(dict) @classmethod def strategy_for(cls, **kw): def decorate(dec_cls): - dec_cls._strategy_keys = [] + # ensure each subclass of the strategy has its + # own _strategy_keys collection + if '_strategy_keys' not in dec_cls.__dict__: + dec_cls._strategy_keys = [] key = tuple(sorted(kw.items())) - cls._strategies[cls][key] = dec_cls + cls._all_strategies[cls][key] = dec_cls dec_cls._strategy_keys.append(key) return dec_cls return decorate @@ -498,8 +543,8 @@ class StrategizedProperty(MapperProperty): @classmethod def _strategy_lookup(cls, *key): for prop_cls in cls.__mro__: - if prop_cls in cls._strategies: - strategies = cls._strategies[prop_cls] + if prop_cls in cls._all_strategies: + strategies = cls._all_strategies[prop_cls] try: return strategies[key] except KeyError: @@ -512,18 +557,24 @@ class MapperOption(object): propagate_to_loaders = False """if True, indicate this option should be carried along - Query object generated by scalar or object lazy loaders. + to "secondary" Query objects produced during lazy loads + or refresh operations. + """ def process_query(self, query): - pass + """Apply a modification to the given :class:`.Query`.""" def process_query_conditionally(self, query): """same as process_query(), except that this option may not apply to the given query. - Used when secondary loaders resend existing options to a new - Query.""" + This is typically used during a lazy load or scalar refresh + operation to propagate options stated in the original Query to the + new Query being used for the load. It occurs for those options that + specify propagate_to_loaders=True. + + """ self.process_query(query) @@ -542,9 +593,9 @@ class LoaderStrategy(object): * it processes the ``QueryContext`` at statement construction time, where it can modify the SQL statement that is being produced. - Simple column attributes may add their represented column to the - list of selected columns, *eager loading* properties may add - ``LEFT OUTER JOIN`` clauses to the statement. + For example, simple column attributes will add their represented + column to the list of selected columns, a joined eager loader + may establish join clauses to add to the statement. * It produces "row processor" functions at result fetching time. These "row processor" functions populate a particular attribute @@ -552,6 +603,8 @@ class LoaderStrategy(object): """ + __slots__ = 'parent_property', 'is_class_level', 'parent', 'key' + def __init__(self, parent): self.parent_property = parent self.is_class_level = False @@ -562,17 +615,26 @@ class LoaderStrategy(object): pass def setup_query(self, context, entity, path, loadopt, adapter, **kwargs): - pass + """Establish column and other state for a given QueryContext. + + This method fulfills the contract specified by MapperProperty.setup(). + + StrategizedProperty delegates its setup() method + directly to this method. + + """ def create_row_processor(self, context, path, loadopt, mapper, - row, adapter): - """Return row processing functions which fulfill the contract - specified by MapperProperty.create_row_processor. + result, adapter, populators): + """Establish row processing functions for a given QueryContext. - StrategizedProperty delegates its create_row_processor method - directly to this method. """ + This method fulfills the contract specified by + MapperProperty.create_row_processor(). - return None, None, None + StrategizedProperty delegates its create_row_processor() method + directly to this method. + + """ def __str__(self): return str(self.parent_property) diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/loading.py b/lib/python3.4/site-packages/sqlalchemy/orm/loading.py index 923ab22..d3e719d 100644 --- a/lib/python3.4/site-packages/sqlalchemy/orm/loading.py +++ b/lib/python3.4/site-packages/sqlalchemy/orm/loading.py @@ -1,5 +1,5 @@ # orm/loading.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -12,26 +12,27 @@ the functions here are called primarily by Query, Mapper, as well as some of the attribute loading strategies. """ - +from __future__ import absolute_import from .. import util -from . import attributes, exc as orm_exc, state as statelib -from .interfaces import EXT_CONTINUE +from . import attributes, exc as orm_exc from ..sql import util as sql_util +from . import strategy_options + from .util import _none_set, state_str +from .base import _SET_DEFERRED_EXPIRED, _DEFER_FOR_STATE from .. import exc as sa_exc +import collections _new_runid = util.counter() def instances(query, cursor, context): """Return an ORM result as an iterator.""" - session = query.session context.runid = _new_runid() - filter_fns = [ent.filter_fn - for ent in query._entities] + filter_fns = [ent.filter_fn for ent in query._entities] filtered = id in filter_fns single_entity = len(query._entities) == 1 and \ @@ -44,59 +45,45 @@ def instances(query, cursor, context): def filter_fn(row): return tuple(fn(x) for x, fn in zip(row, filter_fns)) - custom_rows = single_entity and \ - query._entities[0].custom_rows + try: + (process, labels) = \ + list(zip(*[ + query_entity.row_processor(query, + context, cursor) + for query_entity in query._entities + ])) - (process, labels) = \ - list(zip(*[ - query_entity.row_processor(query, - context, custom_rows) - for query_entity in query._entities - ])) + if not single_entity: + keyed_tuple = util.lightweight_named_tuple('result', labels) - while True: - context.progress = {} - context.partials = {} + while True: + context.partials = {} - if query._yield_per: - fetch = cursor.fetchmany(query._yield_per) - if not fetch: + if query._yield_per: + fetch = cursor.fetchmany(query._yield_per) + if not fetch: + break + else: + fetch = cursor.fetchall() + + if single_entity: + proc = process[0] + rows = [proc(row) for row in fetch] + else: + rows = [keyed_tuple([proc(row) for proc in process]) + for row in fetch] + + if filtered: + rows = util.unique_list(rows, filter_fn) + + for row in rows: + yield row + + if not query._yield_per: break - else: - fetch = cursor.fetchall() - - if custom_rows: - rows = [] - for row in fetch: - process[0](row, rows) - elif single_entity: - rows = [process[0](row, None) for row in fetch] - else: - rows = [util.KeyedTuple([proc(row, None) for proc in process], - labels) for row in fetch] - - if filtered: - rows = util.unique_list(rows, filter_fn) - - if context.refresh_state and query._only_load_props \ - and context.refresh_state in context.progress: - context.refresh_state._commit( - context.refresh_state.dict, query._only_load_props) - context.progress.pop(context.refresh_state) - - statelib.InstanceState._commit_all_states( - list(context.progress.items()), - session.identity_map - ) - - for state, (dict_, attrs) in context.partials.items(): - state._commit(dict_, attrs) - - for row in rows: - yield row - - if not query._yield_per: - break + except Exception as err: + cursor.close() + util.raise_from_cause(err) @util.dependencies("sqlalchemy.orm.query") @@ -126,6 +113,7 @@ def merge_result(querylib, query, iterator, load=True): if isinstance(e, querylib._MapperEntity)] result = [] keys = [ent._label_name for ent in query._entities] + keyed_tuple = util.lightweight_named_tuple('result', keys) for row in iterator: newrow = list(row) for i in mapped_entities: @@ -134,7 +122,7 @@ def merge_result(querylib, query, iterator, load=True): attributes.instance_state(newrow[i]), attributes.instance_dict(newrow[i]), load=load, _recursive={}) - result.append(util.KeyedTuple(newrow, keys)) + result.append(keyed_tuple(newrow)) return iter(result) finally: @@ -161,7 +149,7 @@ def get_from_identity(session, key, passive): # expired state will be checked soon enough, if necessary return instance try: - state(state, passive) + state._load_expired(state, passive) except orm_exc.ObjectDeletedError: session._remove_newly_deleted([state]) return None @@ -233,11 +221,56 @@ def load_on_ident(query, key, return None -def instance_processor(mapper, context, path, adapter, - polymorphic_from=None, - only_load_props=None, - refresh_state=None, - polymorphic_discriminator=None): +def _setup_entity_query( + context, mapper, query_entity, + path, adapter, column_collection, + with_polymorphic=None, only_load_props=None, + polymorphic_discriminator=None, **kw): + + if with_polymorphic: + poly_properties = mapper._iterate_polymorphic_properties( + with_polymorphic) + else: + poly_properties = mapper._polymorphic_properties + + quick_populators = {} + + path.set( + context.attributes, + "memoized_setups", + quick_populators) + + for value in poly_properties: + if only_load_props and \ + value.key not in only_load_props: + continue + value.setup( + context, + query_entity, + path, + adapter, + only_load_props=only_load_props, + column_collection=column_collection, + memoized_populators=quick_populators, + **kw + ) + + if polymorphic_discriminator is not None and \ + polymorphic_discriminator \ + is not mapper.polymorphic_on: + + if adapter: + pd = adapter.columns[polymorphic_discriminator] + else: + pd = polymorphic_discriminator + column_collection.append(pd) + + +def _instance_processor( + mapper, context, result, path, adapter, + only_load_props=None, refresh_state=None, + polymorphic_discriminator=None, + _polymorphic_from=None): """Produce a mapper level row processor callable which processes rows into mapped instances.""" @@ -249,288 +282,292 @@ def instance_processor(mapper, context, path, adapter, pk_cols = mapper.primary_key - if polymorphic_from or refresh_state: - polymorphic_on = None - else: - if polymorphic_discriminator is not None: - polymorphic_on = polymorphic_discriminator - else: - polymorphic_on = mapper.polymorphic_on - polymorphic_instances = util.PopulateDict( - _configure_subclass_mapper( - mapper, - context, path, adapter) - ) - - version_id_col = mapper.version_id_col - if adapter: pk_cols = [adapter.columns[c] for c in pk_cols] - if polymorphic_on is not None: - polymorphic_on = adapter.columns[polymorphic_on] - if version_id_col is not None: - version_id_col = adapter.columns[version_id_col] identity_class = mapper._identity_class - new_populators = [] - existing_populators = [] - eager_populators = [] + populators = collections.defaultdict(list) - load_path = context.query._current_path + path \ - if context.query._current_path.path \ - else path + props = mapper._prop_set + if only_load_props is not None: + props = props.intersection( + mapper._props[k] for k in only_load_props) - def populate_state(state, dict_, row, isnew, only_load_props): - if isnew: - if context.propagate_options: - state.load_options = context.propagate_options - if state.load_options: - state.load_path = load_path + quick_populators = path.get( + context.attributes, "memoized_setups", _none_set) - if not new_populators: - _populators(mapper, context, path, row, adapter, - new_populators, - existing_populators, - eager_populators - ) - - if isnew: - populators = new_populators + for prop in props: + if prop in quick_populators: + # this is an inlined path just for column-based attributes. + col = quick_populators[prop] + if col is _DEFER_FOR_STATE: + populators["new"].append( + (prop.key, prop._deferred_column_loader)) + elif col is _SET_DEFERRED_EXPIRED: + # note that in this path, we are no longer + # searching in the result to see if the column might + # be present in some unexpected way. + populators["expire"].append((prop.key, False)) + else: + if adapter: + col = adapter.columns[col] + getter = result._getter(col) + if getter: + populators["quick"].append((prop.key, getter)) + else: + # fall back to the ColumnProperty itself, which + # will iterate through all of its columns + # to see if one fits + prop.create_row_processor( + context, path, mapper, result, adapter, populators) else: - populators = existing_populators + prop.create_row_processor( + context, path, mapper, result, adapter, populators) - if only_load_props is None: - for key, populator in populators: - populator(state, dict_, row) - elif only_load_props: - for key, populator in populators: - if key in only_load_props: - populator(state, dict_, row) + propagate_options = context.propagate_options + if propagate_options: + load_path = context.query._current_path + path \ + if context.query._current_path.path else path session_identity_map = context.session.identity_map - listeners = mapper.dispatch - - # legacy events - I'd very much like to yank these totally - translate_row = listeners.translate_row or None - create_instance = listeners.create_instance or None - populate_instance = listeners.populate_instance or None - append_result = listeners.append_result or None - #### - populate_existing = context.populate_existing or mapper.always_refresh - invoke_all_eagers = context.invoke_all_eagers + load_evt = bool(mapper.class_manager.dispatch.load) + refresh_evt = bool(mapper.class_manager.dispatch.refresh) + instance_state = attributes.instance_state + instance_dict = attributes.instance_dict + session_id = context.session.hash_key + version_check = context.version_check + runid = context.runid + + if refresh_state: + refresh_identity_key = refresh_state.key + if refresh_identity_key is None: + # super-rare condition; a refresh is being called + # on a non-instance-key instance; this is meant to only + # occur within a flush() + refresh_identity_key = \ + mapper._identity_key_from_state(refresh_state) + else: + refresh_identity_key = None if mapper.allow_partial_pks: is_not_primary_key = _none_set.issuperset else: - is_not_primary_key = _none_set.issubset + is_not_primary_key = _none_set.intersection - def _instance(row, result): - if not new_populators and invoke_all_eagers: - _populators(mapper, context, path, row, adapter, - new_populators, - existing_populators, - eager_populators - ) + def _instance(row): - if translate_row: - for fn in translate_row: - ret = fn(mapper, context, row) - if ret is not EXT_CONTINUE: - row = ret - break - - if polymorphic_on is not None: - discriminator = row[polymorphic_on] - if discriminator is not None: - _instance = polymorphic_instances[discriminator] - if _instance: - return _instance(row, result) - - # determine identity key - if refresh_state: - identitykey = refresh_state.key - if identitykey is None: - # super-rare condition; a refresh is being called - # on a non-instance-key instance; this is meant to only - # occur within a flush() - identitykey = mapper._identity_key_from_state(refresh_state) + # determine the state that we'll be populating + if refresh_identity_key: + # fixed state that we're refreshing + state = refresh_state + instance = state.obj() + dict_ = instance_dict(instance) + isnew = state.runid != runid + currentload = True + loaded_instance = False else: + # look at the row, see if that identity is in the + # session, or we have to create a new one identitykey = ( identity_class, tuple([row[column] for column in pk_cols]) ) - instance = session_identity_map.get(identitykey) + instance = session_identity_map.get(identitykey) - if instance is not None: - state = attributes.instance_state(instance) - dict_ = attributes.instance_dict(instance) + if instance is not None: + # existing instance + state = instance_state(instance) + dict_ = instance_dict(instance) - isnew = state.runid != context.runid - currentload = not isnew - loaded_instance = False + isnew = state.runid != runid + currentload = not isnew + loaded_instance = False - if not currentload and \ - version_id_col is not None and \ - context.version_check and \ - mapper._get_state_attr_by_column( - state, - dict_, - mapper.version_id_col) != \ - row[version_id_col]: + if version_check and not currentload: + _validate_version_id(mapper, state, dict_, row, adapter) - raise orm_exc.StaleDataError( - "Instance '%s' has version id '%s' which " - "does not match database-loaded version id '%s'." - % (state_str(state), - mapper._get_state_attr_by_column( - state, dict_, - mapper.version_id_col), - row[version_id_col])) - elif refresh_state: - # out of band refresh_state detected (i.e. its not in the - # session.identity_map) honor it anyway. this can happen - # if a _get() occurs within save_obj(), such as - # when eager_defaults is True. - state = refresh_state - instance = state.obj() - dict_ = attributes.instance_dict(instance) - isnew = state.runid != context.runid - currentload = True - loaded_instance = False - else: - # check for non-NULL values in the primary key columns, - # else no entity is returned for the row - if is_not_primary_key(identitykey[1]): - return None - - isnew = True - currentload = True - loaded_instance = True - - if create_instance: - for fn in create_instance: - instance = fn(mapper, context, - row, mapper.class_) - if instance is not EXT_CONTINUE: - manager = attributes.manager_of_class( - instance.__class__) - # TODO: if manager is None, raise a friendly error - # about returning instances of unmapped types - manager.setup_instance(instance) - break - else: - instance = mapper.class_manager.new_instance() else: + # create a new instance + + # check for non-NULL values in the primary key columns, + # else no entity is returned for the row + if is_not_primary_key(identitykey[1]): + return None + + isnew = True + currentload = True + loaded_instance = True + instance = mapper.class_manager.new_instance() - dict_ = attributes.instance_dict(instance) - state = attributes.instance_state(instance) - state.key = identitykey + dict_ = instance_dict(instance) + state = instance_state(instance) + state.key = identitykey - # attach instance to session. - state.session_id = context.session.hash_key - session_identity_map.add(state) + # attach instance to session. + state.session_id = session_id + session_identity_map._add_unpresent(state, identitykey) + # populate. this looks at whether this state is new + # for this load or was existing, and whether or not this + # row is the first row with this identity. if currentload or populate_existing: - # state is being fully loaded, so populate. - # add to the "context.progress" collection. - if isnew: - state.runid = context.runid - context.progress[state] = dict_ + # full population routines. Objects here are either + # just created, or we are doing a populate_existing - if populate_instance: - for fn in populate_instance: - ret = fn(mapper, context, row, state, - only_load_props=only_load_props, - instancekey=identitykey, isnew=isnew) - if ret is not EXT_CONTINUE: - break - else: - populate_state(state, dict_, row, isnew, only_load_props) - else: - populate_state(state, dict_, row, isnew, only_load_props) + if isnew and propagate_options: + state.load_options = propagate_options + state.load_path = load_path - if loaded_instance: - state.manager.dispatch.load(state, context) - elif isnew: - state.manager.dispatch.refresh(state, context, only_load_props) - - elif state in context.partials or state.unloaded or eager_populators: - # state is having a partial set of its attributes - # refreshed. Populate those attributes, - # and add to the "context.partials" collection. - if state in context.partials: - isnew = False - (d_, attrs) = context.partials[state] - else: - isnew = True - attrs = state.unloaded - context.partials[state] = (dict_, attrs) - - if populate_instance: - for fn in populate_instance: - ret = fn(mapper, context, row, state, - only_load_props=attrs, - instancekey=identitykey, isnew=isnew) - if ret is not EXT_CONTINUE: - break - else: - populate_state(state, dict_, row, isnew, attrs) - else: - populate_state(state, dict_, row, isnew, attrs) - - for key, pop in eager_populators: - if key not in state.unloaded: - pop(state, dict_, row) + _populate_full( + context, row, state, dict_, isnew, + loaded_instance, populate_existing, populators) if isnew: - state.manager.dispatch.refresh(state, context, attrs) + if loaded_instance and load_evt: + state.manager.dispatch.load(state, context) + elif refresh_evt: + state.manager.dispatch.refresh( + state, context, only_load_props) - if result is not None: - if append_result: - for fn in append_result: - if fn(mapper, context, row, state, - result, instancekey=identitykey, - isnew=isnew) is not EXT_CONTINUE: - break - else: - result.append(instance) - else: - result.append(instance) + if populate_existing or state.modified: + if refresh_state and only_load_props: + state._commit(dict_, only_load_props) + else: + state._commit_all(dict_, session_identity_map) + + else: + # partial population routines, for objects that were already + # in the Session, but a row matches them; apply eager loaders + # on existing objects, etc. + unloaded = state.unloaded + isnew = state not in context.partials + + if not isnew or unloaded or populators["eager"]: + # state is having a partial set of its attributes + # refreshed. Populate those attributes, + # and add to the "context.partials" collection. + + to_load = _populate_partial( + context, row, state, dict_, isnew, + unloaded, populators) + + if isnew: + if refresh_evt: + state.manager.dispatch.refresh( + state, context, to_load) + + state._commit(dict_, to_load) return instance + + if mapper.polymorphic_map and not _polymorphic_from and not refresh_state: + # if we are doing polymorphic, dispatch to a different _instance() + # method specific to the subclass mapper + _instance = _decorate_polymorphic_switch( + _instance, context, mapper, result, path, + polymorphic_discriminator, adapter) + return _instance -def _populators(mapper, context, path, row, adapter, - new_populators, existing_populators, eager_populators): - """Produce a collection of attribute level row processor - callables.""" +def _populate_full( + context, row, state, dict_, isnew, + loaded_instance, populate_existing, populators): + if isnew: + # first time we are seeing a row with this identity. + state.runid = context.runid - delayed_populators = [] - pops = (new_populators, existing_populators, delayed_populators, - eager_populators) - - for prop in mapper._props.values(): - - for i, pop in enumerate(prop.create_row_processor( - context, - path, - mapper, row, adapter)): - if pop is not None: - pops[i].append((prop.key, pop)) - - if delayed_populators: - new_populators.extend(delayed_populators) + for key, getter in populators["quick"]: + dict_[key] = getter(row) + if populate_existing: + for key, set_callable in populators["expire"]: + dict_.pop(key, None) + if set_callable: + state.expired_attributes.add(key) + else: + for key, set_callable in populators["expire"]: + if set_callable: + state.expired_attributes.add(key) + for key, populator in populators["new"]: + populator(state, dict_, row) + for key, populator in populators["delayed"]: + populator(state, dict_, row) + else: + # have already seen rows with this identity. + for key, populator in populators["existing"]: + populator(state, dict_, row) -def _configure_subclass_mapper(mapper, context, path, adapter): - """Produce a mapper level row processor callable factory for mappers - inheriting this one.""" +def _populate_partial( + context, row, state, dict_, isnew, + unloaded, populators): + if not isnew: + to_load = context.partials[state] + for key, populator in populators["existing"]: + if key in to_load: + populator(state, dict_, row) + else: + to_load = unloaded + context.partials[state] = to_load + + for key, getter in populators["quick"]: + if key in to_load: + dict_[key] = getter(row) + for key, set_callable in populators["expire"]: + if key in to_load: + dict_.pop(key, None) + if set_callable: + state.expired_attributes.add(key) + for key, populator in populators["new"]: + if key in to_load: + populator(state, dict_, row) + for key, populator in populators["delayed"]: + if key in to_load: + populator(state, dict_, row) + for key, populator in populators["eager"]: + if key not in unloaded: + populator(state, dict_, row) + + return to_load + + +def _validate_version_id(mapper, state, dict_, row, adapter): + + version_id_col = mapper.version_id_col + + if version_id_col is None: + return + + if adapter: + version_id_col = adapter.columns[version_id_col] + + if mapper._get_state_attr_by_column( + state, dict_, mapper.version_id_col) != row[version_id_col]: + raise orm_exc.StaleDataError( + "Instance '%s' has version id '%s' which " + "does not match database-loaded version id '%s'." + % (state_str(state), mapper._get_state_attr_by_column( + state, dict_, mapper.version_id_col), + row[version_id_col])) + + +def _decorate_polymorphic_switch( + instance_fn, context, mapper, result, path, + polymorphic_discriminator, adapter): + if polymorphic_discriminator is not None: + polymorphic_on = polymorphic_discriminator + else: + polymorphic_on = mapper.polymorphic_on + if polymorphic_on is None: + return instance_fn + + if adapter: + polymorphic_on = adapter.columns[polymorphic_on] def configure_subclass_mapper(discriminator): try: @@ -539,16 +576,26 @@ def _configure_subclass_mapper(mapper, context, path, adapter): raise AssertionError( "No such polymorphic_identity %r is defined" % discriminator) - if sub_mapper is mapper: - return None + else: + if sub_mapper is mapper: + return None - return instance_processor( - sub_mapper, - context, - path, - adapter, - polymorphic_from=mapper) - return configure_subclass_mapper + return _instance_processor( + sub_mapper, context, result, + path, adapter, _polymorphic_from=mapper) + + polymorphic_instances = util.PopulateDict( + configure_subclass_mapper + ) + + def polymorphic_instance(row): + discriminator = row[polymorphic_on] + if discriminator is not None: + _instance = polymorphic_instances[discriminator] + if _instance: + return _instance(row) + return instance_fn(row) + return polymorphic_instance def load_scalar_attributes(mapper, state, attribute_names): @@ -567,10 +614,17 @@ def load_scalar_attributes(mapper, state, attribute_names): result = False if mapper.inherits and not mapper.concrete: + # because we are using Core to produce a select() that we + # pass to the Query, we aren't calling setup() for mapped + # attributes; in 1.0 this means deferred attrs won't get loaded + # by default statement = mapper._optimized_get_statement(state, attribute_names) if statement is not None: result = load_on_ident( - session.query(mapper).from_statement(statement), + session.query(mapper). + options( + strategy_options.Load(mapper).undefer("*") + ).from_statement(statement), None, only_load_props=attribute_names, refresh_state=state @@ -596,10 +650,11 @@ def load_scalar_attributes(mapper, state, attribute_names): if (_none_set.issubset(identity_key) and not mapper.allow_partial_pks) or \ _none_set.issuperset(identity_key): - util.warn("Instance %s to be refreshed doesn't " - "contain a full primary key - can't be refreshed " - "(and shouldn't be expired, either)." - % state_str(state)) + util.warn_limited( + "Instance %s to be refreshed doesn't " + "contain a full primary key - can't be refreshed " + "(and shouldn't be expired, either).", + state_str(state)) return result = load_on_ident( diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/mapper.py b/lib/python3.4/site-packages/sqlalchemy/orm/mapper.py index 8fa2443..97e4638 100644 --- a/lib/python3.4/site-packages/sqlalchemy/orm/mapper.py +++ b/lib/python3.4/site-packages/sqlalchemy/orm/mapper.py @@ -1,5 +1,5 @@ # orm/mapper.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -25,7 +25,8 @@ from .. import sql, util, log, exc as sa_exc, event, schema, inspection from ..sql import expression, visitors, operators, util as sql_util from . import instrumentation, attributes, exc as orm_exc, loading from . import properties -from .interfaces import MapperProperty, _InspectionAttr, _MappedAttribute +from . import util as orm_util +from .interfaces import MapperProperty, InspectionAttr, _MappedAttribute from .base import _class_to_mapper, _state_mapper, class_mapper, \ state_str, _INSTRUMENTOR @@ -51,8 +52,7 @@ _CONFIGURE_MUTEX = util.threading.RLock() @inspection._self_inspects @log.class_logger -class Mapper(_InspectionAttr): - +class Mapper(InspectionAttr): """Define the correlation of class attributes to database table columns. @@ -426,6 +426,12 @@ class Mapper(_InspectionAttr): thus persisting the value to the ``discriminator`` column in the database. + .. warning:: + + Currently, **only one discriminator column may be set**, typically + on the base-most class in the hierarchy. "Cascading" polymorphic + columns are not yet supported. + .. seealso:: :ref:`inheritance_toplevel` @@ -968,6 +974,15 @@ class Mapper(_InspectionAttr): self._all_tables = self.inherits._all_tables if self.polymorphic_identity is not None: + if self.polymorphic_identity in self.polymorphic_map: + util.warn( + "Reassigning polymorphic association for identity %r " + "from %r to %r: Check for duplicate use of %r as " + "value for polymorphic_identity." % + (self.polymorphic_identity, + self.polymorphic_map[self.polymorphic_identity], + self, self.polymorphic_identity) + ) self.polymorphic_map[self.polymorphic_identity] = self else: @@ -1080,6 +1095,7 @@ class Mapper(_InspectionAttr): auto-session attachment logic. """ + manager = attributes.manager_of_class(self.class_) if self.non_primary: @@ -1109,6 +1125,8 @@ class Mapper(_InspectionAttr): _mapper_registry[self] = True + # note: this *must be called before instrumentation.register_class* + # to maintain the documented behavior of instrument_class self.dispatch.instrument_class(self, self.class_) if manager is None: @@ -1127,7 +1145,6 @@ class Mapper(_InspectionAttr): event.listen(manager, 'first_init', _event_on_first_init, raw=True) event.listen(manager, 'init', _event_on_init, raw=True) - event.listen(manager, 'resurrect', _event_on_resurrect, raw=True) for key, method in util.iterate_attributes(self.class_): if isinstance(method, types.FunctionType): @@ -1189,14 +1206,6 @@ class Mapper(_InspectionAttr): util.ordered_column_set(t.c).\ intersection(all_cols) - # determine cols that aren't expressed within our tables; mark these - # as "read only" properties which are refreshed upon INSERT/UPDATE - self._readonly_props = set( - self._columntoproperty[col] - for col in self._columntoproperty - if not hasattr(col, 'table') or - col.table not in self._cols_by_table) - # if explicit PK argument sent, add those columns to the # primary key mappings if self._primary_key_argument: @@ -1247,6 +1256,15 @@ class Mapper(_InspectionAttr): self.primary_key = tuple(primary_key) self._log("Identified primary key columns: %s", primary_key) + # determine cols that aren't expressed within our tables; mark these + # as "read only" properties which are refreshed upon INSERT/UPDATE + self._readonly_props = set( + self._columntoproperty[col] + for col in self._columntoproperty + if self._columntoproperty[col] not in self._identity_key_props and + (not hasattr(col, 'table') or + col.table not in self._cols_by_table)) + def _configure_properties(self): # Column and other ClauseElement objects which are mapped @@ -1452,13 +1470,11 @@ class Mapper(_InspectionAttr): if polymorphic_key in dict_ and \ dict_[polymorphic_key] not in \ mapper._acceptable_polymorphic_identities: - util.warn( + util.warn_limited( "Flushing object %s with " "incompatible polymorphic identity %r; the " - "object may not refresh and/or load correctly" % ( - state_str(state), - dict_[polymorphic_key] - ) + "object may not refresh and/or load correctly", + (state_str(state), dict_[polymorphic_key]) ) self._set_polymorphic_identity = _set_polymorphic_identity @@ -1489,6 +1505,10 @@ class Mapper(_InspectionAttr): return identities + @_memoized_configured_property + def _prop_set(self): + return frozenset(self._props.values()) + def _adapt_inherited_property(self, key, prop, init): if not self.concrete: self._configure_property(key, prop, init=False, setparent=False) @@ -1578,6 +1598,8 @@ class Mapper(_InspectionAttr): self, prop, )) + oldprop = self._props[key] + self._path_registry.pop(oldprop, None) self._props[key] = prop @@ -1892,6 +1914,66 @@ class Mapper(_InspectionAttr): """ + @_memoized_configured_property + def _insert_cols_as_none(self): + return dict( + ( + table, + frozenset( + col.key for col in columns + if not col.primary_key and + not col.server_default and not col.default) + ) + for table, columns in self._cols_by_table.items() + ) + + @_memoized_configured_property + def _propkey_to_col(self): + return dict( + ( + table, + dict( + (self._columntoproperty[col].key, col) + for col in columns + ) + ) + for table, columns in self._cols_by_table.items() + ) + + @_memoized_configured_property + def _pk_keys_by_table(self): + return dict( + ( + table, + frozenset([col.key for col in pks]) + ) + for table, pks in self._pks_by_table.items() + ) + + @_memoized_configured_property + def _server_default_cols(self): + return dict( + ( + table, + frozenset([ + col.key for col in columns + if col.server_default is not None]) + ) + for table, columns in self._cols_by_table.items() + ) + + @_memoized_configured_property + def _server_onupdate_default_cols(self): + return dict( + ( + table, + frozenset([ + col.key for col in columns + if col.server_onupdate is not None]) + ) + for table, columns in self._cols_by_table.items() + ) + @property def selectable(self): """The :func:`.select` construct this :class:`.Mapper` selects from @@ -1968,6 +2050,17 @@ class Mapper(_InspectionAttr): returned, inclding :attr:`.synonyms`, :attr:`.column_attrs`, :attr:`.relationships`, and :attr:`.composites`. + .. warning:: + + the :attr:`.Mapper.relationships` accessor namespace is an + instance of :class:`.OrderedProperties`. This is + a dictionary-like object which includes a small number of + named methods such as :meth:`.OrderedProperties.items` + and :meth:`.OrderedProperties.values`. When + accessing attributes dynamically, favor using the dict-access + scheme, e.g. ``mapper.attrs[somename]`` over + ``getattr(mapper.attrs, somename)`` to avoid name collisions. + .. seealso:: :attr:`.Mapper.all_orm_descriptors` @@ -1979,7 +2072,7 @@ class Mapper(_InspectionAttr): @util.memoized_property def all_orm_descriptors(self): - """A namespace of all :class:`._InspectionAttr` attributes associated + """A namespace of all :class:`.InspectionAttr` attributes associated with the mapped class. These attributes are in all cases Python :term:`descriptors` @@ -1988,13 +2081,13 @@ class Mapper(_InspectionAttr): This namespace includes attributes that are mapped to the class as well as attributes declared by extension modules. It includes any Python descriptor type that inherits from - :class:`._InspectionAttr`. This includes + :class:`.InspectionAttr`. This includes :class:`.QueryableAttribute`, as well as extension types such as :class:`.hybrid_property`, :class:`.hybrid_method` and :class:`.AssociationProxy`. To distinguish between mapped attributes and extension attributes, - the attribute :attr:`._InspectionAttr.extension_type` will refer + the attribute :attr:`.InspectionAttr.extension_type` will refer to a constant that distinguishes between different extension types. When dealing with a :class:`.QueryableAttribute`, the @@ -2003,6 +2096,17 @@ class Mapper(_InspectionAttr): referring to the collection of mapped properties via :attr:`.Mapper.attrs`. + .. warning:: + + the :attr:`.Mapper.relationships` accessor namespace is an + instance of :class:`.OrderedProperties`. This is + a dictionary-like object which includes a small number of + named methods such as :meth:`.OrderedProperties.items` + and :meth:`.OrderedProperties.values`. When + accessing attributes dynamically, favor using the dict-access + scheme, e.g. ``mapper.attrs[somename]`` over + ``getattr(mapper.attrs, somename)`` to avoid name collisions. + .. versionadded:: 0.8.0 .. seealso:: @@ -2044,6 +2148,17 @@ class Mapper(_InspectionAttr): """Return a namespace of all :class:`.RelationshipProperty` properties maintained by this :class:`.Mapper`. + .. warning:: + + the :attr:`.Mapper.relationships` accessor namespace is an + instance of :class:`.OrderedProperties`. This is + a dictionary-like object which includes a small number of + named methods such as :meth:`.OrderedProperties.items` + and :meth:`.OrderedProperties.values`. When + accessing attributes dynamically, favor using the dict-access + scheme, e.g. ``mapper.attrs[somename]`` over + ``getattr(mapper.attrs, somename)`` to avoid name collisions. + .. seealso:: :attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty` @@ -2238,6 +2353,16 @@ class Mapper(_InspectionAttr): def primary_base_mapper(self): return self.class_manager.mapper.base_mapper + def _result_has_identity_key(self, result, adapter=None): + pk_cols = self.primary_key + if adapter: + pk_cols = [adapter.columns[c] for c in pk_cols] + for col in pk_cols: + if not result._has_key(col): + return False + else: + return True + def identity_key_from_row(self, row, adapter=None): """Return an identity-map key for use in storing/retrieving an item from the identity map. @@ -2286,7 +2411,7 @@ class Mapper(_InspectionAttr): manager = state.manager return self._identity_class, tuple([ manager[self._columntoproperty[col].key]. - impl.get(state, dict_, attributes.PASSIVE_OFF) + impl.get(state, dict_, attributes.PASSIVE_RETURN_NEVER_SET) for col in self.primary_key ]) @@ -2301,22 +2426,50 @@ class Mapper(_InspectionAttr): """ state = attributes.instance_state(instance) - return self._primary_key_from_state(state) + return self._primary_key_from_state(state, attributes.PASSIVE_OFF) - def _primary_key_from_state(self, state): + def _primary_key_from_state( + self, state, passive=attributes.PASSIVE_RETURN_NEVER_SET): dict_ = state.dict manager = state.manager return [ - manager[self._columntoproperty[col].key]. - impl.get(state, dict_, attributes.PASSIVE_OFF) - for col in self.primary_key + manager[prop.key]. + impl.get(state, dict_, passive) + for prop in self._identity_key_props ] - def _get_state_attr_by_column(self, state, dict_, column, - passive=attributes.PASSIVE_OFF): + @_memoized_configured_property + def _identity_key_props(self): + return [self._columntoproperty[col] for col in self.primary_key] + + @_memoized_configured_property + def _all_pk_props(self): + collection = set() + for table in self.tables: + collection.update(self._pks_by_table[table]) + return collection + + @_memoized_configured_property + def _should_undefer_in_wildcard(self): + cols = set(self.primary_key) + if self.polymorphic_on is not None: + cols.add(self.polymorphic_on) + return cols + + @_memoized_configured_property + def _primary_key_propkeys(self): + return set([prop.key for prop in self._all_pk_props]) + + def _get_state_attr_by_column( + self, state, dict_, column, + passive=attributes.PASSIVE_RETURN_NEVER_SET): prop = self._columntoproperty[column] return state.manager[prop.key].impl.get(state, dict_, passive=passive) + def _set_committed_state_attr_by_column(self, state, dict_, column, value): + prop = self._columntoproperty[column] + state.manager[prop.key].impl.set_committed_value(state, dict_, value) + def _set_state_attr_by_column(self, state, dict_, column, value): prop = self._columntoproperty[column] state.manager[prop.key].impl.set(state, dict_, value, None) @@ -2324,14 +2477,12 @@ class Mapper(_InspectionAttr): def _get_committed_attr_by_column(self, obj, column): state = attributes.instance_state(obj) dict_ = attributes.instance_dict(obj) - return self._get_committed_state_attr_by_column(state, dict_, column) + return self._get_committed_state_attr_by_column( + state, dict_, column, passive=attributes.PASSIVE_OFF) def _get_committed_state_attr_by_column( - self, - state, - dict_, - column, - passive=attributes.PASSIVE_OFF): + self, state, dict_, column, + passive=attributes.PASSIVE_RETURN_NEVER_SET): prop = self._columntoproperty[column] return state.manager[prop.key].impl.\ @@ -2372,7 +2523,7 @@ class Mapper(_InspectionAttr): state, state.dict, leftcol, passive=attributes.PASSIVE_NO_INITIALIZE) - if leftval is attributes.PASSIVE_NO_RESULT or leftval is None: + if leftval in orm_util._none_set: raise ColumnsNotAvailable() binary.left = sql.bindparam(None, leftval, type_=binary.right.type) @@ -2381,8 +2532,7 @@ class Mapper(_InspectionAttr): state, state.dict, rightcol, passive=attributes.PASSIVE_NO_INITIALIZE) - if rightval is attributes.PASSIVE_NO_RESULT or \ - rightval is None: + if rightval in orm_util._none_set: raise ColumnsNotAvailable() binary.right = sql.bindparam(None, rightval, type_=binary.right.type) @@ -2419,15 +2569,24 @@ class Mapper(_InspectionAttr): for all relationships that meet the given cascade rule. :param type_: - The name of the cascade rule (i.e. save-update, delete, - etc.) + The name of the cascade rule (i.e. ``"save-update"``, ``"delete"``, + etc.). + + .. note:: the ``"all"`` cascade is not accepted here. For a generic + object traversal function, see :ref:`faq_walk_objects`. :param state: The lead InstanceState. child items will be processed per the relationships defined for this object's mapper. - the return value are object instances; this provides a strong - reference so that they don't fall out of scope immediately. + :return: the method yields individual object instances. + + .. seealso:: + + :ref:`unitofwork_cascades` + + :ref:`faq_walk_objects` - illustrates a generic function to + traverse all objects without relying on cascades. """ visited_states = set() @@ -2544,7 +2703,33 @@ def configure_mappers(): have been constructed thus far. This function can be called any number of times, but in - most cases is handled internally. + most cases is invoked automatically, the first time mappings are used, + as well as whenever mappings are used and additional not-yet-configured + mappers have been constructed. + + Points at which this occur include when a mapped class is instantiated + into an instance, as well as when the :meth:`.Session.query` method + is used. + + The :func:`.configure_mappers` function provides several event hooks + that can be used to augment its functionality. These methods include: + + * :meth:`.MapperEvents.before_configured` - called once before + :func:`.configure_mappers` does any work; this can be used to establish + additional options, properties, or related mappings before the operation + proceeds. + + * :meth:`.MapperEvents.mapper_configured` - called as each indivudal + :class:`.Mapper` is configured within the process; will include all + mapper state except for backrefs set up by other mappers that are still + to be configured. + + * :meth:`.MapperEvents.after_configured` - called once after + :func:`.configure_mappers` is complete; at this stage, all + :class:`.Mapper` objects that are known to SQLAlchemy will be fully + configured. Note that the calling application may still have other + mappings that haven't been produced yet, such as if they are in modules + as yet unimported. """ @@ -2563,7 +2748,7 @@ def configure_mappers(): if not Mapper._new_mappers: return - Mapper.dispatch(Mapper).before_configured() + Mapper.dispatch._for_class(Mapper).before_configured() # initialize properties on all mappers # note that _mapper_registry is unordered, which # may randomly conceal/reveal issues related to @@ -2584,7 +2769,7 @@ def configure_mappers(): mapper._expire_memoizations() mapper.dispatch.mapper_configured( mapper, mapper.class_) - except: + except Exception: exc = sys.exc_info()[1] if not hasattr(exc, '_configure_failed'): mapper._configure_failed = exc @@ -2595,7 +2780,7 @@ def configure_mappers(): _already_compiling = False finally: _CONFIGURE_MUTEX.release() - Mapper.dispatch(Mapper).after_configured() + Mapper.dispatch._for_class(Mapper).after_configured() def reconstructor(fn): @@ -2704,20 +2889,11 @@ def _event_on_init(state, args, kwargs): instrumenting_mapper._set_polymorphic_identity(state) -def _event_on_resurrect(state): - # re-populate the primary key elements - # of the dict based on the mapping. - instrumenting_mapper = state.manager.info.get(_INSTRUMENTOR) - if instrumenting_mapper: - for col, val in zip(instrumenting_mapper.primary_key, state.key[1]): - instrumenting_mapper._set_state_attr_by_column( - state, state.dict, col, val) - - class _ColumnMapping(dict): - """Error reporting helper for mapper._columntoproperty.""" + __slots__ = 'mapper', + def __init__(self, mapper): self.mapper = mapper diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/path_registry.py b/lib/python3.4/site-packages/sqlalchemy/orm/path_registry.py index f10a125..cf18465 100644 --- a/lib/python3.4/site-packages/sqlalchemy/orm/path_registry.py +++ b/lib/python3.4/site-packages/sqlalchemy/orm/path_registry.py @@ -1,5 +1,5 @@ # orm/path_registry.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -13,6 +13,9 @@ from .. import util from .. import exc from itertools import chain from .base import class_mapper +import logging + +log = logging.getLogger(__name__) def _unreduce_path(path): @@ -49,14 +52,19 @@ class PathRegistry(object): """ + is_token = False + is_root = False + def __eq__(self, other): return other is not None and \ self.path == other.path def set(self, attributes, key, value): + log.debug("set '%s' on path '%s' to '%s'", key, self, value) attributes[(key, self.path)] = value def setdefault(self, attributes, key, value): + log.debug("setdefault '%s' on path '%s' to '%s'", key, self, value) attributes.setdefault((key, self.path), value) def get(self, attributes, key, value=None): @@ -148,6 +156,8 @@ class RootRegistry(PathRegistry): """ path = () has_entity = False + is_aliased_class = False + is_root = True def __getitem__(self, entity): return entity._path_registry @@ -163,6 +173,15 @@ class TokenRegistry(PathRegistry): has_entity = False + is_token = True + + def generate_for_superclasses(self): + if not self.parent.is_aliased_class and not self.parent.is_root: + for ent in self.parent.mapper.iterate_to_root(): + yield TokenRegistry(self.parent.parent[ent], self.token) + else: + yield self + def __getitem__(self, entity): raise NotImplementedError() @@ -184,6 +203,11 @@ class PropRegistry(PathRegistry): self.parent = parent self.path = parent.path + (prop,) + def __str__(self): + return " -> ".join( + str(elem) for elem in self.path + ) + @util.memoized_property def has_entity(self): return hasattr(self.prop, "mapper") diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/persistence.py b/lib/python3.4/site-packages/sqlalchemy/orm/persistence.py index f3c6959..5d69f51 100644 --- a/lib/python3.4/site-packages/sqlalchemy/orm/persistence.py +++ b/lib/python3.4/site-packages/sqlalchemy/orm/persistence.py @@ -1,5 +1,5 @@ # orm/persistence.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -15,15 +15,114 @@ in unitofwork.py. """ import operator -from itertools import groupby -from .. import sql, util, exc as sa_exc, schema +from itertools import groupby, chain +from .. import sql, util, exc as sa_exc from . import attributes, sync, exc as orm_exc, evaluator -from .base import _state_mapper, state_str, _attr_as_key +from .base import state_str, _attr_as_key, _entity_descriptor from ..sql import expression +from ..sql.base import _from_objects from . import loading -def save_obj(base_mapper, states, uowtransaction, single=False): +def _bulk_insert( + mapper, mappings, session_transaction, isstates, return_defaults): + base_mapper = mapper.base_mapper + + cached_connections = _cached_connection_dict(base_mapper) + + if session_transaction.session.connection_callable: + raise NotImplementedError( + "connection_callable / per-instance sharding " + "not supported in bulk_insert()") + + if isstates: + if return_defaults: + states = [(state, state.dict) for state in mappings] + mappings = [dict_ for (state, dict_) in states] + else: + mappings = [state.dict for state in mappings] + else: + mappings = list(mappings) + + connection = session_transaction.connection(base_mapper) + for table, super_mapper in base_mapper._sorted_tables.items(): + if not mapper.isa(super_mapper): + continue + + records = ( + (None, state_dict, params, mapper, + connection, value_params, has_all_pks, has_all_defaults) + for + state, state_dict, params, mp, + conn, value_params, has_all_pks, + has_all_defaults in _collect_insert_commands(table, ( + (None, mapping, mapper, connection) + for mapping in mappings), + bulk=True, return_defaults=return_defaults + ) + ) + _emit_insert_statements(base_mapper, None, + cached_connections, + super_mapper, table, records, + bookkeeping=return_defaults) + + if return_defaults and isstates: + identity_cls = mapper._identity_class + identity_props = [p.key for p in mapper._identity_key_props] + for state, dict_ in states: + state.key = ( + identity_cls, + tuple([dict_[key] for key in identity_props]) + ) + + +def _bulk_update(mapper, mappings, session_transaction, + isstates, update_changed_only): + base_mapper = mapper.base_mapper + + cached_connections = _cached_connection_dict(base_mapper) + + def _changed_dict(mapper, state): + return dict( + (k, v) + for k, v in state.dict.items() if k in state.committed_state or k + in mapper._primary_key_propkeys + ) + + if isstates: + if update_changed_only: + mappings = [_changed_dict(mapper, state) for state in mappings] + else: + mappings = [state.dict for state in mappings] + else: + mappings = list(mappings) + + if session_transaction.session.connection_callable: + raise NotImplementedError( + "connection_callable / per-instance sharding " + "not supported in bulk_update()") + + connection = session_transaction.connection(base_mapper) + + for table, super_mapper in base_mapper._sorted_tables.items(): + if not mapper.isa(super_mapper): + continue + + records = _collect_update_commands(None, table, ( + (None, mapping, mapper, connection, + (mapping[mapper._version_id_prop.key] + if mapper._version_id_prop else None)) + for mapping in mappings + ), bulk=True) + + _emit_update_statements(base_mapper, None, + cached_connections, + super_mapper, table, records, + bookkeeping=False) + + +def save_obj( + base_mapper, states, uowtransaction, single=False): """Issue ``INSERT`` and/or ``UPDATE`` statements for a list of objects. @@ -40,32 +139,54 @@ def save_obj(base_mapper, states, uowtransaction, single=False): save_obj(base_mapper, [state], uowtransaction, single=True) return - states_to_insert, states_to_update = _organize_states_for_save( - base_mapper, - states, - uowtransaction) - + states_to_update = [] + states_to_insert = [] cached_connections = _cached_connection_dict(base_mapper) + for (state, dict_, mapper, connection, + has_identity, + row_switch, update_version_id) in _organize_states_for_save( + base_mapper, states, uowtransaction + ): + if has_identity or row_switch: + states_to_update.append( + (state, dict_, mapper, connection, update_version_id) + ) + else: + states_to_insert.append( + (state, dict_, mapper, connection) + ) + for table, mapper in base_mapper._sorted_tables.items(): - insert = _collect_insert_commands(base_mapper, uowtransaction, - table, states_to_insert) + if table not in mapper._pks_by_table: + continue + insert = _collect_insert_commands(table, states_to_insert) - update = _collect_update_commands(base_mapper, uowtransaction, - table, states_to_update) + update = _collect_update_commands( + uowtransaction, table, states_to_update) - if update: - _emit_update_statements(base_mapper, uowtransaction, - cached_connections, - mapper, table, update) + _emit_update_statements(base_mapper, uowtransaction, + cached_connections, + mapper, table, update) - if insert: - _emit_insert_statements(base_mapper, uowtransaction, - cached_connections, - mapper, table, insert) + _emit_insert_statements(base_mapper, uowtransaction, + cached_connections, + mapper, table, insert) - _finalize_insert_update_commands(base_mapper, uowtransaction, - states_to_insert, states_to_update) + _finalize_insert_update_commands( + base_mapper, uowtransaction, + chain( + ( + (state, state_dict, mapper, connection, False) + for state, state_dict, mapper, connection in states_to_insert + ), + ( + (state, state_dict, mapper, connection, True) + for state, state_dict, mapper, connection, + update_version_id in states_to_update + ) + ) + ) def post_update(base_mapper, states, uowtransaction, post_update_cols): @@ -75,19 +196,28 @@ def post_update(base_mapper, states, uowtransaction, post_update_cols): """ cached_connections = _cached_connection_dict(base_mapper) - states_to_update = _organize_states_for_post_update( + states_to_update = list(_organize_states_for_post_update( base_mapper, - states, uowtransaction) + states, uowtransaction)) for table, mapper in base_mapper._sorted_tables.items(): + if table not in mapper._pks_by_table: + continue + + update = ( + (state, state_dict, sub_mapper, connection) + for + state, state_dict, sub_mapper, connection in states_to_update + if table in sub_mapper._pks_by_table + ) + update = _collect_post_update_commands(base_mapper, uowtransaction, - table, states_to_update, + table, update, post_update_cols) - if update: - _emit_post_update_statements(base_mapper, uowtransaction, - cached_connections, - mapper, table, update) + _emit_post_update_statements(base_mapper, uowtransaction, + cached_connections, + mapper, table, update) def delete_obj(base_mapper, states, uowtransaction): @@ -100,24 +230,26 @@ def delete_obj(base_mapper, states, uowtransaction): cached_connections = _cached_connection_dict(base_mapper) - states_to_delete = _organize_states_for_delete( + states_to_delete = list(_organize_states_for_delete( base_mapper, states, - uowtransaction) + uowtransaction)) table_to_mapper = base_mapper._sorted_tables for table in reversed(list(table_to_mapper.keys())): + mapper = table_to_mapper[table] + if table not in mapper._pks_by_table: + continue + delete = _collect_delete_commands(base_mapper, uowtransaction, table, states_to_delete) - mapper = table_to_mapper[table] - _emit_delete_statements(base_mapper, uowtransaction, cached_connections, mapper, table, delete) - for state, state_dict, mapper, has_identity, connection \ - in states_to_delete: + for state, state_dict, mapper, connection, \ + update_version_id in states_to_delete: mapper.dispatch.after_delete(mapper, connection, state) @@ -133,17 +265,15 @@ def _organize_states_for_save(base_mapper, states, uowtransaction): """ - states_to_insert = [] - states_to_update = [] - for state, dict_, mapper, connection in _connections_for_states( base_mapper, uowtransaction, states): has_identity = bool(state.key) + instance_key = state.key or mapper._identity_key_from_state(state) - row_switch = None + row_switch = update_version_id = None # call before_XXX extensions if not has_identity: @@ -180,18 +310,14 @@ def _organize_states_for_save(base_mapper, states, uowtransaction): uowtransaction.remove_state_actions(existing) row_switch = existing - if not has_identity and not row_switch: - states_to_insert.append( - (state, dict_, mapper, connection, - has_identity, instance_key, row_switch) - ) - else: - states_to_update.append( - (state, dict_, mapper, connection, - has_identity, instance_key, row_switch) - ) + if (has_identity or row_switch) and mapper.version_id_col is not None: + update_version_id = mapper._get_committed_state_attr_by_column( + row_switch if row_switch else state, + row_switch.dict if row_switch else dict_, + mapper.version_id_col) - return states_to_insert, states_to_update + yield (state, dict_, mapper, connection, + has_identity, row_switch, update_version_id) def _organize_states_for_post_update(base_mapper, states, @@ -204,8 +330,7 @@ def _organize_states_for_post_update(base_mapper, states, the execution per state. """ - return list(_connections_for_states(base_mapper, uowtransaction, - states)) + return _connections_for_states(base_mapper, uowtransaction, states) def _organize_states_for_delete(base_mapper, states, uowtransaction): @@ -216,72 +341,81 @@ def _organize_states_for_delete(base_mapper, states, uowtransaction): mapper, the connection to use for the execution per state. """ - states_to_delete = [] - for state, dict_, mapper, connection in _connections_for_states( base_mapper, uowtransaction, states): mapper.dispatch.before_delete(mapper, connection, state) - states_to_delete.append((state, dict_, mapper, - bool(state.key), connection)) - return states_to_delete + if mapper.version_id_col is not None: + update_version_id = \ + mapper._get_committed_state_attr_by_column( + state, dict_, + mapper.version_id_col) + else: + update_version_id = None + + yield ( + state, dict_, mapper, connection, update_version_id) -def _collect_insert_commands(base_mapper, uowtransaction, table, - states_to_insert): +def _collect_insert_commands( + table, states_to_insert, + bulk=False, return_defaults=False): """Identify sets of values to use in INSERT statements for a list of states. """ - insert = [] - for state, state_dict, mapper, connection, has_identity, \ - instance_key, row_switch in states_to_insert: + for state, state_dict, mapper, connection in states_to_insert: if table not in mapper._pks_by_table: continue - pks = mapper._pks_by_table[table] - params = {} value_params = {} - has_all_pks = True - has_all_defaults = True - for col in mapper._cols_by_table[table]: - if col is mapper.version_id_col and \ - mapper.version_id_generator is not False: - val = mapper.version_id_generator(None) - params[col.key] = val + propkey_to_col = mapper._propkey_to_col[table] + + for propkey in set(propkey_to_col).intersection(state_dict): + value = state_dict[propkey] + col = propkey_to_col[propkey] + if value is None: + continue + elif not bulk and isinstance(value, sql.ClauseElement): + value_params[col.key] = value else: - # pull straight from the dict for - # pending objects - prop = mapper._columntoproperty[col] - value = state_dict.get(prop.key, None) + params[col.key] = value - if value is None: - if col in pks: - has_all_pks = False - elif col.default is None and \ - col.server_default is None: - params[col.key] = value - elif col.server_default is not None and \ - mapper.base_mapper.eager_defaults: - has_all_defaults = False + if not bulk: + for colkey in mapper._insert_cols_as_none[table].\ + difference(params).difference(value_params): + params[colkey] = None - elif isinstance(value, sql.ClauseElement): - value_params[col] = value - else: - params[col.key] = value + if not bulk or return_defaults: + has_all_pks = mapper._pk_keys_by_table[table].issubset(params) - insert.append((state, state_dict, params, mapper, - connection, value_params, has_all_pks, - has_all_defaults)) - return insert + if mapper.base_mapper.eager_defaults: + has_all_defaults = mapper._server_default_cols[table].\ + issubset(params) + else: + has_all_defaults = True + else: + has_all_defaults = has_all_pks = True + + if mapper.version_id_generator is not False \ + and mapper.version_id_col is not None and \ + mapper.version_id_col in mapper._cols_by_table[table]: + params[mapper.version_id_col.key] = \ + mapper.version_id_generator(None) + + yield ( + state, state_dict, params, mapper, + connection, value_params, has_all_pks, + has_all_defaults) -def _collect_update_commands(base_mapper, uowtransaction, - table, states_to_update): +def _collect_update_commands( + uowtransaction, table, states_to_update, + bulk=False): """Identify sets of values to use in UPDATE statements for a list of states. @@ -293,114 +427,114 @@ def _collect_update_commands(base_mapper, uowtransaction, """ - update = [] - for state, state_dict, mapper, connection, has_identity, \ - instance_key, row_switch in states_to_update: + for state, state_dict, mapper, connection, \ + update_version_id in states_to_update: + if table not in mapper._pks_by_table: continue pks = mapper._pks_by_table[table] - params = {} value_params = {} - hasdata = hasnull = False - for col in mapper._cols_by_table[table]: - if col is mapper.version_id_col: - params[col._label] = \ - mapper._get_committed_state_attr_by_column( - row_switch or state, - row_switch and row_switch.dict - or state_dict, - col) + propkey_to_col = mapper._propkey_to_col[table] - prop = mapper._columntoproperty[col] - history = state.manager[prop.key].impl.get_history( - state, state_dict, attributes.PASSIVE_NO_INITIALIZE - ) - if history.added: - params[col.key] = history.added[0] - hasdata = True - else: - if mapper.version_id_generator is not False: - val = mapper.version_id_generator(params[col._label]) - params[col.key] = val + if bulk: + params = dict( + (propkey_to_col[propkey].key, state_dict[propkey]) + for propkey in + set(propkey_to_col).intersection(state_dict).difference( + mapper._pk_keys_by_table[table]) + ) + has_all_defaults = True + else: + params = {} + for propkey in set(propkey_to_col).intersection( + state.committed_state): + value = state_dict[propkey] + col = propkey_to_col[propkey] - # HACK: check for history, in case the - # history is only - # in a different table than the one - # where the version_id_col is. - for prop in mapper._columntoproperty.values(): - history = ( - state.manager[prop.key].impl.get_history( - state, state_dict, - attributes.PASSIVE_NO_INITIALIZE)) - if history.added: - hasdata = True + if isinstance(value, sql.ClauseElement): + value_params[col] = value + # guard against values that generate non-__nonzero__ + # objects for __eq__() + elif state.manager[propkey].impl.is_equal( + value, state.committed_state[propkey]) is not True: + params[col.key] = value + + if mapper.base_mapper.eager_defaults: + has_all_defaults = mapper._server_onupdate_default_cols[table].\ + issubset(params) else: - prop = mapper._columntoproperty[col] - history = state.manager[prop.key].impl.get_history( - state, state_dict, - attributes.PASSIVE_NO_INITIALIZE) + has_all_defaults = True + + if update_version_id is not None and \ + mapper.version_id_col in mapper._cols_by_table[table]: + + if not bulk and not (params or value_params): + # HACK: check for history in other tables, in case the + # history is only in a different table than the one + # where the version_id_col is. This logic was lost + # from 0.9 -> 1.0.0 and restored in 1.0.6. + for prop in mapper._columntoproperty.values(): + history = ( + state.manager[prop.key].impl.get_history( + state, state_dict, + attributes.PASSIVE_NO_INITIALIZE)) + if history.added: + break + else: + # no net change, break + continue + + col = mapper.version_id_col + params[col._label] = update_version_id + + if (bulk or col.key not in params) and \ + mapper.version_id_generator is not False: + val = mapper.version_id_generator(update_version_id) + params[col.key] = val + + elif not (params or value_params): + continue + + if bulk: + pk_params = dict( + (propkey_to_col[propkey]._label, state_dict.get(propkey)) + for propkey in + set(propkey_to_col). + intersection(mapper._pk_keys_by_table[table]) + ) + else: + pk_params = {} + for col in pks: + propkey = mapper._columntoproperty[col].key + + history = state.manager[propkey].impl.get_history( + state, state_dict, attributes.PASSIVE_OFF) + if history.added: - if isinstance(history.added[0], - sql.ClauseElement): - value_params[col] = history.added[0] + if not history.deleted or \ + ("pk_cascaded", state, col) in \ + uowtransaction.attributes: + pk_params[col._label] = history.added[0] + params.pop(col.key, None) else: - value = history.added[0] - params[col.key] = value + # else, use the old value to locate the row + pk_params[col._label] = history.deleted[0] + params[col.key] = history.added[0] + else: + pk_params[col._label] = history.unchanged[0] + if pk_params[col._label] is None: + raise orm_exc.FlushError( + "Can't update table %s using NULL for primary " + "key value on column %s" % (table, col)) - if col in pks: - if history.deleted and \ - not row_switch: - # if passive_updates and sync detected - # this was a pk->pk sync, use the new - # value to locate the row, since the - # DB would already have set this - if ("pk_cascaded", state, col) in \ - uowtransaction.attributes: - value = history.added[0] - params[col._label] = value - else: - # use the old value to - # locate the row - value = history.deleted[0] - params[col._label] = value - hasdata = True - else: - # row switch logic can reach us here - # remove the pk from the update params - # so the update doesn't - # attempt to include the pk in the - # update statement - del params[col.key] - value = history.added[0] - params[col._label] = value - if value is None: - hasnull = True - else: - hasdata = True - elif col in pks: - value = state.manager[prop.key].impl.get( - state, state_dict) - if value is None: - hasnull = True - params[col._label] = value - - # see #3060. Need to consider an "unchanged" None - # as potentially history for now. - elif row_switch and history.unchanged == [None]: - params[col.key] = None - hasdata = True - if hasdata: - if hasnull: - raise orm_exc.FlushError( - "Can't update table " - "using NULL for primary " - "key value") - update.append((state, state_dict, params, mapper, - connection, value_params)) - return update + if params or value_params: + params.update(pk_params) + yield ( + state, state_dict, params, mapper, + connection, value_params, has_all_defaults) def _collect_post_update_commands(base_mapper, uowtransaction, table, @@ -410,10 +544,10 @@ def _collect_post_update_commands(base_mapper, uowtransaction, table, """ - update = [] for state, state_dict, mapper, connection in states_to_update: - if table not in mapper._pks_by_table: - continue + + # assert table in mapper._pks_by_table + pks = mapper._pks_by_table[table] params = {} hasdata = False @@ -422,8 +556,8 @@ def _collect_post_update_commands(base_mapper, uowtransaction, table, if col in pks: params[col._label] = \ mapper._get_state_attr_by_column( - state, - state_dict, col) + state, + state_dict, col, passive=attributes.PASSIVE_OFF) elif col in post_update_cols: prop = mapper._columntoproperty[col] @@ -435,9 +569,7 @@ def _collect_post_update_commands(base_mapper, uowtransaction, table, params[col.key] = value hasdata = True if hasdata: - update.append((state, state_dict, params, mapper, - connection)) - return update + yield params, connection def _collect_delete_commands(base_mapper, uowtransaction, table, @@ -445,42 +577,38 @@ def _collect_delete_commands(base_mapper, uowtransaction, table, """Identify values to use in DELETE statements for a list of states to be deleted.""" - delete = util.defaultdict(list) + for state, state_dict, mapper, connection, \ + update_version_id in states_to_delete: - for state, state_dict, mapper, has_identity, connection \ - in states_to_delete: - if not has_identity or table not in mapper._pks_by_table: + if table not in mapper._pks_by_table: continue params = {} - delete[connection].append(params) for col in mapper._pks_by_table[table]: params[col.key] = \ value = \ mapper._get_committed_state_attr_by_column( - state, state_dict, col) + state, state_dict, col) if value is None: raise orm_exc.FlushError( - "Can't delete from table " + "Can't delete from table %s " "using NULL for primary " - "key value") + "key value on column %s" % (table, col)) - if mapper.version_id_col is not None and \ - table.c.contains_column(mapper.version_id_col): - params[mapper.version_id_col.key] = \ - mapper._get_committed_state_attr_by_column( - state, state_dict, - mapper.version_id_col) - return delete + if update_version_id is not None and \ + mapper.version_id_col in mapper._cols_by_table[table]: + params[mapper.version_id_col.key] = update_version_id + yield params, connection def _emit_update_statements(base_mapper, uowtransaction, - cached_connections, mapper, table, update): + cached_connections, mapper, table, update, + bookkeeping=True): """Emit UPDATE statements corresponding to value lists collected by _collect_update_commands().""" needs_version_id = mapper.version_id_col is not None and \ - table.c.contains_column(mapper.version_id_col) + mapper.version_id_col in mapper._cols_by_table[table] def update_stmt(): clause = sql.and_() @@ -496,67 +624,136 @@ def _emit_update_statements(base_mapper, uowtransaction, type_=mapper.version_id_col.type)) stmt = table.update(clause) - if mapper.base_mapper.eager_defaults: - stmt = stmt.return_defaults() - elif mapper.version_id_col is not None: - stmt = stmt.return_defaults(mapper.version_id_col) - return stmt - statement = base_mapper._memo(('update', table), update_stmt) + cached_stmt = base_mapper._memo(('update', table), update_stmt) - rows = 0 - for state, state_dict, params, mapper, \ - connection, value_params in update: + for (connection, paramkeys, hasvalue, has_all_defaults), \ + records in groupby( + update, + lambda rec: ( + rec[4], # connection + set(rec[2]), # set of parameter keys + bool(rec[5]), # whether or not we have "value" parameters + rec[6] # has_all_defaults + ) + ): + rows = 0 + records = list(records) - if value_params: - c = connection.execute( - statement.values(value_params), - params) + statement = cached_stmt + + # TODO: would be super-nice to not have to determine this boolean + # inside the loop here, in the 99.9999% of the time there's only + # one connection in use + assert_singlerow = connection.dialect.supports_sane_rowcount + assert_multirow = assert_singlerow and \ + connection.dialect.supports_sane_multi_rowcount + allow_multirow = has_all_defaults and not needs_version_id + + if bookkeeping and not has_all_defaults and \ + mapper.base_mapper.eager_defaults: + statement = statement.return_defaults() + elif mapper.version_id_col is not None: + statement = statement.return_defaults(mapper.version_id_col) + + if hasvalue: + for state, state_dict, params, mapper, \ + connection, value_params, has_all_defaults in records: + c = connection.execute( + statement.values(value_params), + params) + if bookkeeping: + _postfetch( + mapper, + uowtransaction, + table, + state, + state_dict, + c, + c.context.compiled_parameters[0], + value_params) + rows += c.rowcount + check_rowcount = True else: - c = cached_connections[connection].\ - execute(statement, params) + if not allow_multirow: + check_rowcount = assert_singlerow + for state, state_dict, params, mapper, \ + connection, value_params, has_all_defaults in records: + c = cached_connections[connection].\ + execute(statement, params) - _postfetch( - mapper, - uowtransaction, - table, - state, - state_dict, - c, - c.context.compiled_parameters[0], - value_params) - rows += c.rowcount + # TODO: why with bookkeeping=False? + _postfetch( + mapper, + uowtransaction, + table, + state, + state_dict, + c, + c.context.compiled_parameters[0], + value_params) + rows += c.rowcount + else: + multiparams = [rec[2] for rec in records] - if connection.dialect.supports_sane_rowcount: - if rows != len(update): - raise orm_exc.StaleDataError( - "UPDATE statement on table '%s' expected to " - "update %d row(s); %d were matched." % - (table.description, len(update), rows)) + check_rowcount = assert_multirow or ( + assert_singlerow and + len(multiparams) == 1 + ) - elif needs_version_id: - util.warn("Dialect %s does not support updated rowcount " - "- versioning cannot be verified." % - c.dialect.dialect_description, - stacklevel=12) + c = cached_connections[connection].\ + execute(statement, multiparams) + + rows += c.rowcount + + # TODO: why with bookkeeping=False? + for state, state_dict, params, mapper, \ + connection, value_params, has_all_defaults in records: + _postfetch( + mapper, + uowtransaction, + table, + state, + state_dict, + c, + c.context.compiled_parameters[0], + value_params) + + if check_rowcount: + if rows != len(records): + raise orm_exc.StaleDataError( + "UPDATE statement on table '%s' expected to " + "update %d row(s); %d were matched." % + (table.description, len(records), rows)) + + elif needs_version_id: + util.warn("Dialect %s does not support updated rowcount " + "- versioning cannot be verified." % + c.dialect.dialect_description) def _emit_insert_statements(base_mapper, uowtransaction, - cached_connections, mapper, table, insert): + cached_connections, mapper, table, insert, + bookkeeping=True): """Emit INSERT statements corresponding to value lists collected by _collect_insert_commands().""" - statement = base_mapper._memo(('insert', table), table.insert) + cached_stmt = base_mapper._memo(('insert', table), table.insert) for (connection, pkeys, hasvalue, has_all_pks, has_all_defaults), \ - records in groupby(insert, - lambda rec: (rec[4], - list(rec[2].keys()), - bool(rec[5]), - rec[6], rec[7]) - ): - if \ + records in groupby( + insert, + lambda rec: ( + rec[4], # connection + set(rec[2]), # parameter keys + bool(rec[5]), # whether we have "value" parameters + rec[6], + rec[7])): + + statement = cached_stmt + + if not bookkeeping or \ ( has_all_defaults or not base_mapper.eager_defaults @@ -569,19 +766,20 @@ def _emit_insert_statements(base_mapper, uowtransaction, c = cached_connections[connection].\ execute(statement, multiparams) - for (state, state_dict, params, mapper_rec, - conn, value_params, has_all_pks, has_all_defaults), \ - last_inserted_params in \ - zip(records, c.context.compiled_parameters): - _postfetch( - mapper_rec, - uowtransaction, - table, - state, - state_dict, - c, - last_inserted_params, - value_params) + if bookkeeping: + for (state, state_dict, params, mapper_rec, + conn, value_params, has_all_pks, has_all_defaults), \ + last_inserted_params in \ + zip(records, c.context.compiled_parameters): + _postfetch( + mapper_rec, + uowtransaction, + table, + state, + state_dict, + c, + last_inserted_params, + value_params) else: if not has_all_defaults and base_mapper.eager_defaults: @@ -609,13 +807,7 @@ def _emit_insert_statements(base_mapper, uowtransaction, mapper._pks_by_table[table]): prop = mapper_rec._columntoproperty[col] if state_dict.get(prop.key) is None: - # TODO: would rather say: - # state_dict[prop.key] = pk - mapper_rec._set_state_attr_by_column( - state, - state_dict, - col, pk) - + state_dict[prop.key] = pk _postfetch( mapper_rec, uowtransaction, @@ -648,11 +840,13 @@ def _emit_post_update_statements(base_mapper, uowtransaction, # also group them into common (connection, cols) sets # to support executemany(). for key, grouper in groupby( - update, lambda rec: (rec[4], list(rec[2].keys())) + update, lambda rec: ( + rec[1], # connection + set(rec[0]) # parameter keys + ) ): connection = key[0] - multiparams = [params for state, state_dict, - params, mapper, conn in grouper] + multiparams = [params for params, conn in grouper] cached_connections[connection].\ execute(statement, multiparams) @@ -663,7 +857,7 @@ def _emit_delete_statements(base_mapper, uowtransaction, cached_connections, by _collect_delete_commands().""" need_version_id = mapper.version_id_col is not None and \ - table.c.contains_column(mapper.version_id_col) + mapper.version_id_col in mapper._cols_by_table[table] def delete_stmt(): clause = sql.and_() @@ -682,8 +876,12 @@ def _emit_delete_statements(base_mapper, uowtransaction, cached_connections, return table.delete(clause) - for connection, del_objects in delete.items(): - statement = base_mapper._memo(('delete', table), delete_stmt) + statement = base_mapper._memo(('delete', table), delete_stmt) + for connection, recs in groupby( + delete, + lambda rec: rec[1] # connection + ): + del_objects = [params for params, connection in recs] connection = cached_connections[connection] @@ -736,15 +934,12 @@ def _emit_delete_statements(base_mapper, uowtransaction, cached_connections, ) -def _finalize_insert_update_commands(base_mapper, uowtransaction, - states_to_insert, states_to_update): +def _finalize_insert_update_commands(base_mapper, uowtransaction, states): """finalize state on states that have been inserted or updated, including calling after_insert/after_update events. """ - for state, state_dict, mapper, connection, has_identity, \ - instance_key, row_switch in states_to_insert + \ - states_to_update: + for state, state_dict, mapper, connection, has_identity in states: if mapper._readonly_props: readonly = state.unmodified_intersection( @@ -763,9 +958,8 @@ def _finalize_insert_update_commands(base_mapper, uowtransaction, toload_now.extend(state._unloaded_non_object) elif mapper.version_id_col is not None and \ mapper.version_id_generator is False: - prop = mapper._columntoproperty[mapper.version_id_col] - if prop.key in state.unloaded: - toload_now.extend([prop.key]) + if mapper._version_id_prop.key in state.unloaded: + toload_now.extend([mapper._version_id_prop.key]) if toload_now: state.key = base_mapper._identity_key_from_state(state) @@ -782,31 +976,46 @@ def _finalize_insert_update_commands(base_mapper, uowtransaction, def _postfetch(mapper, uowtransaction, table, - state, dict_, result, params, value_params): + state, dict_, result, params, value_params, bulk=False): """Expire attributes in need of newly persisted database state, after an INSERT or UPDATE statement has proceeded for that state.""" - prefetch_cols = result.context.prefetch_cols - postfetch_cols = result.context.postfetch_cols - returning_cols = result.context.returning_cols + # TODO: bulk is never non-False, need to clean this up - if mapper.version_id_col is not None: + prefetch_cols = result.context.compiled.prefetch + postfetch_cols = result.context.compiled.postfetch + returning_cols = result.context.compiled.returning + + if mapper.version_id_col is not None and \ + mapper.version_id_col in mapper._cols_by_table[table]: prefetch_cols = list(prefetch_cols) + [mapper.version_id_col] + refresh_flush = bool(mapper.class_manager.dispatch.refresh_flush) + if refresh_flush: + load_evt_attrs = [] + if returning_cols: row = result.context.returned_defaults if row is not None: for col in returning_cols: if col.primary_key: continue - mapper._set_state_attr_by_column(state, dict_, col, row[col]) + dict_[mapper._columntoproperty[col].key] = row[col] + if refresh_flush: + load_evt_attrs.append(mapper._columntoproperty[col].key) for c in prefetch_cols: if c.key in params and c in mapper._columntoproperty: - mapper._set_state_attr_by_column(state, dict_, c, params[c.key]) + dict_[mapper._columntoproperty[c].key] = params[c.key] + if refresh_flush: + load_evt_attrs.append(mapper._columntoproperty[c].key) - if postfetch_cols: + if refresh_flush and load_evt_attrs: + mapper.class_manager.dispatch.refresh_flush( + state, uowtransaction, load_evt_attrs) + + if postfetch_cols and state: state._expire_attributes(state.dict, [mapper._columntoproperty[c].key for c in postfetch_cols if c in @@ -817,10 +1026,13 @@ def _postfetch(mapper, uowtransaction, table, # TODO: this still goes a little too often. would be nice to # have definitive list of "columns that changed" here for m, equated_pairs in mapper._table_to_equated[table]: - sync.populate(state, m, state, m, - equated_pairs, - uowtransaction, - mapper.passive_updates) + if state is None: + sync.bulk_populate_inherit_keys(dict_, m, equated_pairs) + else: + sync.populate(state, m, state, m, + equated_pairs, + uowtransaction, + mapper.passive_updates) def _connections_for_states(base_mapper, uowtransaction, states): @@ -838,17 +1050,14 @@ def _connections_for_states(base_mapper, uowtransaction, states): connection_callable = \ uowtransaction.session.connection_callable else: - connection = None + connection = uowtransaction.transaction.connection(base_mapper) connection_callable = None for state in _sort_states(states): if connection_callable: connection = connection_callable(base_mapper, state.obj()) - elif not connection: - connection = uowtransaction.transaction.connection( - base_mapper) - mapper = _state_mapper(state) + mapper = state.manager.mapper yield state, state.dict, mapper, connection @@ -874,6 +1083,27 @@ class BulkUD(object): def __init__(self, query): self.query = query.enable_eagerloads(False) + self.mapper = self.query._bind_mapper() + self._validate_query_state() + + def _validate_query_state(self): + for attr, methname, notset, op in ( + ('_limit', 'limit()', None, operator.is_), + ('_offset', 'offset()', None, operator.is_), + ('_order_by', 'order_by()', False, operator.is_), + ('_group_by', 'group_by()', False, operator.is_), + ('_distinct', 'distinct()', False, operator.is_), + ( + '_from_obj', + 'join(), outerjoin(), select_from(), or from_self()', + (), operator.eq) + ): + if not op(getattr(self.query, attr), notset): + raise sa_exc.InvalidRequestError( + "Can't call Query.update() or Query.delete() " + "when %s has been called" % + (methname, ) + ) @property def session(self): @@ -898,18 +1128,34 @@ class BulkUD(object): self._do_post_synchronize() self._do_post() - def _do_pre(self): + @util.dependencies("sqlalchemy.orm.query") + def _do_pre(self, querylib): query = self.query - self.context = context = query._compile_context() - if len(context.statement.froms) != 1 or \ - not isinstance(context.statement.froms[0], schema.Table): + self.context = querylib.QueryContext(query) + if isinstance(query._entities[0], querylib._ColumnEntity): + # check for special case of query(table) + tables = set() + for ent in query._entities: + if not isinstance(ent, querylib._ColumnEntity): + tables.clear() + break + else: + tables.update(_from_objects(ent.column)) + + if len(tables) != 1: + raise sa_exc.InvalidRequestError( + "This operation requires only one Table or " + "entity be specified as the target." + ) + else: + self.primary_table = tables.pop() + + else: self.primary_table = query._only_entity_zero( "This operation requires only one Table or " "entity be specified as the target." ).mapper.local_table - else: - self.primary_table = context.statement.froms[0] session = query.session @@ -964,35 +1210,76 @@ class BulkFetch(BulkUD): def _do_pre_synchronize(self): query = self.query session = query.session - select_stmt = self.context.statement.with_only_columns( + context = query._compile_context() + select_stmt = context.statement.with_only_columns( self.primary_table.primary_key) self.matched_rows = session.execute( select_stmt, + mapper=self.mapper, params=query._params).fetchall() class BulkUpdate(BulkUD): """BulkUD which handles UPDATEs.""" - def __init__(self, query, values): + def __init__(self, query, values, update_kwargs): super(BulkUpdate, self).__init__(query) - self.query._no_select_modifiers("update") self.values = values + self.update_kwargs = update_kwargs @classmethod - def factory(cls, query, synchronize_session, values): + def factory(cls, query, synchronize_session, values, update_kwargs): return BulkUD._factory({ "evaluate": BulkUpdateEvaluate, "fetch": BulkUpdateFetch, False: BulkUpdate - }, synchronize_session, query, values) + }, synchronize_session, query, values, update_kwargs) + + def _resolve_string_to_expr(self, key): + if self.mapper and isinstance(key, util.string_types): + attr = _entity_descriptor(self.mapper, key) + return attr.__clause_element__() + else: + return key + + def _resolve_key_to_attrname(self, key): + if self.mapper and isinstance(key, util.string_types): + attr = _entity_descriptor(self.mapper, key) + return attr.property.key + elif isinstance(key, attributes.InstrumentedAttribute): + return key.key + elif hasattr(key, '__clause_element__'): + key = key.__clause_element__() + + if self.mapper and isinstance(key, expression.ColumnElement): + try: + attr = self.mapper._columntoproperty[key] + except orm_exc.UnmappedColumnError: + return None + else: + return attr.key + else: + raise sa_exc.InvalidRequestError( + "Invalid expression type: %r" % key) def _do_exec(self): + + values = [ + (self._resolve_string_to_expr(k), v) + for k, v in ( + self.values.items() if hasattr(self.values, 'items') + else self.values) + ] + if not self.update_kwargs.get('preserve_parameter_order', False): + values = dict(values) + update_stmt = sql.update(self.primary_table, - self.context.whereclause, self.values) + self.context.whereclause, values, + **self.update_kwargs) self.result = self.query.session.execute( - update_stmt, params=self.query._params) + update_stmt, params=self.query._params, + mapper=self.mapper) self.rowcount = self.result.rowcount def _do_post(self): @@ -1005,7 +1292,6 @@ class BulkDelete(BulkUD): def __init__(self, query): super(BulkDelete, self).__init__(query) - self.query._no_select_modifiers("delete") @classmethod def factory(cls, query, synchronize_session): @@ -1019,8 +1305,10 @@ class BulkDelete(BulkUD): delete_stmt = sql.delete(self.primary_table, self.context.whereclause) - self.result = self.query.session.execute(delete_stmt, - params=self.query._params) + self.result = self.query.session.execute( + delete_stmt, + params=self.query._params, + mapper=self.mapper) self.rowcount = self.result.rowcount def _do_post(self): @@ -1034,10 +1322,13 @@ class BulkUpdateEvaluate(BulkEvaluate, BulkUpdate): def _additional_evaluators(self, evaluator_compiler): self.value_evaluators = {} - for key, value in self.values.items(): - key = _attr_as_key(key) - self.value_evaluators[key] = evaluator_compiler.process( - expression._literal_as_binds(value)) + values = (self.values.items() if hasattr(self.values, 'items') + else self.values) + for key, value in values: + key = self._resolve_key_to_attrname(key) + if key is not None: + self.value_evaluators[key] = evaluator_compiler.process( + expression._literal_as_binds(value)) def _do_post_synchronize(self): session = self.query.session diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/properties.py b/lib/python3.4/site-packages/sqlalchemy/orm/properties.py index 62ea93f..f8a3532 100644 --- a/lib/python3.4/site-packages/sqlalchemy/orm/properties.py +++ b/lib/python3.4/site-packages/sqlalchemy/orm/properties.py @@ -1,5 +1,5 @@ # orm/properties.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -34,6 +34,13 @@ class ColumnProperty(StrategizedProperty): strategy_wildcard_key = 'column' + __slots__ = ( + '_orig_columns', 'columns', 'group', 'deferred', + 'instrument', 'comparator_factory', 'descriptor', 'extension', + 'active_history', 'expire_on_flush', 'info', 'doc', + 'strategy_class', '_creation_order', '_is_polymorphic_discriminator', + '_mapped_by_synonym', '_deferred_column_loader') + def __init__(self, *columns, **kwargs): """Provide a column-level property for use with a Mapper. @@ -109,6 +116,7 @@ class ColumnProperty(StrategizedProperty): **Deprecated.** Please see :class:`.AttributeEvents`. """ + super(ColumnProperty, self).__init__() self._orig_columns = [expression._labeled(c) for c in columns] self.columns = [expression._labeled(_orm_full_deannotate(c)) for c in columns] @@ -149,6 +157,12 @@ class ColumnProperty(StrategizedProperty): ("instrument", self.instrument) ) + @util.dependencies("sqlalchemy.orm.state", "sqlalchemy.orm.strategies") + def _memoized_attr__deferred_column_loader(self, state, strategies): + return state.InstanceState._instance_level_callable_processor( + self.parent.class_manager, + strategies.LoadDeferredColumns(self.key), self.key) + @property def expression(self): """Return the primary column or expression for this ColumnProperty. @@ -206,7 +220,7 @@ class ColumnProperty(StrategizedProperty): elif dest_state.has_identity and self.key not in dest_dict: dest_state._expire_attributes(dest_dict, [self.key]) - class Comparator(PropComparator): + class Comparator(util.MemoizedSlots, PropComparator): """Produce boolean, comparison, and other operators for :class:`.ColumnProperty` attributes. @@ -224,24 +238,27 @@ class ColumnProperty(StrategizedProperty): :attr:`.TypeEngine.comparator_factory` """ - @util.memoized_instancemethod - def __clause_element__(self): + + __slots__ = '__clause_element__', 'info' + + def _memoized_method___clause_element__(self): if self.adapter: return self.adapter(self.prop.columns[0]) else: + # no adapter, so we aren't aliased + # assert self._parententity is self._parentmapper return self.prop.columns[0]._annotate({ - "parententity": self._parentmapper, - "parentmapper": self._parentmapper}) + "parententity": self._parententity, + "parentmapper": self._parententity}) - @util.memoized_property - def info(self): + def _memoized_attr_info(self): ce = self.__clause_element__() try: return ce.info except AttributeError: return self.prop.info - def __getattr__(self, key): + def _fallback_getattr(self, key): """proxy attribute access down to the mapped column. this allows user-defined comparison methods to be accessed. diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/query.py b/lib/python3.4/site-packages/sqlalchemy/orm/query.py index 12e11b2..335832d 100644 --- a/lib/python3.4/site-packages/sqlalchemy/orm/query.py +++ b/lib/python3.4/site-packages/sqlalchemy/orm/query.py @@ -1,5 +1,5 @@ # orm/query.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -26,7 +26,7 @@ from . import ( exc as orm_exc, loading ) from .base import _entity_descriptor, _is_aliased_class, \ - _is_mapped_class, _orm_columns, _generative + _is_mapped_class, _orm_columns, _generative, InspectionAttr from .path_registry import PathRegistry from .util import ( AliasedClass, ORMAdapter, join as orm_join, with_parent, aliased @@ -75,6 +75,7 @@ class Query(object): _having = None _distinct = False _prefixes = None + _suffixes = None _offset = None _limit = None _for_update_arg = None @@ -99,7 +100,8 @@ class Query(object): _with_options = () _with_hints = () _enable_single_crit = True - + _orm_only_adapt = True + _orm_only_from_obj_alias = True _current_path = _path_registry def __init__(self, entities, session=None): @@ -137,10 +139,7 @@ class Query(object): ) aliased_adapter = None elif ext_info.is_aliased_class: - aliased_adapter = sql_util.ColumnAdapter( - ext_info.selectable, - ext_info.mapper._equivalent_columns - ) + aliased_adapter = ext_info._adapter else: aliased_adapter = None @@ -162,7 +161,6 @@ class Query(object): for from_obj in obj: info = inspect(from_obj) - if hasattr(info, 'mapper') and \ (info.is_mapper or info.is_aliased_class): self._select_from_entity = from_obj @@ -218,7 +216,7 @@ class Query(object): def _adapt_col_list(self, cols): return [ self._adapt_clause( - expression._literal_as_text(o), + expression._literal_as_label_reference(o), True, True) for o in cols ] @@ -234,7 +232,8 @@ class Query(object): adapters = [] # do we adapt all expression elements or only those # tagged as 'ORM' constructs ? - orm_only = getattr(self, '_orm_only_adapt', orm_only) + if not self._orm_only_adapt: + orm_only = False if as_filter and self._filter_aliases: for fa in self._filter_aliases._visitor_iterator: @@ -251,7 +250,7 @@ class Query(object): # to all SQL constructs. adapters.append( ( - getattr(self, '_orm_only_from_obj_alias', orm_only), + orm_only if self._orm_only_from_obj_alias else False, self._from_obj_alias.replace ) ) @@ -288,8 +287,11 @@ class Query(object): return self._entities[0] def _mapper_zero(self): - return self._select_from_entity or \ - self._entity_zero().entity_zero + # TODO: self._select_from_entity is not a mapper + # so this method is misnamed + return self._select_from_entity \ + if self._select_from_entity is not None \ + else self._entity_zero().entity_zero @property def _mapper_entities(self): @@ -303,11 +305,14 @@ class Query(object): self._mapper_zero() ) - def _mapper_zero_or_none(self): - if self._primary_entity: - return self._primary_entity.mapper - else: - return None + def _bind_mapper(self): + ezero = self._mapper_zero() + if ezero is not None: + insp = inspect(ezero) + if not insp.is_clause_element: + return insp.mapper + + return None def _only_mapper_zero(self, rationale=None): if len(self._entities) > 1: @@ -396,22 +401,6 @@ class Query(object): % (meth, meth) ) - def _no_select_modifiers(self, meth): - if not self._enable_assertions: - return - for attr, methname, notset in ( - ('_limit', 'limit()', None), - ('_offset', 'offset()', None), - ('_order_by', 'order_by()', False), - ('_group_by', 'group_by()', False), - ('_distinct', 'distinct()', False), - ): - if getattr(self, attr) is not notset: - raise sa_exc.InvalidRequestError( - "Can't call Query.%s() when %s has been called" % - (meth, methname) - ) - def _get_options(self, populate_existing=None, version_check=None, only_load_props=None, @@ -595,11 +584,19 @@ class Query(object): This is used primarily when nesting the Query's statement into a subquery or other - selectable. + selectable, or when using :meth:`.Query.yield_per`. """ self._enable_eagerloads = value + def _no_yield_per(self, message): + raise sa_exc.InvalidRequestError( + "The yield_per Query option is currently not " + "compatible with %s eager loading. Please " + "specify lazyload('*') or query.enable_eagerloads(False) in " + "order to " + "proceed with query.yield_per()." % message) + @_generative() def with_labels(self): """Apply column labels to the return value of Query.statement. @@ -613,6 +610,16 @@ class Query(object): When the `Query` actually issues SQL to load rows, it always uses column labeling. + .. note:: The :meth:`.Query.with_labels` method *only* applies + the output of :attr:`.Query.statement`, and *not* to any of + the result-row invoking systems of :class:`.Query` itself, e.g. + :meth:`.Query.first`, :meth:`.Query.all`, etc. To execute + a query using :meth:`.Query.with_labels`, invoke the + :attr:`.Query.statement` using :meth:`.Session.execute`:: + + result = session.execute(query.with_labels().statement) + + """ self._with_labels = True @@ -705,29 +712,64 @@ class Query(object): def yield_per(self, count): """Yield only ``count`` rows at a time. - WARNING: use this method with caution; if the same instance is present - in more than one batch of rows, end-user changes to attributes will be - overwritten. + The purpose of this method is when fetching very large result sets + (> 10K rows), to batch results in sub-collections and yield them + out partially, so that the Python interpreter doesn't need to declare + very large areas of memory which is both time consuming and leads + to excessive memory use. The performance from fetching hundreds of + thousands of rows can often double when a suitable yield-per setting + (e.g. approximately 1000) is used, even with DBAPIs that buffer + rows (which are most). - In particular, it's usually impossible to use this setting with - eagerly loaded collections (i.e. any lazy='joined' or 'subquery') - since those collections will be cleared for a new load when - encountered in a subsequent result batch. In the case of 'subquery' - loading, the full result for all rows is fetched which generally - defeats the purpose of :meth:`~sqlalchemy.orm.query.Query.yield_per`. + The :meth:`.Query.yield_per` method **is not compatible with most + eager loading schemes, including subqueryload and joinedload with + collections**. For this reason, it may be helpful to disable + eager loads, either unconditionally with + :meth:`.Query.enable_eagerloads`:: - Also note that while :meth:`~sqlalchemy.orm.query.Query.yield_per` - will set the ``stream_results`` execution option to True, currently - this is only understood by - :mod:`~sqlalchemy.dialects.postgresql.psycopg2` dialect which will - stream results using server side cursors instead of pre-buffer all - rows for this query. Other DBAPIs pre-buffer all rows before making - them available. + q = sess.query(Object).yield_per(100).enable_eagerloads(False) + + Or more selectively using :func:`.lazyload`; such as with + an asterisk to specify the default loader scheme:: + + q = sess.query(Object).yield_per(100).\\ + options(lazyload('*'), joinedload(Object.some_related)) + + .. warning:: + + Use this method with caution; if the same instance is + present in more than one batch of rows, end-user changes + to attributes will be overwritten. + + In particular, it's usually impossible to use this setting + with eagerly loaded collections (i.e. any lazy='joined' or + 'subquery') since those collections will be cleared for a + new load when encountered in a subsequent result batch. + In the case of 'subquery' loading, the full result for all + rows is fetched which generally defeats the purpose of + :meth:`~sqlalchemy.orm.query.Query.yield_per`. + + Also note that while + :meth:`~sqlalchemy.orm.query.Query.yield_per` will set the + ``stream_results`` execution option to True, currently + this is only understood by + :mod:`~sqlalchemy.dialects.postgresql.psycopg2` dialect + which will stream results using server side cursors + instead of pre-buffer all rows for this query. Other + DBAPIs **pre-buffer all rows** before making them + available. The memory use of raw database rows is much less + than that of an ORM-mapped object, but should still be taken into + consideration when benchmarking. + + .. seealso:: + + :meth:`.Query.enable_eagerloads` """ self._yield_per = count self._execution_options = self._execution_options.union( - {"stream_results": True}) + {"stream_results": True, + "max_row_buffer": count}) def get(self, ident): """Return an instance based on the given primary key identifier, @@ -771,7 +813,7 @@ class Query(object): foreign-key-to-primary-key criterion, will also use an operation equivalent to :meth:`~.Query.get` in order to retrieve the target value from the local identity map - before querying the database. See :doc:`/orm/loading` + before querying the database. See :doc:`/orm/loading_relationships` for further details on relationship loading. :param ident: A scalar or tuple value representing @@ -786,7 +828,9 @@ class Query(object): :return: The object instance, or ``None``. """ + return self._get_impl(ident, loading.load_on_ident) + def _get_impl(self, ident, fallback_fn): # convert composite types to individual args if hasattr(ident, '__composite_values__'): ident = ident.__composite_values__() @@ -817,7 +861,7 @@ class Query(object): return None return instance - return loading.load_on_ident(self, key) + return fallback_fn(self, key) @_generative() def correlate(self, *args): @@ -898,11 +942,13 @@ class Query(object): """ if property is None: + mapper_zero = inspect(self._mapper_zero()).mapper + mapper = object_mapper(instance) for prop in mapper.iterate_properties: if isinstance(prop, properties.RelationshipProperty) and \ - prop.mapper is self._mapper_zero(): + prop.mapper is mapper_zero: property = prop break else: @@ -940,14 +986,176 @@ class Query(object): """return a Query that selects from this Query's SELECT statement. - \*entities - optional list of entities which will replace - those being selected. + :meth:`.Query.from_self` essentially turns the SELECT statement + into a SELECT of itself. Given a query such as:: + + q = session.query(User).filter(User.name.like('e%')) + + Given the :meth:`.Query.from_self` version:: + + q = session.query(User).filter(User.name.like('e%')).from_self() + + This query renders as: + + .. sourcecode:: sql + + SELECT anon_1.user_id AS anon_1_user_id, + anon_1.user_name AS anon_1_user_name + FROM (SELECT "user".id AS user_id, "user".name AS user_name + FROM "user" + WHERE "user".name LIKE :name_1) AS anon_1 + + There are lots of cases where :meth:`.Query.from_self` may be useful. + A simple one is where above, we may want to apply a row LIMIT to + the set of user objects we query against, and then apply additional + joins against that row-limited set:: + + q = session.query(User).filter(User.name.like('e%')).\\ + limit(5).from_self().\\ + join(User.addresses).filter(Address.email.like('q%')) + + The above query joins to the ``Address`` entity but only against the + first five results of the ``User`` query: + + .. sourcecode:: sql + + SELECT anon_1.user_id AS anon_1_user_id, + anon_1.user_name AS anon_1_user_name + FROM (SELECT "user".id AS user_id, "user".name AS user_name + FROM "user" + WHERE "user".name LIKE :name_1 + LIMIT :param_1) AS anon_1 + JOIN address ON anon_1.user_id = address.user_id + WHERE address.email LIKE :email_1 + + **Automatic Aliasing** + + Another key behavior of :meth:`.Query.from_self` is that it applies + **automatic aliasing** to the entities inside the subquery, when + they are referenced on the outside. Above, if we continue to + refer to the ``User`` entity without any additional aliasing applied + to it, those references wil be in terms of the subquery:: + + q = session.query(User).filter(User.name.like('e%')).\\ + limit(5).from_self().\\ + join(User.addresses).filter(Address.email.like('q%')).\\ + order_by(User.name) + + The ORDER BY against ``User.name`` is aliased to be in terms of the + inner subquery: + + .. sourcecode:: sql + + SELECT anon_1.user_id AS anon_1_user_id, + anon_1.user_name AS anon_1_user_name + FROM (SELECT "user".id AS user_id, "user".name AS user_name + FROM "user" + WHERE "user".name LIKE :name_1 + LIMIT :param_1) AS anon_1 + JOIN address ON anon_1.user_id = address.user_id + WHERE address.email LIKE :email_1 ORDER BY anon_1.user_name + + The automatic aliasing feature only works in a **limited** way, + for simple filters and orderings. More ambitious constructions + such as referring to the entity in joins should prefer to use + explicit subquery objects, typically making use of the + :meth:`.Query.subquery` method to produce an explicit subquery object. + Always test the structure of queries by viewing the SQL to ensure + a particular structure does what's expected! + + **Changing the Entities** + + :meth:`.Query.from_self` also includes the ability to modify what + columns are being queried. In our example, we want ``User.id`` + to be queried by the inner query, so that we can join to the + ``Address`` entity on the outside, but we only wanted the outer + query to return the ``Address.email`` column:: + + q = session.query(User).filter(User.name.like('e%')).\\ + limit(5).from_self(Address.email).\\ + join(User.addresses).filter(Address.email.like('q%')) + + yielding: + + .. sourcecode:: sql + + SELECT address.email AS address_email + FROM (SELECT "user".id AS user_id, "user".name AS user_name + FROM "user" + WHERE "user".name LIKE :name_1 + LIMIT :param_1) AS anon_1 + JOIN address ON anon_1.user_id = address.user_id + WHERE address.email LIKE :email_1 + + **Looking out for Inner / Outer Columns** + + Keep in mind that when referring to columns that originate from + inside the subquery, we need to ensure they are present in the + columns clause of the subquery itself; this is an ordinary aspect of + SQL. For example, if we wanted to load from a joined entity inside + the subquery using :func:`.contains_eager`, we need to add those + columns. Below illustrates a join of ``Address`` to ``User``, + then a subquery, and then we'd like :func:`.contains_eager` to access + the ``User`` columns:: + + q = session.query(Address).join(Address.user).\\ + filter(User.name.like('e%')) + + q = q.add_entity(User).from_self().\\ + options(contains_eager(Address.user)) + + We use :meth:`.Query.add_entity` above **before** we call + :meth:`.Query.from_self` so that the ``User`` columns are present + in the inner subquery, so that they are available to the + :func:`.contains_eager` modifier we are using on the outside, + producing: + + .. sourcecode:: sql + + SELECT anon_1.address_id AS anon_1_address_id, + anon_1.address_email AS anon_1_address_email, + anon_1.address_user_id AS anon_1_address_user_id, + anon_1.user_id AS anon_1_user_id, + anon_1.user_name AS anon_1_user_name + FROM ( + SELECT address.id AS address_id, + address.email AS address_email, + address.user_id AS address_user_id, + "user".id AS user_id, + "user".name AS user_name + FROM address JOIN "user" ON "user".id = address.user_id + WHERE "user".name LIKE :name_1) AS anon_1 + + If we didn't call ``add_entity(User)``, but still asked + :func:`.contains_eager` to load the ``User`` entity, it would be + forced to add the table on the outside without the correct + join criteria - note the ``anon1, "user"`` phrase at + the end: + + .. sourcecode:: sql + + -- incorrect query + SELECT anon_1.address_id AS anon_1_address_id, + anon_1.address_email AS anon_1_address_email, + anon_1.address_user_id AS anon_1_address_user_id, + "user".id AS user_id, + "user".name AS user_name + FROM ( + SELECT address.id AS address_id, + address.email AS address_email, + address.user_id AS address_user_id + FROM address JOIN "user" ON "user".id = address.user_id + WHERE "user".name LIKE :name_1) AS anon_1, "user" + + :param \*entities: optional list of entities which will replace + those being selected. """ fromclause = self.with_labels().enable_eagerloads(False).\ - _set_enable_single_crit(False).\ statement.correlate(None) q = self._from_selectable(fromclause) + q._enable_single_crit = False + q._select_from_entity = self._mapper_zero() if entities: q._set_entities(entities) return q @@ -964,7 +1172,7 @@ class Query(object): '_limit', '_offset', '_joinpath', '_joinpoint', '_distinct', '_having', - '_prefixes', + '_prefixes', '_suffixes' ): self.__dict__.pop(attr, None) self._set_select_from([fromclause], True) @@ -1060,7 +1268,7 @@ class Query(object): Most supplied options regard changing how column- and relationship-mapped attributes are loaded. See the sections - :ref:`deferred` and :doc:`/orm/loading` for reference + :ref:`deferred` and :doc:`/orm/loading_relationships` for reference documentation. """ @@ -1106,7 +1314,8 @@ class Query(object): @_generative() def with_hint(self, selectable, text, dialect_name='*'): - """Add an indexing hint for the given entity or selectable to + """Add an indexing or other executional context + hint for the given entity or selectable to this :class:`.Query`. Functionality is passed straight through to @@ -1114,11 +1323,35 @@ class Query(object): with the addition that ``selectable`` can be a :class:`.Table`, :class:`.Alias`, or ORM entity / mapped class /etc. + + .. seealso:: + + :meth:`.Query.with_statement_hint` + """ - selectable = inspect(selectable).selectable + if selectable is not None: + selectable = inspect(selectable).selectable self._with_hints += ((selectable, text, dialect_name),) + def with_statement_hint(self, text, dialect_name='*'): + """add a statement hint to this :class:`.Select`. + + This method is similar to :meth:`.Select.with_hint` except that + it does not require an individual table, and instead applies to the + statement as a whole. + + This feature calls down into :meth:`.Select.with_statement_hint`. + + .. versionadded:: 1.0.0 + + .. seealso:: + + :meth:`.Query.with_hint` + + """ + return self.with_hint(None, text, dialect_name) + @_generative() def execution_options(self, **kwargs): """ Set non-SQL options which take effect during execution. @@ -1222,7 +1455,9 @@ class Query(object): session.query(MyClass).filter(MyClass.name == 'some name') - Multiple criteria are joined together by AND:: + Multiple criteria may be specified as comma separated; the effect + is that they will be joined together using the :func:`.and_` + function:: session.query(MyClass).\\ filter(MyClass.name == 'some name', MyClass.id > 5) @@ -1231,16 +1466,13 @@ class Query(object): WHERE clause of a select. String expressions are coerced into SQL expression constructs via the :func:`.text` construct. - .. versionchanged:: 0.7.5 - Multiple criteria joined by AND. - .. seealso:: :meth:`.Query.filter_by` - filter on keyword expressions. """ for criterion in list(criterion): - criterion = expression._literal_as_text(criterion) + criterion = expression._expression_literal_as_text(criterion) criterion = self._adapt_clause(criterion, True, True) @@ -1257,7 +1489,9 @@ class Query(object): session.query(MyClass).filter_by(name = 'some name') - Multiple criteria are joined together by AND:: + Multiple criteria may be specified as comma separated; the effect + is that they will be joined together using the :func:`.and_` + function:: session.query(MyClass).\\ filter_by(name = 'some name', id = 5) @@ -1339,8 +1573,7 @@ class Query(object): """ - if isinstance(criterion, util.string_types): - criterion = sql.text(criterion) + criterion = expression._expression_literal_as_text(criterion) if criterion is not None and \ not isinstance(criterion, sql.ClauseElement): @@ -1677,6 +1910,14 @@ class Query(object): anonymously aliased. Subsequent calls to :meth:`~.Query.filter` and similar will adapt the incoming criterion to the target alias, until :meth:`~.Query.reset_joinpoint` is called. + :param isouter=False: If True, the join used will be a left outer join, + just as if the :meth:`.Query.outerjoin` method were called. This + flag is here to maintain consistency with the same flag as accepted + by :meth:`.FromClause.join` and other Core constructs. + + + .. versionadded:: 1.0.0 + :param from_joinpoint=False: When using ``aliased=True``, a setting of True here will cause the join to be from the most recent joined target, rather than starting back from the original @@ -1694,13 +1935,14 @@ class Query(object): SQLAlchemy versions was the primary ORM-level joining interface. """ - aliased, from_joinpoint = kwargs.pop('aliased', False),\ - kwargs.pop('from_joinpoint', False) + aliased, from_joinpoint, isouter = kwargs.pop('aliased', False),\ + kwargs.pop('from_joinpoint', False),\ + kwargs.pop('isouter', False) if kwargs: raise TypeError("unknown arguments: %s" % - ','.join(kwargs.keys)) + ', '.join(sorted(kwargs))) return self._join(props, - outerjoin=False, create_aliases=aliased, + outerjoin=isouter, create_aliases=aliased, from_joinpoint=from_joinpoint) def outerjoin(self, *props, **kwargs): @@ -1714,7 +1956,7 @@ class Query(object): kwargs.pop('from_joinpoint', False) if kwargs: raise TypeError("unknown arguments: %s" % - ','.join(kwargs)) + ', '.join(sorted(kwargs))) return self._join(props, outerjoin=True, create_aliases=aliased, from_joinpoint=from_joinpoint) @@ -1750,7 +1992,8 @@ class Query(object): # convert to a tuple. keys = (keys,) - for arg1 in util.to_list(keys): + keylist = util.to_list(keys) + for idx, arg1 in enumerate(keylist): if isinstance(arg1, tuple): # "tuple" form of join, multiple # tuples are accepted as well. The simpler @@ -1772,6 +2015,11 @@ class Query(object): left_entity = prop = None + if isinstance(onclause, interfaces.PropComparator): + of_type = getattr(onclause, '_of_type', None) + else: + of_type = None + if isinstance(onclause, util.string_types): left_entity = self._joinpoint_zero() @@ -1798,8 +2046,6 @@ class Query(object): if isinstance(onclause, interfaces.PropComparator): if right_entity is None: - right_entity = onclause.property.mapper - of_type = getattr(onclause, '_of_type', None) if of_type: right_entity = of_type else: @@ -1826,6 +2072,11 @@ class Query(object): jp = self._joinpoint[edge].copy() jp['prev'] = (edge, self._joinpoint) self._update_joinpoint(jp) + + if idx == len(keylist) - 1: + util.warn( + "Pathed join target %s has already " + "been joined to; skipping" % prop) continue elif onclause is not None and right_entity is None: @@ -1881,11 +2132,9 @@ class Query(object): from_obj, r_info.selectable): overlap = True break - elif sql_util.selectables_overlap(l_info.selectable, - r_info.selectable): - overlap = True - if overlap and l_info.selectable is r_info.selectable: + if (overlap or not create_aliases) and \ + l_info.selectable is r_info.selectable: raise sa_exc.InvalidRequestError( "Can't join table/selectable '%s' to itself" % l_info.selectable) @@ -2250,6 +2499,19 @@ class Query(object): """Apply a ``DISTINCT`` to the query and return the newly resulting ``Query``. + + .. note:: + + The :meth:`.distinct` call includes logic that will automatically + add columns from the ORDER BY of the query to the columns + clause of the SELECT statement, to satisfy the common need + of the database backend that ORDER BY columns be part of the + SELECT list when DISTINCT is used. These columns *are not* + added to the list of columns actually fetched by the + :class:`.Query`, however, so would not affect results. + The columns are passed through when using the + :attr:`.Query.statement` accessor, however. + :param \*expr: optional column expressions. When present, the Postgresql dialect will render a ``DISTINCT ON (>)`` construct. @@ -2285,12 +2547,38 @@ class Query(object): .. versionadded:: 0.7.7 + .. seealso:: + + :meth:`.HasPrefixes.prefix_with` + """ if self._prefixes: self._prefixes += prefixes else: self._prefixes = prefixes + @_generative() + def suffix_with(self, *suffixes): + """Apply the suffix to the query and return the newly resulting + ``Query``. + + :param \*suffixes: optional suffixes, typically strings, + not using any commas. + + .. versionadded:: 1.0.0 + + .. seealso:: + + :meth:`.Query.prefix_with` + + :meth:`.HasSuffixes.suffix_with` + + """ + if self._suffixes: + self._suffixes += suffixes + else: + self._suffixes = suffixes + def all(self): """Return the results represented by this ``Query`` as a list. @@ -2306,13 +2594,18 @@ class Query(object): This method bypasses all internal statement compilation, and the statement is executed without modification. - The statement argument is either a string, a ``select()`` construct, - or a ``text()`` construct, and should return the set of columns - appropriate to the entity class represented by this ``Query``. + The statement is typically either a :func:`~.expression.text` + or :func:`~.expression.select` construct, and should return the set + of columns + appropriate to the entity class represented by this :class:`.Query`. + + .. seealso:: + + :ref:`orm_tutorial_literal_sql` - usage examples in the + ORM tutorial """ - if isinstance(statement, util.string_types): - statement = sql.text(statement) + statement = expression._expression_literal_as_text(statement) if not isinstance(statement, (expression.TextClause, @@ -2332,7 +2625,7 @@ class Query(object): (note this may consist of multiple result rows if join-loaded collections are present). - Calling ``first()`` results in an execution of the underlying query. + Calling :meth:`.Query.first` results in an execution of the underlying query. """ if self._statement is not None: @@ -2344,26 +2637,57 @@ class Query(object): else: return None + def one_or_none(self): + """Return at most one result or raise an exception. + + Returns ``None`` if the query selects + no rows. Raises ``sqlalchemy.orm.exc.MultipleResultsFound`` + if multiple object identities are returned, or if multiple + rows are returned for a query that returns only scalar values + as opposed to full identity-mapped entities. + + Calling :meth:`.Query.one_or_none` results in an execution of the underlying + query. + + .. versionadded:: 1.0.9 + + Added :meth:`.Query.one_or_none` + + .. seealso:: + + :meth:`.Query.first` + + :meth:`.Query.one` + + + """ + ret = list(self) + + l = len(ret) + if l == 1: + return ret[0] + elif l == 0: + return None + else: + raise orm_exc.MultipleResultsFound( + "Multiple rows were found for one_or_none()") + def one(self): """Return exactly one result or raise an exception. Raises ``sqlalchemy.orm.exc.NoResultFound`` if the query selects no rows. Raises ``sqlalchemy.orm.exc.MultipleResultsFound`` if multiple object identities are returned, or if multiple - rows are returned for a query that does not return object - identities. + rows are returned for a query that returns only scalar values + as opposed to full identity-mapped entities. - Note that an entity query, that is, one which selects one or - more mapped classes as opposed to individual column attributes, - may ultimately represent many rows but only one row of - unique entity or entities - this is a successful result for one(). + Calling :meth:`.one` results in an execution of the underlying query. - Calling ``one()`` results in an execution of the underlying query. + .. seealso:: - .. versionchanged:: 0.6 - ``one()`` fully fetches all results instead of applying - any kind of limit, so that the "unique"-ing of entities does not - conceal multiple object identities. + :meth:`.Query.first` + + :meth:`.Query.one_or_none` """ ret = list(self) @@ -2420,12 +2744,12 @@ class Query(object): def _execute_and_instances(self, querycontext): conn = self._connection_from_session( - mapper=self._mapper_zero_or_none(), + mapper=self._bind_mapper(), clause=querycontext.statement, close_with_result=True) result = conn.execute(querycontext.statement, self._params) - return loading.instances(self, result, querycontext) + return loading.instances(querycontext.query, result, querycontext) @property def column_descriptions(self): @@ -2447,30 +2771,46 @@ class Query(object): 'type':User, 'aliased':False, 'expr':User, + 'entity': User }, { 'name':'id', 'type':Integer(), 'aliased':False, 'expr':User.id, + 'entity': User }, { 'name':'user2', 'type':User, 'aliased':True, - 'expr':user_alias + 'expr':user_alias, + 'entity': user_alias } ] """ + return [ { 'name': ent._label_name, 'type': ent.type, - 'aliased': getattr(ent, 'is_aliased_class', False), - 'expr': ent.expr + 'aliased': getattr(insp_ent, 'is_aliased_class', False), + 'expr': ent.expr, + 'entity': + getattr(insp_ent, "entity", None) + if ent.entity_zero is not None + and not insp_ent.is_clause_element + else None } - for ent in self._entities + for ent, insp_ent in [ + ( + _ent, + (inspect(_ent.entity_zero) + if _ent.entity_zero is not None else None) + ) + for _ent in self._entities + ] ] def instances(self, cursor, __context=None): @@ -2522,6 +2862,7 @@ class Query(object): 'offset': self._offset, 'distinct': self._distinct, 'prefixes': self._prefixes, + 'suffixes': self._suffixes, 'group_by': self._group_by or None, 'having': self._having } @@ -2548,6 +2889,19 @@ class Query(object): SELECT 1 FROM users WHERE users.name = :name_1 ) AS anon_1 + The EXISTS construct is usually used in the WHERE clause:: + + session.query(User.id).filter(q.exists()).scalar() + + Note that some databases such as SQL Server don't allow an + EXISTS expression to be present in the columns clause of a + SELECT. To select a simple boolean value based on the exists + as a WHERE, use :func:`.literal`:: + + from sqlalchemy import literal + + session.query(literal(True)).filter(q.exists()).scalar() + .. versionadded:: 0.8.1 """ @@ -2558,7 +2912,7 @@ class Query(object): # .with_only_columns() after we have a core select() so that # we get just "SELECT 1" without any entities. return sql.exists(self.add_columns('1').with_labels(). - statement.with_only_columns(['1'])) + statement.with_only_columns([1])) def count(self): """Return a count of rows this Query would return. @@ -2605,6 +2959,18 @@ class Query(object): Deletes rows matched by this query from the database. + E.g.:: + + sess.query(User).filter(User.age == 25).\\ + delete(synchronize_session=False) + + sess.query(User).filter(User.age == 25).\\ + delete(synchronize_session='evaluate') + + .. warning:: The :meth:`.Query.delete` method is a "bulk" operation, + which bypasses ORM unit-of-work automation in favor of greater + performance. **Please read all caveats and warnings below.** + :param synchronize_session: chooses the strategy for the removal of matched objects from the session. Valid values are: @@ -2623,8 +2989,7 @@ class Query(object): ``'evaluate'`` - Evaluate the query's criteria in Python straight on the objects in the session. If evaluation of the criteria isn't - implemented, an error is raised. In that case you probably - want to use the 'fetch' strategy as a fallback. + implemented, an error is raised. The expression evaluator currently doesn't account for differing string collations between the database and Python. @@ -2632,29 +2997,42 @@ class Query(object): :return: the count of rows matched as returned by the database's "row count" feature. - This method has several key caveats: + .. warning:: **Additional Caveats for bulk query deletes** - * The method does **not** offer in-Python cascading of relationships - - it is assumed that ON DELETE CASCADE/SET NULL/etc. is configured - for any foreign key references which require it, otherwise the - database may emit an integrity violation if foreign key references - are being enforced. + * The method does **not** offer in-Python cascading of + relationships - it is assumed that ON DELETE CASCADE/SET + NULL/etc. is configured for any foreign key references + which require it, otherwise the database may emit an + integrity violation if foreign key references are being + enforced. - After the DELETE, dependent objects in the :class:`.Session` which - were impacted by an ON DELETE may not contain the current - state, or may have been deleted. This issue is resolved once the - :class:`.Session` is expired, - which normally occurs upon :meth:`.Session.commit` or can be forced - by using :meth:`.Session.expire_all`. Accessing an expired object - whose row has been deleted will invoke a SELECT to locate the - row; when the row is not found, an - :class:`~sqlalchemy.orm.exc.ObjectDeletedError` is raised. + After the DELETE, dependent objects in the + :class:`.Session` which were impacted by an ON DELETE + may not contain the current state, or may have been + deleted. This issue is resolved once the + :class:`.Session` is expired, which normally occurs upon + :meth:`.Session.commit` or can be forced by using + :meth:`.Session.expire_all`. Accessing an expired + object whose row has been deleted will invoke a SELECT + to locate the row; when the row is not found, an + :class:`~sqlalchemy.orm.exc.ObjectDeletedError` is + raised. - * The :meth:`.MapperEvents.before_delete` and - :meth:`.MapperEvents.after_delete` - events are **not** invoked from this method. Instead, the - :meth:`.SessionEvents.after_bulk_delete` method is provided to act - upon a mass DELETE of entity rows. + * The ``'fetch'`` strategy results in an additional + SELECT statement emitted and will significantly reduce + performance. + + * The ``'evaluate'`` strategy performs a scan of + all matching objects within the :class:`.Session`; if the + contents of the :class:`.Session` are expired, such as + via a proceeding :meth:`.Session.commit` call, **this will + result in SELECT queries emitted for every matching object**. + + * The :meth:`.MapperEvents.before_delete` and + :meth:`.MapperEvents.after_delete` + events **are not invoked** from this method. Instead, the + :meth:`.SessionEvents.after_bulk_delete` method is provided to + act upon a mass DELETE of entity rows. .. seealso:: @@ -2670,16 +3048,41 @@ class Query(object): delete_op.exec_() return delete_op.rowcount - def update(self, values, synchronize_session='evaluate'): + def update(self, values, synchronize_session='evaluate', update_args=None): """Perform a bulk update query. Updates rows matched by this query in the database. - :param values: a dictionary with attributes names as keys and literal - values or sql expressions as values. + E.g.:: + + sess.query(User).filter(User.age == 25).\\ + update({User.age: User.age - 10}, synchronize_session=False) + + sess.query(User).filter(User.age == 25).\\ + update({"age": User.age - 10}, synchronize_session='evaluate') + + + .. warning:: The :meth:`.Query.update` method is a "bulk" operation, + which bypasses ORM unit-of-work automation in favor of greater + performance. **Please read all caveats and warnings below.** + + + :param values: a dictionary with attributes names, or alternatively + mapped attributes or SQL expressions, as keys, and literal + values or sql expressions as values. If :ref:`parameter-ordered + mode ` is desired, the values can be + passed as a list of 2-tuples; + this requires that the :paramref:`~sqlalchemy.sql.expression.update.preserve_parameter_order` + flag is passed to the :paramref:`.Query.update.update_args` dictionary + as well. + + .. versionchanged:: 1.0.0 - string names in the values dictionary + are now resolved against the mapped entity; previously, these + strings were passed as literal column names with no mapper-level + translation. :param synchronize_session: chooses the strategy to update the - attributes on objects in the session. Valid values are: + attributes on objects in the session. Valid values are: ``False`` - don't synchronize the session. This option is the most efficient and is reliable once the session is expired, which @@ -2699,44 +3102,65 @@ class Query(object): The expression evaluator currently doesn't account for differing string collations between the database and Python. + :param update_args: Optional dictionary, if present will be passed + to the underlying :func:`.update` construct as the ``**kw`` for + the object. May be used to pass dialect-specific arguments such + as ``mysql_limit``, as well as other special arguments such as + :paramref:`~sqlalchemy.sql.expression.update.preserve_parameter_order`. + + .. versionadded:: 1.0.0 + :return: the count of rows matched as returned by the database's - "row count" feature. + "row count" feature. - This method has several key caveats: + .. warning:: **Additional Caveats for bulk query updates** - * The method does **not** offer in-Python cascading of relationships - - it is assumed that ON UPDATE CASCADE is configured for any foreign - key references which require it, otherwise the database may emit an - integrity violation if foreign key references are being enforced. + * The method does **not** offer in-Python cascading of + relationships - it is assumed that ON UPDATE CASCADE is + configured for any foreign key references which require + it, otherwise the database may emit an integrity + violation if foreign key references are being enforced. - After the UPDATE, dependent objects in the :class:`.Session` which - were impacted by an ON UPDATE CASCADE may not contain the current - state; this issue is resolved once the :class:`.Session` is expired, - which normally occurs upon :meth:`.Session.commit` or can be forced - by using :meth:`.Session.expire_all`. + After the UPDATE, dependent objects in the + :class:`.Session` which were impacted by an ON UPDATE + CASCADE may not contain the current state; this issue is + resolved once the :class:`.Session` is expired, which + normally occurs upon :meth:`.Session.commit` or can be + forced by using :meth:`.Session.expire_all`. - * As of 0.8, this method will support multiple table updates, as - detailed in :ref:`multi_table_updates`, and this behavior does - extend to support updates of joined-inheritance and other multiple - table mappings. However, the **join condition of an inheritance - mapper is currently not automatically rendered**. - Care must be taken in any multiple-table update to explicitly - include the joining condition between those tables, even in mappings - where this is normally automatic. - E.g. if a class ``Engineer`` subclasses ``Employee``, an UPDATE of - the ``Engineer`` local table using criteria against the ``Employee`` - local table might look like:: + * The ``'fetch'`` strategy results in an additional + SELECT statement emitted and will significantly reduce + performance. - session.query(Engineer).\\ - filter(Engineer.id == Employee.id).\\ - filter(Employee.name == 'dilbert').\\ - update({"engineer_type": "programmer"}) + * The ``'evaluate'`` strategy performs a scan of + all matching objects within the :class:`.Session`; if the + contents of the :class:`.Session` are expired, such as + via a proceeding :meth:`.Session.commit` call, **this will + result in SELECT queries emitted for every matching object**. - * The :meth:`.MapperEvents.before_update` and - :meth:`.MapperEvents.after_update` - events are **not** invoked from this method. Instead, the - :meth:`.SessionEvents.after_bulk_update` method is provided to act - upon a mass UPDATE of entity rows. + * The method supports multiple table updates, as detailed + in :ref:`multi_table_updates`, and this behavior does + extend to support updates of joined-inheritance and + other multiple table mappings. However, the **join + condition of an inheritance mapper is not + automatically rendered**. Care must be taken in any + multiple-table update to explicitly include the joining + condition between those tables, even in mappings where + this is normally automatic. E.g. if a class ``Engineer`` + subclasses ``Employee``, an UPDATE of the ``Engineer`` + local table using criteria against the ``Employee`` + local table might look like:: + + session.query(Engineer).\\ + filter(Engineer.id == Employee.id).\\ + filter(Employee.name == 'dilbert').\\ + update({"engineer_type": "programmer"}) + + * The :meth:`.MapperEvents.before_update` and + :meth:`.MapperEvents.after_update` + events **are not invoked from this method**. Instead, the + :meth:`.SessionEvents.after_bulk_update` method is provided to + act upon a mass UPDATE of entity rows. .. seealso:: @@ -2746,18 +3170,19 @@ class Query(object): """ - # TODO: value keys need to be mapped to corresponding sql cols and - # instr.attr.s to string keys - # TODO: updates of manytoone relationships need to be converted to - # fk assignments - # TODO: cascades need handling. - + update_args = update_args or {} update_op = persistence.BulkUpdate.factory( - self, synchronize_session, values) + self, synchronize_session, values, update_args) update_op.exec_() return update_op.rowcount def _compile_context(self, labels=True): + if self.dispatch.before_compile: + for fn in self.dispatch.before_compile: + new_query = fn(self) + if new_query is not None: + self = new_query + context = QueryContext(self) if context.statement is not None: @@ -2778,10 +3203,8 @@ class Query(object): # "load from explicit FROMs" mode, # i.e. when select_from() or join() is used context.froms = list(context.from_clause) - else: - # "load from discrete FROMs" mode, - # i.e. when each _MappedEntity has its own FROM - context.froms = context.froms + # else "load from discrete FROMs" mode, + # i.e. when each _MappedEntity has its own FROM if self._enable_single_crit: self._adjust_for_single_inheritance(context) @@ -2801,6 +3224,7 @@ class Query(object): context.statement = self._compound_eager_statement(context) else: context.statement = self._simple_statement(context) + return context def _compound_eager_statement(self, context): @@ -3003,7 +3427,6 @@ class _MapperEntity(_QueryEntity): else: self._label_name = self.mapper.class_.__name__ self.path = self.entity_zero._path_registry - self.custom_rows = bool(self.mapper.dispatch.append_result) def set_with_polymorphic(self, query, cls_or_mappers, selectable, polymorphic_on): @@ -3082,7 +3505,7 @@ class _MapperEntity(_QueryEntity): return ret - def row_processor(self, query, context, custom_rows): + def row_processor(self, query, context, result): adapter = self._get_entity_clauses(query, context) if context.adapter and adapter: @@ -3099,23 +3522,21 @@ class _MapperEntity(_QueryEntity): self.mapper._equivalent_columns) if query._primary_entity is self: - _instance = loading.instance_processor( - self.mapper, - context, - self.path, - adapter, - only_load_props=query._only_load_props, - refresh_state=context.refresh_state, - polymorphic_discriminator=self._polymorphic_discriminator - ) + only_load_props = query._only_load_props + refresh_state = context.refresh_state else: - _instance = loading.instance_processor( - self.mapper, - context, - self.path, - adapter, - polymorphic_discriminator=self._polymorphic_discriminator - ) + only_load_props = refresh_state = None + + _instance = loading._instance_processor( + self.mapper, + context, + result, + self.path, + adapter, + only_load_props=only_load_props, + refresh_state=refresh_state, + polymorphic_discriminator=self._polymorphic_discriminator + ) return _instance, self._label_name @@ -3136,41 +3557,19 @@ class _MapperEntity(_QueryEntity): ) ) - if self._with_polymorphic: - poly_properties = self.mapper._iterate_polymorphic_properties( - self._with_polymorphic) - else: - poly_properties = self.mapper._polymorphic_properties - - for value in poly_properties: - if query._only_load_props and \ - value.key not in query._only_load_props: - continue - value.setup( - context, - self, - self.path, - adapter, - only_load_props=query._only_load_props, - column_collection=context.primary_columns - ) - - if self._polymorphic_discriminator is not None and \ - self._polymorphic_discriminator \ - is not self.mapper.polymorphic_on: - - if adapter: - pd = adapter.columns[self._polymorphic_discriminator] - else: - pd = self._polymorphic_discriminator - context.primary_columns.append(pd) + loading._setup_entity_query( + context, self.mapper, self, + self.path, adapter, context.primary_columns, + with_polymorphic=self._with_polymorphic, + only_load_props=query._only_load_props, + polymorphic_discriminator=self._polymorphic_discriminator) def __str__(self): return str(self.mapper) @inspection._self_inspects -class Bundle(object): +class Bundle(InspectionAttr): """A grouping of SQL expressions that are returned by a :class:`.Query` under one namespace. @@ -3193,6 +3592,12 @@ class Bundle(object): """If True, queries for a single Bundle will be returned as a single entity, rather than an element within a keyed tuple.""" + is_clause_element = False + + is_mapper = False + + is_aliased_class = False + def __init__(self, name, *exprs, **kw): """Construct a new :class:`.Bundle`. @@ -3275,9 +3680,10 @@ class Bundle(object): :ref:`bundles` - includes an example of subclassing. """ - def proc(row, result): - return util.KeyedTuple( - [proc(row, None) for proc in procs], labels) + keyed_tuple = util.lightweight_named_tuple('result', labels) + + def proc(row): + return keyed_tuple([proc(row) for proc in procs]) return proc @@ -3302,8 +3708,6 @@ class _BundleEntity(_QueryEntity): self.supports_single_entity = self.bundle.single_entity - custom_rows = False - @property def entity_zero(self): for ent in self._entities: @@ -3344,9 +3748,9 @@ class _BundleEntity(_QueryEntity): for ent in self._entities: ent.setup_context(query, context) - def row_processor(self, query, context, custom_rows): + def row_processor(self, query, context, result): procs, labels = zip( - *[ent.row_processor(query, context, custom_rows) + *[ent.row_processor(query, context, result) for ent in self._entities] ) @@ -3361,36 +3765,47 @@ class _ColumnEntity(_QueryEntity): def __init__(self, query, column, namespace=None): self.expr = column self.namespace = namespace + search_entities = True + check_column = False if isinstance(column, util.string_types): column = sql.literal_column(column) self._label_name = column.name + search_entities = False + check_column = True + _entity = None elif isinstance(column, ( attributes.QueryableAttribute, interfaces.PropComparator )): + _entity = getattr(column, '_parententity', None) + if _entity is not None: + search_entities = False self._label_name = column.key column = column._query_clause_element() - else: - self._label_name = getattr(column, 'key', None) - - if not isinstance(column, expression.ColumnElement) and \ - hasattr(column, '_select_iterable'): - for c in column._select_iterable: - if c is column: - break - _ColumnEntity(query, c, namespace=column) - else: + check_column = True + if isinstance(column, Bundle): + _BundleEntity(query, column) return - elif isinstance(column, Bundle): - _BundleEntity(query, column) - return if not isinstance(column, sql.ColumnElement): + if hasattr(column, '_select_iterable'): + # break out an object like Table into + # individual columns + for c in column._select_iterable: + if c is column: + break + _ColumnEntity(query, c, namespace=column) + else: + return + raise sa_exc.InvalidRequestError( "SQL expression, column, or mapped entity " "expected - got '%r'" % (column, ) ) + elif not check_column: + self._label_name = getattr(column, 'key', None) + search_entities = True self.type = type_ = column.type if type_.hashable: @@ -3421,22 +3836,39 @@ class _ColumnEntity(_QueryEntity): # leaking out their entities into the main select construct self.actual_froms = actual_froms = set(column._from_objects) - self.entities = util.OrderedSet( - elem._annotations['parententity'] - for elem in visitors.iterate(column, {}) - if 'parententity' in elem._annotations - and actual_froms.intersection(elem._from_objects) - ) - - if self.entities: - self.entity_zero = list(self.entities)[0] - elif self.namespace is not None: - self.entity_zero = self.namespace + if not search_entities: + self.entity_zero = _entity + if _entity: + self.entities = [_entity] + else: + self.entities = [] + self._from_entities = set(self.entities) else: - self.entity_zero = None + all_elements = [ + elem for elem in visitors.iterate(column, {}) + if 'parententity' in elem._annotations + ] + + self.entities = util.unique_list([ + elem._annotations['parententity'] + for elem in all_elements + if 'parententity' in elem._annotations + ]) + + self._from_entities = set([ + elem._annotations['parententity'] + for elem in all_elements + if 'parententity' in elem._annotations + and actual_froms.intersection(elem._from_objects) + ]) + if self.entities: + self.entity_zero = self.entities[0] + elif self.namespace is not None: + self.entity_zero = self.namespace + else: + self.entity_zero = None supports_single_entity = False - custom_rows = False @property def entity_zero_or_selectable(self): @@ -3456,7 +3888,9 @@ class _ColumnEntity(_QueryEntity): def setup_entity(self, ext_info, aliased_adapter): if 'selectable' not in self.__dict__: self.selectable = ext_info.selectable - self.froms.add(ext_info.selectable) + + if self.actual_froms.intersection(ext_info.selectable._from_objects): + self.froms.add(ext_info.selectable) def corresponds_to(self, entity): # TODO: just returning False here, @@ -3470,36 +3904,39 @@ class _ColumnEntity(_QueryEntity): return not _is_aliased_class(self.entity_zero) and \ entity.common_parent(self.entity_zero) - def _resolve_expr_against_query_aliases(self, query, expr, context): - return query._adapt_clause(expr, False, True) - - def row_processor(self, query, context, custom_rows): - column = self._resolve_expr_against_query_aliases( - query, self.column, context) + def row_processor(self, query, context, result): + if ('fetch_column', self) in context.attributes: + column = context.attributes[('fetch_column', self)] + else: + column = query._adapt_clause(self.column, False, True) if context.adapter: column = context.adapter.columns[column] - def proc(row, result): - return row[column] - - return proc, self._label_name + getter = result._getter(column) + return getter, self._label_name def setup_context(self, query, context): - column = self._resolve_expr_against_query_aliases( - query, self.column, context) + column = query._adapt_clause(self.column, False, True) context.froms += tuple(self.froms) context.primary_columns.append(column) + context.attributes[('fetch_column', self)] = column + def __str__(self): return str(self.column) class QueryContext(object): - multi_row_eager_loaders = False - adapter = None - froms = () - for_update = None + __slots__ = ( + 'multi_row_eager_loaders', 'adapter', 'froms', 'for_update', + 'query', 'session', 'autoflush', 'populate_existing', + 'invoke_all_eagers', 'version_check', 'refresh_state', + 'primary_columns', 'secondary_columns', 'eager_order_by', + 'eager_joins', 'create_eager_joins', 'propagate_options', + 'attributes', 'statement', 'from_clause', 'whereclause', + 'order_by', 'labels', '_for_update_arg', 'runid', 'partials' + ) def __init__(self, query): @@ -3516,8 +3953,13 @@ class QueryContext(object): self.whereclause = query._criterion self.order_by = query._order_by + self.multi_row_eager_loaders = False + self.adapter = None + self.froms = () + self.for_update = None self.query = query self.session = query.session + self.autoflush = query._autoflush self.populate_existing = query._populate_existing self.invoke_all_eagers = query._invoke_all_eagers self.version_check = query._version_check diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/relationships.py b/lib/python3.4/site-packages/sqlalchemy/orm/relationships.py index c2debda..c58dd98 100644 --- a/lib/python3.4/site-packages/sqlalchemy/orm/relationships.py +++ b/lib/python3.4/site-packages/sqlalchemy/orm/relationships.py @@ -1,5 +1,5 @@ # orm/relationships.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -16,13 +16,14 @@ and `secondaryjoin` aspects of :func:`.relationship`. from __future__ import absolute_import from .. import sql, util, exc as sa_exc, schema, log +import weakref from .util import CascadeOptions, _orm_annotate, _orm_deannotate from . import dependency from . import attributes from ..sql.util import ( ClauseAdapter, join_condition, _shallow_annotate, visit_binary_product, - _deep_deannotate, selectables_overlap + _deep_deannotate, selectables_overlap, adapt_criterion_to_null ) from ..sql import operators, expression, visitors from .interfaces import (MANYTOMANY, MANYTOONE, ONETOMANY, @@ -112,6 +113,7 @@ class RelationshipProperty(StrategizedProperty): active_history=False, cascade_backrefs=True, load_on_pending=False, + bake_queries=True, strategy_class=None, _local_remote_pairs=None, query_class=None, info=None): @@ -193,7 +195,7 @@ class RelationshipProperty(StrategizedProperty): The :paramref:`~.relationship.secondary` keyword argument is typically applied in the case where the intermediary :class:`.Table` - is not otherwise exprssed in any direct class mapping. If the + is not otherwise expressed in any direct class mapping. If the "secondary" table is also explicitly mapped elsewhere (e.g. as in :ref:`association_pattern`), one should consider applying the :paramref:`~.relationship.viewonly` flag so that this @@ -273,6 +275,31 @@ class RelationshipProperty(StrategizedProperty): :paramref:`~.relationship.backref` - alternative form of backref specification. + :param bake_queries=True: + Use the :class:`.BakedQuery` cache to cache the construction of SQL + used in lazy loads, when the :func:`.bake_lazy_loaders` function has + first been called. Defaults to True and is intended to provide an + "opt out" flag per-relationship when the baked query cache system is + in use. + + .. warning:: + + This flag **only** has an effect when the application-wide + :func:`.bake_lazy_loaders` function has been called. It + defaults to True so is an "opt out" flag. + + Setting this flag to False when baked queries are otherwise in + use might be to reduce + ORM memory use for this :func:`.relationship`, or to work around + unresolved stability issues observed within the baked query + cache system. + + .. versionadded:: 1.0.0 + + .. seealso:: + + :ref:`baked_toplevel` + :param cascade: a comma-separated list of cascade rules which determines how Session operations should be "cascaded" from parent to child. @@ -459,22 +486,18 @@ class RelationshipProperty(StrategizedProperty): nullable, or when the reference is one-to-one or a collection that is guaranteed to have one or at least one entry. - If the joined-eager load is chained onto an existing LEFT OUTER - JOIN, ``innerjoin=True`` will be bypassed and the join will continue - to chain as LEFT OUTER JOIN so that the results don't change. As an - alternative, specify the value ``"nested"``. This will instead nest - the join on the right side, e.g. using the form "a LEFT OUTER JOIN - (b JOIN c)". - - .. versionadded:: 0.9.4 Added ``innerjoin="nested"`` option to - support nesting of eager "inner" joins. + The option supports the same "nested" and "unnested" options as + that of :paramref:`.joinedload.innerjoin`. See that flag + for details on nested / unnested behaviors. .. seealso:: + :paramref:`.joinedload.innerjoin` - the option as specified by + loader option, including detail on nesting behavior. + :ref:`what_kind_of_loading` - Discussion of some details of various loader options. - :paramref:`.joinedload.innerjoin` - loader option version :param join_depth: when non-``None``, an integer value indicating how many levels @@ -531,7 +554,7 @@ class RelationshipProperty(StrategizedProperty): .. seealso:: - :doc:`/orm/loading` - Full documentation on relationship loader + :doc:`/orm/loading_relationships` - Full documentation on relationship loader configuration. :ref:`dynamic_relationship` - detail on the ``dynamic`` option. @@ -597,30 +620,26 @@ class RelationshipProperty(StrategizedProperty): and examples. :param passive_updates=True: - Indicates loading and INSERT/UPDATE/DELETE behavior when the - source of a foreign key value changes (i.e. an "on update" - cascade), which are typically the primary key columns of the - source row. + Indicates the persistence behavior to take when a referenced + primary key value changes in place, indicating that the referencing + foreign key columns will also need their value changed. - When True, it is assumed that ON UPDATE CASCADE is configured on + When True, it is assumed that ``ON UPDATE CASCADE`` is configured on the foreign key in the database, and that the database will handle propagation of an UPDATE from a source column to - dependent rows. Note that with databases which enforce - referential integrity (i.e. PostgreSQL, MySQL with InnoDB tables), - ON UPDATE CASCADE is required for this operation. The - relationship() will update the value of the attribute on related - items which are locally present in the session during a flush. + dependent rows. When False, the SQLAlchemy :func:`.relationship` + construct will attempt to emit its own UPDATE statements to + modify related targets. However note that SQLAlchemy **cannot** + emit an UPDATE for more than one level of cascade. Also, + setting this flag to False is not compatible in the case where + the database is in fact enforcing referential integrity, unless + those constraints are explicitly "deferred", if the target backend + supports it. - When False, it is assumed that the database does not enforce - referential integrity and will not be issuing its own CASCADE - operation for an update. The relationship() will issue the - appropriate UPDATE statements to the database in response to the - change of a referenced key, and items locally present in the - session during a flush will also be refreshed. - - This flag should probably be set to False if primary key changes - are expected and the database in use doesn't support CASCADE - (i.e. SQLite, MySQL MyISAM tables). + It is highly advised that an application which is employing + mutable primary keys keeps ``passive_updates`` set to True, + and instead uses the referential integrity features of the database + itself in order to handle the change efficiently and fully. .. seealso:: @@ -778,6 +797,7 @@ class RelationshipProperty(StrategizedProperty): """ + super(RelationshipProperty, self).__init__() self.uselist = uselist self.argument = argument @@ -804,6 +824,7 @@ class RelationshipProperty(StrategizedProperty): self.join_depth = join_depth self.local_remote_pairs = _local_remote_pairs self.extension = extension + self.bake_queries = bake_queries self.load_on_pending = load_on_pending self.comparator_factory = comparator_factory or \ RelationshipProperty.Comparator @@ -875,13 +896,13 @@ class RelationshipProperty(StrategizedProperty): """ self.prop = prop - self._parentmapper = parentmapper + self._parententity = parentmapper self._adapt_to_entity = adapt_to_entity if of_type: self._of_type = of_type def adapt_to_entity(self, adapt_to_entity): - return self.__class__(self.property, self._parentmapper, + return self.__class__(self.property, self._parententity, adapt_to_entity=adapt_to_entity, of_type=self._of_type) @@ -933,7 +954,7 @@ class RelationshipProperty(StrategizedProperty): """ return RelationshipProperty.Comparator( self.property, - self._parentmapper, + self._parententity, adapt_to_entity=self._adapt_to_entity, of_type=cls) @@ -1224,11 +1245,15 @@ class RelationshipProperty(StrategizedProperty): state = attributes.instance_state(other) def state_bindparam(x, state, col): - o = state.obj() # strong ref + dict_ = state.dict return sql.bindparam( - x, unique=True, callable_=lambda: - self.property.mapper. - _get_committed_attr_by_column(o, col)) + x, unique=True, + callable_=self.property._get_attr_w_warn_on_none( + col, + self.property.mapper._get_state_attr_by_column, + state, dict_, col, passive=attributes.PASSIVE_OFF + ) + ) def adapt(col): if self.adapter: @@ -1243,13 +1268,14 @@ class RelationshipProperty(StrategizedProperty): adapt(x) == None) for (x, y) in self.property.local_remote_pairs]) - criterion = sql.and_(*[x == y for (x, y) in - zip( - self.property.mapper.primary_key, - self.property. - mapper. - primary_key_from_instance(other)) + criterion = sql.and_(*[ + x == y for (x, y) in + zip( + self.property.mapper.primary_key, + self.property.mapper.primary_key_from_instance(other) + ) ]) + return ~self._criterion_exists(criterion) def __ne__(self, other): @@ -1293,8 +1319,9 @@ class RelationshipProperty(StrategizedProperty): """ if isinstance(other, (util.NoneType, expression.Null)): if self.property.direction == MANYTOONE: - return sql.or_(*[x != None for x in - self.property._calculated_foreign_keys]) + return _orm_annotate(~self.property._optimized_compare( + None, adapt_source=self.adapter)) + else: return self._criterion_exists() elif self.property.uselist: @@ -1303,7 +1330,7 @@ class RelationshipProperty(StrategizedProperty): " to an object or collection; use " "contains() to test for membership.") else: - return self.__negated_contains_or_equals(other) + return _orm_annotate(self.__negated_contains_or_equals(other)) @util.memoized_property def property(self): @@ -1311,36 +1338,88 @@ class RelationshipProperty(StrategizedProperty): mapperlib.Mapper._configure_all() return self.prop - def compare(self, op, value, - value_is_parent=False, - alias_secondary=True): - if op == operators.eq: - if value is None: - if self.uselist: - return ~sql.exists([1], self.primaryjoin) - else: - return self._optimized_compare( - None, - value_is_parent=value_is_parent, - alias_secondary=alias_secondary) - else: - return self._optimized_compare( - value, - value_is_parent=value_is_parent, - alias_secondary=alias_secondary) - else: - return op(self.comparator, value) + def _with_parent(self, instance, alias_secondary=True): + assert instance is not None + return self._optimized_compare( + instance, value_is_parent=True, alias_secondary=alias_secondary) - def _optimized_compare(self, value, value_is_parent=False, + def _optimized_compare(self, state, value_is_parent=False, adapt_source=None, alias_secondary=True): - if value is not None: - value = attributes.instance_state(value) - return self._lazy_strategy.lazy_clause( - value, - reverse_direction=not value_is_parent, - alias_secondary=alias_secondary, - adapt_source=adapt_source) + if state is not None: + state = attributes.instance_state(state) + + reverse_direction = not value_is_parent + + if state is None: + return self._lazy_none_clause( + reverse_direction, + adapt_source=adapt_source) + + if not reverse_direction: + criterion, bind_to_col = \ + self._lazy_strategy._lazywhere, \ + self._lazy_strategy._bind_to_col + else: + criterion, bind_to_col = \ + self._lazy_strategy._rev_lazywhere, \ + self._lazy_strategy._rev_bind_to_col + + if reverse_direction: + mapper = self.mapper + else: + mapper = self.parent + + dict_ = attributes.instance_dict(state.obj()) + + def visit_bindparam(bindparam): + if bindparam._identifying_key in bind_to_col: + bindparam.callable = self._get_attr_w_warn_on_none( + bind_to_col[bindparam._identifying_key], + mapper._get_state_attr_by_column, + state, dict_, + bind_to_col[bindparam._identifying_key], + passive=attributes.PASSIVE_OFF) + + if self.secondary is not None and alias_secondary: + criterion = ClauseAdapter( + self.secondary.alias()).\ + traverse(criterion) + + criterion = visitors.cloned_traverse( + criterion, {}, {'bindparam': visit_bindparam}) + + if adapt_source: + criterion = adapt_source(criterion) + return criterion + + def _get_attr_w_warn_on_none(self, column, fn, *arg, **kw): + def _go(): + value = fn(*arg, **kw) + if value is None: + util.warn( + "Got None for value of column %s; this is unsupported " + "for a relationship comparison and will not " + "currently produce an IS comparison " + "(but may in a future release)" % column) + return value + return _go + + def _lazy_none_clause(self, reverse_direction=False, adapt_source=None): + if not reverse_direction: + criterion, bind_to_col = \ + self._lazy_strategy._lazywhere, \ + self._lazy_strategy._bind_to_col + else: + criterion, bind_to_col = \ + self._lazy_strategy._rev_lazywhere, \ + self._lazy_strategy._rev_bind_to_col + + criterion = adapt_criterion_to_null(criterion, bind_to_col) + + if adapt_source: + criterion = adapt_source(criterion) + return criterion def __str__(self): return str(self.parent.class_.__name__) + "." + self.key @@ -1551,6 +1630,7 @@ class RelationshipProperty(StrategizedProperty): self._check_cascade_settings(self._cascade) self._post_init() self._generate_backref() + self._join_condition._warn_for_conflicting_sync_targets() super(RelationshipProperty, self).do_init() self._lazy_strategy = self._get_strategy((("lazy", "select"),)) @@ -1637,7 +1717,7 @@ class RelationshipProperty(StrategizedProperty): """Test that this relationship is legal, warn about inheritance conflicts.""" - if not self.is_primary() and not mapperlib.class_mapper( + if self.parent.non_primary and not mapperlib.class_mapper( self.parent.class_, configure=False).has_property(self.key): raise sa_exc.ArgumentError( @@ -1723,7 +1803,7 @@ class RelationshipProperty(StrategizedProperty): """Interpret the 'backref' instruction to create a :func:`.relationship` complementary to this one.""" - if not self.is_primary(): + if self.parent.non_primary: return if self.backref is not None and not self.back_populates: if isinstance(self.backref, util.string_types): @@ -2200,7 +2280,7 @@ class JoinCondition(object): elif self._local_remote_pairs or self._remote_side: self._annotate_remote_from_args() elif self._refers_to_parent_table(): - self._annotate_selfref(lambda col: "foreign" in col._annotations) + self._annotate_selfref(lambda col: "foreign" in col._annotations, False) elif self._tables_overlap(): self._annotate_remote_with_overlap() else: @@ -2219,7 +2299,7 @@ class JoinCondition(object): self.secondaryjoin = visitors.replacement_traverse( self.secondaryjoin, {}, repl) - def _annotate_selfref(self, fn): + def _annotate_selfref(self, fn, remote_side_given): """annotate 'remote' in primaryjoin, secondaryjoin when the relationship is detected as self-referential. @@ -2234,7 +2314,7 @@ class JoinCondition(object): if fn(binary.right) and not equated: binary.right = binary.right._annotate( {"remote": True}) - else: + elif not remote_side_given: self._warn_non_column_elements() self.primaryjoin = visitors.cloned_traverse( @@ -2259,7 +2339,7 @@ class JoinCondition(object): remote_side = self._remote_side if self._refers_to_parent_table(): - self._annotate_selfref(lambda col: col in remote_side) + self._annotate_selfref(lambda col: col in remote_side, True) else: def repl(element): if element in remote_side: @@ -2280,12 +2360,21 @@ class JoinCondition(object): binary.right, binary.left = proc_left_right(binary.right, binary.left) + check_entities = self.prop is not None and \ + self.prop.mapper is not self.prop.parent + def proc_left_right(left, right): if isinstance(left, expression.ColumnClause) and \ isinstance(right, expression.ColumnClause): if self.child_selectable.c.contains_column(right) and \ self.parent_selectable.c.contains_column(left): right = right._annotate({"remote": True}) + elif check_entities and \ + right._annotations.get('parentmapper') is self.prop.mapper: + right = right._annotate({"remote": True}) + elif check_entities and \ + left._annotations.get('parentmapper') is self.prop.mapper: + left = left._annotate({"remote": True}) else: self._warn_non_column_elements() @@ -2538,6 +2627,60 @@ class JoinCondition(object): self.secondary_synchronize_pairs = \ self._deannotate_pairs(secondary_sync_pairs) + _track_overlapping_sync_targets = weakref.WeakKeyDictionary() + + def _warn_for_conflicting_sync_targets(self): + if not self.support_sync: + return + + # we would like to detect if we are synchronizing any column + # pairs in conflict with another relationship that wishes to sync + # an entirely different column to the same target. This is a + # very rare edge case so we will try to minimize the memory/overhead + # impact of this check + for from_, to_ in [ + (from_, to_) for (from_, to_) in self.synchronize_pairs + ] + [ + (from_, to_) for (from_, to_) in self.secondary_synchronize_pairs + ]: + # save ourselves a ton of memory and overhead by only + # considering columns that are subject to a overlapping + # FK constraints at the core level. This condition can arise + # if multiple relationships overlap foreign() directly, but + # we're going to assume it's typically a ForeignKeyConstraint- + # level configuration that benefits from this warning. + if len(to_.foreign_keys) < 2: + continue + + if to_ not in self._track_overlapping_sync_targets: + self._track_overlapping_sync_targets[to_] = \ + weakref.WeakKeyDictionary({self.prop: from_}) + else: + other_props = [] + prop_to_from = self._track_overlapping_sync_targets[to_] + for pr, fr_ in prop_to_from.items(): + if pr.mapper in mapperlib._mapper_registry and \ + fr_ is not from_ and \ + pr not in self.prop._reverse_property: + other_props.append((pr, fr_)) + + if other_props: + util.warn( + "relationship '%s' will copy column %s to column %s, " + "which conflicts with relationship(s): %s. " + "Consider applying " + "viewonly=True to read-only relationships, or provide " + "a primaryjoin condition marking writable columns " + "with the foreign() annotation." % ( + self.prop, + from_, to_, + ", ".join( + "'%s' (copies %s to %s)" % (pr, fr_, to_) + for (pr, fr_) in other_props) + ) + ) + self._track_overlapping_sync_targets[to_][self.prop] = from_ + @util.memoized_property def remote_columns(self): return self._gather_join_annotations("remote") @@ -2654,27 +2797,31 @@ class JoinCondition(object): def create_lazy_clause(self, reverse_direction=False): binds = util.column_dict() - lookup = collections.defaultdict(list) equated_columns = util.column_dict() - if reverse_direction and self.secondaryjoin is None: - for l, r in self.local_remote_pairs: - lookup[r].append((r, l)) - equated_columns[l] = r - else: - # replace all "local side" columns, which is - # anything that isn't marked "remote" + has_secondary = self.secondaryjoin is not None + + if has_secondary: + lookup = collections.defaultdict(list) for l, r in self.local_remote_pairs: lookup[l].append((l, r)) equated_columns[r] = l + elif not reverse_direction: + for l, r in self.local_remote_pairs: + equated_columns[r] = l + else: + for l, r in self.local_remote_pairs: + equated_columns[l] = r def col_to_bind(col): - if (reverse_direction and col in lookup) or \ - (not reverse_direction and "local" in col._annotations): - if col in lookup: - for tobind, equated in lookup[col]: - if equated in binds: - return None + + if ( + (not reverse_direction and 'local' in col._annotations) or + reverse_direction and ( + (has_secondary and col in lookup) or + (not has_secondary and 'remote' in col._annotations) + ) + ): if col not in binds: binds[col] = sql.bindparam( None, None, type_=col.type, unique=True) diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/scoping.py b/lib/python3.4/site-packages/sqlalchemy/orm/scoping.py index 71648d1..6306514 100644 --- a/lib/python3.4/site-packages/sqlalchemy/orm/scoping.py +++ b/lib/python3.4/site-packages/sqlalchemy/orm/scoping.py @@ -1,5 +1,5 @@ # orm/scoping.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -21,6 +21,12 @@ class scoped_session(object): """ + session_factory = None + """The `session_factory` provided to `__init__` is stored in this + attribute and may be accessed at a later time. This can be useful when + a new non-scoped :class:`.Session` or :class:`.Connection` to the + database is needed.""" + def __init__(self, session_factory, scopefunc=None): """Construct a new :class:`.scoped_session`. @@ -38,6 +44,7 @@ class scoped_session(object): """ self.session_factory = session_factory + if scopefunc: self.registry = ScopedRegistry(session_factory, scopefunc) else: @@ -45,12 +52,12 @@ class scoped_session(object): def __call__(self, **kw): """Return the current :class:`.Session`, creating it - using the session factory if not present. + using the :attr:`.scoped_session.session_factory` if not present. :param \**kw: Keyword arguments will be passed to the - session factory callable, if an existing :class:`.Session` - is not present. If the :class:`.Session` is present and - keyword arguments have been passed, + :attr:`.scoped_session.session_factory` callable, if an existing + :class:`.Session` is not present. If the :class:`.Session` is present + and keyword arguments have been passed, :exc:`~sqlalchemy.exc.InvalidRequestError` is raised. """ diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/session.py b/lib/python3.4/site-packages/sqlalchemy/orm/session.py index dd82fa0..000441f 100644 --- a/lib/python3.4/site-packages/sqlalchemy/orm/session.py +++ b/lib/python3.4/site-packages/sqlalchemy/orm/session.py @@ -1,5 +1,5 @@ # orm/session.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -20,6 +20,8 @@ from .base import ( _class_to_mapper, _state_mapper, object_state, _none_set, state_str, instance_str ) +import itertools +from . import persistence from .unitofwork import UOWTransaction from . import state as statelib import sys @@ -45,7 +47,6 @@ def _state_session(state): class _SessionClassMethods(object): - """Class-level methods for :class:`.Session`, :class:`.sessionmaker`.""" @classmethod @@ -84,7 +85,6 @@ CLOSED = util.symbol('CLOSED') class SessionTransaction(object): - """A :class:`.Session`-level transaction. :class:`.SessionTransaction` is a mostly behind-the-scenes object @@ -226,10 +226,10 @@ class SessionTransaction(object): def _is_transaction_boundary(self): return self.nested or not self._parent - def connection(self, bindkey, **kwargs): + def connection(self, bindkey, execution_options=None, **kwargs): self._assert_active() bind = self.session.get_bind(bindkey, **kwargs) - return self._connection_for_bind(bind) + return self._connection_for_bind(bind, execution_options) def _begin(self, nested=False): self._assert_active() @@ -237,14 +237,21 @@ class SessionTransaction(object): self.session, self, nested=nested) def _iterate_parents(self, upto=None): - if self._parent is upto: - return (self,) - else: - if self._parent is None: + + current = self + result = () + while current: + result += (current, ) + if current._parent is upto: + break + elif current._parent is None: raise sa_exc.InvalidRequestError( "Transaction %s is not on the active transaction list" % ( upto)) - return (self,) + self._parent._iterate_parents(upto) + else: + current = current._parent + + return result def _take_snapshot(self): if not self._is_transaction_boundary: @@ -271,7 +278,7 @@ class SessionTransaction(object): del s.key for s, (oldkey, newkey) in self._key_switches.items(): - self.session.identity_map.discard(s) + self.session.identity_map.safe_discard(s) s.key = oldkey self.session.identity_map.replace(s) @@ -293,22 +300,27 @@ class SessionTransaction(object): if not self.nested and self.session.expire_on_commit: for s in self.session.identity_map.all_states(): s._expire(s.dict, self.session.identity_map._modified) - for s in self._deleted: - s.session_id = None + for s in list(self._deleted): + s._detach() self._deleted.clear() elif self.nested: self._parent._new.update(self._new) + self._parent._dirty.update(self._dirty) self._parent._deleted.update(self._deleted) self._parent._key_switches.update(self._key_switches) - def _connection_for_bind(self, bind): + def _connection_for_bind(self, bind, execution_options): self._assert_active() if bind in self._connections: + if execution_options: + util.warn( + "Connection is already established for the " + "given bind; execution_options ignored") return self._connections[bind][0] if self._parent: - conn = self._parent._connection_for_bind(bind) + conn = self._parent._connection_for_bind(bind, execution_options) if not self.nested: return conn else: @@ -321,6 +333,9 @@ class SessionTransaction(object): else: conn = bind.contextual_connect() + if execution_options: + conn = conn.execution_options(**execution_options) + if self.session.twophase and self._parent is None: transaction = conn.begin_twophase() elif self.nested: @@ -397,26 +412,29 @@ class SessionTransaction(object): for subtransaction in stx._iterate_parents(upto=self): subtransaction.close() + boundary = self if self._state in (ACTIVE, PREPARED): for transaction in self._iterate_parents(): if transaction._parent is None or transaction.nested: transaction._rollback_impl() transaction._state = DEACTIVE + boundary = transaction break else: transaction._state = DEACTIVE sess = self.session - if self.session._enable_transaction_accounting and \ + if sess._enable_transaction_accounting and \ not sess._is_clean(): + # if items were added, deleted, or mutated # here, we need to re-restore the snapshot util.warn( "Session's state has been changed on " "a non-active transaction - this state " "will be discarded.") - self._restore_snapshot(dirty_only=self.nested) + boundary._restore_snapshot(dirty_only=boundary.nested) self.close() if self._parent and _capture_exception: @@ -435,11 +453,13 @@ class SessionTransaction(object): self.session.dispatch.after_rollback(self.session) - def close(self): + def close(self, invalidate=False): self.session.transaction = self._parent if self._parent is None: for connection, transaction, autoclose in \ set(self._connections.values()): + if invalidate: + connection.invalidate() if autoclose: connection.close() else: @@ -473,7 +493,6 @@ class SessionTransaction(object): class Session(_SessionClassMethods): - """Manages persistence operations for ORM-mapped objects. The Session's usage paradigm is described at :doc:`/orm/session`. @@ -485,7 +504,8 @@ class Session(_SessionClassMethods): '__contains__', '__iter__', 'add', 'add_all', 'begin', 'begin_nested', 'close', 'commit', 'connection', 'delete', 'execute', 'expire', 'expire_all', 'expunge', 'expunge_all', 'flush', 'get_bind', - 'is_modified', + 'is_modified', 'bulk_save_objects', 'bulk_insert_mappings', + 'bulk_update_mappings', 'merge', 'query', 'refresh', 'rollback', 'scalar') @@ -529,8 +549,8 @@ class Session(_SessionClassMethods): :meth:`~.Session.flush` call to this ``Session`` before proceeding. This is a convenience feature so that :meth:`~.Session.flush` need not be called repeatedly in order for database queries to retrieve - results. It's typical that ``autoflush`` is used in conjunction with - ``autocommit=False``. In this scenario, explicit calls to + results. It's typical that ``autoflush`` is used in conjunction + with ``autocommit=False``. In this scenario, explicit calls to :meth:`~.Session.flush` are rarely needed; you usually only need to call :meth:`~.Session.commit` (which flushes) to finalize changes. @@ -546,8 +566,8 @@ class Session(_SessionClassMethods): :class:`.Engine` or :class:`.Connection` objects. Operations which proceed relative to a particular :class:`.Mapper` will consult this dictionary for the direct :class:`.Mapper` instance as - well as the mapper's ``mapped_table`` attribute in order to locate a - connectable to use. The full resolution is described in the + well as the mapper's ``mapped_table`` attribute in order to locate + a connectable to use. The full resolution is described in the :meth:`.Session.get_bind`. Usage looks like:: @@ -594,8 +614,8 @@ class Session(_SessionClassMethods): .. versionadded:: 0.9.0 :param query_cls: Class which should be used to create new Query - objects, as returned by the :meth:`~.Session.query` method. Defaults - to :class:`.Query`. + objects, as returned by the :meth:`~.Session.query` method. + Defaults to :class:`.Query`. :param twophase: When ``True``, all transactions will be started as a "two phase" transaction, i.e. using the "two phase" semantics @@ -610,15 +630,26 @@ class Session(_SessionClassMethods): ``False``, objects placed in the :class:`.Session` will be strongly referenced until explicitly removed or the :class:`.Session` is closed. **Deprecated** - this option - is obsolete. + is present to allow compatibility with older applications, but + it is recommended that strong references to objects + be maintained by the calling application + externally to the :class:`.Session` itself, + to the extent that is required by the application. """ if weak_identity_map: self._identity_cls = identity.WeakInstanceDict else: - util.warn_deprecated("weak_identity_map=False is deprecated. " - "This feature is not needed.") + util.warn_deprecated( + "weak_identity_map=False is deprecated. " + "It is present to allow compatibility with older " + "applications, but " + "it is recommended that strong references to " + "objects be maintained by the calling application " + "externally to the :class:`.Session` itself, " + "to the extent that is required by the application.") + self._identity_cls = identity.StrongInstanceDict self.identity_map = self._identity_cls() @@ -644,14 +675,8 @@ class Session(_SessionClassMethods): SessionExtension._adapt_listener(self, ext) if binds is not None: - for mapperortable, bind in binds.items(): - insp = inspect(mapperortable) - if insp.is_selectable: - self.bind_table(mapperortable, bind) - elif insp.is_mapper: - self.bind_mapper(mapperortable, bind) - else: - assert False + for key, bind in binds.items(): + self._add_bind(key, bind) if not self.autocommit: self.begin() @@ -666,7 +691,7 @@ class Session(_SessionClassMethods): def info(self): """A user-modifiable dictionary. - The initial value of this dictioanry can be populated using the + The initial value of this dictionary can be populated using the ``info`` argument to the :class:`.Session` constructor or :class:`.sessionmaker` constructor or factory methods. The dictionary here is always local to this :class:`.Session` and can be modified @@ -797,6 +822,7 @@ class Session(_SessionClassMethods): def connection(self, mapper=None, clause=None, bind=None, close_with_result=False, + execution_options=None, **kw): """Return a :class:`.Connection` object corresponding to this :class:`.Session` object's transactional state. @@ -841,6 +867,18 @@ class Session(_SessionClassMethods): configured with ``autocommit=True`` and does not already have a transaction in progress. + :param execution_options: a dictionary of execution options that will + be passed to :meth:`.Connection.execution_options`, **when the + connection is first procured only**. If the connection is already + present within the :class:`.Session`, a warning is emitted and + the arguments are ignored. + + .. versionadded:: 0.9.9 + + .. seealso:: + + :ref:`session_transaction_isolation` + :param \**kw: Additional keyword arguments are sent to :meth:`get_bind()`, allowing additional arguments to be passed to custom @@ -851,13 +889,18 @@ class Session(_SessionClassMethods): bind = self.get_bind(mapper, clause=clause, **kw) return self._connection_for_bind(bind, - close_with_result=close_with_result) + close_with_result=close_with_result, + execution_options=execution_options) - def _connection_for_bind(self, engine, **kwargs): + def _connection_for_bind(self, engine, execution_options=None, **kw): if self.transaction is not None: - return self.transaction._connection_for_bind(engine) + return self.transaction._connection_for_bind( + engine, execution_options) else: - return engine.contextual_connect(**kwargs) + conn = engine.contextual_connect(**kw) + if execution_options: + conn = conn.execution_options(**execution_options) + return conn def execute(self, clause, params=None, mapper=None, bind=None, **kw): """Execute a SQL expression construct or string statement within @@ -1006,10 +1049,46 @@ class Session(_SessionClassMethods): not use any connection resources until they are first needed. """ + self._close_impl(invalidate=False) + + def invalidate(self): + """Close this Session, using connection invalidation. + + This is a variant of :meth:`.Session.close` that will additionally + ensure that the :meth:`.Connection.invalidate` method will be called + on all :class:`.Connection` objects. This can be called when + the database is known to be in a state where the connections are + no longer safe to be used. + + E.g.:: + + try: + sess = Session() + sess.add(User()) + sess.commit() + except gevent.Timeout: + sess.invalidate() + raise + except: + sess.rollback() + raise + + This clears all items and ends any transaction in progress. + + If this session were created with ``autocommit=False``, a new + transaction is immediately begun. Note that this new transaction does + not use any connection resources until they are first needed. + + .. versionadded:: 0.9.9 + + """ + self._close_impl(invalidate=True) + + def _close_impl(self, invalidate): self.expunge_all() if self.transaction is not None: for transaction in self.transaction._iterate_parents(): - transaction.close() + transaction.close(invalidate) def expunge_all(self): """Remove all object instances from this ``Session``. @@ -1029,40 +1108,47 @@ class Session(_SessionClassMethods): # TODO: + crystallize + document resolution order # vis. bind_mapper/bind_table + def _add_bind(self, key, bind): + try: + insp = inspect(key) + except sa_exc.NoInspectionAvailable: + if not isinstance(key, type): + raise exc.ArgumentError( + "Not acceptable bind target: %s" % + key) + else: + self.__binds[key] = bind + else: + if insp.is_selectable: + self.__binds[insp] = bind + elif insp.is_mapper: + self.__binds[insp.class_] = bind + for selectable in insp._all_tables: + self.__binds[selectable] = bind + else: + raise exc.ArgumentError( + "Not acceptable bind target: %s" % + key) + def bind_mapper(self, mapper, bind): - """Bind operations for a mapper to a Connectable. + """Associate a :class:`.Mapper` with a "bind", e.g. a :class:`.Engine` + or :class:`.Connection`. - mapper - A mapper instance or mapped class - - bind - Any Connectable: a :class:`.Engine` or :class:`.Connection`. - - All subsequent operations involving this mapper will use the given - `bind`. + The given mapper is added to a lookup used by the + :meth:`.Session.get_bind` method. """ - if isinstance(mapper, type): - mapper = class_mapper(mapper) - - self.__binds[mapper.base_mapper] = bind - for t in mapper._all_tables: - self.__binds[t] = bind + self._add_bind(mapper, bind) def bind_table(self, table, bind): - """Bind operations on a Table to a Connectable. + """Associate a :class:`.Table` with a "bind", e.g. a :class:`.Engine` + or :class:`.Connection`. - table - A :class:`.Table` instance - - bind - Any Connectable: a :class:`.Engine` or :class:`.Connection`. - - All subsequent operations involving this :class:`.Table` will use the - given `bind`. + The given mapper is added to a lookup used by the + :meth:`.Session.get_bind` method. """ - self.__binds[table] = bind + self._add_bind(table, bind) def get_bind(self, mapper=None, clause=None): """Return a "bind" to which this :class:`.Session` is bound. @@ -1116,6 +1202,7 @@ class Session(_SessionClassMethods): bound :class:`.MetaData`. """ + if mapper is clause is None: if self.bind: return self.bind @@ -1125,15 +1212,23 @@ class Session(_SessionClassMethods): "Connection, and no context was provided to locate " "a binding.") - c_mapper = mapper is not None and _class_to_mapper(mapper) or None + if mapper is not None: + try: + mapper = inspect(mapper) + except sa_exc.NoInspectionAvailable: + if isinstance(mapper, type): + raise exc.UnmappedClassError(mapper) + else: + raise - # manually bound? if self.__binds: - if c_mapper: - if c_mapper.base_mapper in self.__binds: - return self.__binds[c_mapper.base_mapper] - elif c_mapper.mapped_table in self.__binds: - return self.__binds[c_mapper.mapped_table] + if mapper: + for cls in mapper.class_.__mro__: + if cls in self.__binds: + return self.__binds[cls] + if clause is None: + clause = mapper.mapped_table + if clause is not None: for t in sql_util.find_tables(clause, include_crud=True): if t in self.__binds: @@ -1145,12 +1240,12 @@ class Session(_SessionClassMethods): if isinstance(clause, sql.expression.ClauseElement) and clause.bind: return clause.bind - if c_mapper and c_mapper.mapped_table.bind: - return c_mapper.mapped_table.bind + if mapper and mapper.mapped_table.bind: + return mapper.mapped_table.bind context = [] if mapper is not None: - context.append('mapper %s' % c_mapper) + context.append('mapper %s' % mapper) if clause is not None: context.append('SQL expression') @@ -1397,11 +1492,12 @@ class Session(_SessionClassMethods): self._new.pop(state) state._detach() elif self.identity_map.contains_state(state): - self.identity_map.discard(state) + self.identity_map.safe_discard(state) self._deleted.pop(state, None) state._detach() elif self.transaction: self.transaction._deleted.pop(state, None) + state._detach() def _register_newly_persistent(self, states): for state in states: @@ -1413,7 +1509,7 @@ class Session(_SessionClassMethods): instance_key = mapper._identity_key_from_state(state) - if _none_set.issubset(instance_key[1]) and \ + if _none_set.intersection(instance_key[1]) and \ not mapper.allow_partial_pks or \ _none_set.issuperset(instance_key[1]): raise exc.FlushError( @@ -1430,10 +1526,10 @@ class Session(_SessionClassMethods): if state.key is None: state.key = instance_key elif state.key != instance_key: - # primary key switch. use discard() in case another + # primary key switch. use safe_discard() in case another # state has already replaced this one in the identity # map (see test/orm/test_naturalpks.py ReversePKsTest) - self.identity_map.discard(state) + self.identity_map.safe_discard(state) if state in self.transaction._key_switches: orig_key = self.transaction._key_switches[state][0] else: @@ -1467,7 +1563,7 @@ class Session(_SessionClassMethods): if self._enable_transaction_accounting and self.transaction: self.transaction._deleted[state] = True - self.identity_map.discard(state) + self.identity_map.safe_discard(state) self._deleted.pop(state, None) state.deleted = True @@ -1630,6 +1726,9 @@ class Session(_SessionClassMethods): "all changes on mapped instances before merging with " "load=False.") key = mapper._identity_key_from_state(state) + key_is_persistent = attributes.NEVER_SET not in key[1] + else: + key_is_persistent = True if key in self.identity_map: merged = self.identity_map[key] @@ -1646,9 +1745,10 @@ class Session(_SessionClassMethods): self._update_impl(merged_state) new_instance = True - elif not _none_set.issubset(key[1]) or \ + elif key_is_persistent and ( + not _none_set.intersection(key[1]) or (mapper.allow_partial_pks and - not _none_set.issuperset(key[1])): + not _none_set.issuperset(key[1]))): merged = self.query(mapper.class_).get(key[1]) else: merged = None @@ -1746,7 +1846,7 @@ class Session(_SessionClassMethods): "function to send this object back to the transient state." % state_str(state) ) - self._before_attach(state) + self._before_attach(state, check_identity_map=False) self._deleted.pop(state, None) if discard_existing: self.identity_map.replace(state) @@ -1826,13 +1926,12 @@ class Session(_SessionClassMethods): self._attach(state, include_before=True) state._load_pending = True - def _before_attach(self, state): + def _before_attach(self, state, check_identity_map=True): if state.session_id != self.hash_key and \ self.dispatch.before_attach: self.dispatch.before_attach(self, state.obj()) - def _attach(self, state, include_before=False): - if state.key and \ + if check_identity_map and state.key and \ state.key in self.identity_map and \ not self.identity_map.contains_state(state): raise sa_exc.InvalidRequestError( @@ -1848,10 +1947,11 @@ class Session(_SessionClassMethods): "(this is '%s')" % (state_str(state), state.session_id, self.hash_key)) + def _attach(self, state, include_before=False): + if state.session_id != self.hash_key: - if include_before and \ - self.dispatch.before_attach: - self.dispatch.before_attach(self, state.obj()) + if include_before: + self._before_attach(state) state.session_id = self.hash_key if state.modified and state._strong_obj is None: state._strong_obj = state.obj() @@ -1898,7 +1998,7 @@ class Session(_SessionClassMethods): For ``autocommit`` Sessions with no active manual transaction, flush() will create a transaction on the fly that surrounds the entire set of - operations int the flush. + operations into the flush. :param objects: Optional; restricts the flush operation to operate only on elements that are in the given collection. @@ -2036,6 +2136,226 @@ class Session(_SessionClassMethods): with util.safe_reraise(): transaction.rollback(_capture_exception=True) + def bulk_save_objects( + self, objects, return_defaults=False, update_changed_only=True): + """Perform a bulk save of the given list of objects. + + The bulk save feature allows mapped objects to be used as the + source of simple INSERT and UPDATE operations which can be more easily + grouped together into higher performing "executemany" + operations; the extraction of data from the objects is also performed + using a lower-latency process that ignores whether or not attributes + have actually been modified in the case of UPDATEs, and also ignores + SQL expressions. + + The objects as given are not added to the session and no additional + state is established on them, unless the ``return_defaults`` flag + is also set, in which case primary key attributes and server-side + default values will be populated. + + .. versionadded:: 1.0.0 + + .. warning:: + + The bulk save feature allows for a lower-latency INSERT/UPDATE + of rows at the expense of most other unit-of-work features. + Features such as object management, relationship handling, + and SQL clause support are **silently omitted** in favor of raw + INSERT/UPDATES of records. + + **Please read the list of caveats at** :ref:`bulk_operations` + **before using this method, and fully test and confirm the + functionality of all code developed using these systems.** + + :param objects: a list of mapped object instances. The mapped + objects are persisted as is, and are **not** associated with the + :class:`.Session` afterwards. + + For each object, whether the object is sent as an INSERT or an + UPDATE is dependent on the same rules used by the :class:`.Session` + in traditional operation; if the object has the + :attr:`.InstanceState.key` + attribute set, then the object is assumed to be "detached" and + will result in an UPDATE. Otherwise, an INSERT is used. + + In the case of an UPDATE, statements are grouped based on which + attributes have changed, and are thus to be the subject of each + SET clause. If ``update_changed_only`` is False, then all + attributes present within each object are applied to the UPDATE + statement, which may help in allowing the statements to be grouped + together into a larger executemany(), and will also reduce the + overhead of checking history on attributes. + + :param return_defaults: when True, rows that are missing values which + generate defaults, namely integer primary key defaults and sequences, + will be inserted **one at a time**, so that the primary key value + is available. In particular this will allow joined-inheritance + and other multi-table mappings to insert correctly without the need + to provide primary key values ahead of time; however, + :paramref:`.Session.bulk_save_objects.return_defaults` **greatly + reduces the performance gains** of the method overall. + + :param update_changed_only: when True, UPDATE statements are rendered + based on those attributes in each state that have logged changes. + When False, all attributes present are rendered into the SET clause + with the exception of primary key attributes. + + .. seealso:: + + :ref:`bulk_operations` + + :meth:`.Session.bulk_insert_mappings` + + :meth:`.Session.bulk_update_mappings` + + """ + for (mapper, isupdate), states in itertools.groupby( + (attributes.instance_state(obj) for obj in objects), + lambda state: (state.mapper, state.key is not None) + ): + self._bulk_save_mappings( + mapper, states, isupdate, True, + return_defaults, update_changed_only) + + def bulk_insert_mappings(self, mapper, mappings, return_defaults=False): + """Perform a bulk insert of the given list of mapping dictionaries. + + The bulk insert feature allows plain Python dictionaries to be used as + the source of simple INSERT operations which can be more easily + grouped together into higher performing "executemany" + operations. Using dictionaries, there is no "history" or session + state management features in use, reducing latency when inserting + large numbers of simple rows. + + The values within the dictionaries as given are typically passed + without modification into Core :meth:`.Insert` constructs, after + organizing the values within them across the tables to which + the given mapper is mapped. + + .. versionadded:: 1.0.0 + + .. warning:: + + The bulk insert feature allows for a lower-latency INSERT + of rows at the expense of most other unit-of-work features. + Features such as object management, relationship handling, + and SQL clause support are **silently omitted** in favor of raw + INSERT of records. + + **Please read the list of caveats at** :ref:`bulk_operations` + **before using this method, and fully test and confirm the + functionality of all code developed using these systems.** + + :param mapper: a mapped class, or the actual :class:`.Mapper` object, + representing the single kind of object represented within the mapping + list. + + :param mappings: a list of dictionaries, each one containing the state + of the mapped row to be inserted, in terms of the attribute names + on the mapped class. If the mapping refers to multiple tables, + such as a joined-inheritance mapping, each dictionary must contain + all keys to be populated into all tables. + + :param return_defaults: when True, rows that are missing values which + generate defaults, namely integer primary key defaults and sequences, + will be inserted **one at a time**, so that the primary key value + is available. In particular this will allow joined-inheritance + and other multi-table mappings to insert correctly without the need + to provide primary + key values ahead of time; however, + :paramref:`.Session.bulk_insert_mappings.return_defaults` + **greatly reduces the performance gains** of the method overall. + If the rows + to be inserted only refer to a single table, then there is no + reason this flag should be set as the returned default information + is not used. + + + .. seealso:: + + :ref:`bulk_operations` + + :meth:`.Session.bulk_save_objects` + + :meth:`.Session.bulk_update_mappings` + + """ + self._bulk_save_mappings( + mapper, mappings, False, False, return_defaults, False) + + def bulk_update_mappings(self, mapper, mappings): + """Perform a bulk update of the given list of mapping dictionaries. + + The bulk update feature allows plain Python dictionaries to be used as + the source of simple UPDATE operations which can be more easily + grouped together into higher performing "executemany" + operations. Using dictionaries, there is no "history" or session + state management features in use, reducing latency when updating + large numbers of simple rows. + + .. versionadded:: 1.0.0 + + .. warning:: + + The bulk update feature allows for a lower-latency UPDATE + of rows at the expense of most other unit-of-work features. + Features such as object management, relationship handling, + and SQL clause support are **silently omitted** in favor of raw + UPDATES of records. + + **Please read the list of caveats at** :ref:`bulk_operations` + **before using this method, and fully test and confirm the + functionality of all code developed using these systems.** + + :param mapper: a mapped class, or the actual :class:`.Mapper` object, + representing the single kind of object represented within the mapping + list. + + :param mappings: a list of dictionaries, each one containing the state + of the mapped row to be updated, in terms of the attribute names + on the mapped class. If the mapping refers to multiple tables, + such as a joined-inheritance mapping, each dictionary may contain + keys corresponding to all tables. All those keys which are present + and are not part of the primary key are applied to the SET clause + of the UPDATE statement; the primary key values, which are required, + are applied to the WHERE clause. + + + .. seealso:: + + :ref:`bulk_operations` + + :meth:`.Session.bulk_insert_mappings` + + :meth:`.Session.bulk_save_objects` + + """ + self._bulk_save_mappings(mapper, mappings, True, False, False, False) + + def _bulk_save_mappings( + self, mapper, mappings, isupdate, isstates, + return_defaults, update_changed_only): + mapper = _class_to_mapper(mapper) + self._flushing = True + + transaction = self.begin( + subtransactions=True) + try: + if isupdate: + persistence._bulk_update( + mapper, mappings, transaction, + isstates, update_changed_only) + else: + persistence._bulk_insert( + mapper, mappings, transaction, isstates, return_defaults) + transaction.commit() + + except: + with util.safe_reraise(): + transaction.rollback(_capture_exception=True) + finally: + self._flushing = False + def is_modified(self, instance, include_collections=True, passive=True): """Return ``True`` if the given instance has locally @@ -2251,7 +2571,6 @@ class Session(_SessionClassMethods): class sessionmaker(_SessionClassMethods): - """A configurable :class:`.Session` factory. The :class:`.sessionmaker` factory generates new @@ -2379,18 +2698,49 @@ class sessionmaker(_SessionClassMethods): def make_transient(instance): - """Make the given instance 'transient'. + """Alter the state of the given instance so that it is :term:`transient`. - This will remove its association with any - session and additionally will remove its "identity key", - such that it's as though the object were newly constructed, - except retaining its values. It also resets the - "deleted" flag on the state if this object - had been explicitly deleted by its session. + .. note:: - Attributes which were "expired" or deferred at the - instance level are reverted to undefined, and - will not trigger any loads. + :func:`.make_transient` is a special-case function for + advanced use cases only. + + The given mapped instance is assumed to be in the :term:`persistent` or + :term:`detached` state. The function will remove its association with any + :class:`.Session` as well as its :attr:`.InstanceState.identity`. The + effect is that the object will behave as though it were newly constructed, + except retaining any attribute / collection values that were loaded at the + time of the call. The :attr:`.InstanceState.deleted` flag is also reset + if this object had been deleted as a result of using + :meth:`.Session.delete`. + + .. warning:: + + :func:`.make_transient` does **not** "unexpire" or otherwise eagerly + load ORM-mapped attributes that are not currently loaded at the time + the function is called. This includes attributes which: + + * were expired via :meth:`.Session.expire` + + * were expired as the natural effect of committing a session + transaction, e.g. :meth:`.Session.commit` + + * are normally :term:`lazy loaded` but are not currently loaded + + * are "deferred" via :ref:`deferred` and are not yet loaded + + * were not present in the query which loaded this object, such as that + which is common in joined table inheritance and other scenarios. + + After :func:`.make_transient` is called, unloaded attributes such + as those above will normally resolve to the value ``None`` when + accessed, or an empty collection for a collection-oriented attribute. + As the object is transient and un-associated with any database + identity, it will no longer retrieve these values. + + .. seealso:: + + :func:`.make_transient_to_detached` """ state = attributes.instance_state(instance) @@ -2398,9 +2748,13 @@ def make_transient(instance): if s: s._expunge_state(state) - # remove expired state and - # deferred callables - state.callables.clear() + # remove expired state + state.expired_attributes.clear() + + # remove deferred callables + if state.callables: + del state.callables + if state.key: del state.key if state.deleted: @@ -2408,7 +2762,12 @@ def make_transient(instance): def make_transient_to_detached(instance): - """Make the given transient instance 'detached'. + """Make the given transient instance :term:`detached`. + + .. note:: + + :func:`.make_transient_to_detached` is a special-case function for + advanced use cases only. All attribute history on the given instance will be reset as though the instance were freshly loaded @@ -2443,16 +2802,19 @@ def make_transient_to_detached(instance): def object_session(instance): - """Return the ``Session`` to which instance belongs. + """Return the :class:`.Session` to which the given instance belongs. - If the instance is not a mapped instance, an error is raised. + This is essentially the same as the :attr:`.InstanceState.session` + accessor. See that attribute for details. """ try: - return _state_session(attributes.instance_state(instance)) + state = attributes.instance_state(instance) except exc.NO_STATE: raise exc.UnmappedInstanceError(instance) + else: + return _state_session(state) _new_sessionid = util.counter() diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/state.py b/lib/python3.4/site-packages/sqlalchemy/orm/state.py index b9aff7a..c66507d 100644 --- a/lib/python3.4/site-packages/sqlalchemy/orm/state.py +++ b/lib/python3.4/site-packages/sqlalchemy/orm/state.py @@ -1,5 +1,5 @@ # orm/state.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -21,7 +21,7 @@ from .base import PASSIVE_NO_RESULT, SQL_OK, NEVER_SET, ATTR_WAS_SET, \ from . import base -class InstanceState(interfaces._InspectionAttr): +class InstanceState(interfaces.InspectionAttr): """tracks state information at the instance level. The :class:`.InstanceState` is a key object used by the @@ -58,15 +58,35 @@ class InstanceState(interfaces._InspectionAttr): expired = False deleted = False _load_pending = False - is_instance = True + callables = () + """A namespace where a per-state loader callable can be associated. + + In SQLAlchemy 1.0, this is only used for lazy loaders / deferred + loaders that were set up via query option. + + Previously, callables was used also to indicate expired attributes + by storing a link to the InstanceState itself in this dictionary. + This role is now handled by the expired_attributes set. + + """ + def __init__(self, obj, manager): self.class_ = obj.__class__ self.manager = manager self.obj = weakref.ref(obj, self._cleanup) - self.callables = {} self.committed_state = {} + self.expired_attributes = set() + + expired_attributes = None + """The set of keys which are 'expired' to be loaded by + the manager's deferred scalar loader, assuming no pending + changes. + + see also the ``unmodified`` collection which is intersected + against this set when a refresh operation occurs.""" + @util.memoized_property def attrs(self): @@ -146,7 +166,16 @@ class InstanceState(interfaces._InspectionAttr): @util.dependencies("sqlalchemy.orm.session") def session(self, sessionlib): """Return the owning :class:`.Session` for this instance, - or ``None`` if none available.""" + or ``None`` if none available. + + Note that the result here can in some cases be *different* + from that of ``obj in session``; an object that's been deleted + will report as not ``in session``, however if the transaction is + still in progress, this attribute will still refer to that session. + Only when the transaction is completed does the object become + fully detached under normal circumstances. + + """ return sessionlib._state_session(self) @property @@ -165,7 +194,7 @@ class InstanceState(interfaces._InspectionAttr): Returns ``None`` if the object has no primary key identity. .. note:: - An object which is transient or pending + An object which is :term:`transient` or :term:`pending` does **not** have a mapped identity until it is flushed, even if its attributes include primary key values. @@ -220,11 +249,25 @@ class InstanceState(interfaces._InspectionAttr): del self.obj def _cleanup(self, ref): - instance_dict = self._instance_dict() - if instance_dict: - instance_dict.discard(self) + """Weakref callback cleanup. + + This callable cleans out the state when it is being garbage + collected. + + this _cleanup **assumes** that there are no strong refs to us! + Will not work otherwise! + + """ + instance_dict = self._instance_dict() + if instance_dict is not None: + instance_dict._fast_discard(self) + del self._instance_dict + + # we can't possibly be in instance_dict._modified + # b.c. this is weakref cleanup only, that set + # is strong referencing! + # assert self not in instance_dict._modified - self.callables = {} self.session_id = self._strong_obj = None del self.obj @@ -251,7 +294,7 @@ class InstanceState(interfaces._InspectionAttr): return {} def _initialize_instance(*mixed, **kwargs): - self, instance, args = mixed[0], mixed[1], mixed[2:] + self, instance, args = mixed[0], mixed[1], mixed[2:] # noqa manager = self.manager manager.dispatch.init(self, args, kwargs) @@ -259,8 +302,8 @@ class InstanceState(interfaces._InspectionAttr): try: return manager.original_init(*mixed[1:], **kwargs) except: - manager.dispatch.init_failure(self, args, kwargs) - raise + with util.safe_reraise(): + manager.dispatch.init_failure(self, args, kwargs) def get_history(self, key, passive): return self.manager[key].impl.get_history(self, self.dict, passive) @@ -279,7 +322,7 @@ class InstanceState(interfaces._InspectionAttr): (k, self.__dict__[k]) for k in ( 'committed_state', '_pending_mutations', 'modified', 'expired', 'callables', 'key', 'parents', 'load_options', - 'class_', + 'class_', 'expired_attributes' ) if k in self.__dict__ ) if self.load_path: @@ -306,7 +349,18 @@ class InstanceState(interfaces._InspectionAttr): self.parents = state_dict.get('parents', {}) self.modified = state_dict.get('modified', False) self.expired = state_dict.get('expired', False) - self.callables = state_dict.get('callables', {}) + if 'callables' in state_dict: + self.callables = state_dict['callables'] + + try: + self.expired_attributes = state_dict['expired_attributes'] + except KeyError: + self.expired_attributes = set() + # 0.9 and earlier compat + for k in list(self.callables): + if self.callables[k] is self: + self.expired_attributes.add(k) + del self.callables[k] self.__dict__.update([ (k, state_dict[k]) for k in ( @@ -320,12 +374,6 @@ class InstanceState(interfaces._InspectionAttr): state_dict['manager'](self, inst, state_dict) - def _initialize(self, key): - """Set this attribute to an empty value or collection, - based on the AttributeImpl in use.""" - - self.manager.get_impl(key).initialize(self, self.dict) - def _reset(self, dict_, key): """Remove the given attribute and any callables associated with it.""" @@ -333,71 +381,73 @@ class InstanceState(interfaces._InspectionAttr): old = dict_.pop(key, None) if old is not None and self.manager[key].impl.collection: self.manager[key].impl._invalidate_collection(old) - self.callables.pop(key, None) - - def _expire_attribute_pre_commit(self, dict_, key): - """a fast expire that can be called by column loaders during a load. - - The additional bookkeeping is finished up in commit_all(). - - Should only be called for scalar attributes. - - This method is actually called a lot with joined-table - loading, when the second table isn't present in the result. - - """ - dict_.pop(key, None) - self.callables[key] = self + self.expired_attributes.discard(key) + if self.callables: + self.callables.pop(key, None) @classmethod - def _row_processor(cls, manager, fn, key): + def _instance_level_callable_processor(cls, manager, fn, key): impl = manager[key].impl if impl.collection: def _set_callable(state, dict_, row): + if 'callables' not in state.__dict__: + state.callables = {} old = dict_.pop(key, None) if old is not None: impl._invalidate_collection(old) state.callables[key] = fn else: def _set_callable(state, dict_, row): + if 'callables' not in state.__dict__: + state.callables = {} state.callables[key] = fn return _set_callable def _expire(self, dict_, modified_set): self.expired = True + if self.modified: modified_set.discard(self) + self.committed_state.clear() + self.modified = False - self.modified = False self._strong_obj = None - self.committed_state.clear() + if '_pending_mutations' in self.__dict__: + del self.__dict__['_pending_mutations'] - InstanceState._pending_mutations._reset(self) + if 'parents' in self.__dict__: + del self.__dict__['parents'] - # clear out 'parents' collection. not - # entirely clear how we can best determine - # which to remove, or not. - InstanceState.parents._reset(self) + self.expired_attributes.update( + [impl.key for impl in self.manager._scalar_loader_impls + if impl.expire_missing or impl.key in dict_] + ) - for key in self.manager: - impl = self.manager[key].impl - if impl.accepts_scalar_loader and \ - (impl.expire_missing or key in dict_): - self.callables[key] = self - old = dict_.pop(key, None) - if impl.collection and old is not None: - impl._invalidate_collection(old) + if self.callables: + for k in self.expired_attributes.intersection(self.callables): + del self.callables[k] + + for k in self.manager._collection_impl_keys.intersection(dict_): + collection = dict_.pop(k) + collection._sa_adapter.invalidated = True + + for key in self.manager._all_key_set.intersection(dict_): + del dict_[key] self.manager.dispatch.expire(self, None) def _expire_attributes(self, dict_, attribute_names): pending = self.__dict__.get('_pending_mutations', None) + callables = self.callables + for key in attribute_names: impl = self.manager[key].impl if impl.accepts_scalar_loader: - self.callables[key] = self + self.expired_attributes.add(key) + if callables and key in callables: + del callables[key] old = dict_.pop(key, None) if impl.collection and old is not None: impl._invalidate_collection(old) @@ -408,7 +458,7 @@ class InstanceState(interfaces._InspectionAttr): self.manager.dispatch.expire(self, attribute_names) - def __call__(self, state, passive): + def _load_expired(self, state, passive): """__call__ allows the InstanceState to act as a deferred callable for loading expired attributes, which is also serializable (picklable). @@ -427,8 +477,7 @@ class InstanceState(interfaces._InspectionAttr): # instance state didn't have an identity, # the attributes still might be in the callables # dict. ensure they are removed. - for k in toload.intersection(self.callables): - del self.callables[k] + self.expired_attributes.clear() return ATTR_WAS_SET @@ -463,18 +512,6 @@ class InstanceState(interfaces._InspectionAttr): if self.manager[attr].impl.accepts_scalar_loader ) - @property - def expired_attributes(self): - """Return the set of keys which are 'expired' to be loaded by - the manager's deferred scalar loader, assuming no pending - changes. - - see also the ``unmodified`` collection which is intersected - against this set when a refresh operation occurs. - - """ - return set([k for k, v in self.callables.items() if v is self]) - def _instance_dict(self): return None @@ -497,6 +534,7 @@ class InstanceState(interfaces._InspectionAttr): if (self.session_id and self._strong_obj is None) \ or not self.modified: + self.modified = True instance_dict = self._instance_dict() if instance_dict: instance_dict._modified.add(self) @@ -517,7 +555,6 @@ class InstanceState(interfaces._InspectionAttr): self.manager[attr.key], base.state_class_str(self) )) - self.modified = True def _commit(self, dict_, keys): """Commit attributes. @@ -534,10 +571,18 @@ class InstanceState(interfaces._InspectionAttr): self.expired = False - for key in set(self.callables).\ + self.expired_attributes.difference_update( + set(keys).intersection(dict_)) + + # the per-keys commit removes object-level callables, + # while that of commit_all does not. it's not clear + # if this behavior has a clear rationale, however tests do + # ensure this is what it does. + if self.callables: + for key in set(self.callables).\ intersection(keys).\ - intersection(dict_): - del self.callables[key] + intersection(dict_): + del self.callables[key] def _commit_all(self, dict_, instance_dict=None): """commit all attributes unconditionally. @@ -548,7 +593,8 @@ class InstanceState(interfaces._InspectionAttr): - all attributes are marked as "committed" - the "strong dirty reference" is removed - the "modified" flag is set to False - - any "expired" markers/callables for attributes loaded are removed. + - any "expired" markers for scalar attributes loaded are removed. + - lazy load callables for objects / collections *stay* Attributes marked as "expired" can potentially remain "expired" after this step if a value was not populated in state.dict. @@ -558,16 +604,17 @@ class InstanceState(interfaces._InspectionAttr): @classmethod def _commit_all_states(self, iter, instance_dict=None): - """Mass version of commit_all().""" + """Mass / highly inlined version of commit_all().""" for state, dict_ in iter: - state.committed_state.clear() - InstanceState._pending_mutations._reset(state) + state_dict = state.__dict__ - callables = state.callables - for key in list(callables): - if key in dict_ and callables[key] is state: - del callables[key] + state.committed_state.clear() + + if '_pending_mutations' in state_dict: + del state_dict['_pending_mutations'] + + state.expired_attributes.difference_update(dict_) if instance_dict and state.modified: instance_dict._modified.discard(state) diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/strategies.py b/lib/python3.4/site-packages/sqlalchemy/orm/strategies.py index 33be868..7942b14 100644 --- a/lib/python3.4/site-packages/sqlalchemy/orm/strategies.py +++ b/lib/python3.4/site-packages/sqlalchemy/orm/strategies.py @@ -1,5 +1,5 @@ # orm/strategies.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -22,6 +22,7 @@ from . import properties from .interfaces import ( LoaderStrategy, StrategizedProperty ) +from .base import _SET_DEFERRED_EXPIRED, _DEFER_FOR_STATE from .session import _state_session import itertools @@ -105,6 +106,8 @@ class UninstrumentedColumnLoader(LoaderStrategy): if the argument is against the with_polymorphic selectable. """ + __slots__ = 'columns', + def __init__(self, parent): super(UninstrumentedColumnLoader, self).__init__(parent) self.columns = self.parent_property.columns @@ -119,8 +122,8 @@ class UninstrumentedColumnLoader(LoaderStrategy): def create_row_processor( self, context, path, loadopt, - mapper, row, adapter): - return None, None, None + mapper, result, adapter, populators): + pass @log.class_logger @@ -128,6 +131,8 @@ class UninstrumentedColumnLoader(LoaderStrategy): class ColumnLoader(LoaderStrategy): """Provide loading behavior for a :class:`.ColumnProperty`.""" + __slots__ = 'columns', 'is_composite' + def __init__(self, parent): super(ColumnLoader, self).__init__(parent) self.columns = self.parent_property.columns @@ -135,12 +140,18 @@ class ColumnLoader(LoaderStrategy): def setup_query( self, context, entity, path, loadopt, - adapter, column_collection, **kwargs): + adapter, column_collection, memoized_populators, **kwargs): + for c in self.columns: if adapter: c = adapter.columns[c] column_collection.append(c) + fetch = self.columns[0] + if adapter: + fetch = adapter.columns[fetch] + memoized_populators[self.parent_property] = fetch + def init_class_attribute(self, mapper): self.is_class_level = True coltype = self.columns[0].type @@ -157,21 +168,18 @@ class ColumnLoader(LoaderStrategy): def create_row_processor( self, context, path, - loadopt, mapper, row, adapter): - key = self.key + loadopt, mapper, result, adapter, populators): # look through list of columns represented here # to see which, if any, is present in the row. for col in self.columns: if adapter: col = adapter.columns[col] - if col is not None and col in row: - def fetch_col(state, dict_, row): - dict_[key] = row[col] - return fetch_col, None, None + getter = result._getter(col) + if getter: + populators["quick"].append((self.key, getter)) + break else: - def expire_for_non_present_col(state, dict_, row): - state._expire_attribute_pre_commit(dict_, key) - return expire_for_non_present_col, None, None + populators["expire"].append((self.key, True)) @log.class_logger @@ -179,6 +187,8 @@ class ColumnLoader(LoaderStrategy): class DeferredColumnLoader(LoaderStrategy): """Provide loading behavior for a deferred :class:`.ColumnProperty`.""" + __slots__ = 'columns', 'group' + def __init__(self, parent): super(DeferredColumnLoader, self).__init__(parent) if hasattr(self.parent_property, 'composite_class'): @@ -189,28 +199,18 @@ class DeferredColumnLoader(LoaderStrategy): def create_row_processor( self, context, path, loadopt, - mapper, row, adapter): - col = self.columns[0] - if adapter: - col = adapter.columns[col] + mapper, result, adapter, populators): - key = self.key - if col in row: - return self.parent_property._get_strategy_by_cls(ColumnLoader).\ - create_row_processor( - context, path, loadopt, mapper, row, adapter) - - elif not self.is_class_level: - set_deferred_for_local_state = InstanceState._row_processor( - mapper.class_manager, - LoadDeferredColumns(key), key) - return set_deferred_for_local_state, None, None + # this path currently does not check the result + # for the column; this is because in most cases we are + # working just with the setup_query() directive which does + # not support this, and the behavior here should be consistent. + if not self.is_class_level: + set_deferred_for_local_state = \ + self.parent_property._deferred_column_loader + populators["new"].append((self.key, set_deferred_for_local_state)) else: - def reset_col_for_deferred(state, dict_, row): - # reset state on the key so that deferred callables - # fire off on next access. - state._reset(dict_, key) - return reset_col_for_deferred, None, None + populators["expire"].append((self.key, False)) def init_class_attribute(self, mapper): self.is_class_level = True @@ -223,20 +223,22 @@ class DeferredColumnLoader(LoaderStrategy): ) def setup_query( - self, context, entity, path, loadopt, adapter, - only_load_props=None, **kwargs): + self, context, entity, path, loadopt, + adapter, column_collection, memoized_populators, + only_load_props=None, **kw): if ( ( loadopt and 'undefer_pks' in loadopt.local_opts and - set(self.columns).intersection(self.parent.primary_key) + set(self.columns).intersection( + self.parent._should_undefer_in_wildcard) ) or ( loadopt and self.group and - loadopt.local_opts.get('undefer_group', False) == self.group + loadopt.local_opts.get('undefer_group_%s' % self.group, False) ) or ( @@ -245,7 +247,12 @@ class DeferredColumnLoader(LoaderStrategy): ): self.parent_property._get_strategy_by_cls(ColumnLoader).\ setup_query(context, entity, - path, loadopt, adapter, **kwargs) + path, loadopt, adapter, + column_collection, memoized_populators, **kw) + elif self.is_class_level: + memoized_populators[self.parent_property] = _SET_DEFERRED_EXPIRED + else: + memoized_populators[self.parent_property] = _DEFER_FOR_STATE def _load_for_state(self, state, passive): if not state.key: @@ -305,6 +312,8 @@ class LoadDeferredColumns(object): class AbstractRelationshipLoader(LoaderStrategy): """LoaderStratgies which deal with related objects.""" + __slots__ = 'mapper', 'target', 'uselist' + def __init__(self, parent): super(AbstractRelationshipLoader, self).__init__(parent) self.mapper = self.parent_property.mapper @@ -321,6 +330,8 @@ class NoLoader(AbstractRelationshipLoader): """ + __slots__ = () + def init_class_attribute(self, mapper): self.is_class_level = True @@ -333,21 +344,29 @@ class NoLoader(AbstractRelationshipLoader): def create_row_processor( self, context, path, loadopt, mapper, - row, adapter): + result, adapter, populators): def invoke_no_load(state, dict_, row): - state._initialize(self.key) - return invoke_no_load, None, None + if self.uselist: + state.manager.get_impl(self.key).initialize(state, dict_) + else: + dict_[self.key] = None + populators["new"].append((self.key, invoke_no_load)) @log.class_logger @properties.RelationshipProperty.strategy_for(lazy=True) @properties.RelationshipProperty.strategy_for(lazy="select") -class LazyLoader(AbstractRelationshipLoader): +class LazyLoader(AbstractRelationshipLoader, util.MemoizedSlots): """Provide loading behavior for a :class:`.RelationshipProperty` with "lazy=True", that is loads when first accessed. """ + __slots__ = ( + '_lazywhere', '_rev_lazywhere', 'use_get', '_bind_to_col', + '_equated_columns', '_rev_bind_to_col', '_rev_equated_columns', + '_simple_lazy_clause') + def __init__(self, parent): super(LazyLoader, self).__init__(parent) join_condition = self.parent_property._join_condition @@ -378,7 +397,7 @@ class LazyLoader(AbstractRelationshipLoader): self._equated_columns[c] = self._equated_columns[col] self.logger.info("%s will use query.get() to " - "optimize instance loads" % self) + "optimize instance loads", self) def init_class_attribute(self, mapper): self.is_class_level = True @@ -406,78 +425,57 @@ class LazyLoader(AbstractRelationshipLoader): active_history=active_history ) - def lazy_clause( - self, state, reverse_direction=False, - alias_secondary=False, - adapt_source=None, - passive=None): + def _memoized_attr__simple_lazy_clause(self): + criterion, bind_to_col = ( + self._lazywhere, + self._bind_to_col + ) + + params = [] + + def visit_bindparam(bindparam): + bindparam.unique = False + if bindparam._identifying_key in bind_to_col: + params.append(( + bindparam.key, bind_to_col[bindparam._identifying_key], + None)) + else: + params.append((bindparam.key, None, bindparam.value)) + + criterion = visitors.cloned_traverse( + criterion, {}, {'bindparam': visit_bindparam} + ) + + return criterion, params + + def _generate_lazy_clause(self, state, passive): + criterion, param_keys = self._simple_lazy_clause + if state is None: - return self._lazy_none_clause( - reverse_direction, - adapt_source=adapt_source) + return sql_util.adapt_criterion_to_null( + criterion, [key for key, ident, value in param_keys]) - if not reverse_direction: - criterion, bind_to_col = \ - self._lazywhere, \ - self._bind_to_col - else: - criterion, bind_to_col = \ - self._rev_lazywhere, \ - self._rev_bind_to_col - - if reverse_direction: - mapper = self.parent_property.mapper - else: - mapper = self.parent_property.parent + mapper = self.parent_property.parent o = state.obj() # strong ref dict_ = attributes.instance_dict(o) - # use the "committed state" only if we're in a flush - # for this state. + if passive & attributes.INIT_OK: + passive ^= attributes.INIT_OK - if passive and passive & attributes.LOAD_AGAINST_COMMITTED: - def visit_bindparam(bindparam): - if bindparam._identifying_key in bind_to_col: - bindparam.callable = \ - lambda: mapper._get_committed_state_attr_by_column( - state, dict_, - bind_to_col[bindparam._identifying_key]) - else: - def visit_bindparam(bindparam): - if bindparam._identifying_key in bind_to_col: - bindparam.callable = \ - lambda: mapper._get_state_attr_by_column( - state, dict_, - bind_to_col[bindparam._identifying_key]) + params = {} + for key, ident, value in param_keys: + if ident is not None: + if passive and passive & attributes.LOAD_AGAINST_COMMITTED: + value = mapper._get_committed_state_attr_by_column( + state, dict_, ident, passive) + else: + value = mapper._get_state_attr_by_column( + state, dict_, ident, passive) - if self.parent_property.secondary is not None and alias_secondary: - criterion = sql_util.ClauseAdapter( - self.parent_property.secondary.alias()).\ - traverse(criterion) + params[key] = value - criterion = visitors.cloned_traverse( - criterion, {}, {'bindparam': visit_bindparam}) - - if adapt_source: - criterion = adapt_source(criterion) - return criterion - - def _lazy_none_clause(self, reverse_direction=False, adapt_source=None): - if not reverse_direction: - criterion, bind_to_col = \ - self._lazywhere, \ - self._bind_to_col - else: - criterion, bind_to_col = \ - self._rev_lazywhere, \ - self._rev_bind_to_col - - criterion = sql_util.adapt_criterion_to_null(criterion, bind_to_col) - - if adapt_source: - criterion = adapt_source(criterion) - return criterion + return criterion, params def _load_for_state(self, state, passive): if not state.key and ( @@ -554,10 +552,9 @@ class LazyLoader(AbstractRelationshipLoader): @util.dependencies("sqlalchemy.orm.strategy_options") def _emit_lazyload( - self, strategy_options, session, state, - ident_key, passive): - q = session.query(self.mapper)._adapt_all_clauses() + self, strategy_options, session, state, ident_key, passive): + q = session.query(self.mapper)._adapt_all_clauses() if self.parent_property.secondary is not None: q = q.select_from(self.mapper, self.parent_property.secondary) @@ -588,17 +585,19 @@ class LazyLoader(AbstractRelationshipLoader): rev._use_get and \ not isinstance(rev.strategy, LazyLoader): q = q.options( - strategy_options.Load(rev.parent). - lazyload(rev.key)) + strategy_options.Load(rev.parent).lazyload(rev.key)) - lazy_clause = self.lazy_clause(state, passive=passive) + lazy_clause, params = self._generate_lazy_clause( + state, passive=passive) if pending: - bind_values = sql_util.bind_values(lazy_clause) - if None in bind_values: + if util.has_intersection( + orm_util._none_set, params.values()): return None + elif util.has_intersection(orm_util._never_set, params.values()): + return None - q = q.filter(lazy_clause) + q = q.filter(lazy_clause).params(params) result = q.all() if self.uselist: @@ -618,7 +617,7 @@ class LazyLoader(AbstractRelationshipLoader): def create_row_processor( self, context, path, loadopt, - mapper, row, adapter): + mapper, result, adapter, populators): key = self.key if not self.is_class_level: # we are not the primary manager for this attribute @@ -629,12 +628,12 @@ class LazyLoader(AbstractRelationshipLoader): # "lazyload" option on a "no load" # attribute - "eager" attributes always have a # class-level lazyloader installed. - set_lazy_callable = InstanceState._row_processor( + set_lazy_callable = InstanceState._instance_level_callable_processor( mapper.class_manager, - LoadLazyAttribute(key), key) + LoadLazyAttribute(key, self._strategy_keys[0]), key) - return set_lazy_callable, None, None - else: + populators["new"].append((self.key, set_lazy_callable)) + elif context.populate_existing or mapper.always_refresh: def reset_for_lazy_callable(state, dict_, row): # we are the primary manager for this attribute on # this class - reset its @@ -646,26 +645,29 @@ class LazyLoader(AbstractRelationshipLoader): # any existing state. state._reset(dict_, key) - return reset_for_lazy_callable, None, None + populators["new"].append((self.key, reset_for_lazy_callable)) class LoadLazyAttribute(object): """serializable loader object used by LazyLoader""" - def __init__(self, key): + def __init__(self, key, strategy_key=(('lazy', 'select'),)): self.key = key + self.strategy_key = strategy_key def __call__(self, state, passive=attributes.PASSIVE_OFF): key = self.key instance_mapper = state.manager.mapper prop = instance_mapper._props[key] - strategy = prop._strategies[LazyLoader] + strategy = prop._strategies[self.strategy_key] return strategy._load_for_state(state, passive) @properties.RelationshipProperty.strategy_for(lazy="immediate") class ImmediateLoader(AbstractRelationshipLoader): + __slots__ = () + def init_class_attribute(self, mapper): self.parent_property.\ _get_strategy_by_cls(LazyLoader).\ @@ -679,16 +681,18 @@ class ImmediateLoader(AbstractRelationshipLoader): def create_row_processor( self, context, path, loadopt, - mapper, row, adapter): + mapper, result, adapter, populators): def load_immediate(state, dict_, row): state.get_impl(self.key).get(state, dict_) - return None, None, load_immediate + populators["delayed"].append((self.key, load_immediate)) @log.class_logger @properties.RelationshipProperty.strategy_for(lazy="subquery") class SubqueryLoader(AbstractRelationshipLoader): + __slots__ = 'join_depth', + def __init__(self, parent): super(SubqueryLoader, self).__init__(parent) self.join_depth = self.parent_property.join_depth @@ -706,6 +710,8 @@ class SubqueryLoader(AbstractRelationshipLoader): if not context.query._enable_eagerloads: return + elif context.query._yield_per: + context.query._no_yield_per("subquery") path = path[self.parent_property] @@ -994,7 +1000,7 @@ class SubqueryLoader(AbstractRelationshipLoader): def create_row_processor( self, context, path, loadopt, - mapper, row, adapter): + mapper, result, adapter, populators): if not self.parent.class_manager[self.key].impl.supports_population: raise sa_exc.InvalidRequestError( "'%s' does not support object " @@ -1006,7 +1012,13 @@ class SubqueryLoader(AbstractRelationshipLoader): subq = path.get(context.attributes, 'subquery') if subq is None: - return None, None, None + return + + assert subq.session is context.session, ( + "Subquery session doesn't refer to that of " + "our context. Are there broken context caching " + "schemes being used?" + ) local_cols = self.parent_property.local_columns @@ -1022,11 +1034,14 @@ class SubqueryLoader(AbstractRelationshipLoader): local_cols = [adapter.columns[c] for c in local_cols] if self.uselist: - return self._create_collection_loader(collections, local_cols) + self._create_collection_loader( + context, collections, local_cols, populators) else: - return self._create_scalar_loader(collections, local_cols) + self._create_scalar_loader( + context, collections, local_cols, populators) - def _create_collection_loader(self, collections, local_cols): + def _create_collection_loader( + self, context, collections, local_cols, populators): def load_collection_from_subq(state, dict_, row): collection = collections.get( tuple([row[col] for col in local_cols]), @@ -1035,9 +1050,12 @@ class SubqueryLoader(AbstractRelationshipLoader): state.get_impl(self.key).\ set_committed_value(state, dict_, collection) - return load_collection_from_subq, None, None, collections.loader + populators["new"].append((self.key, load_collection_from_subq)) + if context.invoke_all_eagers: + populators["eager"].append((self.key, collections.loader)) - def _create_scalar_loader(self, collections, local_cols): + def _create_scalar_loader( + self, context, collections, local_cols, populators): def load_scalar_from_subq(state, dict_, row): collection = collections.get( tuple([row[col] for col in local_cols]), @@ -1053,7 +1071,9 @@ class SubqueryLoader(AbstractRelationshipLoader): state.get_impl(self.key).\ set_committed_value(state, dict_, scalar) - return load_scalar_from_subq, None, None, collections.loader + populators["new"].append((self.key, load_scalar_from_subq)) + if context.invoke_all_eagers: + populators["eager"].append((self.key, collections.loader)) @log.class_logger @@ -1064,6 +1084,9 @@ class JoinedLoader(AbstractRelationshipLoader): using joined eager loading. """ + + __slots__ = 'join_depth', + def __init__(self, parent): super(JoinedLoader, self).__init__(parent) self.join_depth = self.parent_property.join_depth @@ -1081,6 +1104,8 @@ class JoinedLoader(AbstractRelationshipLoader): if not context.query._enable_eagerloads: return + elif context.query._yield_per and self.uselist: + context.query._no_yield_per("joined collection") path = path[self.parent_property] @@ -1123,16 +1148,12 @@ class JoinedLoader(AbstractRelationshipLoader): path = path[self.mapper] - for value in self.mapper._iterate_polymorphic_properties( - mappers=with_polymorphic): - value.setup( - context, - entity, - path, - clauses, - parentmapper=self.mapper, - column_collection=add_to_collection, - chained_from_outerjoin=chained_from_outerjoin) + loading._setup_entity_query( + context, self.mapper, entity, + path, clauses, add_to_collection, + with_polymorphic=with_polymorphic, + parentmapper=self.mapper, + chained_from_outerjoin=chained_from_outerjoin) if with_poly_info is not None and \ None in set(context.secondary_columns): @@ -1235,10 +1256,11 @@ class JoinedLoader(AbstractRelationshipLoader): clauses = orm_util.ORMAdapter( to_adapt, equivalents=self.mapper._equivalent_columns, - adapt_required=True) + adapt_required=True, allow_label_resolve=False, + anonymize_labels=True) assert clauses.aliased_class is not None - if self.parent_property.direction != interfaces.MANYTOONE: + if self.parent_property.uselist: context.multi_row_eager_loaders = True innerjoin = ( @@ -1303,8 +1325,19 @@ class JoinedLoader(AbstractRelationshipLoader): if adapter: if getattr(adapter, 'aliased_class', None): + # joining from an adapted entity. The adapted entity + # might be a "with_polymorphic", so resolve that to our + # specific mapper's entity before looking for our attribute + # name on it. + efm = inspect(adapter.aliased_class).\ + _entity_for_mapper( + parentmapper + if parentmapper.isa(self.parent) else self.parent) + + # look for our attribute on the adapted entity, else fall back + # to our straight property onclause = getattr( - adapter.aliased_class, self.key, + efm.entity, self.key, self.parent_property) else: onclause = getattr( @@ -1321,40 +1354,31 @@ class JoinedLoader(AbstractRelationshipLoader): assert clauses.aliased_class is not None - join_to_outer = innerjoin and isinstance(towrap, sql.Join) and \ - towrap.isouter + attach_on_outside = ( + not chained_from_outerjoin or + not innerjoin or innerjoin == 'unnested') - if chained_from_outerjoin and join_to_outer and innerjoin == 'nested': - inner = orm_util.join( - towrap.right, - clauses.aliased_class, - onclause, - isouter=False - ) - - eagerjoin = orm_util.join( - towrap.left, - inner, - towrap.onclause, - isouter=True - ) - eagerjoin._target_adapter = inner._target_adapter - else: - if chained_from_outerjoin: - innerjoin = False - eagerjoin = orm_util.join( + if attach_on_outside: + # this is the "classic" eager join case. + eagerjoin = orm_util._ORMJoin( towrap, clauses.aliased_class, onclause, - isouter=not innerjoin + isouter=not innerjoin or ( + chained_from_outerjoin and isinstance(towrap, sql.Join) + ), _left_memo=self.parent, _right_memo=self.mapper ) + else: + # all other cases are innerjoin=='nested' approach + eagerjoin = self._splice_nested_inner_join( + path, towrap, clauses, onclause) + context.eager_joins[entity_key] = eagerjoin # send a hint to the Query as to where it may "splice" this join eagerjoin.stop_on = entity.selectable - if self.parent_property.secondary is None and \ - not parentmapper: + if not parentmapper: # for parentclause that is the non-eager end of the join, # ensure all the parent cols in the primaryjoin are actually # in the @@ -1377,7 +1401,67 @@ class JoinedLoader(AbstractRelationshipLoader): ) ) - def _create_eager_adapter(self, context, row, adapter, path, loadopt): + def _splice_nested_inner_join( + self, path, join_obj, clauses, onclause, splicing=False): + + if splicing is False: + # first call is always handed a join object + # from the outside + assert isinstance(join_obj, orm_util._ORMJoin) + elif isinstance(join_obj, sql.selectable.FromGrouping): + return self._splice_nested_inner_join( + path, join_obj.element, clauses, onclause, splicing + ) + elif not isinstance(join_obj, orm_util._ORMJoin): + if path[-2] is splicing: + return orm_util._ORMJoin( + join_obj, clauses.aliased_class, + onclause, isouter=False, + _left_memo=splicing, + _right_memo=path[-1].mapper + ) + else: + # only here if splicing == True + return None + + target_join = self._splice_nested_inner_join( + path, join_obj.right, clauses, + onclause, join_obj._right_memo) + if target_join is None: + right_splice = False + target_join = self._splice_nested_inner_join( + path, join_obj.left, clauses, + onclause, join_obj._left_memo) + if target_join is None: + # should only return None when recursively called, + # e.g. splicing==True + assert splicing is not False, \ + "assertion failed attempting to produce joined eager loads" + return None + else: + right_splice = True + + if right_splice: + # for a right splice, attempt to flatten out + # a JOIN b JOIN c JOIN .. to avoid needless + # parenthesis nesting + if not join_obj.isouter and not target_join.isouter: + eagerjoin = join_obj._splice_into_center(target_join) + else: + eagerjoin = orm_util._ORMJoin( + join_obj.left, target_join, + join_obj.onclause, isouter=join_obj.isouter, + _left_memo=join_obj._left_memo) + else: + eagerjoin = orm_util._ORMJoin( + target_join, join_obj.right, + join_obj.onclause, isouter=join_obj.isouter, + _right_memo=join_obj._right_memo) + + eagerjoin._target_adapter = target_join._target_adapter + return eagerjoin + + def _create_eager_adapter(self, context, result, adapter, path, loadopt): user_defined_adapter = self._init_user_defined_eager_proc( loadopt, context) if loadopt else False @@ -1395,17 +1479,16 @@ class JoinedLoader(AbstractRelationshipLoader): if decorator is None: return False - try: - self.mapper.identity_key_from_row(row, decorator) + if self.mapper._result_has_identity_key(result, decorator): return decorator - except KeyError: + else: # no identity key - don't return a row # processor, will cause a degrade to lazy return False def create_row_processor( self, context, path, loadopt, mapper, - row, adapter): + result, adapter, populators): if not self.parent.class_manager[self.key].impl.supports_population: raise sa_exc.InvalidRequestError( "'%s' does not support object " @@ -1417,36 +1500,40 @@ class JoinedLoader(AbstractRelationshipLoader): eager_adapter = self._create_eager_adapter( context, - row, + result, adapter, our_path, loadopt) if eager_adapter is not False: key = self.key - _instance = loading.instance_processor( + _instance = loading._instance_processor( self.mapper, context, + result, our_path[self.mapper], eager_adapter) if not self.uselist: - return self._create_scalar_loader(context, key, _instance) + self._create_scalar_loader(context, key, _instance, populators) else: - return self._create_collection_loader(context, key, _instance) + self._create_collection_loader( + context, key, _instance, populators) else: - return self.parent_property._get_strategy_by_cls(LazyLoader).\ + self.parent_property._get_strategy_by_cls(LazyLoader).\ create_row_processor( context, path, loadopt, - mapper, row, adapter) + mapper, result, adapter, populators) - def _create_collection_loader(self, context, key, _instance): + def _create_collection_loader(self, context, key, _instance, populators): def load_collection_from_joined_new_row(state, dict_, row): collection = attributes.init_state_collection( state, dict_, key) result_list = util.UniqueAppender(collection, 'append_without_event') context.attributes[(state, key)] = result_list - _instance(row, result_list) + inst = _instance(row) + if inst is not None: + result_list.append(inst) def load_collection_from_joined_existing_row(state, dict_, row): if (state, key) in context.attributes: @@ -1462,25 +1549,30 @@ class JoinedLoader(AbstractRelationshipLoader): collection, 'append_without_event') context.attributes[(state, key)] = result_list - _instance(row, result_list) + inst = _instance(row) + if inst is not None: + result_list.append(inst) def load_collection_from_joined_exec(state, dict_, row): - _instance(row, None) + _instance(row) - return load_collection_from_joined_new_row, \ - load_collection_from_joined_existing_row, \ - None, load_collection_from_joined_exec + populators["new"].append((self.key, load_collection_from_joined_new_row)) + populators["existing"].append( + (self.key, load_collection_from_joined_existing_row)) + if context.invoke_all_eagers: + populators["eager"].append( + (self.key, load_collection_from_joined_exec)) - def _create_scalar_loader(self, context, key, _instance): + def _create_scalar_loader(self, context, key, _instance, populators): def load_scalar_from_joined_new_row(state, dict_, row): # set a scalar object instance directly on the parent # object, bypassing InstrumentedAttribute event handlers. - dict_[key] = _instance(row, None) + dict_[key] = _instance(row) def load_scalar_from_joined_existing_row(state, dict_, row): # call _instance on the row, even though the object has # been created, so that we further descend into properties - existing = _instance(row, None) + existing = _instance(row) if existing is not None \ and key in dict_ \ and existing is not dict_[key]: @@ -1490,11 +1582,13 @@ class JoinedLoader(AbstractRelationshipLoader): % self) def load_scalar_from_joined_exec(state, dict_, row): - _instance(row, None) + _instance(row) - return load_scalar_from_joined_new_row, \ - load_scalar_from_joined_existing_row, \ - None, load_scalar_from_joined_exec + populators["new"].append((self.key, load_scalar_from_joined_new_row)) + populators["existing"].append( + (self.key, load_scalar_from_joined_existing_row)) + if context.invoke_all_eagers: + populators["eager"].append((self.key, load_scalar_from_joined_exec)) def single_parent_validator(desc, prop): diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/strategy_options.py b/lib/python3.4/site-packages/sqlalchemy/orm/strategy_options.py index 392f7ce..141f867 100644 --- a/lib/python3.4/site-packages/sqlalchemy/orm/strategy_options.py +++ b/lib/python3.4/site-packages/sqlalchemy/orm/strategy_options.py @@ -1,5 +1,4 @@ -# orm/strategy_options.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -89,6 +88,7 @@ class Load(Generative, MapperOption): cloned.local_opts = {} return cloned + _merge_into_path = False strategy = None propagate_to_loaders = False @@ -162,11 +162,14 @@ class Load(Generative, MapperOption): ext_info = inspect(ac) path_element = ext_info.mapper + existing = path.entity_path[prop].get( + self.context, "path_with_polymorphic") if not ext_info.is_aliased_class: ac = orm_util.with_polymorphic( ext_info.mapper.base_mapper, ext_info.mapper, aliased=True, - _use_mapper_path=True) + _use_mapper_path=True, + _existing_alias=existing) path.entity_path[prop].set( self.context, "path_with_polymorphic", inspect(ac)) path = path[prop][path_element] @@ -177,6 +180,9 @@ class Load(Generative, MapperOption): path = path.entity_path return path + def __str__(self): + return "Load(strategy=%r)" % (self.strategy, ) + def _coerce_strat(self, strategy): if strategy is not None: strategy = tuple(sorted(strategy.items())) @@ -209,7 +215,15 @@ class Load(Generative, MapperOption): cloned._set_path_strategy() def _set_path_strategy(self): - if self.path.has_entity: + if self._merge_into_path: + # special helper for undefer_group + existing = self.path.get(self.context, "loader") + if existing: + existing.local_opts.update(self.local_opts) + else: + self.path.set(self.context, "loader", self) + + elif self.path.has_entity: self.path.parent.set(self.context, "loader", self) else: self.path.set(self.context, "loader", self) @@ -359,6 +373,7 @@ class _UnboundLoad(Load): return None token = start_path[0] + if isinstance(token, util.string_types): entity = self._find_entity_basestring(query, token, raiseerr) elif isinstance(token, PropComparator): @@ -402,10 +417,27 @@ class _UnboundLoad(Load): # prioritize "first class" options over those # that were "links in the chain", e.g. "x" and "y" in # someload("x.y.z") versus someload("x") / someload("x.y") - if self._is_chain_link: - effective_path.setdefault(context, "loader", loader) + + if effective_path.is_token: + for path in effective_path.generate_for_superclasses(): + if self._merge_into_path: + # special helper for undefer_group + existing = path.get(context, "loader") + if existing: + existing.local_opts.update(self.local_opts) + else: + path.set(context, "loader", loader) + elif self._is_chain_link: + path.setdefault(context, "loader", loader) + else: + path.set(context, "loader", loader) else: - effective_path.set(context, "loader", loader) + # only supported for the undefer_group() wildcard opt + assert not self._merge_into_path + if self._is_chain_link: + effective_path.setdefault(context, "loader", loader) + else: + effective_path.set(context, "loader", loader) def _find_entity_prop_comparator(self, query, token, mapper, raiseerr): if _is_aliased_class(mapper): @@ -631,15 +663,47 @@ def joinedload(loadopt, attr, innerjoin=None): query(Order).options(joinedload(Order.user, innerjoin=True)) - If the joined-eager load is chained onto an existing LEFT OUTER JOIN, - ``innerjoin=True`` will be bypassed and the join will continue to - chain as LEFT OUTER JOIN so that the results don't change. As an - alternative, specify the value ``"nested"``. This will instead nest the - join on the right side, e.g. using the form "a LEFT OUTER JOIN - (b JOIN c)". + In order to chain multiple eager joins together where some may be + OUTER and others INNER, right-nested joins are used to link them:: - .. versionadded:: 0.9.4 Added ``innerjoin="nested"`` option to support - nesting of eager "inner" joins. + query(A).options( + joinedload(A.bs, innerjoin=False). + joinedload(B.cs, innerjoin=True) + ) + + The above query, linking A.bs via "outer" join and B.cs via "inner" join + would render the joins as "a LEFT OUTER JOIN (b JOIN c)". When using + SQLite, this form of JOIN is translated to use full subqueries as this + syntax is otherwise not directly supported. + + The ``innerjoin`` flag can also be stated with the term ``"unnested"``. + This will prevent joins from being right-nested, and will instead + link an "innerjoin" eagerload to an "outerjoin" eagerload by bypassing + the "inner" join. Using this form as follows:: + + query(A).options( + joinedload(A.bs, innerjoin=False). + joinedload(B.cs, innerjoin="unnested") + ) + + Joins will be rendered as "a LEFT OUTER JOIN b LEFT OUTER JOIN c", so that + all of "a" is matched rather than being incorrectly limited by a "b" that + does not contain a "c". + + .. note:: The "unnested" flag does **not** affect the JOIN rendered + from a many-to-many association table, e.g. a table configured + as :paramref:`.relationship.secondary`, to the target table; for + correctness of results, these joins are always INNER and are + therefore right-nested if linked to an OUTER join. + + .. versionadded:: 0.9.4 Added support for "nesting" of eager "inner" + joins. See :ref:`feature_2976`. + + .. versionchanged:: 1.0.0 ``innerjoin=True`` now implies + ``innerjoin="nested"``, whereas in 0.9 it implied + ``innerjoin="unnested"``. In order to achieve the pre-1.0 "unnested" + inner join behavior, use the value ``innerjoin="unnested"``. + See :ref:`migration_3008`. .. note:: @@ -979,10 +1043,11 @@ def undefer_group(loadopt, name): :func:`.orm.undefer` """ + loadopt._merge_into_path = True return loadopt.set_column_strategy( "*", None, - {"undefer_group": name} + {"undefer_group_%s" % name: True} ) diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/sync.py b/lib/python3.4/site-packages/sqlalchemy/orm/sync.py index 2e897c4..ccca508 100644 --- a/lib/python3.4/site-packages/sqlalchemy/orm/sync.py +++ b/lib/python3.4/site-packages/sqlalchemy/orm/sync.py @@ -1,5 +1,5 @@ # orm/sync.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -45,11 +45,28 @@ def populate(source, source_mapper, dest, dest_mapper, uowcommit.attributes[("pk_cascaded", dest, r)] = True +def bulk_populate_inherit_keys( + source_dict, source_mapper, synchronize_pairs): + # a simplified version of populate() used by bulk insert mode + for l, r in synchronize_pairs: + try: + prop = source_mapper._columntoproperty[l] + value = source_dict[prop.key] + except exc.UnmappedColumnError: + _raise_col_to_prop(False, source_mapper, l, source_mapper, r) + + try: + prop = source_mapper._columntoproperty[r] + source_dict[prop.key] = value + except exc.UnmappedColumnError: + _raise_col_to_prop(True, source_mapper, l, source_mapper, r) + + def clear(dest, dest_mapper, synchronize_pairs): for l, r in synchronize_pairs: if r.primary_key and \ dest_mapper._get_state_attr_by_column( - dest, dest.dict, r) is not None: + dest, dest.dict, r) not in orm_util._none_set: raise AssertionError( "Dependency rule tried to blank-out primary key " @@ -68,7 +85,7 @@ def update(source, source_mapper, dest, old_prefix, synchronize_pairs): oldvalue = source_mapper._get_committed_attr_by_column( source.obj(), l) value = source_mapper._get_state_attr_by_column( - source, source.dict, l) + source, source.dict, l, passive=attributes.PASSIVE_OFF) except exc.UnmappedColumnError: _raise_col_to_prop(False, source_mapper, l, None, r) dest[r.key] = value @@ -79,7 +96,7 @@ def populate_dict(source, source_mapper, dict_, synchronize_pairs): for l, r in synchronize_pairs: try: value = source_mapper._get_state_attr_by_column( - source, source.dict, l) + source, source.dict, l, passive=attributes.PASSIVE_OFF) except exc.UnmappedColumnError: _raise_col_to_prop(False, source_mapper, l, None, r) diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/unitofwork.py b/lib/python3.4/site-packages/sqlalchemy/orm/unitofwork.py index 71e6182..8b4ae64 100644 --- a/lib/python3.4/site-packages/sqlalchemy/orm/unitofwork.py +++ b/lib/python3.4/site-packages/sqlalchemy/orm/unitofwork.py @@ -1,5 +1,5 @@ # orm/unitofwork.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -16,6 +16,7 @@ organizes them in order of dependency, and executes. from .. import util, event from ..util import topological from . import attributes, persistence, util as orm_util +import itertools def track_cascade_events(descriptor, prop): @@ -379,14 +380,19 @@ class UOWTransaction(object): execute() method has succeeded and the transaction has been committed. """ + if not self.states: + return + states = set(self.states) isdel = set( s for (s, (isdelete, listonly)) in self.states.items() if isdelete ) other = states.difference(isdel) - self.session._remove_newly_deleted(isdel) - self.session._register_newly_persistent(other) + if isdel: + self.session._remove_newly_deleted(isdel) + if other: + self.session._register_newly_persistent(other) class IterateMappersMixin(object): diff --git a/lib/python3.4/site-packages/sqlalchemy/orm/util.py b/lib/python3.4/site-packages/sqlalchemy/orm/util.py index 215de5f..42fadca 100644 --- a/lib/python3.4/site-packages/sqlalchemy/orm/util.py +++ b/lib/python3.4/site-packages/sqlalchemy/orm/util.py @@ -1,5 +1,5 @@ # orm/util.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -13,9 +13,9 @@ from . import attributes import re from .base import instance_str, state_str, state_class_str, attribute_str, \ - state_attribute_str, object_mapper, object_state, _none_set + state_attribute_str, object_mapper, object_state, _none_set, _never_set from .base import class_mapper, _class_to_mapper -from .base import _InspectionAttr +from .base import InspectionAttr from .path_registry import PathRegistry all_cascades = frozenset(("delete", "delete-orphan", "all", "merge", @@ -30,21 +30,19 @@ class CascadeOptions(frozenset): 'all', 'none', 'delete-orphan']) _allowed_cascades = all_cascades - def __new__(cls, arg): - values = set([ - c for c - in re.split('\s*,\s*', arg or "") - if c - ]) + __slots__ = ( + 'save_update', 'delete', 'refresh_expire', 'merge', + 'expunge', 'delete_orphan') + def __new__(cls, value_list): + if isinstance(value_list, util.string_types) or value_list is None: + return cls.from_string(value_list) + values = set(value_list) if values.difference(cls._allowed_cascades): raise sa_exc.ArgumentError( "Invalid cascade option(s): %s" % ", ".join([repr(x) for x in - sorted( - values.difference(cls._allowed_cascades) - )]) - ) + sorted(values.difference(cls._allowed_cascades))])) if "all" in values: values.update(cls._add_w_all_cascades) @@ -70,6 +68,15 @@ class CascadeOptions(frozenset): ",".join([x for x in sorted(self)]) ) + @classmethod + def from_string(cls, arg): + values = [ + c for c + in re.split('\s*,\s*', arg or "") + if c + ] + return cls(values) + def _validator_events( desc, key, validator, include_removes, include_backrefs): @@ -270,15 +277,14 @@ first() class ORMAdapter(sql_util.ColumnAdapter): - """Extends ColumnAdapter to accept ORM entities. - - The selectable is extracted from the given entity, - and the AliasedClass if any is referenced. + """ColumnAdapter subclass which excludes adaptation of entities from + non-matching mappers. """ def __init__(self, entity, equivalents=None, adapt_required=False, - chain_to=None): + chain_to=None, allow_label_resolve=True, + anonymize_labels=False): info = inspection.inspect(entity) self.mapper = info.mapper @@ -288,16 +294,18 @@ class ORMAdapter(sql_util.ColumnAdapter): self.aliased_class = entity else: self.aliased_class = None - sql_util.ColumnAdapter.__init__(self, selectable, - equivalents, chain_to, - adapt_required=adapt_required) - def replace(self, elem): + sql_util.ColumnAdapter.__init__( + self, selectable, equivalents, chain_to, + adapt_required=adapt_required, + allow_label_resolve=allow_label_resolve, + anonymize_labels=anonymize_labels, + include_fn=self._include_fn + ) + + def _include_fn(self, elem): entity = elem._annotations.get('parentmapper', None) - if not entity or entity.isa(self.mapper): - return sql_util.ColumnAdapter.replace(self, elem) - else: - return None + return not entity or entity.isa(self.mapper) class AliasedClass(object): @@ -354,6 +362,7 @@ class AliasedClass(object): if alias is None: alias = mapper._with_polymorphic_selectable.alias( name=name, flat=flat) + self._aliased_insp = AliasedInsp( self, mapper, @@ -412,7 +421,7 @@ class AliasedClass(object): id(self), self._aliased_insp._target.__name__) -class AliasedInsp(_InspectionAttr): +class AliasedInsp(InspectionAttr): """Provide an inspection interface for an :class:`.AliasedClass` object. @@ -460,9 +469,9 @@ class AliasedInsp(_InspectionAttr): self._base_alias = _base_alias or self self._use_mapper_path = _use_mapper_path - self._adapter = sql_util.ClauseAdapter( + self._adapter = sql_util.ColumnAdapter( selectable, equivalents=mapper._equivalent_columns, - adapt_on_names=adapt_on_names) + adapt_on_names=adapt_on_names, anonymize_labels=True) self._adapt_on_names = adapt_on_names self._target = mapper.class_ @@ -521,14 +530,18 @@ class AliasedInsp(_InspectionAttr): def _adapt_element(self, elem): return self._adapter.traverse(elem).\ _annotate({ - 'parententity': self.entity, + 'parententity': self, 'parentmapper': self.mapper} ) def _entity_for_mapper(self, mapper): self_poly = self.with_polymorphic_mappers if mapper in self_poly: - return getattr(self.entity, mapper.class_.__name__)._aliased_insp + if mapper is self.mapper: + return self + else: + return getattr( + self.entity, mapper.class_.__name__)._aliased_insp elif mapper.isa(self.mapper): return self else: @@ -536,8 +549,13 @@ class AliasedInsp(_InspectionAttr): mapper, self) def __repr__(self): - return '' % ( - id(self), self.class_.__name__) + if self.with_polymorphic_mappers: + with_poly = "(%s)" % ", ".join( + mp.class_.__name__ for mp in self.with_polymorphic_mappers) + else: + with_poly = "" + return '' % ( + id(self), self.class_.__name__, with_poly) inspection._inspects(AliasedClass)(lambda target: target._aliased_insp) @@ -641,7 +659,8 @@ def aliased(element, alias=None, name=None, flat=False, adapt_on_names=False): def with_polymorphic(base, classes, selectable=False, flat=False, polymorphic_on=None, aliased=False, - innerjoin=False, _use_mapper_path=False): + innerjoin=False, _use_mapper_path=False, + _existing_alias=None): """Produce an :class:`.AliasedClass` construct which specifies columns for descendant mappers of the given base. @@ -706,6 +725,16 @@ def with_polymorphic(base, classes, selectable=False, only be specified if querying for one specific subtype only """ primary_mapper = _class_to_mapper(base) + if _existing_alias: + assert _existing_alias.mapper is primary_mapper + classes = util.to_set(classes) + new_classes = set([ + mp.class_ for mp in + _existing_alias.with_polymorphic_mappers]) + if classes == new_classes: + return _existing_alias + else: + classes = classes.union(new_classes) mappers, selectable = primary_mapper.\ _with_polymorphic_args(classes, selectable, innerjoin=innerjoin) @@ -751,7 +780,10 @@ class _ORMJoin(expression.Join): __visit_name__ = expression.Join.__visit_name__ - def __init__(self, left, right, onclause=None, isouter=False): + def __init__( + self, + left, right, onclause=None, isouter=False, + _left_memo=None, _right_memo=None): left_info = inspection.inspect(left) left_orm_info = getattr(left, '_joined_from_info', left_info) @@ -761,6 +793,9 @@ class _ORMJoin(expression.Join): self._joined_from_info = right_info + self._left_memo = _left_memo + self._right_memo = _right_memo + if isinstance(onclause, util.string_types): onclause = getattr(left_orm_info.entity, onclause) @@ -802,6 +837,43 @@ class _ORMJoin(expression.Join): expression.Join.__init__(self, left, right, onclause, isouter) + if not prop and getattr(right_info, 'mapper', None) \ + and right_info.mapper.single: + # if single inheritance target and we are using a manual + # or implicit ON clause, augment it the same way we'd augment the + # WHERE. + single_crit = right_info.mapper._single_table_criterion + if single_crit is not None: + if right_info.is_aliased_class: + single_crit = right_info._adapter.traverse(single_crit) + self.onclause = self.onclause & single_crit + + def _splice_into_center(self, other): + """Splice a join into the center. + + Given join(a, b) and join(b, c), return join(a, b).join(c) + + """ + leftmost = other + while isinstance(leftmost, sql.Join): + leftmost = leftmost.left + + assert self.right is leftmost + + left = _ORMJoin( + self.left, other.left, + self.onclause, isouter=self.isouter, + _left_memo=self._left_memo, + _right_memo=other._left_memo + ) + + return _ORMJoin( + left, + other.right, + other.onclause, isouter=other.isouter, + _right_memo=other._right_memo + ) + def join(self, right, onclause=None, isouter=False, join_to_left=None): return _ORMJoin(self, right, onclause, isouter) @@ -894,9 +966,7 @@ def with_parent(instance, prop): elif isinstance(prop, attributes.QueryableAttribute): prop = prop.property - return prop.compare(operators.eq, - instance, - value_is_parent=True) + return prop._with_parent(instance) def has_identity(object): diff --git a/lib/python3.4/site-packages/sqlalchemy/pool.py b/lib/python3.4/site-packages/sqlalchemy/pool.py index d26bbf3..32b4736 100644 --- a/lib/python3.4/site-packages/sqlalchemy/pool.py +++ b/lib/python3.4/site-packages/sqlalchemy/pool.py @@ -1,5 +1,5 @@ # sqlalchemy/pool.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -186,6 +186,10 @@ class Pool(log.Identified): database that supports transactions, as it will lead to deadlocks and stale state. + * ``"none"`` - same as ``None`` + + .. versionadded:: 0.9.10 + * ``False`` - same as None, this is here for backwards compatibility. @@ -220,7 +224,7 @@ class Pool(log.Identified): self._use_threadlocal = use_threadlocal if reset_on_return in ('rollback', True, reset_rollback): self._reset_on_return = reset_rollback - elif reset_on_return in (None, False, reset_none): + elif reset_on_return in ('none', None, False, reset_none): self._reset_on_return = reset_none elif reset_on_return in ('commit', reset_commit): self._reset_on_return = reset_commit @@ -230,6 +234,7 @@ class Pool(log.Identified): % reset_on_return) self.echo = echo + if _dispatch: self.dispatch._update(_dispatch, only_propagate=False) if _dialect: @@ -244,13 +249,46 @@ class Pool(log.Identified): for l in listeners: self.add_listener(l) + @property + def _creator(self): + return self.__dict__['_creator'] + + @_creator.setter + def _creator(self, creator): + self.__dict__['_creator'] = creator + self._invoke_creator = self._should_wrap_creator(creator) + + def _should_wrap_creator(self, creator): + """Detect if creator accepts a single argument, or is sent + as a legacy style no-arg function. + + """ + + try: + argspec = util.get_callable_argspec(self._creator, no_self=True) + except TypeError: + return lambda crec: creator() + + defaulted = argspec[3] is not None and len(argspec[3]) or 0 + positionals = len(argspec[0]) - defaulted + + # look for the exact arg signature that DefaultStrategy + # sends us + if (argspec[0], argspec[3]) == (['connection_record'], (None,)): + return creator + # or just a single positional + elif positionals == 1: + return creator + # all other cases, just wrap and assume legacy "creator" callable + # thing + else: + return lambda crec: creator() + def _close_connection(self, connection): self.logger.debug("Closing connection %r", connection) try: self._dialect.do_close(connection) - except (SystemExit, KeyboardInterrupt): - raise - except: + except Exception: self.logger.error("Exception closing connection %r", connection, exc_info=True) @@ -305,7 +343,7 @@ class Pool(log.Identified): """Return a new :class:`.Pool`, of the same class as this one and configured with identical creation arguments. - This method is used in conjunection with :meth:`dispose` + This method is used in conjunction with :meth:`dispose` to close out an entire :class:`.Pool` and create a new one in its place. @@ -425,6 +463,8 @@ class _ConnectionRecord(object): """ + _soft_invalidate_time = 0 + @util.memoized_property def info(self): """The ``.info`` dictionary associated with the DBAPI connection. @@ -441,18 +481,19 @@ class _ConnectionRecord(object): try: dbapi_connection = rec.get_connection() except: - rec.checkin() - raise - fairy = _ConnectionFairy(dbapi_connection, rec) + with util.safe_reraise(): + rec.checkin() + echo = pool._should_log_debug() + fairy = _ConnectionFairy(dbapi_connection, rec, echo) rec.fairy_ref = weakref.ref( fairy, lambda ref: _finalize_fairy and _finalize_fairy( dbapi_connection, - rec, pool, ref, pool._echo) + rec, pool, ref, echo) ) _refs.add(rec) - if pool._echo: + if echo: pool.logger.debug("Connection %r checked out from pool", dbapi_connection) return fairy @@ -472,7 +513,7 @@ class _ConnectionRecord(object): if self.connection is not None: self.__close() - def invalidate(self, e=None): + def invalidate(self, e=None, soft=False): """Invalidate the DBAPI connection held by this :class:`._ConnectionRecord`. This method is called for all connection invalidations, including @@ -480,6 +521,13 @@ class _ConnectionRecord(object): :meth:`.Connection.invalidate` methods are called, as well as when any so-called "automatic invalidation" condition occurs. + :param e: an exception object indicating a reason for the invalidation. + + :param soft: if True, the connection isn't closed; instead, this + connection will be recycled on next checkout. + + .. versionadded:: 1.0.3 + .. seealso:: :ref:`pool_connection_invalidation` @@ -488,22 +536,31 @@ class _ConnectionRecord(object): # already invalidated if self.connection is None: return - self.__pool.dispatch.invalidate(self.connection, self, e) + if soft: + self.__pool.dispatch.soft_invalidate(self.connection, self, e) + else: + self.__pool.dispatch.invalidate(self.connection, self, e) if e is not None: self.__pool.logger.info( - "Invalidate connection %r (reason: %s:%s)", + "%sInvalidate connection %r (reason: %s:%s)", + "Soft " if soft else "", self.connection, e.__class__.__name__, e) else: self.__pool.logger.info( - "Invalidate connection %r", self.connection) - self.__close() - self.connection = None + "%sInvalidate connection %r", + "Soft " if soft else "", + self.connection) + if soft: + self._soft_invalidate_time = time.time() + else: + self.__close() + self.connection = None def get_connection(self): recycle = False if self.connection is None: - self.connection = self.__connect() self.info.clear() + self.connection = self.__connect() if self.__pool.dispatch.connect: self.__pool.dispatch.connect(self.connection, self) elif self.__pool._recycle > -1 and \ @@ -519,22 +576,35 @@ class _ConnectionRecord(object): self.connection ) recycle = True + elif self._soft_invalidate_time > self.starttime: + self.__pool.logger.info( + "Connection %r invalidated due to local soft invalidation; " + + "recycling", + self.connection + ) + recycle = True if recycle: self.__close() - self.connection = self.__connect() self.info.clear() + + # ensure that if self.__connect() fails, + # we are not referring to the previous stale connection here + self.connection = None + self.connection = self.__connect() + if self.__pool.dispatch.connect: self.__pool.dispatch.connect(self.connection, self) return self.connection def __close(self): + self.finalize_callback.clear() self.__pool._close_connection(self.connection) def __connect(self): try: self.starttime = time.time() - connection = self.__pool._creator() + connection = self.__pool._invoke_creator(self) self.__pool.logger.debug("Created new connection %r", connection) return connection except Exception as e: @@ -560,19 +630,20 @@ def _finalize_fairy(connection, connection_record, connection) try: - fairy = fairy or _ConnectionFairy(connection, connection_record) + fairy = fairy or _ConnectionFairy( + connection, connection_record, echo) assert fairy.connection is connection - fairy._reset(pool, echo) + fairy._reset(pool) # Immediately close detached instances if not connection_record: pool._close_connection(connection) - except Exception as e: + except BaseException as e: pool.logger.error( "Exception during reset or similar", exc_info=True) if connection_record: connection_record.invalidate(e=e) - if isinstance(e, (SystemExit, KeyboardInterrupt)): + if not isinstance(e, Exception): raise if connection_record: @@ -603,9 +674,10 @@ class _ConnectionFairy(object): """ - def __init__(self, dbapi_connection, connection_record): + def __init__(self, dbapi_connection, connection_record, echo): self.connection = dbapi_connection self._connection_record = connection_record + self._echo = echo connection = None """A reference to the actual DBAPI connection being tracked.""" @@ -642,7 +714,6 @@ class _ConnectionFairy(object): fairy._pool = pool fairy._counter = 0 - fairy._echo = pool._should_log_debug() if threadconns is not None: threadconns.current = weakref.ref(fairy) @@ -666,7 +737,13 @@ class _ConnectionFairy(object): pool.logger.info( "Disconnection detected on checkout: %s", e) fairy._connection_record.invalidate(e) - fairy.connection = fairy._connection_record.get_connection() + try: + fairy.connection = \ + fairy._connection_record.get_connection() + except: + with util.safe_reraise(): + fairy._connection_record.checkin() + attempts -= 1 pool.logger.info("Reconnection attempts exhausted on checkout") @@ -684,11 +761,11 @@ class _ConnectionFairy(object): _close = _checkin - def _reset(self, pool, echo): + def _reset(self, pool): if pool.dispatch.reset: pool.dispatch.reset(self, self._connection_record) if pool._reset_on_return is reset_rollback: - if echo: + if self._echo: pool.logger.debug("Connection %s rollback-on-return%s", self.connection, ", via agent" @@ -698,7 +775,7 @@ class _ConnectionFairy(object): else: pool._dialect.do_rollback(self) elif pool._reset_on_return is reset_commit: - if echo: + if self._echo: pool.logger.debug("Connection %s commit-on-return%s", self.connection, ", via agent" @@ -734,7 +811,7 @@ class _ConnectionFairy(object): """ return self._connection_record.info - def invalidate(self, e=None): + def invalidate(self, e=None, soft=False): """Mark this connection as invalidated. This method can be called directly, and is also called as a result @@ -743,6 +820,13 @@ class _ConnectionFairy(object): further use by the pool. The invalidation mechanism proceeds via the :meth:`._ConnectionRecord.invalidate` internal method. + :param e: an exception object indicating a reason for the invalidation. + + :param soft: if True, the connection isn't closed; instead, this + connection will be recycled on next checkout. + + .. versionadded:: 1.0.3 + .. seealso:: :ref:`pool_connection_invalidation` @@ -753,9 +837,10 @@ class _ConnectionFairy(object): util.warn("Can't invalidate an already-closed connection.") return if self._connection_record: - self._connection_record.invalidate(e=e) - self.connection = None - self._checkin() + self._connection_record.invalidate(e=e, soft=soft) + if not soft: + self.connection = None + self._checkin() def cursor(self, *args, **kwargs): """Return a new DBAPI cursor for the underlying connection. @@ -804,6 +889,19 @@ class SingletonThreadPool(Pool): Maintains one connection per each thread, never moving a connection to a thread other than the one which it was created in. + .. warning:: the :class:`.SingletonThreadPool` will call ``.close()`` + on arbitrary connections that exist beyond the size setting of + ``pool_size``, e.g. if more unique **thread identities** + than what ``pool_size`` states are used. This cleanup is + non-deterministic and not sensitive to whether or not the connections + linked to those thread identities are currently in use. + + :class:`.SingletonThreadPool` may be improved in a future release, + however in its current status it is generally used only for test + scenarios using a SQLite ``:memory:`` database and is not recommended + for production use. + + Options are the same as those of :class:`.Pool`, as well as: :param pool_size: The number of threads in which to maintain connections @@ -840,9 +938,7 @@ class SingletonThreadPool(Pool): for conn in self._all_conns: try: conn.close() - except (SystemExit, KeyboardInterrupt): - raise - except: + except Exception: # pysqlite won't even let you close a conn from a thread # that didn't create it pass @@ -919,9 +1015,9 @@ class QueuePool(Pool): on returning a connection. Defaults to 30. :param \**kw: Other keyword arguments including - :paramref:`.Pool.recycle`, :paramref:`.Pool.echo`, - :paramref:`.Pool.reset_on_return` and others are passed to the - :class:`.Pool` constructor. + :paramref:`.Pool.recycle`, :paramref:`.Pool.echo`, + :paramref:`.Pool.reset_on_return` and others are passed to the + :class:`.Pool` constructor. """ Pool.__init__(self, creator, **kw) @@ -960,8 +1056,8 @@ class QueuePool(Pool): try: return self._create_connection() except: - self._dec_overflow() - raise + with util.safe_reraise(): + self._dec_overflow() else: return self._do_get() diff --git a/lib/python3.4/site-packages/sqlalchemy/processors.py b/lib/python3.4/site-packages/sqlalchemy/processors.py index 3794b01..b57e674 100644 --- a/lib/python3.4/site-packages/sqlalchemy/processors.py +++ b/lib/python3.4/site-packages/sqlalchemy/processors.py @@ -1,5 +1,5 @@ # sqlalchemy/processors.py -# Copyright (C) 2010-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2010-2016 the SQLAlchemy authors and contributors # # Copyright (C) 2010 Gaetan de Menten gdementen@gmail.com # diff --git a/lib/python3.4/site-packages/sqlalchemy/schema.py b/lib/python3.4/site-packages/sqlalchemy/schema.py index 4b6ad19..5b703f7 100644 --- a/lib/python3.4/site-packages/sqlalchemy/schema.py +++ b/lib/python3.4/site-packages/sqlalchemy/schema.py @@ -1,5 +1,5 @@ # schema.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -35,6 +35,7 @@ from .sql.schema import ( UniqueConstraint, _get_table_key, ColumnCollectionConstraint, + ColumnCollectionMixin ) @@ -58,5 +59,7 @@ from .sql.ddl import ( DDLBase, DDLElement, _CreateDropBase, - _DDLCompiles + _DDLCompiles, + sort_tables, + sort_tables_and_constraints ) diff --git a/lib/python3.4/site-packages/sqlalchemy/sql/__init__.py b/lib/python3.4/site-packages/sqlalchemy/sql/__init__.py index 4d01385..eb305a8 100644 --- a/lib/python3.4/site-packages/sqlalchemy/sql/__init__.py +++ b/lib/python3.4/site-packages/sqlalchemy/sql/__init__.py @@ -1,5 +1,5 @@ # sql/__init__.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -38,6 +38,7 @@ from .expression import ( false, False_, func, + funcfilter, insert, intersect, intersect_all, diff --git a/lib/python3.4/site-packages/sqlalchemy/sql/annotation.py b/lib/python3.4/site-packages/sqlalchemy/sql/annotation.py index 02f5c3c..6ad25ab 100644 --- a/lib/python3.4/site-packages/sqlalchemy/sql/annotation.py +++ b/lib/python3.4/site-packages/sqlalchemy/sql/annotation.py @@ -1,5 +1,5 @@ # sql/annotation.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -46,6 +46,7 @@ class Annotated(object): self.__dict__ = element.__dict__.copy() self.__element = element self._annotations = values + self._hash = hash(element) def _annotate(self, values): _values = self._annotations.copy() @@ -87,7 +88,7 @@ class Annotated(object): return self.__class__(clone, self._annotations) def __hash__(self): - return hash(self.__element) + return self._hash def __eq__(self, other): if isinstance(self.__element, operators.ColumnOperators): diff --git a/lib/python3.4/site-packages/sqlalchemy/sql/base.py b/lib/python3.4/site-packages/sqlalchemy/sql/base.py index 5358d95..cf7dcfd 100644 --- a/lib/python3.4/site-packages/sqlalchemy/sql/base.py +++ b/lib/python3.4/site-packages/sqlalchemy/sql/base.py @@ -1,5 +1,5 @@ # sql/base.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -449,10 +449,13 @@ class ColumnCollection(util.OrderedProperties): """ - def __init__(self): + __slots__ = '_all_columns' + + def __init__(self, *columns): super(ColumnCollection, self).__init__() - self.__dict__['_all_col_set'] = util.column_set() - self.__dict__['_all_columns'] = [] + object.__setattr__(self, '_all_columns', []) + for c in columns: + self.add(c) def __str__(self): return repr([str(c) for c in self]) @@ -478,14 +481,11 @@ class ColumnCollection(util.OrderedProperties): other = self[column.name] if other.name == other.key: remove_col = other - self._all_col_set.remove(other) del self._data[other.key] if column.key in self._data: remove_col = self._data[column.key] - self._all_col_set.remove(remove_col) - self._all_col_set.add(column) self._data[column.key] = column if remove_col is not None: self._all_columns[:] = [column if c is remove_col @@ -530,7 +530,6 @@ class ColumnCollection(util.OrderedProperties): # in a _make_proxy operation util.memoized_property.reset(value, "proxy_set") - self._all_col_set.add(value) self._all_columns.append(value) self._data[key] = value @@ -539,22 +538,20 @@ class ColumnCollection(util.OrderedProperties): def remove(self, column): del self._data[column.key] - self._all_col_set.remove(column) self._all_columns[:] = [ c for c in self._all_columns if c is not column] def update(self, iter): cols = list(iter) + all_col_set = set(self._all_columns) self._all_columns.extend( - c for label, c in cols if c not in self._all_col_set) - self._all_col_set.update(c for label, c in cols) + c for label, c in cols if c not in all_col_set) self._data.update((label, c) for label, c in cols) def extend(self, iter): cols = list(iter) - self._all_columns.extend(c for c in cols if c not in - self._all_col_set) - self._all_col_set.update(cols) + all_col_set = set(self._all_columns) + self._all_columns.extend(c for c in cols if c not in all_col_set) self._data.update((c.key, c) for c in cols) __hash__ = None @@ -574,28 +571,24 @@ class ColumnCollection(util.OrderedProperties): return util.OrderedProperties.__contains__(self, other) def __getstate__(self): - return {'_data': self.__dict__['_data'], - '_all_columns': self.__dict__['_all_columns']} + return {'_data': self._data, + '_all_columns': self._all_columns} def __setstate__(self, state): - self.__dict__['_data'] = state['_data'] - self.__dict__['_all_columns'] = state['_all_columns'] - self.__dict__['_all_col_set'] = util.column_set(state['_all_columns']) + object.__setattr__(self, '_data', state['_data']) + object.__setattr__(self, '_all_columns', state['_all_columns']) def contains_column(self, col): - # this has to be done via set() membership - return col in self._all_col_set + return col in set(self._all_columns) def as_immutable(self): - return ImmutableColumnCollection( - self._data, self._all_col_set, self._all_columns) + return ImmutableColumnCollection(self._data, self._all_columns) class ImmutableColumnCollection(util.ImmutableProperties, ColumnCollection): - def __init__(self, data, colset, all_columns): + def __init__(self, data, all_columns): util.ImmutableProperties.__init__(self, data) - self.__dict__['_all_col_set'] = colset - self.__dict__['_all_columns'] = all_columns + object.__setattr__(self, '_all_columns', all_columns) extend = remove = util.ImmutableProperties._immutable diff --git a/lib/python3.4/site-packages/sqlalchemy/sql/compiler.py b/lib/python3.4/site-packages/sqlalchemy/sql/compiler.py index ca03b62..722beb1 100644 --- a/lib/python3.4/site-packages/sqlalchemy/sql/compiler.py +++ b/lib/python3.4/site-packages/sqlalchemy/sql/compiler.py @@ -1,5 +1,5 @@ # sql/compiler.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -23,13 +23,12 @@ To generate user-defined SQL strings, see """ +import contextlib import re -from . import schema, sqltypes, operators, functions, \ - util as sql_util, visitors, elements, selectable, base +from . import schema, sqltypes, operators, functions, visitors, \ + elements, selectable, crud from .. import util, exc -import decimal import itertools -import operator RESERVED_WORDS = set([ 'all', 'analyse', 'analyze', 'and', 'any', 'array', @@ -54,7 +53,7 @@ LEGAL_CHARACTERS = re.compile(r'^[A-Z0-9_$]+$', re.I) ILLEGAL_INITIAL_CHARACTERS = set([str(x) for x in range(0, 10)]).union(['$']) BIND_PARAMS = re.compile(r'(? self.label_length: + if len(anonname) > self.label_length - 6: counter = self.truncated_names.get(ident_class, 1) truncname = anonname[0:max(self.label_length - 6, 0)] + \ "_" + hex(counter)[2:] @@ -1118,7 +1198,7 @@ class SQLCompiler(Compiled): if cte._cte_alias is not None: orig_cte = cte._cte_alias if orig_cte not in self.ctes: - self.visit_cte(orig_cte) + self.visit_cte(orig_cte, **kwargs) cte_alias_name = cte._cte_alias.name if isinstance(cte_alias_name, elements._truncated_label): cte_alias_name = self._truncated_identifier( @@ -1153,12 +1233,16 @@ class SQLCompiler(Compiled): self, asfrom=True, **kwargs ) + if cte._suffixes: + text += " " + self._generate_prefixes( + cte, cte._suffixes, **kwargs) + self.ctes[cte] = text if asfrom: if cte_alias_name: text = self.preparer.format_alias(cte, cte_alias_name) - text += " AS " + cte_name + text += self.get_render_as_alias_suffix(cte_name) else: return self.preparer.format_alias(cte, cte_name) return text @@ -1177,8 +1261,8 @@ class SQLCompiler(Compiled): elif asfrom: ret = alias.original._compiler_dispatch(self, asfrom=True, **kwargs) + \ - " AS " + \ - self.preparer.format_alias(alias, alias_name) + self.get_render_as_alias_suffix( + self.preparer.format_alias(alias, alias_name)) if fromhints and alias in fromhints: ret = self.format_from_hint_text(ret, alias, @@ -1188,19 +1272,11 @@ class SQLCompiler(Compiled): else: return alias.original._compiler_dispatch(self, **kwargs) - def _add_to_result_map(self, keyname, name, objects, type_): - if not self.dialect.case_sensitive: - keyname = keyname.lower() + def get_render_as_alias_suffix(self, alias_name_text): + return " AS " + alias_name_text - if keyname in self.result_map: - # conflicting keyname, just double up the list - # of objects. this will cause an "ambiguous name" - # error if an attempt is made by the result set to - # access. - e_name, e_obj, e_type = self.result_map[keyname] - self.result_map[keyname] = e_name, e_obj + objects, e_type - else: - self.result_map[keyname] = name, objects, type_ + def _add_to_result_map(self, keyname, name, objects, type_): + self._result_columns.append((keyname, name, objects, type_)) def _label_select_column(self, select, column, populate_result_map, @@ -1251,10 +1327,17 @@ class SQLCompiler(Compiled): result_expr = _CompileLabel(col_expr, elements._as_truncated(column.name), alt_names=(column.key,)) - elif not isinstance(column, - (elements.UnaryExpression, elements.TextClause)) \ - and (not hasattr(column, 'name') or - isinstance(column, functions.Function)): + elif ( + not isinstance(column, elements.TextClause) and + ( + not isinstance(column, elements.UnaryExpression) or + column.wraps_column_expression + ) and + ( + not hasattr(column, 'name') or + isinstance(column, functions.Function) + ) + ): result_expr = _CompileLabel(col_expr, column.anon_label) elif col_expr is not column: # TODO: are we sure "column" has a .name and .key here ? @@ -1289,6 +1372,9 @@ class SQLCompiler(Compiled): def get_crud_hint_text(self, table, text): return None + def get_statement_hint_text(self, hint_texts): + return " ".join(hint_texts) + def _transform_select_for_nested_joins(self, select): """Rewrite any "a JOIN (b JOIN c)" expression as "a JOIN (select * from b JOIN c) AS anon", to support @@ -1387,12 +1473,13 @@ class SQLCompiler(Compiled): (inner_col[c._key_label], c) for c in select.inner_columns ) - for key, (name, objs, typ) in list(self.result_map.items()): - objs = tuple([d.get(col, col) for col in objs]) - self.result_map[key] = (name, objs, typ) + + self._result_columns = [ + (key, name, tuple([d.get(col, col) for col in objs]), typ) + for key, name, objs, typ in self._result_columns + ] _default_stack_entry = util.immutabledict([ - ('iswrapper', False), ('correlate_froms', frozenset()), ('asfrom_froms', frozenset()) ]) @@ -1420,10 +1507,10 @@ class SQLCompiler(Compiled): return froms def visit_select(self, select, asfrom=False, parens=True, - iswrapper=False, fromhints=None, + fromhints=None, compound_index=0, - force_result_map=False, nested_join_translation=False, + select_wraps_for=None, **kwargs): needs_nested_translation = \ @@ -1437,21 +1524,25 @@ class SQLCompiler(Compiled): select) text = self.visit_select( transformed_select, asfrom=asfrom, parens=parens, - iswrapper=iswrapper, fromhints=fromhints, + fromhints=fromhints, compound_index=compound_index, - force_result_map=force_result_map, nested_join_translation=True, **kwargs ) toplevel = not self.stack entry = self._default_stack_entry if toplevel else self.stack[-1] - populate_result_map = force_result_map or ( - compound_index == 0 and ( - toplevel or - entry['iswrapper'] - ) - ) + populate_result_map = toplevel or \ + ( + compound_index == 0 and entry.get( + 'need_result_map_for_compound', False) + ) or entry.get('need_result_map_for_nested', False) + + # this was first proposed as part of #3372; however, it is not + # reached in current tests and could possibly be an assertion + # instead. + if not populate_result_map and 'add_to_result_map' in kwargs: + del kwargs['add_to_result_map'] if needs_nested_translation: if populate_result_map: @@ -1459,6 +1550,114 @@ class SQLCompiler(Compiled): select, transformed_select) return text + froms = self._setup_select_stack(select, entry, asfrom) + + column_clause_args = kwargs.copy() + column_clause_args.update({ + 'within_label_clause': False, + 'within_columns_clause': False + }) + + text = "SELECT " # we're off to a good start ! + + if select._hints: + hint_text, byfrom = self._setup_select_hints(select) + if hint_text: + text += hint_text + " " + else: + byfrom = None + + if select._prefixes: + text += self._generate_prefixes( + select, select._prefixes, **kwargs) + + text += self.get_select_precolumns(select, **kwargs) + + # the actual list of columns to print in the SELECT column list. + inner_columns = [ + c for c in [ + self._label_select_column( + select, + column, + populate_result_map, asfrom, + column_clause_args, + name=name) + for name, column in select._columns_plus_names + ] + if c is not None + ] + + if populate_result_map and select_wraps_for is not None: + # if this select is a compiler-generated wrapper, + # rewrite the targeted columns in the result map + wrapped_inner_columns = set(select_wraps_for.inner_columns) + translate = dict( + (outer, inner.pop()) for outer, inner in [ + ( + outer, + outer.proxy_set.intersection(wrapped_inner_columns)) + for outer in select.inner_columns + ] if inner + ) + self._result_columns = [ + (key, name, tuple(translate.get(o, o) for o in obj), type_) + for key, name, obj, type_ in self._result_columns + ] + + text = self._compose_select_body( + text, select, inner_columns, froms, byfrom, kwargs) + + if select._statement_hints: + per_dialect = [ + ht for (dialect_name, ht) + in select._statement_hints + if dialect_name in ('*', self.dialect.name) + ] + if per_dialect: + text += " " + self.get_statement_hint_text(per_dialect) + + if self.ctes and self._is_toplevel_select(select): + text = self._render_cte_clause() + text + + if select._suffixes: + text += " " + self._generate_prefixes( + select, select._suffixes, **kwargs) + + self.stack.pop(-1) + + if asfrom and parens: + return "(" + text + ")" + else: + return text + + def _is_toplevel_select(self, select): + """Return True if the stack is placed at the given select, and + is also the outermost SELECT, meaning there is either no stack + before this one, or the enclosing stack is a topmost INSERT. + + """ + return ( + self.stack[-1]['selectable'] is select and + ( + len(self.stack) == 1 or self.isinsert and len(self.stack) == 2 + and self.statement is self.stack[0]['selectable'] + ) + ) + + def _setup_select_hints(self, select): + byfrom = dict([ + (from_, hinttext % { + 'name': from_._compiler_dispatch( + self, ashint=True) + }) + for (from_, dialect), hinttext in + select._hints.items() + if dialect in ('*', self.dialect.name) + ]) + hint_text = self.get_select_hint_text(byfrom) + return hint_text, byfrom + + def _setup_select_stack(self, select, entry, asfrom): correlate_froms = entry['correlate_froms'] asfrom_froms = entry['asfrom_froms'] @@ -1477,52 +1676,14 @@ class SQLCompiler(Compiled): new_entry = { 'asfrom_froms': new_correlate_froms, - 'iswrapper': iswrapper, - 'correlate_froms': all_correlate_froms + 'correlate_froms': all_correlate_froms, + 'selectable': select, } self.stack.append(new_entry) + return froms - column_clause_args = kwargs.copy() - column_clause_args.update({ - 'within_label_clause': False, - 'within_columns_clause': False - }) - - text = "SELECT " # we're off to a good start ! - - if select._hints: - byfrom = dict([ - (from_, hinttext % { - 'name': from_._compiler_dispatch( - self, ashint=True) - }) - for (from_, dialect), hinttext in - select._hints.items() - if dialect in ('*', self.dialect.name) - ]) - hint_text = self.get_select_hint_text(byfrom) - if hint_text: - text += hint_text + " " - - if select._prefixes: - text += self._generate_prefixes( - select, select._prefixes, **kwargs) - - text += self.get_select_precolumns(select) - - # the actual list of columns to print in the SELECT column list. - inner_columns = [ - c for c in [ - self._label_select_column(select, - column, - populate_result_map, asfrom, - column_clause_args, - name=name) - for name, column in select._columns_plus_names - ] - if c is not None - ] - + def _compose_select_body( + self, text, select, inner_columns, froms, byfrom, kwargs): text += ', '.join(inner_columns) if froms: @@ -1557,30 +1718,16 @@ class SQLCompiler(Compiled): text += " \nHAVING " + t if select._order_by_clause.clauses: - if self.dialect.supports_simple_order_by_label: - order_by_select = select - else: - order_by_select = None + text += self.order_by_clause(select, **kwargs) - text += self.order_by_clause( - select, order_by_select=order_by_select, **kwargs) - - if select._limit is not None or select._offset is not None: - text += self.limit_clause(select) + if (select._limit_clause is not None or + select._offset_clause is not None): + text += self.limit_clause(select, **kwargs) if select._for_update_arg is not None: - text += self.for_update_clause(select) + text += self.for_update_clause(select, **kwargs) - if self.ctes and \ - compound_index == 0 and toplevel: - text = self._render_cte_clause() + text - - self.stack.pop(-1) - - if asfrom and parens: - return "(" + text + ")" - else: - return text + return text def _generate_prefixes(self, stmt, prefixes, **kw): clause = " ".join( @@ -1612,7 +1759,7 @@ class SQLCompiler(Compiled): else: return "WITH" - def get_select_precolumns(self, select): + def get_select_precolumns(self, select, **kw): """Called when building a ``SELECT`` statement, position is just before column list. @@ -1626,7 +1773,7 @@ class SQLCompiler(Compiled): else: return "" - def for_update_clause(self, select): + def for_update_clause(self, select, **kw): return " FOR UPDATE" def returning_clause(self, stmt, returning_cols): @@ -1634,20 +1781,20 @@ class SQLCompiler(Compiled): "RETURNING is not supported by this " "dialect's statement compiler.") - def limit_clause(self, select): + def limit_clause(self, select, **kw): text = "" - if select._limit is not None: - text += "\n LIMIT " + self.process(elements.literal(select._limit)) - if select._offset is not None: - if select._limit is None: + if select._limit_clause is not None: + text += "\n LIMIT " + self.process(select._limit_clause, **kw) + if select._offset_clause is not None: + if select._limit_clause is None: text += "\n LIMIT -1" - text += " OFFSET " + self.process(elements.literal(select._offset)) + text += " OFFSET " + self.process(select._offset_clause, **kw) return text def visit_table(self, table, asfrom=False, iscrud=False, ashint=False, - fromhints=None, **kwargs): + fromhints=None, use_schema=True, **kwargs): if asfrom or ashint: - if getattr(table, "schema", None): + if use_schema and getattr(table, "schema", None): ret = self.preparer.quote_schema(table.schema) + \ "." + self.preparer.quote(table.name) else: @@ -1669,10 +1816,15 @@ class SQLCompiler(Compiled): ) def visit_insert(self, insert_stmt, **kw): - self.isinsert = True - colparams = self._get_colparams(insert_stmt, **kw) + self.stack.append( + {'correlate_froms': set(), + "asfrom_froms": set(), + "selectable": insert_stmt}) - if not colparams and \ + self.isinsert = True + crud_params = crud._get_crud_params(self, insert_stmt, **kw) + + if not crud_params and \ not self.dialect.supports_default_values and \ not self.dialect.supports_empty_insert: raise exc.CompileError("The '%s' dialect with current database " @@ -1687,9 +1839,9 @@ class SQLCompiler(Compiled): "version settings does not support " "in-place multirow inserts." % self.dialect.name) - colparams_single = colparams[0] + crud_params_single = crud_params[0] else: - colparams_single = colparams + crud_params_single = crud_params preparer = self.preparer supports_default_values = self.dialect.supports_default_values @@ -1720,9 +1872,9 @@ class SQLCompiler(Compiled): text += table_text - if colparams_single or not supports_default_values: + if crud_params_single or not supports_default_values: text += " (%s)" % ', '.join([preparer.format_column(c[0]) - for c in colparams_single]) + for c in crud_params_single]) if self.returning or insert_stmt._returning: self.returning = self.returning or insert_stmt._returning @@ -1733,25 +1885,27 @@ class SQLCompiler(Compiled): text += " " + returning_clause if insert_stmt.select is not None: - text += " %s" % self.process(insert_stmt.select, **kw) - elif not colparams and supports_default_values: + text += " %s" % self.process(self._insert_from_select, **kw) + elif not crud_params and supports_default_values: text += " DEFAULT VALUES" elif insert_stmt._has_multi_parameters: text += " VALUES %s" % ( ", ".join( "(%s)" % ( - ', '.join(c[1] for c in colparam_set) + ', '.join(c[1] for c in crud_param_set) ) - for colparam_set in colparams + for crud_param_set in crud_params ) ) else: text += " VALUES (%s)" % \ - ', '.join([c[1] for c in colparams]) + ', '.join([c[1] for c in crud_params]) if self.returning and not self.returning_precedes_values: text += " " + returning_clause + self.stack.pop(-1) + return text def update_limit_clause(self, update_stmt): @@ -1787,8 +1941,8 @@ class SQLCompiler(Compiled): def visit_update(self, update_stmt, **kw): self.stack.append( {'correlate_froms': set([update_stmt.table]), - "iswrapper": False, - "asfrom_froms": set([update_stmt.table])}) + "asfrom_froms": set([update_stmt.table]), + "selectable": update_stmt}) self.isupdate = True @@ -1803,7 +1957,7 @@ class SQLCompiler(Compiled): table_text = self.update_tables_clause(update_stmt, update_stmt.table, extra_froms, **kw) - colparams = self._get_colparams(update_stmt, **kw) + crud_params = crud._get_crud_params(self, update_stmt, **kw) if update_stmt._hints: dialect_hints = dict([ @@ -1830,7 +1984,7 @@ class SQLCompiler(Compiled): text += ', '.join( c[0]._compiler_dispatch(self, include_table=include_table) + - '=' + c[1] for c in colparams + '=' + c[1] for c in crud_params ) if self.returning or update_stmt._returning: @@ -1850,7 +2004,7 @@ class SQLCompiler(Compiled): text += " " + extra_from_text if update_stmt._whereclause is not None: - t = self.process(update_stmt._whereclause) + t = self.process(update_stmt._whereclause, **kw) if t: text += " WHERE " + t @@ -1866,384 +2020,14 @@ class SQLCompiler(Compiled): return text - def _create_crud_bind_param(self, col, value, required=False, name=None): - if name is None: - name = col.key - bindparam = elements.BindParameter(name, value, - type_=col.type, required=required) - bindparam._is_crud = True - return bindparam._compiler_dispatch(self) - @util.memoized_property def _key_getters_for_crud_column(self): - if self.isupdate and self.statement._extra_froms: - # when extra tables are present, refer to the columns - # in those extra tables as table-qualified, including in - # dictionaries and when rendering bind param names. - # the "main" table of the statement remains unqualified, - # allowing the most compatibility with a non-multi-table - # statement. - _et = set(self.statement._extra_froms) - - def _column_as_key(key): - str_key = elements._column_as_key(key) - if hasattr(key, 'table') and key.table in _et: - return (key.table.name, str_key) - else: - return str_key - - def _getattr_col_key(col): - if col.table in _et: - return (col.table.name, col.key) - else: - return col.key - - def _col_bind_name(col): - if col.table in _et: - return "%s_%s" % (col.table.name, col.key) - else: - return col.key - - else: - _column_as_key = elements._column_as_key - _getattr_col_key = _col_bind_name = operator.attrgetter("key") - - return _column_as_key, _getattr_col_key, _col_bind_name - - def _get_colparams(self, stmt, **kw): - """create a set of tuples representing column/string pairs for use - in an INSERT or UPDATE statement. - - Also generates the Compiled object's postfetch, prefetch, and - returning column collections, used for default handling and ultimately - populating the ResultProxy's prefetch_cols() and postfetch_cols() - collections. - - """ - - self.postfetch = [] - self.prefetch = [] - self.returning = [] - - # no parameters in the statement, no parameters in the - # compiled params - return binds for all columns - if self.column_keys is None and stmt.parameters is None: - return [ - (c, self._create_crud_bind_param(c, - None, required=True)) - for c in stmt.table.columns - ] - - if stmt._has_multi_parameters: - stmt_parameters = stmt.parameters[0] - else: - stmt_parameters = stmt.parameters - - # getters - these are normally just column.key, - # but in the case of mysql multi-table update, the rules for - # .key must conditionally take tablename into account - _column_as_key, _getattr_col_key, _col_bind_name = \ - self._key_getters_for_crud_column - - # if we have statement parameters - set defaults in the - # compiled params - if self.column_keys is None: - parameters = {} - else: - parameters = dict((_column_as_key(key), REQUIRED) - for key in self.column_keys - if not stmt_parameters or - key not in stmt_parameters) - - # create a list of column assignment clauses as tuples - values = [] - - if stmt_parameters is not None: - for k, v in stmt_parameters.items(): - colkey = _column_as_key(k) - if colkey is not None: - parameters.setdefault(colkey, v) - else: - # a non-Column expression on the left side; - # add it to values() in an "as-is" state, - # coercing right side to bound param - if elements._is_literal(v): - v = self.process( - elements.BindParameter(None, v, type_=k.type), - **kw) - else: - v = self.process(v.self_group(), **kw) - - values.append((k, v)) - - need_pks = self.isinsert and \ - not self.inline and \ - not stmt._returning - - implicit_returning = need_pks and \ - self.dialect.implicit_returning and \ - stmt.table.implicit_returning - - if self.isinsert: - implicit_return_defaults = (implicit_returning and - stmt._return_defaults) - elif self.isupdate: - implicit_return_defaults = (self.dialect.implicit_returning and - stmt.table.implicit_returning and - stmt._return_defaults) - else: - implicit_return_defaults = False - - if implicit_return_defaults: - if stmt._return_defaults is True: - implicit_return_defaults = set(stmt.table.c) - else: - implicit_return_defaults = set(stmt._return_defaults) - - postfetch_lastrowid = need_pks and self.dialect.postfetch_lastrowid - - check_columns = {} - - # special logic that only occurs for multi-table UPDATE - # statements - if self.isupdate and stmt._extra_froms and stmt_parameters: - normalized_params = dict( - (elements._clause_element_as_expr(c), param) - for c, param in stmt_parameters.items() - ) - affected_tables = set() - for t in stmt._extra_froms: - for c in t.c: - if c in normalized_params: - affected_tables.add(t) - check_columns[_getattr_col_key(c)] = c - value = normalized_params[c] - if elements._is_literal(value): - value = self._create_crud_bind_param( - c, value, required=value is REQUIRED, - name=_col_bind_name(c)) - else: - self.postfetch.append(c) - value = self.process(value.self_group(), **kw) - values.append((c, value)) - # determine tables which are actually - # to be updated - process onupdate and - # server_onupdate for these - for t in affected_tables: - for c in t.c: - if c in normalized_params: - continue - elif (c.onupdate is not None and not - c.onupdate.is_sequence): - if c.onupdate.is_clause_element: - values.append( - (c, self.process( - c.onupdate.arg.self_group(), - **kw) - ) - ) - self.postfetch.append(c) - else: - values.append( - (c, self._create_crud_bind_param( - c, None, name=_col_bind_name(c) - ) - ) - ) - self.prefetch.append(c) - elif c.server_onupdate is not None: - self.postfetch.append(c) - - if self.isinsert and stmt.select_names: - # for an insert from select, we can only use names that - # are given, so only select for those names. - cols = (stmt.table.c[_column_as_key(name)] - for name in stmt.select_names) - else: - # iterate through all table columns to maintain - # ordering, even for those cols that aren't included - cols = stmt.table.columns - - for c in cols: - col_key = _getattr_col_key(c) - if col_key in parameters and col_key not in check_columns: - value = parameters.pop(col_key) - if elements._is_literal(value): - value = self._create_crud_bind_param( - c, value, required=value is REQUIRED, - name=_col_bind_name(c) - if not stmt._has_multi_parameters - else "%s_0" % _col_bind_name(c) - ) - else: - if isinstance(value, elements.BindParameter) and \ - value.type._isnull: - value = value._clone() - value.type = c.type - - if c.primary_key and implicit_returning: - self.returning.append(c) - value = self.process(value.self_group(), **kw) - elif implicit_return_defaults and \ - c in implicit_return_defaults: - self.returning.append(c) - value = self.process(value.self_group(), **kw) - else: - self.postfetch.append(c) - value = self.process(value.self_group(), **kw) - values.append((c, value)) - - elif self.isinsert: - if c.primary_key and \ - need_pks and \ - ( - implicit_returning or - not postfetch_lastrowid or - c is not stmt.table._autoincrement_column - ): - - if implicit_returning: - if c.default is not None: - if c.default.is_sequence: - if self.dialect.supports_sequences and \ - (not c.default.optional or - not self.dialect.sequences_optional): - proc = self.process(c.default, **kw) - values.append((c, proc)) - self.returning.append(c) - elif c.default.is_clause_element: - values.append( - (c, self.process( - c.default.arg.self_group(), **kw)) - ) - self.returning.append(c) - else: - values.append( - (c, self._create_crud_bind_param(c, None)) - ) - self.prefetch.append(c) - else: - self.returning.append(c) - else: - if ( - (c.default is not None and - (not c.default.is_sequence or - self.dialect.supports_sequences)) or - c is stmt.table._autoincrement_column and - (self.dialect.supports_sequences or - self.dialect. - preexecute_autoincrement_sequences) - ): - - values.append( - (c, self._create_crud_bind_param(c, None)) - ) - - self.prefetch.append(c) - - elif c.default is not None: - if c.default.is_sequence: - if self.dialect.supports_sequences and \ - (not c.default.optional or - not self.dialect.sequences_optional): - proc = self.process(c.default, **kw) - values.append((c, proc)) - if implicit_return_defaults and \ - c in implicit_return_defaults: - self.returning.append(c) - elif not c.primary_key: - self.postfetch.append(c) - elif c.default.is_clause_element: - values.append( - (c, self.process( - c.default.arg.self_group(), **kw)) - ) - - if implicit_return_defaults and \ - c in implicit_return_defaults: - self.returning.append(c) - elif not c.primary_key: - # don't add primary key column to postfetch - self.postfetch.append(c) - else: - values.append( - (c, self._create_crud_bind_param(c, None)) - ) - self.prefetch.append(c) - elif c.server_default is not None: - if implicit_return_defaults and \ - c in implicit_return_defaults: - self.returning.append(c) - elif not c.primary_key: - self.postfetch.append(c) - elif implicit_return_defaults and \ - c in implicit_return_defaults: - self.returning.append(c) - - elif self.isupdate: - if c.onupdate is not None and not c.onupdate.is_sequence: - if c.onupdate.is_clause_element: - values.append( - (c, self.process( - c.onupdate.arg.self_group(), **kw)) - ) - if implicit_return_defaults and \ - c in implicit_return_defaults: - self.returning.append(c) - else: - self.postfetch.append(c) - else: - values.append( - (c, self._create_crud_bind_param(c, None)) - ) - self.prefetch.append(c) - elif c.server_onupdate is not None: - if implicit_return_defaults and \ - c in implicit_return_defaults: - self.returning.append(c) - else: - self.postfetch.append(c) - elif implicit_return_defaults and \ - c in implicit_return_defaults: - self.returning.append(c) - - if parameters and stmt_parameters: - check = set(parameters).intersection( - _column_as_key(k) for k in stmt.parameters - ).difference(check_columns) - if check: - raise exc.CompileError( - "Unconsumed column names: %s" % - (", ".join("%s" % c for c in check)) - ) - - if stmt._has_multi_parameters: - values_0 = values - values = [values] - - values.extend( - [ - ( - c, - (self._create_crud_bind_param( - c, row[c.key], - name="%s_%d" % (c.key, i + 1) - ) if elements._is_literal(row[c.key]) - else self.process( - row[c.key].self_group(), **kw)) - if c.key in row else param - ) - for (c, param) in values_0 - ] - for i, row in enumerate(stmt.parameters[1:]) - ) - - return values + return crud._key_getters_for_crud_column(self) def visit_delete(self, delete_stmt, **kw): self.stack.append({'correlate_froms': set([delete_stmt.table]), - "iswrapper": False, - "asfrom_froms": set([delete_stmt.table])}) + "asfrom_froms": set([delete_stmt.table]), + "selectable": delete_stmt}) self.isdelete = True text = "DELETE " @@ -2283,7 +2067,7 @@ class SQLCompiler(Compiled): delete_stmt, delete_stmt._returning) if delete_stmt._whereclause is not None: - t = delete_stmt._whereclause._compiler_dispatch(self) + t = delete_stmt._whereclause._compiler_dispatch(self, **kw) if t: text += " WHERE " + t @@ -2386,9 +2170,11 @@ class DDLCompiler(Compiled): (table.description, column.name, ce.args[0]) )) - const = self.create_table_constraints(table) + const = self.create_table_constraints( + table, _include_foreign_key_constraints= + create.include_foreign_key_constraints) if const: - text += ", \n\t" + const + text += separator + "\t" + const text += "\n)%s\n\n" % self.post_create_table(table) return text @@ -2410,7 +2196,9 @@ class DDLCompiler(Compiled): return text - def create_table_constraints(self, table): + def create_table_constraints( + self, table, + _include_foreign_key_constraints=None): # On some DB order is significant: visit PK first, then the # other constraints (engine.ReflectionTest.testbasic failed on FB2) @@ -2418,20 +2206,28 @@ class DDLCompiler(Compiled): if table.primary_key: constraints.append(table.primary_key) - constraints.extend([c for c in table._sorted_constraints - if c is not table.primary_key]) + all_fkcs = table.foreign_key_constraints + if _include_foreign_key_constraints is not None: + omit_fkcs = all_fkcs.difference(_include_foreign_key_constraints) + else: + omit_fkcs = set() - return ", \n\t".join(p for p in - (self.process(constraint) - for constraint in constraints - if ( - constraint._create_rule is None or - constraint._create_rule(self)) - and ( - not self.dialect.supports_alter or - not getattr(constraint, 'use_alter', False) - )) if p is not None - ) + constraints.extend([c for c in table._sorted_constraints + if c is not table.primary_key and + c not in omit_fkcs]) + + return ", \n\t".join( + p for p in + (self.process(constraint) + for constraint in constraints + if ( + constraint._create_rule is None or + constraint._create_rule(self)) + and ( + not self.dialect.supports_alter or + not getattr(constraint, 'use_alter', False) + )) if p is not None + ) def visit_drop_table(self, drop): return "\nDROP TABLE " + self.preparer.format_table(drop.element) @@ -2506,6 +2302,16 @@ class DDLCompiler(Compiled): text += " INCREMENT BY %d" % create.element.increment if create.element.start is not None: text += " START WITH %d" % create.element.start + if create.element.minvalue is not None: + text += " MINVALUE %d" % create.element.minvalue + if create.element.maxvalue is not None: + text += " MAXVALUE %d" % create.element.maxvalue + if create.element.nominvalue is not None: + text += " NO MINVALUE" + if create.element.nomaxvalue is not None: + text += " NO MAXVALUE" + if create.element.cycle is not None: + text += " CYCLE" return text def visit_drop_sequence(self, drop): @@ -2513,15 +2319,26 @@ class DDLCompiler(Compiled): self.preparer.format_sequence(drop.element) def visit_drop_constraint(self, drop): + constraint = drop.element + if constraint.name is not None: + formatted_name = self.preparer.format_constraint(constraint) + else: + formatted_name = None + + if formatted_name is None: + raise exc.CompileError( + "Can't emit DROP CONSTRAINT for constraint %r; " + "it has no name" % drop.element) return "ALTER TABLE %s DROP CONSTRAINT %s%s" % ( self.preparer.format_table(drop.element.table), - self.preparer.format_constraint(drop.element), + formatted_name, drop.cascade and " CASCADE" or "" ) def get_column_specification(self, column, **kwargs): colspec = self.preparer.format_column(column) + " " + \ - self.dialect.type_compiler.process(column.type) + self.dialect.type_compiler.process( + column.type, type_expression=column) default = self.get_column_default_string(column) if default is not None: colspec += " DEFAULT " + default @@ -2538,7 +2355,8 @@ class DDLCompiler(Compiled): if isinstance(column.server_default.arg, util.string_types): return "'%s'" % column.server_default.arg else: - return self.sql_compiler.process(column.server_default.arg) + return self.sql_compiler.process( + column.server_default.arg, literal_binds=True) else: return None @@ -2585,14 +2403,14 @@ class DDLCompiler(Compiled): formatted_name = self.preparer.format_constraint(constraint) if formatted_name is not None: text += "CONSTRAINT %s " % formatted_name - remote_table = list(constraint._elements.values())[0].column.table + remote_table = list(constraint.elements)[0].column.table text += "FOREIGN KEY(%s) REFERENCES %s (%s)" % ( ', '.join(preparer.quote(f.parent.name) - for f in constraint._elements.values()), + for f in constraint.elements), self.define_constraint_remote_table( constraint, remote_table, preparer), ', '.join(preparer.quote(f.column.name) - for f in constraint._elements.values()) + for f in constraint.elements) ) text += self.define_constraint_match(constraint) text += self.define_constraint_cascades(constraint) @@ -2645,13 +2463,13 @@ class DDLCompiler(Compiled): class GenericTypeCompiler(TypeCompiler): - def visit_FLOAT(self, type_): + def visit_FLOAT(self, type_, **kw): return "FLOAT" - def visit_REAL(self, type_): + def visit_REAL(self, type_, **kw): return "REAL" - def visit_NUMERIC(self, type_): + def visit_NUMERIC(self, type_, **kw): if type_.precision is None: return "NUMERIC" elif type_.scale is None: @@ -2662,7 +2480,7 @@ class GenericTypeCompiler(TypeCompiler): {'precision': type_.precision, 'scale': type_.scale} - def visit_DECIMAL(self, type_): + def visit_DECIMAL(self, type_, **kw): if type_.precision is None: return "DECIMAL" elif type_.scale is None: @@ -2673,31 +2491,31 @@ class GenericTypeCompiler(TypeCompiler): {'precision': type_.precision, 'scale': type_.scale} - def visit_INTEGER(self, type_): + def visit_INTEGER(self, type_, **kw): return "INTEGER" - def visit_SMALLINT(self, type_): + def visit_SMALLINT(self, type_, **kw): return "SMALLINT" - def visit_BIGINT(self, type_): + def visit_BIGINT(self, type_, **kw): return "BIGINT" - def visit_TIMESTAMP(self, type_): + def visit_TIMESTAMP(self, type_, **kw): return 'TIMESTAMP' - def visit_DATETIME(self, type_): + def visit_DATETIME(self, type_, **kw): return "DATETIME" - def visit_DATE(self, type_): + def visit_DATE(self, type_, **kw): return "DATE" - def visit_TIME(self, type_): + def visit_TIME(self, type_, **kw): return "TIME" - def visit_CLOB(self, type_): + def visit_CLOB(self, type_, **kw): return "CLOB" - def visit_NCLOB(self, type_): + def visit_NCLOB(self, type_, **kw): return "NCLOB" def _render_string_type(self, type_, name): @@ -2709,91 +2527,91 @@ class GenericTypeCompiler(TypeCompiler): text += ' COLLATE "%s"' % type_.collation return text - def visit_CHAR(self, type_): + def visit_CHAR(self, type_, **kw): return self._render_string_type(type_, "CHAR") - def visit_NCHAR(self, type_): + def visit_NCHAR(self, type_, **kw): return self._render_string_type(type_, "NCHAR") - def visit_VARCHAR(self, type_): + def visit_VARCHAR(self, type_, **kw): return self._render_string_type(type_, "VARCHAR") - def visit_NVARCHAR(self, type_): + def visit_NVARCHAR(self, type_, **kw): return self._render_string_type(type_, "NVARCHAR") - def visit_TEXT(self, type_): + def visit_TEXT(self, type_, **kw): return self._render_string_type(type_, "TEXT") - def visit_BLOB(self, type_): + def visit_BLOB(self, type_, **kw): return "BLOB" - def visit_BINARY(self, type_): + def visit_BINARY(self, type_, **kw): return "BINARY" + (type_.length and "(%d)" % type_.length or "") - def visit_VARBINARY(self, type_): + def visit_VARBINARY(self, type_, **kw): return "VARBINARY" + (type_.length and "(%d)" % type_.length or "") - def visit_BOOLEAN(self, type_): + def visit_BOOLEAN(self, type_, **kw): return "BOOLEAN" - def visit_large_binary(self, type_): - return self.visit_BLOB(type_) + def visit_large_binary(self, type_, **kw): + return self.visit_BLOB(type_, **kw) - def visit_boolean(self, type_): - return self.visit_BOOLEAN(type_) + def visit_boolean(self, type_, **kw): + return self.visit_BOOLEAN(type_, **kw) - def visit_time(self, type_): - return self.visit_TIME(type_) + def visit_time(self, type_, **kw): + return self.visit_TIME(type_, **kw) - def visit_datetime(self, type_): - return self.visit_DATETIME(type_) + def visit_datetime(self, type_, **kw): + return self.visit_DATETIME(type_, **kw) - def visit_date(self, type_): - return self.visit_DATE(type_) + def visit_date(self, type_, **kw): + return self.visit_DATE(type_, **kw) - def visit_big_integer(self, type_): - return self.visit_BIGINT(type_) + def visit_big_integer(self, type_, **kw): + return self.visit_BIGINT(type_, **kw) - def visit_small_integer(self, type_): - return self.visit_SMALLINT(type_) + def visit_small_integer(self, type_, **kw): + return self.visit_SMALLINT(type_, **kw) - def visit_integer(self, type_): - return self.visit_INTEGER(type_) + def visit_integer(self, type_, **kw): + return self.visit_INTEGER(type_, **kw) - def visit_real(self, type_): - return self.visit_REAL(type_) + def visit_real(self, type_, **kw): + return self.visit_REAL(type_, **kw) - def visit_float(self, type_): - return self.visit_FLOAT(type_) + def visit_float(self, type_, **kw): + return self.visit_FLOAT(type_, **kw) - def visit_numeric(self, type_): - return self.visit_NUMERIC(type_) + def visit_numeric(self, type_, **kw): + return self.visit_NUMERIC(type_, **kw) - def visit_string(self, type_): - return self.visit_VARCHAR(type_) + def visit_string(self, type_, **kw): + return self.visit_VARCHAR(type_, **kw) - def visit_unicode(self, type_): - return self.visit_VARCHAR(type_) + def visit_unicode(self, type_, **kw): + return self.visit_VARCHAR(type_, **kw) - def visit_text(self, type_): - return self.visit_TEXT(type_) + def visit_text(self, type_, **kw): + return self.visit_TEXT(type_, **kw) - def visit_unicode_text(self, type_): - return self.visit_TEXT(type_) + def visit_unicode_text(self, type_, **kw): + return self.visit_TEXT(type_, **kw) - def visit_enum(self, type_): - return self.visit_VARCHAR(type_) + def visit_enum(self, type_, **kw): + return self.visit_VARCHAR(type_, **kw) - def visit_null(self, type_): + def visit_null(self, type_, **kw): raise exc.CompileError("Can't generate DDL for %r; " "did you forget to specify a " "type on this Column?" % type_) - def visit_type_decorator(self, type_): - return self.process(type_.type_engine(self.dialect)) + def visit_type_decorator(self, type_, **kw): + return self.process(type_.type_engine(self.dialect), **kw) - def visit_user_defined(self, type_): - return type_.get_col_spec() + def visit_user_defined(self, type_, **kw): + return type_.get_col_spec(**kw) class IdentifierPreparer(object): diff --git a/lib/python3.4/site-packages/sqlalchemy/sql/crud.py b/lib/python3.4/site-packages/sqlalchemy/sql/crud.py new file mode 100644 index 0000000..273cc7e --- /dev/null +++ b/lib/python3.4/site-packages/sqlalchemy/sql/crud.py @@ -0,0 +1,571 @@ +# sql/crud.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Functions used by compiler.py to determine the parameters rendered +within INSERT and UPDATE statements. + +""" +from .. import util +from .. import exc +from . import elements +import operator + +REQUIRED = util.symbol('REQUIRED', """ +Placeholder for the value within a :class:`.BindParameter` +which is required to be present when the statement is passed +to :meth:`.Connection.execute`. + +This symbol is typically used when a :func:`.expression.insert` +or :func:`.expression.update` statement is compiled without parameter +values present. + +""") + + +def _get_crud_params(compiler, stmt, **kw): + """create a set of tuples representing column/string pairs for use + in an INSERT or UPDATE statement. + + Also generates the Compiled object's postfetch, prefetch, and + returning column collections, used for default handling and ultimately + populating the ResultProxy's prefetch_cols() and postfetch_cols() + collections. + + """ + + compiler.postfetch = [] + compiler.prefetch = [] + compiler.returning = [] + + # no parameters in the statement, no parameters in the + # compiled params - return binds for all columns + if compiler.column_keys is None and stmt.parameters is None: + return [ + (c, _create_bind_param( + compiler, c, None, required=True)) + for c in stmt.table.columns + ] + + if stmt._has_multi_parameters: + stmt_parameters = stmt.parameters[0] + else: + stmt_parameters = stmt.parameters + + # getters - these are normally just column.key, + # but in the case of mysql multi-table update, the rules for + # .key must conditionally take tablename into account + _column_as_key, _getattr_col_key, _col_bind_name = \ + _key_getters_for_crud_column(compiler) + + # if we have statement parameters - set defaults in the + # compiled params + if compiler.column_keys is None: + parameters = {} + else: + parameters = dict((_column_as_key(key), REQUIRED) + for key in compiler.column_keys + if not stmt_parameters or + key not in stmt_parameters) + + # create a list of column assignment clauses as tuples + values = [] + + if stmt_parameters is not None: + _get_stmt_parameters_params( + compiler, + parameters, stmt_parameters, _column_as_key, values, kw) + + check_columns = {} + + # special logic that only occurs for multi-table UPDATE + # statements + if compiler.isupdate and stmt._extra_froms and stmt_parameters: + _get_multitable_params( + compiler, stmt, stmt_parameters, check_columns, + _col_bind_name, _getattr_col_key, values, kw) + + if compiler.isinsert and stmt.select_names: + _scan_insert_from_select_cols( + compiler, stmt, parameters, + _getattr_col_key, _column_as_key, + _col_bind_name, check_columns, values, kw) + else: + _scan_cols( + compiler, stmt, parameters, + _getattr_col_key, _column_as_key, + _col_bind_name, check_columns, values, kw) + + if parameters and stmt_parameters: + check = set(parameters).intersection( + _column_as_key(k) for k in stmt.parameters + ).difference(check_columns) + if check: + raise exc.CompileError( + "Unconsumed column names: %s" % + (", ".join("%s" % c for c in check)) + ) + + if stmt._has_multi_parameters: + values = _extend_values_for_multiparams(compiler, stmt, values, kw) + + return values + + +def _create_bind_param( + compiler, col, value, process=True, + required=False, name=None, **kw): + if name is None: + name = col.key + bindparam = elements.BindParameter( + name, value, type_=col.type, required=required) + bindparam._is_crud = True + if process: + bindparam = bindparam._compiler_dispatch(compiler, **kw) + return bindparam + + +def _key_getters_for_crud_column(compiler): + if compiler.isupdate and compiler.statement._extra_froms: + # when extra tables are present, refer to the columns + # in those extra tables as table-qualified, including in + # dictionaries and when rendering bind param names. + # the "main" table of the statement remains unqualified, + # allowing the most compatibility with a non-multi-table + # statement. + _et = set(compiler.statement._extra_froms) + + def _column_as_key(key): + str_key = elements._column_as_key(key) + if hasattr(key, 'table') and key.table in _et: + return (key.table.name, str_key) + else: + return str_key + + def _getattr_col_key(col): + if col.table in _et: + return (col.table.name, col.key) + else: + return col.key + + def _col_bind_name(col): + if col.table in _et: + return "%s_%s" % (col.table.name, col.key) + else: + return col.key + + else: + _column_as_key = elements._column_as_key + _getattr_col_key = _col_bind_name = operator.attrgetter("key") + + return _column_as_key, _getattr_col_key, _col_bind_name + + +def _scan_insert_from_select_cols( + compiler, stmt, parameters, _getattr_col_key, + _column_as_key, _col_bind_name, check_columns, values, kw): + + need_pks, implicit_returning, \ + implicit_return_defaults, postfetch_lastrowid = \ + _get_returning_modifiers(compiler, stmt) + + cols = [stmt.table.c[_column_as_key(name)] + for name in stmt.select_names] + + compiler._insert_from_select = stmt.select + + add_select_cols = [] + if stmt.include_insert_from_select_defaults: + col_set = set(cols) + for col in stmt.table.columns: + if col not in col_set and col.default: + cols.append(col) + + for c in cols: + col_key = _getattr_col_key(c) + if col_key in parameters and col_key not in check_columns: + parameters.pop(col_key) + values.append((c, None)) + else: + _append_param_insert_select_hasdefault( + compiler, stmt, c, add_select_cols, kw) + + if add_select_cols: + values.extend(add_select_cols) + compiler._insert_from_select = compiler._insert_from_select._generate() + compiler._insert_from_select._raw_columns = \ + tuple(compiler._insert_from_select._raw_columns) + tuple( + expr for col, expr in add_select_cols) + + +def _scan_cols( + compiler, stmt, parameters, _getattr_col_key, + _column_as_key, _col_bind_name, check_columns, values, kw): + + need_pks, implicit_returning, \ + implicit_return_defaults, postfetch_lastrowid = \ + _get_returning_modifiers(compiler, stmt) + + if stmt._parameter_ordering: + parameter_ordering = [ + _column_as_key(key) for key in stmt._parameter_ordering + ] + ordered_keys = set(parameter_ordering) + cols = [ + stmt.table.c[key] for key in parameter_ordering + ] + [ + c for c in stmt.table.c if c.key not in ordered_keys + ] + else: + cols = stmt.table.columns + + for c in cols: + col_key = _getattr_col_key(c) + if col_key in parameters and col_key not in check_columns: + + _append_param_parameter( + compiler, stmt, c, col_key, parameters, _col_bind_name, + implicit_returning, implicit_return_defaults, values, kw) + + elif compiler.isinsert: + if c.primary_key and \ + need_pks and \ + ( + implicit_returning or + not postfetch_lastrowid or + c is not stmt.table._autoincrement_column + ): + + if implicit_returning: + _append_param_insert_pk_returning( + compiler, stmt, c, values, kw) + else: + _append_param_insert_pk(compiler, stmt, c, values, kw) + + elif c.default is not None: + + _append_param_insert_hasdefault( + compiler, stmt, c, implicit_return_defaults, + values, kw) + + elif c.server_default is not None: + if implicit_return_defaults and \ + c in implicit_return_defaults: + compiler.returning.append(c) + elif not c.primary_key: + compiler.postfetch.append(c) + elif implicit_return_defaults and \ + c in implicit_return_defaults: + compiler.returning.append(c) + + elif compiler.isupdate: + _append_param_update( + compiler, stmt, c, implicit_return_defaults, values, kw) + + +def _append_param_parameter( + compiler, stmt, c, col_key, parameters, _col_bind_name, + implicit_returning, implicit_return_defaults, values, kw): + value = parameters.pop(col_key) + if elements._is_literal(value): + value = _create_bind_param( + compiler, c, value, required=value is REQUIRED, + name=_col_bind_name(c) + if not stmt._has_multi_parameters + else "%s_0" % _col_bind_name(c), + **kw + ) + else: + if isinstance(value, elements.BindParameter) and \ + value.type._isnull: + value = value._clone() + value.type = c.type + + if c.primary_key and implicit_returning: + compiler.returning.append(c) + value = compiler.process(value.self_group(), **kw) + elif implicit_return_defaults and \ + c in implicit_return_defaults: + compiler.returning.append(c) + value = compiler.process(value.self_group(), **kw) + else: + compiler.postfetch.append(c) + value = compiler.process(value.self_group(), **kw) + values.append((c, value)) + + +def _append_param_insert_pk_returning(compiler, stmt, c, values, kw): + if c.default is not None: + if c.default.is_sequence: + if compiler.dialect.supports_sequences and \ + (not c.default.optional or + not compiler.dialect.sequences_optional): + proc = compiler.process(c.default, **kw) + values.append((c, proc)) + compiler.returning.append(c) + elif c.default.is_clause_element: + values.append( + (c, compiler.process( + c.default.arg.self_group(), **kw)) + ) + compiler.returning.append(c) + else: + values.append( + (c, _create_prefetch_bind_param(compiler, c)) + ) + + else: + compiler.returning.append(c) + + +def _create_prefetch_bind_param(compiler, c, process=True, name=None): + param = _create_bind_param(compiler, c, None, process=process, name=name) + compiler.prefetch.append(c) + return param + + +class _multiparam_column(elements.ColumnElement): + def __init__(self, original, index): + self.key = "%s_%d" % (original.key, index + 1) + self.original = original + self.default = original.default + self.type = original.type + + def __eq__(self, other): + return isinstance(other, _multiparam_column) and \ + other.key == self.key and \ + other.original == self.original + + +def _process_multiparam_default_bind(compiler, c, index, kw): + + if not c.default: + raise exc.CompileError( + "INSERT value for column %s is explicitly rendered as a bound" + "parameter in the VALUES clause; " + "a Python-side value or SQL expression is required" % c) + elif c.default.is_clause_element: + return compiler.process(c.default.arg.self_group(), **kw) + else: + col = _multiparam_column(c, index) + return _create_prefetch_bind_param(compiler, col) + + +def _append_param_insert_pk(compiler, stmt, c, values, kw): + if ( + (c.default is not None and + (not c.default.is_sequence or + compiler.dialect.supports_sequences)) or + c is stmt.table._autoincrement_column and + (compiler.dialect.supports_sequences or + compiler.dialect. + preexecute_autoincrement_sequences) + ): + values.append( + (c, _create_prefetch_bind_param(compiler, c)) + ) + + +def _append_param_insert_hasdefault( + compiler, stmt, c, implicit_return_defaults, values, kw): + + if c.default.is_sequence: + if compiler.dialect.supports_sequences and \ + (not c.default.optional or + not compiler.dialect.sequences_optional): + proc = compiler.process(c.default, **kw) + values.append((c, proc)) + if implicit_return_defaults and \ + c in implicit_return_defaults: + compiler.returning.append(c) + elif not c.primary_key: + compiler.postfetch.append(c) + elif c.default.is_clause_element: + proc = compiler.process(c.default.arg.self_group(), **kw) + values.append((c, proc)) + + if implicit_return_defaults and \ + c in implicit_return_defaults: + compiler.returning.append(c) + elif not c.primary_key: + # don't add primary key column to postfetch + compiler.postfetch.append(c) + else: + values.append( + (c, _create_prefetch_bind_param(compiler, c)) + ) + + +def _append_param_insert_select_hasdefault( + compiler, stmt, c, values, kw): + + if c.default.is_sequence: + if compiler.dialect.supports_sequences and \ + (not c.default.optional or + not compiler.dialect.sequences_optional): + proc = c.default + values.append((c, proc)) + elif c.default.is_clause_element: + proc = c.default.arg.self_group() + values.append((c, proc)) + else: + values.append( + (c, _create_prefetch_bind_param(compiler, c, process=False)) + ) + + +def _append_param_update( + compiler, stmt, c, implicit_return_defaults, values, kw): + + if c.onupdate is not None and not c.onupdate.is_sequence: + if c.onupdate.is_clause_element: + values.append( + (c, compiler.process( + c.onupdate.arg.self_group(), **kw)) + ) + if implicit_return_defaults and \ + c in implicit_return_defaults: + compiler.returning.append(c) + else: + compiler.postfetch.append(c) + else: + values.append( + (c, _create_prefetch_bind_param(compiler, c)) + ) + elif c.server_onupdate is not None: + if implicit_return_defaults and \ + c in implicit_return_defaults: + compiler.returning.append(c) + else: + compiler.postfetch.append(c) + elif implicit_return_defaults and \ + stmt._return_defaults is not True and \ + c in implicit_return_defaults: + compiler.returning.append(c) + + +def _get_multitable_params( + compiler, stmt, stmt_parameters, check_columns, + _col_bind_name, _getattr_col_key, values, kw): + + normalized_params = dict( + (elements._clause_element_as_expr(c), param) + for c, param in stmt_parameters.items() + ) + affected_tables = set() + for t in stmt._extra_froms: + for c in t.c: + if c in normalized_params: + affected_tables.add(t) + check_columns[_getattr_col_key(c)] = c + value = normalized_params[c] + if elements._is_literal(value): + value = _create_bind_param( + compiler, c, value, required=value is REQUIRED, + name=_col_bind_name(c)) + else: + compiler.postfetch.append(c) + value = compiler.process(value.self_group(), **kw) + values.append((c, value)) + # determine tables which are actually to be updated - process onupdate + # and server_onupdate for these + for t in affected_tables: + for c in t.c: + if c in normalized_params: + continue + elif (c.onupdate is not None and not + c.onupdate.is_sequence): + if c.onupdate.is_clause_element: + values.append( + (c, compiler.process( + c.onupdate.arg.self_group(), + **kw) + ) + ) + compiler.postfetch.append(c) + else: + values.append( + (c, _create_prefetch_bind_param( + compiler, c, name=_col_bind_name(c))) + ) + elif c.server_onupdate is not None: + compiler.postfetch.append(c) + + +def _extend_values_for_multiparams(compiler, stmt, values, kw): + values_0 = values + values = [values] + + values.extend( + [ + ( + c, + (_create_bind_param( + compiler, c, row[c.key], + name="%s_%d" % (c.key, i + 1) + ) if elements._is_literal(row[c.key]) + else compiler.process( + row[c.key].self_group(), **kw)) + if c.key in row else + _process_multiparam_default_bind(compiler, c, i, kw) + ) + for (c, param) in values_0 + ] + for i, row in enumerate(stmt.parameters[1:]) + ) + return values + + +def _get_stmt_parameters_params( + compiler, parameters, stmt_parameters, _column_as_key, values, kw): + for k, v in stmt_parameters.items(): + colkey = _column_as_key(k) + if colkey is not None: + parameters.setdefault(colkey, v) + else: + # a non-Column expression on the left side; + # add it to values() in an "as-is" state, + # coercing right side to bound param + if elements._is_literal(v): + v = compiler.process( + elements.BindParameter(None, v, type_=k.type), + **kw) + else: + v = compiler.process(v.self_group(), **kw) + + values.append((k, v)) + + +def _get_returning_modifiers(compiler, stmt): + need_pks = compiler.isinsert and \ + not compiler.inline and \ + not stmt._returning and \ + not stmt._has_multi_parameters + + implicit_returning = need_pks and \ + compiler.dialect.implicit_returning and \ + stmt.table.implicit_returning + + if compiler.isinsert: + implicit_return_defaults = (implicit_returning and + stmt._return_defaults) + elif compiler.isupdate: + implicit_return_defaults = (compiler.dialect.implicit_returning and + stmt.table.implicit_returning and + stmt._return_defaults) + else: + implicit_return_defaults = False + + if implicit_return_defaults: + if stmt._return_defaults is True: + implicit_return_defaults = set(stmt.table.c) + else: + implicit_return_defaults = set(stmt._return_defaults) + + postfetch_lastrowid = need_pks and compiler.dialect.postfetch_lastrowid + + return need_pks, implicit_returning, \ + implicit_return_defaults, postfetch_lastrowid diff --git a/lib/python3.4/site-packages/sqlalchemy/sql/ddl.py b/lib/python3.4/site-packages/sqlalchemy/sql/ddl.py index 1f2c448..1cb9eeb 100644 --- a/lib/python3.4/site-packages/sqlalchemy/sql/ddl.py +++ b/lib/python3.4/site-packages/sqlalchemy/sql/ddl.py @@ -1,5 +1,5 @@ # sql/ddl.py -# Copyright (C) 2009-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2009-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -12,7 +12,6 @@ to invoke them for a create/drop call. from .. import util from .elements import ClauseElement -from .visitors import traverse from .base import Executable, _generative, SchemaVisitor, _bind_or_error from ..util import topological from .. import event @@ -370,7 +369,7 @@ class DDL(DDLElement): :class:`.DDLEvents` - :mod:`sqlalchemy.event` + :ref:`event_toplevel` """ @@ -464,19 +463,28 @@ class CreateTable(_CreateDropBase): __visit_name__ = "create_table" - def __init__(self, element, on=None, bind=None): + def __init__( + self, element, on=None, bind=None, + include_foreign_key_constraints=None): """Create a :class:`.CreateTable` construct. :param element: a :class:`.Table` that's the subject of the CREATE :param on: See the description for 'on' in :class:`.DDL`. :param bind: See the description for 'bind' in :class:`.DDL`. + :param include_foreign_key_constraints: optional sequence of + :class:`.ForeignKeyConstraint` objects that will be included + inline within the CREATE construct; if omitted, all foreign key + constraints that do not specify use_alter=True are included. + + .. versionadded:: 1.0.0 """ super(CreateTable, self).__init__(element, on=on, bind=bind) self.columns = [CreateColumn(column) for column in element.columns ] + self.include_foreign_key_constraints = include_foreign_key_constraints class _DropView(_CreateDropBase): @@ -696,48 +704,80 @@ class SchemaGenerator(DDLBase): tables = self.tables else: tables = list(metadata.tables.values()) - collection = [t for t in sort_tables(tables) - if self._can_create_table(t)] + + collection = sort_tables_and_constraints( + [t for t in tables if self._can_create_table(t)]) + seq_coll = [s for s in metadata._sequences.values() if s.column is None and self._can_create_sequence(s)] + event_collection = [ + t for (t, fks) in collection if t is not None + ] metadata.dispatch.before_create(metadata, self.connection, - tables=collection, + tables=event_collection, checkfirst=self.checkfirst, _ddl_runner=self) for seq in seq_coll: self.traverse_single(seq, create_ok=True) - for table in collection: - self.traverse_single(table, create_ok=True) + for table, fkcs in collection: + if table is not None: + self.traverse_single( + table, create_ok=True, + include_foreign_key_constraints=fkcs, + _is_metadata_operation=True) + else: + for fkc in fkcs: + self.traverse_single(fkc) metadata.dispatch.after_create(metadata, self.connection, - tables=collection, + tables=event_collection, checkfirst=self.checkfirst, _ddl_runner=self) - def visit_table(self, table, create_ok=False): + def visit_table( + self, table, create_ok=False, + include_foreign_key_constraints=None, + _is_metadata_operation=False): if not create_ok and not self._can_create_table(table): return - table.dispatch.before_create(table, self.connection, - checkfirst=self.checkfirst, - _ddl_runner=self) + table.dispatch.before_create( + table, self.connection, + checkfirst=self.checkfirst, + _ddl_runner=self, + _is_metadata_operation=_is_metadata_operation) for column in table.columns: if column.default is not None: self.traverse_single(column.default) - self.connection.execute(CreateTable(table)) + if not self.dialect.supports_alter: + # e.g., don't omit any foreign key constraints + include_foreign_key_constraints = None + + self.connection.execute( + CreateTable( + table, + include_foreign_key_constraints=include_foreign_key_constraints + )) if hasattr(table, 'indexes'): for index in table.indexes: self.traverse_single(index) - table.dispatch.after_create(table, self.connection, - checkfirst=self.checkfirst, - _ddl_runner=self) + table.dispatch.after_create( + table, self.connection, + checkfirst=self.checkfirst, + _ddl_runner=self, + _is_metadata_operation=_is_metadata_operation) + + def visit_foreign_key_constraint(self, constraint): + if not self.dialect.supports_alter: + return + self.connection.execute(AddConstraint(constraint)) def visit_sequence(self, sequence, create_ok=False): if not create_ok and not self._can_create_sequence(sequence): @@ -765,11 +805,51 @@ class SchemaDropper(DDLBase): else: tables = list(metadata.tables.values()) - collection = [ - t - for t in reversed(sort_tables(tables)) - if self._can_drop_table(t) - ] + try: + unsorted_tables = [t for t in tables if self._can_drop_table(t)] + collection = list(reversed( + sort_tables_and_constraints( + unsorted_tables, + filter_fn=lambda constraint: False + if not self.dialect.supports_alter + or constraint.name is None + else None + ) + )) + except exc.CircularDependencyError as err2: + if not self.dialect.supports_alter: + util.warn( + "Can't sort tables for DROP; an " + "unresolvable foreign key " + "dependency exists between tables: %s, and backend does " + "not support ALTER. To restore at least a partial sort, " + "apply use_alter=True to ForeignKey and " + "ForeignKeyConstraint " + "objects involved in the cycle to mark these as known " + "cycles that will be ignored." + % ( + ", ".join(sorted([t.fullname for t in err2.cycles])) + ) + ) + collection = [(t, ()) for t in unsorted_tables] + else: + util.raise_from_cause( + exc.CircularDependencyError( + err2.args[0], + err2.cycles, err2.edges, + msg="Can't sort tables for DROP; an " + "unresolvable foreign key " + "dependency exists between tables: %s. Please ensure " + "that the ForeignKey and ForeignKeyConstraint objects " + "involved in the cycle have " + "names so that they can be dropped using " + "DROP CONSTRAINT." + % ( + ", ".join(sorted([t.fullname for t in err2.cycles])) + ) + + ) + ) seq_coll = [ s @@ -777,18 +857,27 @@ class SchemaDropper(DDLBase): if s.column is None and self._can_drop_sequence(s) ] + event_collection = [ + t for (t, fks) in collection if t is not None + ] + metadata.dispatch.before_drop( - metadata, self.connection, tables=collection, + metadata, self.connection, tables=event_collection, checkfirst=self.checkfirst, _ddl_runner=self) - for table in collection: - self.traverse_single(table, drop_ok=True) + for table, fkcs in collection: + if table is not None: + self.traverse_single( + table, drop_ok=True, _is_metadata_operation=True) + else: + for fkc in fkcs: + self.traverse_single(fkc) for seq in seq_coll: self.traverse_single(seq, drop_ok=True) metadata.dispatch.after_drop( - metadata, self.connection, tables=collection, + metadata, self.connection, tables=event_collection, checkfirst=self.checkfirst, _ddl_runner=self) def _can_drop_table(self, table): @@ -812,13 +901,15 @@ class SchemaDropper(DDLBase): def visit_index(self, index): self.connection.execute(DropIndex(index)) - def visit_table(self, table, drop_ok=False): + def visit_table(self, table, drop_ok=False, _is_metadata_operation=False): if not drop_ok and not self._can_drop_table(table): return - table.dispatch.before_drop(table, self.connection, - checkfirst=self.checkfirst, - _ddl_runner=self) + table.dispatch.before_drop( + table, self.connection, + checkfirst=self.checkfirst, + _ddl_runner=self, + _is_metadata_operation=_is_metadata_operation) for column in table.columns: if column.default is not None: @@ -826,9 +917,16 @@ class SchemaDropper(DDLBase): self.connection.execute(DropTable(table)) - table.dispatch.after_drop(table, self.connection, - checkfirst=self.checkfirst, - _ddl_runner=self) + table.dispatch.after_drop( + table, self.connection, + checkfirst=self.checkfirst, + _ddl_runner=self, + _is_metadata_operation=_is_metadata_operation) + + def visit_foreign_key_constraint(self, constraint): + if not self.dialect.supports_alter: + return + self.connection.execute(DropConstraint(constraint)) def visit_sequence(self, sequence, drop_ok=False): if not drop_ok and not self._can_drop_sequence(sequence): @@ -837,32 +935,161 @@ class SchemaDropper(DDLBase): def sort_tables(tables, skip_fn=None, extra_dependencies=None): - """sort a collection of Table objects in order of - their foreign-key dependency.""" + """sort a collection of :class:`.Table` objects based on dependency. + + This is a dependency-ordered sort which will emit :class:`.Table` + objects such that they will follow their dependent :class:`.Table` objects. + Tables are dependent on another based on the presence of + :class:`.ForeignKeyConstraint` objects as well as explicit dependencies + added by :meth:`.Table.add_is_dependent_on`. + + .. warning:: + + The :func:`.sort_tables` function cannot by itself accommodate + automatic resolution of dependency cycles between tables, which + are usually caused by mutually dependent foreign key constraints. + To resolve these cycles, either the + :paramref:`.ForeignKeyConstraint.use_alter` parameter may be appled + to those constraints, or use the + :func:`.sql.sort_tables_and_constraints` function which will break + out foreign key constraints involved in cycles separately. + + :param tables: a sequence of :class:`.Table` objects. + + :param skip_fn: optional callable which will be passed a + :class:`.ForeignKey` object; if it returns True, this + constraint will not be considered as a dependency. Note this is + **different** from the same parameter in + :func:`.sort_tables_and_constraints`, which is + instead passed the owning :class:`.ForeignKeyConstraint` object. + + :param extra_dependencies: a sequence of 2-tuples of tables which will + also be considered as dependent on each other. + + .. seealso:: + + :func:`.sort_tables_and_constraints` + + :meth:`.MetaData.sorted_tables` - uses this function to sort + + + """ + + if skip_fn is not None: + def _skip_fn(fkc): + for fk in fkc.elements: + if skip_fn(fk): + return True + else: + return None + else: + _skip_fn = None + + return [ + t for (t, fkcs) in + sort_tables_and_constraints( + tables, filter_fn=_skip_fn, extra_dependencies=extra_dependencies) + if t is not None + ] + + +def sort_tables_and_constraints( + tables, filter_fn=None, extra_dependencies=None): + """sort a collection of :class:`.Table` / :class:`.ForeignKeyConstraint` + objects. + + This is a dependency-ordered sort which will emit tuples of + ``(Table, [ForeignKeyConstraint, ...])`` such that each + :class:`.Table` follows its dependent :class:`.Table` objects. + Remaining :class:`.ForeignKeyConstraint` objects that are separate due to + dependency rules not satisifed by the sort are emitted afterwards + as ``(None, [ForeignKeyConstraint ...])``. + + Tables are dependent on another based on the presence of + :class:`.ForeignKeyConstraint` objects, explicit dependencies + added by :meth:`.Table.add_is_dependent_on`, as well as dependencies + stated here using the :paramref:`~.sort_tables_and_constraints.skip_fn` + and/or :paramref:`~.sort_tables_and_constraints.extra_dependencies` + parameters. + + :param tables: a sequence of :class:`.Table` objects. + + :param filter_fn: optional callable which will be passed a + :class:`.ForeignKeyConstraint` object, and returns a value based on + whether this constraint should definitely be included or excluded as + an inline constraint, or neither. If it returns False, the constraint + will definitely be included as a dependency that cannot be subject + to ALTER; if True, it will **only** be included as an ALTER result at + the end. Returning None means the constraint is included in the + table-based result unless it is detected as part of a dependency cycle. + + :param extra_dependencies: a sequence of 2-tuples of tables which will + also be considered as dependent on each other. + + .. versionadded:: 1.0.0 + + .. seealso:: + + :func:`.sort_tables` + + + """ + + fixed_dependencies = set() + mutable_dependencies = set() - tables = list(tables) - tuples = [] if extra_dependencies is not None: - tuples.extend(extra_dependencies) - - def visit_foreign_key(fkey): - if fkey.use_alter: - return - elif skip_fn and skip_fn(fkey): - return - parent_table = fkey.column.table - if parent_table in tables: - child_table = fkey.parent.table - if parent_table is not child_table: - tuples.append((parent_table, child_table)) + fixed_dependencies.update(extra_dependencies) + remaining_fkcs = set() for table in tables: - traverse(table, - {'schema_visitor': True}, - {'foreign_key': visit_foreign_key}) + for fkc in table.foreign_key_constraints: + if fkc.use_alter is True: + remaining_fkcs.add(fkc) + continue - tuples.extend( - [parent, table] for parent in table._extra_dependencies + if filter_fn: + filtered = filter_fn(fkc) + + if filtered is True: + remaining_fkcs.add(fkc) + continue + + dependent_on = fkc.referred_table + if dependent_on is not table: + mutable_dependencies.add((dependent_on, table)) + + fixed_dependencies.update( + (parent, table) for parent in table._extra_dependencies ) - return list(topological.sort(tuples, tables)) + try: + candidate_sort = list( + topological.sort( + fixed_dependencies.union(mutable_dependencies), tables, + deterministic_order=True + ) + ) + except exc.CircularDependencyError as err: + for edge in err.edges: + if edge in mutable_dependencies: + table = edge[1] + can_remove = [ + fkc for fkc in table.foreign_key_constraints + if filter_fn is None or filter_fn(fkc) is not False] + remaining_fkcs.update(can_remove) + for fkc in can_remove: + dependent_on = fkc.referred_table + if dependent_on is not table: + mutable_dependencies.discard((dependent_on, table)) + candidate_sort = list( + topological.sort( + fixed_dependencies.union(mutable_dependencies), tables, + deterministic_order=True + ) + ) + + return [ + (table, table.foreign_key_constraints.difference(remaining_fkcs)) + for table in candidate_sort + ] + [(None, list(remaining_fkcs))] diff --git a/lib/python3.4/site-packages/sqlalchemy/sql/default_comparator.py b/lib/python3.4/site-packages/sqlalchemy/sql/default_comparator.py index 4f53e29..d180dbc 100644 --- a/lib/python3.4/site-packages/sqlalchemy/sql/default_comparator.py +++ b/lib/python3.4/site-packages/sqlalchemy/sql/default_comparator.py @@ -1,5 +1,5 @@ # sql/default_comparator.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -9,8 +9,8 @@ """ from .. import exc, util -from . import operators from . import type_api +from . import operators from .elements import BindParameter, True_, False_, BinaryExpression, \ Null, _const_expr, _clause_element_as_expr, \ ClauseList, ColumnElement, TextClause, UnaryExpression, \ @@ -18,294 +18,271 @@ from .elements import BindParameter, True_, False_, BinaryExpression, \ from .selectable import SelectBase, Alias, Selectable, ScalarSelect -class _DefaultColumnComparator(operators.ColumnOperators): - """Defines comparison and math operations. +def _boolean_compare(expr, op, obj, negate=None, reverse=False, + _python_is_types=(util.NoneType, bool), + result_type = None, + **kwargs): - See :class:`.ColumnOperators` and :class:`.Operators` for descriptions - of all operations. + if result_type is None: + result_type = type_api.BOOLEANTYPE - """ + if isinstance(obj, _python_is_types + (Null, True_, False_)): - @util.memoized_property - def type(self): - return self.expr.type - - def operate(self, op, *other, **kwargs): - o = self.operators[op.__name__] - return o[0](self, self.expr, op, *(other + o[1:]), **kwargs) - - def reverse_operate(self, op, other, **kwargs): - o = self.operators[op.__name__] - return o[0](self, self.expr, op, other, - reverse=True, *o[1:], **kwargs) - - def _adapt_expression(self, op, other_comparator): - """evaluate the return type of , - and apply any adaptations to the given operator. - - This method determines the type of a resulting binary expression - given two source types and an operator. For example, two - :class:`.Column` objects, both of the type :class:`.Integer`, will - produce a :class:`.BinaryExpression` that also has the type - :class:`.Integer` when compared via the addition (``+``) operator. - However, using the addition operator with an :class:`.Integer` - and a :class:`.Date` object will produce a :class:`.Date`, assuming - "days delta" behavior by the database (in reality, most databases - other than Postgresql don't accept this particular operation). - - The method returns a tuple of the form , . - The resulting operator and type will be those applied to the - resulting :class:`.BinaryExpression` as the final operator and the - right-hand side of the expression. - - Note that only a subset of operators make usage of - :meth:`._adapt_expression`, - including math operators and user-defined operators, but not - boolean comparison or special SQL keywords like MATCH or BETWEEN. - - """ - return op, other_comparator.type - - def _boolean_compare(self, expr, op, obj, negate=None, reverse=False, - _python_is_types=(util.NoneType, bool), - **kwargs): - - if isinstance(obj, _python_is_types + (Null, True_, False_)): - - # allow x ==/!= True/False to be treated as a literal. - # this comes out to "== / != true/false" or "1/0" if those - # constants aren't supported and works on all platforms - if op in (operators.eq, operators.ne) and \ - isinstance(obj, (bool, True_, False_)): - return BinaryExpression(expr, - _literal_as_text(obj), - op, - type_=type_api.BOOLEANTYPE, - negate=negate, modifiers=kwargs) - else: - # all other None/True/False uses IS, IS NOT - if op in (operators.eq, operators.is_): - return BinaryExpression(expr, _const_expr(obj), - operators.is_, - negate=operators.isnot) - elif op in (operators.ne, operators.isnot): - return BinaryExpression(expr, _const_expr(obj), - operators.isnot, - negate=operators.is_) - else: - raise exc.ArgumentError( - "Only '=', '!=', 'is_()', 'isnot()' operators can " - "be used with None/True/False") - else: - obj = self._check_literal(expr, op, obj) - - if reverse: - return BinaryExpression(obj, - expr, - op, - type_=type_api.BOOLEANTYPE, - negate=negate, modifiers=kwargs) - else: + # allow x ==/!= True/False to be treated as a literal. + # this comes out to "== / != true/false" or "1/0" if those + # constants aren't supported and works on all platforms + if op in (operators.eq, operators.ne) and \ + isinstance(obj, (bool, True_, False_)): return BinaryExpression(expr, - obj, + _literal_as_text(obj), op, - type_=type_api.BOOLEANTYPE, + type_=result_type, negate=negate, modifiers=kwargs) - - def _binary_operate(self, expr, op, obj, reverse=False, result_type=None, - **kw): - obj = self._check_literal(expr, op, obj) - - if reverse: - left, right = obj, expr else: - left, right = expr, obj - - if result_type is None: - op, result_type = left.comparator._adapt_expression( - op, right.comparator) - - return BinaryExpression(left, right, op, type_=result_type) - - def _conjunction_operate(self, expr, op, other, **kw): - if op is operators.and_: - return and_(expr, other) - elif op is operators.or_: - return or_(expr, other) - else: - raise NotImplementedError() - - def _scalar(self, expr, op, fn, **kw): - return fn(expr) - - def _in_impl(self, expr, op, seq_or_selectable, negate_op, **kw): - seq_or_selectable = _clause_element_as_expr(seq_or_selectable) - - if isinstance(seq_or_selectable, ScalarSelect): - return self._boolean_compare(expr, op, seq_or_selectable, - negate=negate_op) - elif isinstance(seq_or_selectable, SelectBase): - - # TODO: if we ever want to support (x, y, z) IN (select x, - # y, z from table), we would need a multi-column version of - # as_scalar() to produce a multi- column selectable that - # does not export itself as a FROM clause - - return self._boolean_compare( - expr, op, seq_or_selectable.as_scalar(), - negate=negate_op, **kw) - elif isinstance(seq_or_selectable, (Selectable, TextClause)): - return self._boolean_compare(expr, op, seq_or_selectable, - negate=negate_op, **kw) - elif isinstance(seq_or_selectable, ClauseElement): - raise exc.InvalidRequestError( - 'in_() accepts' - ' either a list of expressions ' - 'or a selectable: %r' % seq_or_selectable) - - # Handle non selectable arguments as sequences - args = [] - for o in seq_or_selectable: - if not _is_literal(o): - if not isinstance(o, operators.ColumnOperators): - raise exc.InvalidRequestError( - 'in_() accepts' - ' either a list of expressions ' - 'or a selectable: %r' % o) - elif o is None: - o = Null() + # all other None/True/False uses IS, IS NOT + if op in (operators.eq, operators.is_): + return BinaryExpression(expr, _const_expr(obj), + operators.is_, + negate=operators.isnot) + elif op in (operators.ne, operators.isnot): + return BinaryExpression(expr, _const_expr(obj), + operators.isnot, + negate=operators.is_) else: - o = expr._bind_param(op, o) - args.append(o) - if len(args) == 0: + raise exc.ArgumentError( + "Only '=', '!=', 'is_()', 'isnot()' operators can " + "be used with None/True/False") + else: + obj = _check_literal(expr, op, obj) - # Special case handling for empty IN's, behave like - # comparison against zero row selectable. We use != to - # build the contradiction as it handles NULL values - # appropriately, i.e. "not (x IN ())" should not return NULL - # values for x. + if reverse: + return BinaryExpression(obj, + expr, + op, + type_=result_type, + negate=negate, modifiers=kwargs) + else: + return BinaryExpression(expr, + obj, + op, + type_=result_type, + negate=negate, modifiers=kwargs) - util.warn('The IN-predicate on "%s" was invoked with an ' - 'empty sequence. This results in a ' - 'contradiction, which nonetheless can be ' - 'expensive to evaluate. Consider alternative ' - 'strategies for improved performance.' % expr) - if op is operators.in_op: - return expr != expr - else: - return expr == expr - return self._boolean_compare(expr, op, - ClauseList(*args).self_group(against=op), - negate=negate_op) +def _binary_operate(expr, op, obj, reverse=False, result_type=None, + **kw): + obj = _check_literal(expr, op, obj) - def _unsupported_impl(self, expr, op, *arg, **kw): - raise NotImplementedError("Operator '%s' is not supported on " - "this expression" % op.__name__) + if reverse: + left, right = obj, expr + else: + left, right = expr, obj - def _inv_impl(self, expr, op, **kw): - """See :meth:`.ColumnOperators.__inv__`.""" - if hasattr(expr, 'negation_clause'): - return expr.negation_clause + if result_type is None: + op, result_type = left.comparator._adapt_expression( + op, right.comparator) + + return BinaryExpression( + left, right, op, type_=result_type, modifiers=kw) + + +def _conjunction_operate(expr, op, other, **kw): + if op is operators.and_: + return and_(expr, other) + elif op is operators.or_: + return or_(expr, other) + else: + raise NotImplementedError() + + +def _scalar(expr, op, fn, **kw): + return fn(expr) + + +def _in_impl(expr, op, seq_or_selectable, negate_op, **kw): + seq_or_selectable = _clause_element_as_expr(seq_or_selectable) + + if isinstance(seq_or_selectable, ScalarSelect): + return _boolean_compare(expr, op, seq_or_selectable, + negate=negate_op) + elif isinstance(seq_or_selectable, SelectBase): + + # TODO: if we ever want to support (x, y, z) IN (select x, + # y, z from table), we would need a multi-column version of + # as_scalar() to produce a multi- column selectable that + # does not export itself as a FROM clause + + return _boolean_compare( + expr, op, seq_or_selectable.as_scalar(), + negate=negate_op, **kw) + elif isinstance(seq_or_selectable, (Selectable, TextClause)): + return _boolean_compare(expr, op, seq_or_selectable, + negate=negate_op, **kw) + elif isinstance(seq_or_selectable, ClauseElement): + raise exc.InvalidRequestError( + 'in_() accepts' + ' either a list of expressions ' + 'or a selectable: %r' % seq_or_selectable) + + # Handle non selectable arguments as sequences + args = [] + for o in seq_or_selectable: + if not _is_literal(o): + if not isinstance(o, operators.ColumnOperators): + raise exc.InvalidRequestError( + 'in_() accepts' + ' either a list of expressions ' + 'or a selectable: %r' % o) + elif o is None: + o = Null() else: - return expr._negate() + o = expr._bind_param(op, o) + args.append(o) + if len(args) == 0: - def _neg_impl(self, expr, op, **kw): - """See :meth:`.ColumnOperators.__neg__`.""" - return UnaryExpression(expr, operator=operators.neg) + # Special case handling for empty IN's, behave like + # comparison against zero row selectable. We use != to + # build the contradiction as it handles NULL values + # appropriately, i.e. "not (x IN ())" should not return NULL + # values for x. - def _match_impl(self, expr, op, other, **kw): - """See :meth:`.ColumnOperators.match`.""" - return self._boolean_compare( - expr, operators.match_op, - self._check_literal( - expr, operators.match_op, other), - **kw) - - def _distinct_impl(self, expr, op, **kw): - """See :meth:`.ColumnOperators.distinct`.""" - return UnaryExpression(expr, operator=operators.distinct_op, - type_=expr.type) - - def _between_impl(self, expr, op, cleft, cright, **kw): - """See :meth:`.ColumnOperators.between`.""" - return BinaryExpression( - expr, - ClauseList( - self._check_literal(expr, operators.and_, cleft), - self._check_literal(expr, operators.and_, cright), - operator=operators.and_, - group=False, group_contents=False), - op, - negate=operators.notbetween_op - if op is operators.between_op - else operators.between_op, - modifiers=kw) - - def _collate_impl(self, expr, op, other, **kw): - return collate(expr, other) - - # a mapping of operators with the method they use, along with - # their negated operator for comparison operators - operators = { - "and_": (_conjunction_operate,), - "or_": (_conjunction_operate,), - "inv": (_inv_impl,), - "add": (_binary_operate,), - "mul": (_binary_operate,), - "sub": (_binary_operate,), - "div": (_binary_operate,), - "mod": (_binary_operate,), - "truediv": (_binary_operate,), - "custom_op": (_binary_operate,), - "concat_op": (_binary_operate,), - "lt": (_boolean_compare, operators.ge), - "le": (_boolean_compare, operators.gt), - "ne": (_boolean_compare, operators.eq), - "gt": (_boolean_compare, operators.le), - "ge": (_boolean_compare, operators.lt), - "eq": (_boolean_compare, operators.ne), - "like_op": (_boolean_compare, operators.notlike_op), - "ilike_op": (_boolean_compare, operators.notilike_op), - "notlike_op": (_boolean_compare, operators.like_op), - "notilike_op": (_boolean_compare, operators.ilike_op), - "contains_op": (_boolean_compare, operators.notcontains_op), - "startswith_op": (_boolean_compare, operators.notstartswith_op), - "endswith_op": (_boolean_compare, operators.notendswith_op), - "desc_op": (_scalar, UnaryExpression._create_desc), - "asc_op": (_scalar, UnaryExpression._create_asc), - "nullsfirst_op": (_scalar, UnaryExpression._create_nullsfirst), - "nullslast_op": (_scalar, UnaryExpression._create_nullslast), - "in_op": (_in_impl, operators.notin_op), - "notin_op": (_in_impl, operators.in_op), - "is_": (_boolean_compare, operators.is_), - "isnot": (_boolean_compare, operators.isnot), - "collate": (_collate_impl,), - "match_op": (_match_impl,), - "distinct_op": (_distinct_impl,), - "between_op": (_between_impl, ), - "notbetween_op": (_between_impl, ), - "neg": (_neg_impl,), - "getitem": (_unsupported_impl,), - "lshift": (_unsupported_impl,), - "rshift": (_unsupported_impl,), - } - - def _check_literal(self, expr, operator, other): - if isinstance(other, (ColumnElement, TextClause)): - if isinstance(other, BindParameter) and \ - other.type._isnull: - other = other._clone() - other.type = expr.type - return other - elif hasattr(other, '__clause_element__'): - other = other.__clause_element__() - elif isinstance(other, type_api.TypeEngine.Comparator): - other = other.expr - - if isinstance(other, (SelectBase, Alias)): - return other.as_scalar() - elif not isinstance(other, (ColumnElement, TextClause)): - return expr._bind_param(operator, other) + util.warn('The IN-predicate on "%s" was invoked with an ' + 'empty sequence. This results in a ' + 'contradiction, which nonetheless can be ' + 'expensive to evaluate. Consider alternative ' + 'strategies for improved performance.' % expr) + if op is operators.in_op: + return expr != expr else: - return other + return expr == expr + + return _boolean_compare(expr, op, + ClauseList(*args).self_group(against=op), + negate=negate_op) + + +def _unsupported_impl(expr, op, *arg, **kw): + raise NotImplementedError("Operator '%s' is not supported on " + "this expression" % op.__name__) + + +def _inv_impl(expr, op, **kw): + """See :meth:`.ColumnOperators.__inv__`.""" + if hasattr(expr, 'negation_clause'): + return expr.negation_clause + else: + return expr._negate() + + +def _neg_impl(expr, op, **kw): + """See :meth:`.ColumnOperators.__neg__`.""" + return UnaryExpression(expr, operator=operators.neg) + + +def _match_impl(expr, op, other, **kw): + """See :meth:`.ColumnOperators.match`.""" + + return _boolean_compare( + expr, operators.match_op, + _check_literal( + expr, operators.match_op, other), + result_type=type_api.MATCHTYPE, + negate=operators.notmatch_op + if op is operators.match_op else operators.match_op, + **kw + ) + + +def _distinct_impl(expr, op, **kw): + """See :meth:`.ColumnOperators.distinct`.""" + return UnaryExpression(expr, operator=operators.distinct_op, + type_=expr.type) + + +def _between_impl(expr, op, cleft, cright, **kw): + """See :meth:`.ColumnOperators.between`.""" + return BinaryExpression( + expr, + ClauseList( + _check_literal(expr, operators.and_, cleft), + _check_literal(expr, operators.and_, cright), + operator=operators.and_, + group=False, group_contents=False), + op, + negate=operators.notbetween_op + if op is operators.between_op + else operators.between_op, + modifiers=kw) + + +def _collate_impl(expr, op, other, **kw): + return collate(expr, other) + +# a mapping of operators with the method they use, along with +# their negated operator for comparison operators +operator_lookup = { + "and_": (_conjunction_operate,), + "or_": (_conjunction_operate,), + "inv": (_inv_impl,), + "add": (_binary_operate,), + "mul": (_binary_operate,), + "sub": (_binary_operate,), + "div": (_binary_operate,), + "mod": (_binary_operate,), + "truediv": (_binary_operate,), + "custom_op": (_binary_operate,), + "concat_op": (_binary_operate,), + "lt": (_boolean_compare, operators.ge), + "le": (_boolean_compare, operators.gt), + "ne": (_boolean_compare, operators.eq), + "gt": (_boolean_compare, operators.le), + "ge": (_boolean_compare, operators.lt), + "eq": (_boolean_compare, operators.ne), + "like_op": (_boolean_compare, operators.notlike_op), + "ilike_op": (_boolean_compare, operators.notilike_op), + "notlike_op": (_boolean_compare, operators.like_op), + "notilike_op": (_boolean_compare, operators.ilike_op), + "contains_op": (_boolean_compare, operators.notcontains_op), + "startswith_op": (_boolean_compare, operators.notstartswith_op), + "endswith_op": (_boolean_compare, operators.notendswith_op), + "desc_op": (_scalar, UnaryExpression._create_desc), + "asc_op": (_scalar, UnaryExpression._create_asc), + "nullsfirst_op": (_scalar, UnaryExpression._create_nullsfirst), + "nullslast_op": (_scalar, UnaryExpression._create_nullslast), + "in_op": (_in_impl, operators.notin_op), + "notin_op": (_in_impl, operators.in_op), + "is_": (_boolean_compare, operators.is_), + "isnot": (_boolean_compare, operators.isnot), + "collate": (_collate_impl,), + "match_op": (_match_impl,), + "notmatch_op": (_match_impl,), + "distinct_op": (_distinct_impl,), + "between_op": (_between_impl, ), + "notbetween_op": (_between_impl, ), + "neg": (_neg_impl,), + "getitem": (_unsupported_impl,), + "lshift": (_unsupported_impl,), + "rshift": (_unsupported_impl,), + "contains": (_unsupported_impl,), +} + + +def _check_literal(expr, operator, other): + if isinstance(other, (ColumnElement, TextClause)): + if isinstance(other, BindParameter) and \ + other.type._isnull: + other = other._clone() + other.type = expr.type + return other + elif hasattr(other, '__clause_element__'): + other = other.__clause_element__() + elif isinstance(other, type_api.TypeEngine.Comparator): + other = other.expr + + if isinstance(other, (SelectBase, Alias)): + return other.as_scalar() + elif not isinstance(other, (ColumnElement, TextClause)): + return expr._bind_param(operator, other) + else: + return other + diff --git a/lib/python3.4/site-packages/sqlalchemy/sql/dml.py b/lib/python3.4/site-packages/sqlalchemy/sql/dml.py index f7e033d..7b506f9 100644 --- a/lib/python3.4/site-packages/sqlalchemy/sql/dml.py +++ b/lib/python3.4/site-packages/sqlalchemy/sql/dml.py @@ -1,5 +1,5 @@ # sql/dml.py -# Copyright (C) 2009-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2009-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -10,7 +10,8 @@ Provide :class:`.Insert`, :class:`.Update` and :class:`.Delete`. """ from .base import Executable, _generative, _from_objects, DialectKWArgs -from .elements import ClauseElement, _literal_as_text, Null, and_, _clone +from .elements import ClauseElement, _literal_as_text, Null, and_, _clone, \ + _column_as_key from .selectable import _interpret_as_from, _interpret_as_select, HasPrefixes from .. import util from .. import exc @@ -26,6 +27,7 @@ class UpdateBase(DialectKWArgs, HasPrefixes, Executable, ClauseElement): _execution_options = \ Executable._execution_options.union({'autocommit': True}) _hints = util.immutabledict() + _parameter_ordering = None _prefixes = () def _process_colparams(self, parameters): @@ -38,6 +40,16 @@ class UpdateBase(DialectKWArgs, HasPrefixes, Executable, ClauseElement): else: return p + if self._preserve_parameter_order and parameters is not None: + if not isinstance(parameters, list) or \ + (parameters and not isinstance(parameters[0], tuple)): + raise ValueError( + "When preserve_parameter_order is True, " + "values() only accepts a list of 2-tuples") + self._parameter_ordering = [key for key, value in parameters] + + return dict(parameters), False + if (isinstance(parameters, (list, tuple)) and parameters and isinstance(parameters[0], (list, tuple, dict))): @@ -177,6 +189,7 @@ class ValuesBase(UpdateBase): _supports_multi_parameters = False _has_multi_parameters = False + _preserve_parameter_order = False select = None def __init__(self, table, values, prefixes): @@ -213,23 +226,32 @@ class ValuesBase(UpdateBase): users.update().where(users.c.id==5).values(name="some name") - :param \*args: Alternatively, a dictionary, tuple or list - of dictionaries or tuples can be passed as a single positional - argument in order to form the VALUES or - SET clause of the statement. The single dictionary form - works the same as the kwargs form:: + :param \*args: As an alternative to passing key/value parameters, + a dictionary, tuple, or list of dictionaries or tuples can be passed + as a single positional argument in order to form the VALUES or + SET clause of the statement. The forms that are accepted vary + based on whether this is an :class:`.Insert` or an :class:`.Update` + construct. + + For either an :class:`.Insert` or :class:`.Update` construct, a + single dictionary can be passed, which works the same as that of + the kwargs form:: users.insert().values({"name": "some name"}) - If a tuple is passed, the tuple should contain the same number - of columns as the target :class:`.Table`:: + users.update().values({"name": "some new name"}) + + Also for either form but more typically for the :class:`.Insert` + construct, a tuple that contains an entry for every column in the + table is also accepted:: users.insert().values((5, "some name")) - The :class:`.Insert` construct also supports multiply-rendered VALUES - construct, for those backends which support this SQL syntax - (SQLite, Postgresql, MySQL). This mode is indicated by passing a - list of one or more dictionaries/tuples:: + The :class:`.Insert` construct also supports being passed a list + of dictionaries or full-table-tuples, which on the server will + render the less common SQL syntax of "multiple values" - this + syntax is supported on backends such as SQLite, Postgresql, MySQL, + but not necessarily others:: users.insert().values([ {"name": "some name"}, @@ -237,38 +259,61 @@ class ValuesBase(UpdateBase): {"name": "yet another name"}, ]) - In the case of an :class:`.Update` - construct, only the single dictionary/tuple form is accepted, - else an exception is raised. It is also an exception case to - attempt to mix the single-/multiple- value styles together, - either through multiple :meth:`.ValuesBase.values` calls - or by sending a list + kwargs at the same time. + The above form would render a multiple VALUES statement similar to:: - .. note:: + INSERT INTO users (name) VALUES + (:name_1), + (:name_2), + (:name_3) - Passing a multiple values list is *not* the same - as passing a multiple values list to the - :meth:`.Connection.execute` method. Passing a list of parameter - sets to :meth:`.ValuesBase.values` produces a construct of this - form:: + It is essential to note that **passing multiple values is + NOT the same as using traditional executemany() form**. The above + syntax is a **special** syntax not typically used. To emit an + INSERT statement against multiple rows, the normal method is + to pass a multiple values list to the :meth:`.Connection.execute` + method, which is supported by all database backends and is generally + more efficient for a very large number of parameters. - INSERT INTO table (col1, col2, col3) VALUES - (col1_0, col2_0, col3_0), - (col1_1, col2_1, col3_1), - ... + .. seealso:: - whereas a multiple list passed to :meth:`.Connection.execute` - has the effect of using the DBAPI - `executemany() `_ - method, which provides a high-performance system of invoking - a single-row INSERT statement many times against a series - of parameter sets. The "executemany" style is supported by - all database backends, as it does not depend on a special SQL - syntax. + :ref:`execute_multiple` - an introduction to + the traditional Core method of multiple parameter set + invocation for INSERTs and other statements. - .. versionadded:: 0.8 - Support for multiple-VALUES INSERT statements. + .. versionchanged:: 1.0.0 an INSERT that uses a multiple-VALUES + clause, even a list of length one, + implies that the :paramref:`.Insert.inline` flag is set to + True, indicating that the statement will not attempt to fetch + the "last inserted primary key" or other defaults. The + statement deals with an arbitrary number of rows, so the + :attr:`.ResultProxy.inserted_primary_key` accessor does not + apply. + .. versionchanged:: 1.0.0 A multiple-VALUES INSERT now supports + columns with Python side default values and callables in the + same way as that of an "executemany" style of invocation; the + callable is invoked for each row. See :ref:`bug_3288` + for other details. + + The :class:`.Update` construct supports a special form which is a + list of 2-tuples, which when provided must be passed in conjunction + with the + :paramref:`~sqlalchemy.sql.expression.update.preserve_parameter_order` + parameter. + This form causes the UPDATE statement to render the SET clauses + using the order of parameters given to :meth:`.Update.values`, rather + than the ordering of columns given in the :class:`.Table`. + + .. versionadded:: 1.0.10 - added support for parameter-ordered + UPDATE statements via the + :paramref:`~sqlalchemy.sql.expression.update.preserve_parameter_order` + flag. + + .. seealso:: + + :ref:`updates_order_parameters` - full example of the + :paramref:`~sqlalchemy.sql.expression.update.preserve_parameter_order` + flag .. seealso:: @@ -358,7 +403,7 @@ class ValuesBase(UpdateBase): SELECT, multi-valued VALUES clause), :meth:`.ValuesBase.return_defaults` is intended only for an "ORM-style" single-row INSERT/UPDATE statement. The row returned - by the statement is also consumed implcitly when + by the statement is also consumed implicitly when :meth:`.ValuesBase.return_defaults` is used. By contrast, :meth:`.UpdateBase.returning` leaves the RETURNING result-set intact with a collection of any number of rows. @@ -380,7 +425,7 @@ class ValuesBase(UpdateBase): :func:`.mapper`. :param cols: optional list of column key names or :class:`.Column` - objects. If omitted, all column expressions evaulated on the server + objects. If omitted, all column expressions evaluated on the server are added to the returning list. .. versionadded:: 0.9.0 @@ -434,8 +479,13 @@ class Insert(ValuesBase): dynamically render the VALUES clause at execution time based on the parameters passed to :meth:`.Connection.execute`. - :param inline: if True, SQL defaults will be compiled 'inline' into - the statement and not pre-executed. + :param inline: if True, no attempt will be made to retrieve the + SQL-generated default values to be provided within the statement; + in particular, + this allows SQL expressions to be rendered 'inline' within the + statement without the need to pre-execute them beforehand; for + backends that support "returning", this turns off the "implicit + returning" feature for the statement. If both `values` and compile-time bind parameters are present, the compile-time bind parameters override the information specified @@ -463,6 +513,7 @@ class Insert(ValuesBase): ValuesBase.__init__(self, table, values, prefixes) self._bind = bind self.select = self.select_names = None + self.include_insert_from_select_defaults = False self.inline = inline self._returning = returning self._validate_dialect_kwargs(dialect_kw) @@ -475,7 +526,7 @@ class Insert(ValuesBase): return () @_generative - def from_select(self, names, select): + def from_select(self, names, select, include_defaults=True): """Return a new :class:`.Insert` construct which represents an ``INSERT...FROM SELECT`` statement. @@ -494,25 +545,28 @@ class Insert(ValuesBase): is not checked before passing along to the database, the database would normally raise an exception if these column lists don't correspond. + :param include_defaults: if True, non-server default values and + SQL expressions as specified on :class:`.Column` objects + (as documented in :ref:`metadata_defaults_toplevel`) not + otherwise specified in the list of names will be rendered + into the INSERT and SELECT statements, so that these values are also + included in the data to be inserted. - .. note:: + .. note:: A Python-side default that uses a Python callable function + will only be invoked **once** for the whole statement, and **not + per row**. - Depending on backend, it may be necessary for the :class:`.Insert` - statement to be constructed using the ``inline=True`` flag; this - flag will prevent the implicit usage of ``RETURNING`` when the - ``INSERT`` statement is rendered, which isn't supported on a - backend such as Oracle in conjunction with an ``INSERT..SELECT`` - combination:: + .. versionadded:: 1.0.0 - :meth:`.Insert.from_select` now renders + Python-side and SQL expression column defaults into the + SELECT statement for columns otherwise not included in the + list of column names. - sel = select([table1.c.a, table1.c.b]).where(table1.c.c > 5) - ins = table2.insert(inline=True).from_select(['a', 'b'], sel) - - .. note:: - - A SELECT..INSERT construct in SQL has no VALUES clause. Therefore - :class:`.Column` objects which utilize Python-side defaults - (e.g. as described at :ref:`metadata_defaults_toplevel`) - will **not** take effect when using :meth:`.Insert.from_select`. + .. versionchanged:: 1.0.0 an INSERT that uses FROM SELECT + implies that the :paramref:`.insert.inline` flag is set to + True, indicating that the statement will not attempt to fetch + the "last inserted primary key" or other defaults. The statement + deals with an arbitrary number of rows, so the + :attr:`.ResultProxy.inserted_primary_key` accessor does not apply. .. versionadded:: 0.8.3 @@ -522,9 +576,12 @@ class Insert(ValuesBase): "This construct already inserts value expressions") self.parameters, self._has_multi_parameters = \ - self._process_colparams(dict((n, Null()) for n in names)) + self._process_colparams( + dict((_column_as_key(n), Null()) for n in names)) self.select_names = names + self.inline = True + self.include_insert_from_select_defaults = include_defaults self.select = _interpret_as_select(select) def _copy_internals(self, clone=_clone, **kw): @@ -552,6 +609,7 @@ class Update(ValuesBase): prefixes=None, returning=None, return_defaults=False, + preserve_parameter_order=False, **dialect_kw): """Construct an :class:`.Update` object. @@ -614,6 +672,19 @@ class Update(ValuesBase): be available in the dictionary returned from :meth:`.ResultProxy.last_updated_params`. + :param preserve_parameter_order: if True, the update statement is + expected to receive parameters **only** via the :meth:`.Update.values` + method, and they must be passed as a Python ``list`` of 2-tuples. + The rendered UPDATE statement will emit the SET clause for each + referenced column maintaining this order. + + .. versionadded:: 1.0.10 + + .. seealso:: + + :ref:`updates_order_parameters` - full example of the + :paramref:`~sqlalchemy.sql.expression.update.preserve_parameter_order` flag + If both ``values`` and compile-time bind parameters are present, the compile-time bind parameters override the information specified within ``values`` on a per-key basis. @@ -655,6 +726,7 @@ class Update(ValuesBase): """ + self._preserve_parameter_order = preserve_parameter_order ValuesBase.__init__(self, table, values, prefixes) self._bind = bind self._returning = returning @@ -728,10 +800,10 @@ class Delete(UpdateBase): :meth:`~.TableClause.delete` method on :class:`~.schema.Table`. - :param table: The table to be updated. + :param table: The table to delete rows from. :param whereclause: A :class:`.ClauseElement` describing the ``WHERE`` - condition of the ``UPDATE`` statement. Note that the + condition of the ``DELETE`` statement. Note that the :meth:`~Delete.where()` generative method may be used instead. .. seealso:: diff --git a/lib/python3.4/site-packages/sqlalchemy/sql/elements.py b/lib/python3.4/site-packages/sqlalchemy/sql/elements.py index 6114460..67957a1 100644 --- a/lib/python3.4/site-packages/sqlalchemy/sql/elements.py +++ b/lib/python3.4/site-packages/sqlalchemy/sql/elements.py @@ -1,5 +1,5 @@ # sql/elements.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # # This module is part of SQLAlchemy and is released under @@ -19,7 +19,8 @@ from .visitors import Visitable, cloned_traverse, traverse from .annotation import Annotated import itertools from .base import Executable, PARSE_AUTOCOMMIT, Immutable, NO_ARG -from .base import _generative, Generative +from .base import _generative +import numbers import re import operator @@ -227,6 +228,7 @@ class ClauseElement(Visitable): is_selectable = False is_clause_element = True + description = None _order_by_label_element = None _is_from_container = False @@ -539,7 +541,7 @@ class ClauseElement(Visitable): __nonzero__ = __bool__ def __repr__(self): - friendly = getattr(self, 'description', None) + friendly = self.description if friendly is None: return object.__repr__(self) else: @@ -624,8 +626,73 @@ class ColumnElement(operators.ColumnOperators, ClauseElement): __visit_name__ = 'column' primary_key = False foreign_keys = [] + _label = None - _key_label = key = None + """The named label that can be used to target + this column in a result set. + + This label is almost always the label used when + rendering AS