update sqlalchemy
This commit is contained in:
parent
6c6c3e68c6
commit
a4267212e4
192 changed files with 17429 additions and 9601 deletions
|
@ -1,367 +0,0 @@
|
|||
SQLAlchemy-0.9.7.dist-info/DESCRIPTION.rst,sha256=ZN8fj2owI_rw0Emr3_RXqoNfTFkThjiZy7xcCzg1W_g,5013
|
||||
SQLAlchemy-0.9.7.dist-info/METADATA,sha256=BJMEdxvRA6_2F3TeCnFjCS87MJ-9mQDfNhMasw7zlxw,5785
|
||||
SQLAlchemy-0.9.7.dist-info/RECORD,,
|
||||
SQLAlchemy-0.9.7.dist-info/WHEEL,sha256=Er7DBTU_C2g_rTGCxcwhCKegQSKoYLj1ncusWiwlKwM,111
|
||||
SQLAlchemy-0.9.7.dist-info/metadata.json,sha256=L8ZvHjkvIuuc2wYjqxQXcMfbjzJukQrYyJWZgDtacI8,948
|
||||
SQLAlchemy-0.9.7.dist-info/top_level.txt,sha256=rp-ZgB7D8G11ivXON5VGPjupT1voYmWqkciDt5Uaw_Q,11
|
||||
sqlalchemy/__init__.py,sha256=1e-MTh9yzDNEKrfMPLTPbxzQxq2--QE4H-tc03eT5uE,2072
|
||||
sqlalchemy/cprocessors.cpython-35m-darwin.so,sha256=FEiXsJ5pdsq_PuqNiR7jrnTn99nThaja3fdifVbizV4,17012
|
||||
sqlalchemy/cresultproxy.cpython-35m-darwin.so,sha256=s_Y7BzjDh2KutKXsl9PyCZMdpMs1nmy1WlKCnAipmbI,18844
|
||||
sqlalchemy/cutils.cpython-35m-darwin.so,sha256=bGZ5-TxVNVjfkNxMTkbugVNeRQHI3mKXadQ2TXXvFu0,9756
|
||||
sqlalchemy/events.py,sha256=n0Z8zkW0Fdwdv82HLssEKhNNkCJP1nj3jZrztZinkYc,39724
|
||||
sqlalchemy/exc.py,sha256=aIZHSzr2SgBoyfNcjm4cWc5e81B953kSq2BskM6fBQY,11392
|
||||
sqlalchemy/inspection.py,sha256=zCZPzSx4EwImFRXU8vySI8xglZP_Nx4UEyKtjGMynhs,3093
|
||||
sqlalchemy/interfaces.py,sha256=SzmUZ1pL-7b4vEH361UFoCDW6GdM64UFXn5m-VIwgIA,10967
|
||||
sqlalchemy/log.py,sha256=4EG734XnC0sZJ-jFZJKJ1ONCJq1rAZCyu4SyAT8q3yQ,6712
|
||||
sqlalchemy/pool.py,sha256=LOxfK-5Mpuz6RO462sGSFCXieXauqcoMdMZt28XO0to,43830
|
||||
sqlalchemy/processors.py,sha256=h-deajMZbjXpfiOlOVNfBAxuIQf3fq9FemazYk1BGho,5220
|
||||
sqlalchemy/schema.py,sha256=Kr_6g5anin76KPkgWA_uagf-EzPLTWvsbFuKorJNHf4,1106
|
||||
sqlalchemy/types.py,sha256=E-EC4GrZeg_aEgGdXoXIw7iSgLtYJMNccYoDEXtF81s,1635
|
||||
sqlalchemy/connectors/__init__.py,sha256=-3OdiI200TYZzctdcC8z5tgV__t9sdhukPWJBHjlszA,278
|
||||
sqlalchemy/connectors/mxodbc.py,sha256=JZj-z_sY-BYiAM-T8PK9m-WN3vhfObmzAP6eb8Abpag,5348
|
||||
sqlalchemy/connectors/mysqldb.py,sha256=ZSi4E_f5Or7bqsI6Zv6cjLGEMAN0bedv7sHnCkrwwbM,4748
|
||||
sqlalchemy/connectors/pyodbc.py,sha256=vAvQwk3wDt1wRkiBwuwe0hC-7Fla4ekqBJLO8WPS_xs,5890
|
||||
sqlalchemy/connectors/zxJDBC.py,sha256=cVhbJ3PqmVD0KJ7m6FRtR9c5KhHIG-hhm-NX-4rgd5E,1868
|
||||
sqlalchemy/databases/__init__.py,sha256=CKXfBXKaWADu571n8lJR8cndndTHDUAAyK-a_0JLGjg,879
|
||||
sqlalchemy/dialects/__init__.py,sha256=XtI5s53JyccSQo2GIGNa89bXn8mtePccSukfR_-qipc,1027
|
||||
sqlalchemy/dialects/postgres.py,sha256=_FjxoU0BVULlj6PZBMB-2-c4WM6zviIvzxCgtXhxhsY,614
|
||||
sqlalchemy/dialects/drizzle/__init__.py,sha256=AOyB8JGeTbwWfpM5wVLGhbyzX2MRKPFIAoU00IMtrzw,573
|
||||
sqlalchemy/dialects/drizzle/base.py,sha256=dLQxRslE6oyugIeSCdlrVP4DYuCgWiH0kgbpuD59-KM,14995
|
||||
sqlalchemy/dialects/drizzle/mysqldb.py,sha256=myT7EXJg9ToVBrbUkkfmGgCb5Xu2PAr3xnFeA-pvS3s,1270
|
||||
sqlalchemy/dialects/firebird/__init__.py,sha256=bmFjix7gx64rr2luqs9O2mYm-NnpcgguH951XuW6eyc,664
|
||||
sqlalchemy/dialects/firebird/base.py,sha256=nnUrSBfI_chqZmA32KOeMzx8cgpEE5Tr9RNkoZA_Qpk,28061
|
||||
sqlalchemy/dialects/firebird/fdb.py,sha256=mr4KaJgHzpFk6W7g8qBPDLy6WAQkdnR5gMipWmM_6EE,4325
|
||||
sqlalchemy/dialects/firebird/kinterbasdb.py,sha256=WaIPAEIqQJ3QRpYk44IwwC-3t4X4jxv_LWK9CI0xBf4,6299
|
||||
sqlalchemy/dialects/mssql/__init__.py,sha256=dzp85H5bMoja-EsD08ctKaope5e-bjr9r6QoxL9TJXo,1081
|
||||
sqlalchemy/dialects/mssql/adodbapi.py,sha256=j1K_qpA_v8Uc6zX1PGSGsqnHECUQIx_mnxMr7h9pd3A,2493
|
||||
sqlalchemy/dialects/mssql/base.py,sha256=TY7YAXMg_ZKGtaWKrSOV1VTVpZ2oGh68ga_MbQYZvX4,58663
|
||||
sqlalchemy/dialects/mssql/information_schema.py,sha256=-E4WAgB0yYoncty676FvmZL9DEqXCEj0bGwvFc2aZD4,6418
|
||||
sqlalchemy/dialects/mssql/mxodbc.py,sha256=pIXO0sxf_5z2R68jIeILnC1aH5s8HyOadIzr7H13dxw,3856
|
||||
sqlalchemy/dialects/mssql/pymssql.py,sha256=DXoU2r3dcuxMh_QLB713iPAANfcsRqcMhhmBFKg_k9o,2978
|
||||
sqlalchemy/dialects/mssql/pyodbc.py,sha256=gHTjyRc9VOEJDZqs1daBmCPTDZGPQzhoOet2kxy7r2Q,9437
|
||||
sqlalchemy/dialects/mssql/zxjdbc.py,sha256=x33xz8OTS9ffrtRV9lWxbReLS4qW3pq2lp1aljDHGA8,2144
|
||||
sqlalchemy/dialects/mysql/__init__.py,sha256=Ii4p3TOckR9l50WvkstGq1piE_eoySn5ZXPw8R7D_rI,1171
|
||||
sqlalchemy/dialects/mysql/base.py,sha256=Oqh2Z2mcFDVlUL0DwPy3Q7bSzJQiC1JxCdMrAMwMyEo,109960
|
||||
sqlalchemy/dialects/mysql/cymysql.py,sha256=MWtGXcS4f5JpP1zBLzejWz3y7w5-saWlwQfH7_i1aao,2349
|
||||
sqlalchemy/dialects/mysql/gaerdbms.py,sha256=oe9BfWjFJglITk6s_wQQYe5vs5h9zYcMUuUx2FmE7J8,2724
|
||||
sqlalchemy/dialects/mysql/mysqlconnector.py,sha256=FX7HEsbiqE98IbpI3paFzfP2geLYsTadXox_lYmL9eE,3911
|
||||
sqlalchemy/dialects/mysql/mysqldb.py,sha256=Gf9QLAY2o4Eu9IeagLw4e-v5uQGHb7sJgP_COnwn8MM,3259
|
||||
sqlalchemy/dialects/mysql/oursql.py,sha256=DGwyO-b7etGb76XdDrcQ-hwtVu_yIzU-8IF_iYw5LqU,8756
|
||||
sqlalchemy/dialects/mysql/pymysql.py,sha256=TLsJeMHMZGvOTlbO2JhQqnjcALKmAgTedQJCBCnME0Q,1232
|
||||
sqlalchemy/dialects/mysql/pyodbc.py,sha256=agCzgILoQdSVOKup3OUMvt9RkDAtwyg8yQgIibo14XE,2640
|
||||
sqlalchemy/dialects/mysql/zxjdbc.py,sha256=MyCM6TGRxzjrhvsow6MhwLScB4qQTZn1DmyhhhPGcNg,3803
|
||||
sqlalchemy/dialects/oracle/__init__.py,sha256=hO304rf8aiIq9--QQSmuvi8MBZBcZhNzjI48cs1JTZo,797
|
||||
sqlalchemy/dialects/oracle/base.py,sha256=cG2Ov7EJ8GKg18mXLCiKLC0WO0ssdONykAPydaTCpJQ,49391
|
||||
sqlalchemy/dialects/oracle/cx_oracle.py,sha256=rneYv6pQOXVM2ztFBCO6bBcVAfR7vdtV8voEbvBYiIY,37737
|
||||
sqlalchemy/dialects/oracle/zxjdbc.py,sha256=c_nHf8X1GM0OkqXAKgQiTEved3ZDDzDop6dG_SpE55w,8034
|
||||
sqlalchemy/dialects/postgresql/__init__.py,sha256=JdIQ3kAuikIwYNEc4W39-BKnOepAb3jpdN7RXtwru9E,1251
|
||||
sqlalchemy/dialects/postgresql/base.py,sha256=EV642aX-WlOLDNirz33f50GXOGasLG8TBw8qaggo0GI,88366
|
||||
sqlalchemy/dialects/postgresql/constraints.py,sha256=OAIZmNYW3PEuWVagjj9p-CpCcdh6tM_PTObwjqD_vVs,2543
|
||||
sqlalchemy/dialects/postgresql/hstore.py,sha256=_HhwrAGGEk8KlKQZrqzJ_PSLHlH9Cd4wemN0ascv-Uk,11402
|
||||
sqlalchemy/dialects/postgresql/json.py,sha256=ezCEhBZKnobL2epNbT3re9AWQxvtYHpsQxK7fmB3XcI,11066
|
||||
sqlalchemy/dialects/postgresql/pg8000.py,sha256=Uiu6RhLLn42UpCu27Z957dhet59zZh4z3jmbjTfgLJg,5428
|
||||
sqlalchemy/dialects/postgresql/psycopg2.py,sha256=cjIOs6k-EKVGhbsec4sx8MNq7Nk-c1z4hnW5ZJYOn4U,20761
|
||||
sqlalchemy/dialects/postgresql/pypostgresql.py,sha256=ZxaL0d8xA0yhlhOdK09lLtJdWH90vdq57EITBrrdRms,2173
|
||||
sqlalchemy/dialects/postgresql/ranges.py,sha256=q3pc7jeUOc83lkgE3WVN8PlqKzeYJjuTHuXeRvY-s2s,4814
|
||||
sqlalchemy/dialects/postgresql/zxjdbc.py,sha256=c9_JUHsjiaTbmxqoe3v2YS0oIgkk5xOL0e6ZSaUM_EI,1397
|
||||
sqlalchemy/dialects/sqlite/__init__.py,sha256=evr3TsIXnZIKD7QY-CHC-MVvkt28SyV0sCJrIjpnQJM,723
|
||||
sqlalchemy/dialects/sqlite/base.py,sha256=jQ5kDBsYuP4yoENO6lBJ792YDY9cHJRfPXq4etec0mI,39398
|
||||
sqlalchemy/dialects/sqlite/pysqlite.py,sha256=dHrZk8Ut8sgNpVJ2-Byx_xJoxn55zLS_whmGSjABNCk,13249
|
||||
sqlalchemy/dialects/sybase/__init__.py,sha256=4G1LG5YqVaE2QDIJANqomedZXRQrVIwMW3y2lKWurVU,894
|
||||
sqlalchemy/dialects/sybase/base.py,sha256=ArOCZBItXEupylulxBzjr9Z81st06ZgGfqtfLJEurWE,28629
|
||||
sqlalchemy/dialects/sybase/mxodbc.py,sha256=1Qmk1XbcjhJwu0TH6U1QjHRhnncclR9jqdMCWsasbYM,901
|
||||
sqlalchemy/dialects/sybase/pyodbc.py,sha256=FsrKZP9k9UBMp_sTBIhe50QWwJmcpDw6FAzXmEhaq2s,2102
|
||||
sqlalchemy/dialects/sybase/pysybase.py,sha256=c9LCZ0IM4wW5fwMvvpgaflDlZJKcan4Pq7QwFE1LAUw,3208
|
||||
sqlalchemy/engine/__init__.py,sha256=o2daUscphvcFRkjOPMl0BJLGO6PXev_L8eDcx-7Zafg,15923
|
||||
sqlalchemy/engine/base.py,sha256=3_F3iPisYSVePe8G2vIsZqqEz29qfSvmsijwdgsyNYI,70311
|
||||
sqlalchemy/engine/default.py,sha256=W2TuN72wafDfLQU-ud74Fpl6pL8TjD5YdftrPrclyAY,34371
|
||||
sqlalchemy/engine/interfaces.py,sha256=pzFtwvtRIFze67WKMFKzVivZbSsXGQeEngoY-2DnL8g,30327
|
||||
sqlalchemy/engine/reflection.py,sha256=GocL2XvTxrmSdqn4ybWiRTlfsfiLSZcUISSJ_M1Bur8,21545
|
||||
sqlalchemy/engine/result.py,sha256=Is8N8TcQISjwBPwRxdsiOgyKqIDV8cJ15wBcwQWnsEw,34979
|
||||
sqlalchemy/engine/strategies.py,sha256=oVroyOyomN2q_OJfpHMhBa_0q-n_vWzI2H1cZIZb5G4,8715
|
||||
sqlalchemy/engine/threadlocal.py,sha256=UBtauPUQjOPHuPIcBIxpiKUmOm_jpcUrkftbJ32Gt4E,4103
|
||||
sqlalchemy/engine/url.py,sha256=9kL6hfESlqwlBcc5E_2a8YsuGF1EseMBm_DQyUAJ4XA,7521
|
||||
sqlalchemy/engine/util.py,sha256=wh2y0Uwt9O1ulnT6YhxAShcDXZTtigQZbiBnMxpvHeo,2338
|
||||
sqlalchemy/event/__init__.py,sha256=sk4pgB4dEPftPZMtgFctlqsPnWYjtcvqcpTVSg6mQ9M,419
|
||||
sqlalchemy/event/api.py,sha256=zLbcAKsKsYX5I3zmiwNIewwto1puGSWCm49crnAbHbk,3854
|
||||
sqlalchemy/event/attr.py,sha256=-ENxlontP0HnKFE9nyHMC4jS5pvMlXmkiK2ehAcdzLU,12566
|
||||
sqlalchemy/event/base.py,sha256=DU3EYBWflaGrwpEz_jocGQTsR3_nlKKWMFVThh4D6f4,7248
|
||||
sqlalchemy/event/legacy.py,sha256=vA9km6n_ZN1YSI5C-A9Jw4ptKfwZueTuvJbmtVYY1os,5818
|
||||
sqlalchemy/event/registry.py,sha256=bzPbXp2NTcYKQC7wsYUk-5rNMJMz5OwPGGsnzyI2ylQ,7470
|
||||
sqlalchemy/ext/__init__.py,sha256=wSCbYQ2KptpL8sNFiCbEzTjI2doWmcEAiRAqx58qLcY,235
|
||||
sqlalchemy/ext/associationproxy.py,sha256=XDr4UarpHG7ulrhiEsD8PYnYp4MUEUhpvjPfPdbayGw,32975
|
||||
sqlalchemy/ext/automap.py,sha256=r7F2VM0T--wecKH6uNFIWsYTElwwXCW9cOzF0llKz10,39713
|
||||
sqlalchemy/ext/compiler.py,sha256=m12MOPF6YbhY2OWJccWVRxfe2cigX0VcKzAhSr5FTI0,15770
|
||||
sqlalchemy/ext/horizontal_shard.py,sha256=T31IsHSpyJC27YLHsSxQ7R4uGIn4guICsVNNDWE994k,4814
|
||||
sqlalchemy/ext/hybrid.py,sha256=ZnPkE4ORZwA1md6rBYeyK-ySAcVsPZdLzRLXS3ZxmYo,27985
|
||||
sqlalchemy/ext/instrumentation.py,sha256=5MjuuikGz1x_itSm22PZMWr-los8v-5PK6HE5TKCSSM,14646
|
||||
sqlalchemy/ext/mutable.py,sha256=GCsgkaFyypUa1c3x7q9oNF2uoGM8d3LTv3DgDB9x5J8,23069
|
||||
sqlalchemy/ext/orderinglist.py,sha256=5vRTTK4Pdm2_IrbRIxWV8qHgDxktPVljrUBqwyLk-TE,13695
|
||||
sqlalchemy/ext/serializer.py,sha256=JiHdBiStZDlEvh5yzsd7lJAkmwfHJbsgDtcPwCRFOi0,5586
|
||||
sqlalchemy/ext/declarative/__init__.py,sha256=kGdbexw3SfbgS8Uq7og9iTLDXV-Hk8CJnXn_4nulql0,47618
|
||||
sqlalchemy/ext/declarative/api.py,sha256=idvoFBxhqzQvhTEPY764L1RpdrrSCJS3yU_J9xTbqJY,17780
|
||||
sqlalchemy/ext/declarative/base.py,sha256=oXplGZM3G81DPKcaI_PsNVGjmpj9g7eqmDOXod3DzvU,20036
|
||||
sqlalchemy/ext/declarative/clsregistry.py,sha256=niIyzC-WeITMJeFQqkdZdeLxuKni-vHrs7-LJ3WZi_g,10314
|
||||
sqlalchemy/orm/__init__.py,sha256=-RsTaAlLe9a2GvHw74fN5jyB2brV-i0XXViPzPGp2gc,7976
|
||||
sqlalchemy/orm/attributes.py,sha256=aoHu08zOuMziAlwgVyO2cr_86m26PFnTx6YnZ0QgcGQ,55522
|
||||
sqlalchemy/orm/base.py,sha256=lijsuavy2C4UJd7RrKKx8U6IdxsXwG8xV_XdqQ6B31c,13181
|
||||
sqlalchemy/orm/collections.py,sha256=RmOIb6b-XrRD8sSJ-9qMDM3aRyps1aOZcbQlWt4Ojss,52951
|
||||
sqlalchemy/orm/dependency.py,sha256=zEruE4dj9m7qHC9nS0xIAVx4L0WB58c6eZHPsZF25QM,46072
|
||||
sqlalchemy/orm/deprecated_interfaces.py,sha256=CXg9nh2XUyrRYDVS20XVc3G3niHx84J8ko7PBXOXcAI,21941
|
||||
sqlalchemy/orm/descriptor_props.py,sha256=b1GyOu45jjvxgBMwhVXBSyBxhYM7IVlOGG5F_qOl5co,24455
|
||||
sqlalchemy/orm/dynamic.py,sha256=XQTf7ozzdkloQMUPPkwM02eE_sr0sYt-uQgpkdFt2SU,13338
|
||||
sqlalchemy/orm/evaluator.py,sha256=ZrExCzWmvVZS2ovM8t09cPzfKOqbF7Zm3b88nPviPQM,5032
|
||||
sqlalchemy/orm/events.py,sha256=6AMKAa-V_72GsGj3dP158FdiqXbF3JM9IeBpAQFPsjo,70274
|
||||
sqlalchemy/orm/exc.py,sha256=Otfiun4oExJpUp8Tjk2JcSt9w3BkL1JRi95wXo-tv60,5439
|
||||
sqlalchemy/orm/identity.py,sha256=e_xaDoNI06hLaiDNomYuvuAUs-TAh901dwqeQhZs320,7091
|
||||
sqlalchemy/orm/instrumentation.py,sha256=rl72nSRqgk4PXlY3NsZJ_jtkrdszi_AwlBFRvXzRaug,16787
|
||||
sqlalchemy/orm/interfaces.py,sha256=vNj2Jl_U_S2rlITAlqg90mz9RTdRmtZvNJHCUnekWig,18983
|
||||
sqlalchemy/orm/loading.py,sha256=mvvth2KOU2CTNNnQuhAtzQRjuFoEf63jTjQ3Seop56M,21257
|
||||
sqlalchemy/orm/mapper.py,sha256=SZ1V59o6e10sUyAT9XytHjCvzyGALMF99QajZcWAAd8,108109
|
||||
sqlalchemy/orm/path_registry.py,sha256=VzaWNV7iA8Z5XkXcHLP379H6gQm1qSfunZ4dRaxyLDY,7672
|
||||
sqlalchemy/orm/persistence.py,sha256=Dz3prpO_nHfPO67dDwO4-6buSOziqXQktXfcyIGseYI,40862
|
||||
sqlalchemy/orm/properties.py,sha256=AB5fBY8EEchTU7QYgWQMn93i7KJoFdKI15O9ofETo8k,9557
|
||||
sqlalchemy/orm/query.py,sha256=GCMI29Hj3XXYFMo7cyGwpThv2rRqNuFa8T4lpt4bDcg,129823
|
||||
sqlalchemy/orm/relationships.py,sha256=IjCW_ng5YXoUODGj7EbkS3q9r1YnE4Y788O34ZeFeBI,111050
|
||||
sqlalchemy/orm/scoping.py,sha256=OoRgem4nICXPZAeK4-Sbe2kDzWDtyBHd6fZtK3DZT4c,6101
|
||||
sqlalchemy/orm/session.py,sha256=DHchZuztFAGH256oCA9dDKJCznyvvN9EeoFEXEpKW9E,95874
|
||||
sqlalchemy/orm/state.py,sha256=_hEslw-lhYnJTcSMmVfmsAzerR1yxwwtLDvCDrw3PK0,21014
|
||||
sqlalchemy/orm/strategies.py,sha256=0RsDUGe0i1Di_eFXkWAW0ZcV19V-m0K9YxmT8lM9Q-k,54293
|
||||
sqlalchemy/orm/strategy_options.py,sha256=q1_-h6Vh-59mRgM9dGNApxjF18TfrnwXMRk7wpHvTrE,32170
|
||||
sqlalchemy/orm/sync.py,sha256=NDLMpllJjk_zF7rgCSy4WlaeEfP1TVSj-UeVU_QZbVs,4736
|
||||
sqlalchemy/orm/unitofwork.py,sha256=F70Dfg7kBtcqLUp2qBYlcQZIDx3zI4bpNsIJFRxcf90,23234
|
||||
sqlalchemy/orm/util.py,sha256=-wzG6p6NGztBvxHiosvTvShwSQpZdd3KKT2mw26l86o,35695
|
||||
sqlalchemy/sql/__init__.py,sha256=158RHlIfn8A-OyihHBGd1jNcd4Ed2-laRkgeGJIvs4w,1721
|
||||
sqlalchemy/sql/annotation.py,sha256=JXguy7w1i3jPtr7AMiJCZ5B-TaECZquA8GmM2laEcC4,6111
|
||||
sqlalchemy/sql/base.py,sha256=z_dXcqIZsqKaCsSgl7dz5t97Ndb3v0l8gBe53jYRwTE,21416
|
||||
sqlalchemy/sql/compiler.py,sha256=7_GOyLILz5GBozTCXmQAnOResx0IivYH4DCf7ZV9acs,109419
|
||||
sqlalchemy/sql/ddl.py,sha256=WjCxmUAHmlbFXe5zofpj_lYU6-TEqvCJR9cxThHTIlc,28693
|
||||
sqlalchemy/sql/default_comparator.py,sha256=4DYyP32ubGMPei66c-IMAMsNKE-xoXpptyZDEju5jL4,13132
|
||||
sqlalchemy/sql/dml.py,sha256=ruyX-MuW_FcOW7DyjqGlyXwQUQlpAHtx73LHxfVXwqI,29526
|
||||
sqlalchemy/sql/elements.py,sha256=tyrdO9Bzm6em1sNEnwJO04LJCz-b1AmomRTp9OuerKQ,121164
|
||||
sqlalchemy/sql/expression.py,sha256=ERaOilpJJEWVXXCanhEY5x3Jeu5Q3pLuDSIQZCzq0bU,5668
|
||||
sqlalchemy/sql/functions.py,sha256=zI_Q6gqketUqehiNnDB6ezgDQC01iT7Up2jhPhWMTOg,16567
|
||||
sqlalchemy/sql/naming.py,sha256=FIdNBDvZwf1e-mW-Opjd_aKyeo986nWmYW0Dr9MOgCc,4588
|
||||
sqlalchemy/sql/operators.py,sha256=wsWdHm4sN6b6jfB8CTaFgBqww2jC-YjgpJJnmqzicVc,22510
|
||||
sqlalchemy/sql/schema.py,sha256=ts0hj8oYU9bx-amhfYpaDHdx4GFNwj1_avwHwm3eY5Q,132789
|
||||
sqlalchemy/sql/selectable.py,sha256=-CaVRHBIbgWpKe7kI5BSWY0x8AHTwUSJtq5d3XCQLwQ,109544
|
||||
sqlalchemy/sql/sqltypes.py,sha256=Zb7AMQlkMaVKTWQgR2o1JS4hwDjr6gjWl_POBaVGZ_0,54635
|
||||
sqlalchemy/sql/type_api.py,sha256=WsB-QZiEz1ez4g9OeXBKBqSnyluawX0ttG5stLmKJCI,37692
|
||||
sqlalchemy/sql/util.py,sha256=k2cBNkvVBl-B3AF5nD16Hq0NyWOE78iBbIf0b6PHA9U,19501
|
||||
sqlalchemy/sql/visitors.py,sha256=cohfnIfn4fD6O-qLhzhrwqrMaGhLlrRKvW0nTa72dHk,9943
|
||||
sqlalchemy/testing/__init__.py,sha256=RzQCY3RZ88UFBUhCGxnA82w1BsJ8M-0L-76-ex-Wt5o,1035
|
||||
sqlalchemy/testing/assertions.py,sha256=MoK89J6upTMKcPU23gTMIDb9Wk8qPvOkJd88y6lWNt0,15666
|
||||
sqlalchemy/testing/assertsql.py,sha256=fgA3QTe2vNQZzlGoBXmAhc0RA2JZKsbxyRAmr0FczZ8,11248
|
||||
sqlalchemy/testing/config.py,sha256=l34Qkqpz2Yu6BZxWm2zy7e5AqLyx_0adHLLpTq-bGew,2136
|
||||
sqlalchemy/testing/distutils_run.py,sha256=fzij-nmjhKKdS9j9THw8i4zWkC3syEzN5V4L9gx45YA,230
|
||||
sqlalchemy/testing/engines.py,sha256=uf4lllczl1qqmaOUrPNrW57cYgLLpJK8k3k8oduCTcg,13030
|
||||
sqlalchemy/testing/entities.py,sha256=1JpVCXMLwzSPpzetTqERD_3Zk3tiQBIDpBO9OVoVa9k,2992
|
||||
sqlalchemy/testing/exclusions.py,sha256=gjgLNk2PRBtsYbi_CsWGf6MxRLB8y4b9QkFbAWRfPuA,10053
|
||||
sqlalchemy/testing/fixtures.py,sha256=ys7TZ1uP9wF_OQXgTUNcXiUTOQZhQYgzc9fzjD420mQ,10587
|
||||
sqlalchemy/testing/mock.py,sha256=F0ticsEqquR82laaAqhJaTNDk-wJBjY8UqQmrFrvTLM,620
|
||||
sqlalchemy/testing/pickleable.py,sha256=9I1ADF_Tw-E6UgTHOuKGZP7_ZM72XhmEXNp_1qmh2l4,2641
|
||||
sqlalchemy/testing/profiling.py,sha256=FeAWcKQrVh_-ow06yUIF3P5gQU8YCeIkXKsV142U5eU,10280
|
||||
sqlalchemy/testing/requirements.py,sha256=xTZPvrj3wr-4xDsatmX1mp7mPMxgS_4XH5WLaZhi8Yg,17718
|
||||
sqlalchemy/testing/runner.py,sha256=q2HZNYXYgcJytV8IHw4btb7sD6muov4WtJzJFF6zuFc,1625
|
||||
sqlalchemy/testing/schema.py,sha256=nn4K5mjQbRQuvnuux43h9feYrXjZvZKpKEJLJioljGM,3433
|
||||
sqlalchemy/testing/util.py,sha256=nDHZOwsKgX1-ecRgZOzXzMrKp11HXfMCd5LsQB94ES4,5304
|
||||
sqlalchemy/testing/warnings.py,sha256=VskMM5G9imDqSfOQvWsriJ21b_CrfcgD5VOCWJtmwps,1682
|
||||
sqlalchemy/testing/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
sqlalchemy/testing/plugin/noseplugin.py,sha256=Ql26MG8e5ZWzCDisrvYR3thsA0wS__iEHFq-0yGSgsg,2771
|
||||
sqlalchemy/testing/plugin/plugin_base.py,sha256=VMClps-DhTrrmx4rRCX5eXktKwZ5R-z-RlcRSmnt0lo,15384
|
||||
sqlalchemy/testing/plugin/pytestplugin.py,sha256=B2Wsh3ANSi3mw9zzuheRJ_HSpXfzilnCgl0WEh1nDmE,4368
|
||||
sqlalchemy/testing/suite/__init__.py,sha256=lbOCv0BGIpGJmk9L4I4zUL_W6_n-LGXJRTPvlvM4kDQ,419
|
||||
sqlalchemy/testing/suite/test_ddl.py,sha256=Baw0ou9nKdADmrRuXgWzF1FZx0rvkkw3JHc6yw5BN0M,1838
|
||||
sqlalchemy/testing/suite/test_insert.py,sha256=QQVLHnw58kUZWwGCVH7E1LL3I3R2b9mxl7MWhTro0hA,6746
|
||||
sqlalchemy/testing/suite/test_reflection.py,sha256=5rjLsnHZvQx0GQ9s6rkQaI_JcBVdQl-KFw8KolgXTk0,19895
|
||||
sqlalchemy/testing/suite/test_results.py,sha256=oAcO1tD0I7c9ErMeSvSZBZfz1IBDMJHJTf64Y1pBodk,6685
|
||||
sqlalchemy/testing/suite/test_select.py,sha256=qmSQE2EaVSf1Zwi_4kiBWstLZD2NvC3l8NbCfcnAhr8,2506
|
||||
sqlalchemy/testing/suite/test_sequence.py,sha256=i7tWJnVqfZDTopHs8i4NEDZnhsxjDoOQW0khixKIAnU,3806
|
||||
sqlalchemy/testing/suite/test_types.py,sha256=UKa-ZPdpz16mVKvT-9ISRAfqdrqiKaE7IA-_phQQuxo,17088
|
||||
sqlalchemy/testing/suite/test_update_delete.py,sha256=r5p467r-EUsjEcWGfUE0VPIfN4LLXZpLRnnyBLyyjl4,1582
|
||||
sqlalchemy/util/__init__.py,sha256=goz0YsuGbgmDfmXJ5bmx9H0JjKDAKflB2glTMAfbtK0,2350
|
||||
sqlalchemy/util/_collections.py,sha256=G3xVqiiFI-stTqFwJ93epgHws5_el6W-0KsS0JXxeH4,26052
|
||||
sqlalchemy/util/compat.py,sha256=HhYut_7bky-P3acKHpXs5CsX9Bri8FDLRgfAnG9bEyg,5926
|
||||
sqlalchemy/util/deprecations.py,sha256=CYQ712rSop1CXF-0Kr0eAo-bEa4g8YJvHoOiBxbfIhw,4403
|
||||
sqlalchemy/util/langhelpers.py,sha256=Yc9l67mVY-O8NZLJqI4ZOvbsC9eibVDm_V8Rhjhzbj4,37539
|
||||
sqlalchemy/util/queue.py,sha256=XAJ2hKAwepp3mCw5M1-Ksn1t7XS-mHoGhIhwzusklWw,6548
|
||||
sqlalchemy/util/topological.py,sha256=wmuAjgNqxrGWFrI3KHzUAD8ppaD6gRsxLtoG1D3nKDI,2594
|
||||
sqlalchemy/sql/__pycache__/functions.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/sybase/__pycache__/base.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/deprecated_interfaces.cpython-35.pyc,,
|
||||
sqlalchemy/ext/declarative/__pycache__/base.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/adodbapi.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/dependency.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/relationships.cpython-35.pyc,,
|
||||
sqlalchemy/util/__pycache__/deprecations.cpython-35.pyc,,
|
||||
sqlalchemy/__pycache__/log.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/interfaces.cpython-35.pyc,,
|
||||
sqlalchemy/connectors/__pycache__/pyodbc.cpython-35.pyc,,
|
||||
sqlalchemy/ext/__pycache__/compiler.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/dynamic.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/drizzle/__pycache__/base.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/oracle/__pycache__/base.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/zxjdbc.cpython-35.pyc,,
|
||||
sqlalchemy/ext/__pycache__/mutable.cpython-35.pyc,,
|
||||
sqlalchemy/__pycache__/pool.cpython-35.pyc,,
|
||||
sqlalchemy/engine/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/mysqlconnector.cpython-35.pyc,,
|
||||
sqlalchemy/sql/__pycache__/elements.cpython-35.pyc,,
|
||||
sqlalchemy/sql/__pycache__/ddl.cpython-35.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_results.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/pyodbc.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/information_schema.cpython-35.pyc,,
|
||||
sqlalchemy/sql/__pycache__/dml.cpython-35.pyc,,
|
||||
sqlalchemy/ext/__pycache__/hybrid.cpython-35.pyc,,
|
||||
sqlalchemy/testing/__pycache__/exclusions.cpython-35.pyc,,
|
||||
sqlalchemy/event/__pycache__/attr.cpython-35.pyc,,
|
||||
sqlalchemy/util/__pycache__/queue.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/event/__pycache__/legacy.cpython-35.pyc,,
|
||||
sqlalchemy/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/__pycache__/exc.cpython-35.pyc,,
|
||||
sqlalchemy/__pycache__/schema.cpython-35.pyc,,
|
||||
sqlalchemy/sql/__pycache__/annotation.cpython-35.pyc,,
|
||||
sqlalchemy/sql/__pycache__/compiler.cpython-35.pyc,,
|
||||
sqlalchemy/util/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/oracle/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/sql/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/__pycache__/types.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-35.pyc,,
|
||||
sqlalchemy/connectors/__pycache__/mxodbc.cpython-35.pyc,,
|
||||
sqlalchemy/sql/__pycache__/sqltypes.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/firebird/__pycache__/kinterbasdb.cpython-35.pyc,,
|
||||
sqlalchemy/util/__pycache__/langhelpers.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/events.cpython-35.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_reflection.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/strategies.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/oracle/__pycache__/cx_oracle.cpython-35.pyc,,
|
||||
sqlalchemy/ext/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/gaerdbms.cpython-35.pyc,,
|
||||
sqlalchemy/sql/__pycache__/base.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/mapper.cpython-35.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_types.cpython-35.pyc,,
|
||||
sqlalchemy/engine/__pycache__/default.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/session.cpython-35.pyc,,
|
||||
sqlalchemy/testing/__pycache__/profiling.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/state.cpython-35.pyc,,
|
||||
sqlalchemy/event/__pycache__/registry.cpython-35.pyc,,
|
||||
sqlalchemy/util/__pycache__/compat.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/base.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/loading.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/persistence.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/attributes.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/zxjdbc.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/descriptor_props.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/base.cpython-35.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/sybase/__pycache__/mxodbc.cpython-35.pyc,,
|
||||
sqlalchemy/sql/__pycache__/default_comparator.cpython-35.pyc,,
|
||||
sqlalchemy/sql/__pycache__/schema.cpython-35.pyc,,
|
||||
sqlalchemy/testing/plugin/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/scoping.cpython-35.pyc,,
|
||||
sqlalchemy/ext/__pycache__/horizontal_shard.cpython-35.pyc,,
|
||||
sqlalchemy/sql/__pycache__/operators.cpython-35.pyc,,
|
||||
sqlalchemy/testing/__pycache__/distutils_run.cpython-35.pyc,,
|
||||
sqlalchemy/event/__pycache__/api.cpython-35.pyc,,
|
||||
sqlalchemy/testing/__pycache__/schema.cpython-35.pyc,,
|
||||
sqlalchemy/ext/__pycache__/orderinglist.cpython-35.pyc,,
|
||||
sqlalchemy/testing/__pycache__/assertsql.cpython-35.pyc,,
|
||||
sqlalchemy/testing/__pycache__/engines.cpython-35.pyc,,
|
||||
sqlalchemy/connectors/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/engine/__pycache__/reflection.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/firebird/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-35.pyc,,
|
||||
sqlalchemy/sql/__pycache__/type_api.cpython-35.pyc,,
|
||||
sqlalchemy/testing/__pycache__/config.cpython-35.pyc,,
|
||||
sqlalchemy/testing/__pycache__/util.cpython-35.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_ddl.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/util.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/unitofwork.cpython-35.pyc,,
|
||||
sqlalchemy/testing/__pycache__/pickleable.cpython-35.pyc,,
|
||||
sqlalchemy/event/__pycache__/base.cpython-35.pyc,,
|
||||
sqlalchemy/ext/__pycache__/associationproxy.cpython-35.pyc,,
|
||||
sqlalchemy/sql/__pycache__/visitors.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/__pycache__/inspection.cpython-35.pyc,,
|
||||
sqlalchemy/ext/__pycache__/serializer.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/sybase/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/base.cpython-35.pyc,,
|
||||
sqlalchemy/databases/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/sql/__pycache__/selectable.cpython-35.pyc,,
|
||||
sqlalchemy/testing/__pycache__/fixtures.cpython-35.pyc,,
|
||||
sqlalchemy/engine/__pycache__/util.cpython-35.pyc,,
|
||||
sqlalchemy/event/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/connectors/__pycache__/zxJDBC.cpython-35.pyc,,
|
||||
sqlalchemy/testing/__pycache__/runner.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/pypostgresql.cpython-35.pyc,,
|
||||
sqlalchemy/ext/__pycache__/automap.cpython-35.pyc,,
|
||||
sqlalchemy/__pycache__/interfaces.cpython-35.pyc,,
|
||||
sqlalchemy/ext/__pycache__/instrumentation.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/__pycache__/postgres.cpython-35.pyc,,
|
||||
sqlalchemy/testing/__pycache__/mock.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/exc.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/json.cpython-35.pyc,,
|
||||
sqlalchemy/util/__pycache__/_collections.cpython-35.pyc,,
|
||||
sqlalchemy/engine/__pycache__/threadlocal.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/zxjdbc.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/pyodbc.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/constraints.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/mysqldb.cpython-35.pyc,,
|
||||
sqlalchemy/engine/__pycache__/strategies.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/strategy_options.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/drizzle/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/engine/__pycache__/result.cpython-35.pyc,,
|
||||
sqlalchemy/testing/__pycache__/assertions.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/drizzle/__pycache__/mysqldb.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/sqlite/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/mxodbc.cpython-35.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_insert.cpython-35.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_update_delete.cpython-35.pyc,,
|
||||
sqlalchemy/util/__pycache__/topological.cpython-35.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_select.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/evaluator.cpython-35.pyc,,
|
||||
sqlalchemy/ext/declarative/__pycache__/clsregistry.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/pymysql.cpython-35.pyc,,
|
||||
sqlalchemy/__pycache__/events.cpython-35.pyc,,
|
||||
sqlalchemy/testing/__pycache__/warnings.cpython-35.pyc,,
|
||||
sqlalchemy/sql/__pycache__/naming.cpython-35.pyc,,
|
||||
sqlalchemy/engine/__pycache__/base.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/sqlite/__pycache__/pysqlite.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/collections.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/sqlite/__pycache__/base.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/instrumentation.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/properties.cpython-35.pyc,,
|
||||
sqlalchemy/sql/__pycache__/expression.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/identity.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/sybase/__pycache__/pyodbc.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/query.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/firebird/__pycache__/base.cpython-35.pyc,,
|
||||
sqlalchemy/engine/__pycache__/interfaces.cpython-35.pyc,,
|
||||
sqlalchemy/testing/__pycache__/requirements.cpython-35.pyc,,
|
||||
sqlalchemy/testing/__pycache__/entities.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/sync.cpython-35.pyc,,
|
||||
sqlalchemy/engine/__pycache__/url.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/oracle/__pycache__/zxjdbc.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/base.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/pymssql.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/path_registry.cpython-35.pyc,,
|
||||
sqlalchemy/sql/__pycache__/util.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/firebird/__pycache__/fdb.cpython-35.pyc,,
|
||||
sqlalchemy/testing/plugin/__pycache__/plugin_base.cpython-35.pyc,,
|
||||
sqlalchemy/testing/plugin/__pycache__/noseplugin.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/cymysql.cpython-35.pyc,,
|
||||
sqlalchemy/ext/declarative/__pycache__/api.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/oursql.cpython-35.pyc,,
|
||||
sqlalchemy/testing/plugin/__pycache__/pytestplugin.cpython-35.pyc,,
|
||||
sqlalchemy/ext/declarative/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/sybase/__pycache__/pysybase.cpython-35.pyc,,
|
||||
sqlalchemy/testing/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_sequence.cpython-35.pyc,,
|
||||
sqlalchemy/connectors/__pycache__/mysqldb.cpython-35.pyc,,
|
||||
sqlalchemy/__pycache__/processors.cpython-35.pyc,,
|
|
@ -1,6 +1,6 @@
|
|||
Metadata-Version: 2.0
|
||||
Name: SQLAlchemy
|
||||
Version: 0.9.7
|
||||
Version: 1.0.12
|
||||
Summary: Database Abstraction Library
|
||||
Home-page: http://www.sqlalchemy.org
|
||||
Author: Mike Bayer
|
375
lib/python3.5/site-packages/SQLAlchemy-1.0.12.dist-info/RECORD
Normal file
375
lib/python3.5/site-packages/SQLAlchemy-1.0.12.dist-info/RECORD
Normal file
|
@ -0,0 +1,375 @@
|
|||
SQLAlchemy-1.0.12.dist-info/DESCRIPTION.rst,sha256=ZN8fj2owI_rw0Emr3_RXqoNfTFkThjiZy7xcCzg1W_g,5013
|
||||
SQLAlchemy-1.0.12.dist-info/METADATA,sha256=fntYBelbmQAxIrj5_YGLpIGPzwQBxiA_6kJwVdrwMF4,5786
|
||||
SQLAlchemy-1.0.12.dist-info/RECORD,,
|
||||
SQLAlchemy-1.0.12.dist-info/WHEEL,sha256=Er7DBTU_C2g_rTGCxcwhCKegQSKoYLj1ncusWiwlKwM,111
|
||||
SQLAlchemy-1.0.12.dist-info/metadata.json,sha256=QojGP97nxChuh8Zdlr9y3tpCLlGzDO622gq1_HEvFmo,965
|
||||
SQLAlchemy-1.0.12.dist-info/top_level.txt,sha256=rp-ZgB7D8G11ivXON5VGPjupT1voYmWqkciDt5Uaw_Q,11
|
||||
sqlalchemy/__init__.py,sha256=fTurvwmGkoRt_zdwxoZNWTHg6VdzvBpeHyPmUnexOK4,2112
|
||||
sqlalchemy/cprocessors.cpython-35m-darwin.so,sha256=U2_r5ou5V_I2IehYbMEPnqhrB5x0__8Zd7dpLTkWZO8,16924
|
||||
sqlalchemy/cresultproxy.cpython-35m-darwin.so,sha256=QSna4HE3XSia-2igDXCc_5VR9PyKwu89Tti5FttpmeM,18756
|
||||
sqlalchemy/cutils.cpython-35m-darwin.so,sha256=rUL24S3pi7e8KdIeS5oyseDDKpi2LwzIDQvzKNXCQ5Q,9668
|
||||
sqlalchemy/events.py,sha256=j8yref-XfuJxkPKbvnZmB4jeUAIujPcbLAzD2cKV4f4,43944
|
||||
sqlalchemy/exc.py,sha256=NhA5R5nDdducWkp0MXtlQ0-Q6iF_rhqkHWblIfuSYGk,11706
|
||||
sqlalchemy/inspection.py,sha256=zMa-2nt-OQ0Op1dqq0Z2XCnpdAFSTkqif5Kdi8Wz8AU,3093
|
||||
sqlalchemy/interfaces.py,sha256=XSx5y-HittAzc79lU4C7rPbTtSW_Hc2c89NqCy50tsQ,10967
|
||||
sqlalchemy/log.py,sha256=opX7UORq5N6_jWxN9aHX9OpiirwAcRA0qq-u5m4SMkQ,6712
|
||||
sqlalchemy/pool.py,sha256=-F51TIJYl0XGTV2_sdpV8C1m0jTTQaq0nAezdmSgr84,47220
|
||||
sqlalchemy/processors.py,sha256=Li1kdC-I0v03JxeOz4V7u4HAevK6LledyCPvaL06mYc,5220
|
||||
sqlalchemy/schema.py,sha256=rZzZJJ8dT9trLSYknFpHm0N1kRERYwhqHH3QD31SJjc,1182
|
||||
sqlalchemy/types.py,sha256=qcoy5xKaurDV4kaXr489GL2sz8FKkWX21Us3ZCqeasg,1650
|
||||
sqlalchemy/connectors/__init__.py,sha256=97YbriYu5mcljh7opc1JOScRlf3Tk8ldbn5urBVm4WY,278
|
||||
sqlalchemy/connectors/mxodbc.py,sha256=-0iqw2k8e-o3OkAKzoCWuAaEPxlEjslvfRM9hnVXENM,5348
|
||||
sqlalchemy/connectors/pyodbc.py,sha256=pG2yf3cEDtTr-w_m4to6jF5l8hZk6MJv69K3cg84NfY,6264
|
||||
sqlalchemy/connectors/zxJDBC.py,sha256=2KK_sVSgMsdW0ufZqAwgXjd1FsMb4hqbiUQRAkM0RYg,1868
|
||||
sqlalchemy/databases/__init__.py,sha256=BaQyAuMjXNpZYV47hCseHrDtPzTfSw-iqUQYxMWJddw,817
|
||||
sqlalchemy/dialects/__init__.py,sha256=7SMul8PL3gkbJRUwAwovHLae5qBBApRF-VcRwU-VtdU,1012
|
||||
sqlalchemy/dialects/postgres.py,sha256=heNVHys6E91DIBepXT3ls_4_6N8HTTahrZ49W5IR3M0,614
|
||||
sqlalchemy/dialects/firebird/__init__.py,sha256=QYmQ0SaGfq3YjDraCV9ALwqVW5A3KDUF0F6air_qp3Q,664
|
||||
sqlalchemy/dialects/firebird/base.py,sha256=IT0prWkh1TFSTke-BqGdVMGdof53zmWWk6zbJZ_TuuI,28170
|
||||
sqlalchemy/dialects/firebird/fdb.py,sha256=l4s6_8Z0HvqxgqGz0LNcKWP1qUmEc3M2XM718_drN34,4325
|
||||
sqlalchemy/dialects/firebird/kinterbasdb.py,sha256=kCsn2ed4u9fyjcyfEI3rXQdKvL05z9wtf5YjW9-NrvI,6299
|
||||
sqlalchemy/dialects/mssql/__init__.py,sha256=G12xmirGZgMzfUKZCA8BFfaCmqUDuYca9Fu2VP_eaks,1081
|
||||
sqlalchemy/dialects/mssql/adodbapi.py,sha256=dHZgS3pEDX39ixhlDfTtDcjCq6rdjF85VS7rIZ1TfYo,2493
|
||||
sqlalchemy/dialects/mssql/base.py,sha256=xqRmK_npoyH5gl626EjazVnu9TEArmrBIFme_avYFUg,66855
|
||||
sqlalchemy/dialects/mssql/information_schema.py,sha256=pwuTsgOCY5eSBW9w-g-pyJDRfyuZ_rOEXXNYRuAroCE,6418
|
||||
sqlalchemy/dialects/mssql/mxodbc.py,sha256=G9LypIeEizgxeShtDu2M7Vwm8NopnzaTmnZMD49mYeg,3856
|
||||
sqlalchemy/dialects/mssql/pymssql.py,sha256=w92w4YQzXdHb53AjCrBcIRHsf6jmie1iN9H7gJNGX4k,3079
|
||||
sqlalchemy/dialects/mssql/pyodbc.py,sha256=KRke1Hizrg3r5iYqxdBI0axXVQ_pZR_UPxLaAdF0mKk,9473
|
||||
sqlalchemy/dialects/mssql/zxjdbc.py,sha256=u4uBgwk0LbI7_I5CIvM3C4bBb0pmrw2_DqRh_ehJTkI,2282
|
||||
sqlalchemy/dialects/mysql/__init__.py,sha256=3cQ2juPT8LsZTicPa2J-0rCQjQIQaPgyBzxjV3O_7xs,1171
|
||||
sqlalchemy/dialects/mysql/base.py,sha256=rwC8fnhGZaAnsPB1Jhg4sTcrWE2hjxrZJ5deCS0rAOc,122869
|
||||
sqlalchemy/dialects/mysql/cymysql.py,sha256=nqsdQA8LBLIc6eilgX6qwkjm7szsUoqMTVYwK9kkfsE,2349
|
||||
sqlalchemy/dialects/mysql/gaerdbms.py,sha256=2MxtTsIqlpq_J32HHqDzz-5vu-mC51Lb7PvyGkJa73M,3387
|
||||
sqlalchemy/dialects/mysql/mysqlconnector.py,sha256=DMDm684Shk-ijVo7w-yidopYw7EC6EiOmJY56EPawok,5323
|
||||
sqlalchemy/dialects/mysql/mysqldb.py,sha256=McqROngxAknbLOXoUAG9o9mP9FQBLs-ouD-JqqI2Ses,6564
|
||||
sqlalchemy/dialects/mysql/oursql.py,sha256=rmdr-r66iJ2amqFeGvCohvE8WCl_i6R9KcgVG0uXOQs,8124
|
||||
sqlalchemy/dialects/mysql/pymysql.py,sha256=e-qehI-sASmAjEa0ajHqjZjlyJYWsb3RPQY4iBR5pz0,1504
|
||||
sqlalchemy/dialects/mysql/pyodbc.py,sha256=Ze9IOKw6ANVQj25IlmSGR8aaJhM0pMuRtbzKF7UsZCY,2665
|
||||
sqlalchemy/dialects/mysql/zxjdbc.py,sha256=LIhe2mHSRVgi8I7qmiTMVBRSpuWJVnuDtpHTUivIx0M,3942
|
||||
sqlalchemy/dialects/oracle/__init__.py,sha256=UhF2ZyPfT3EFAnP8ZjGng6GnWSzmAkjMax0Lucpn0Bg,797
|
||||
sqlalchemy/dialects/oracle/base.py,sha256=2KJO-sU2CVKK1rij6bAQ5ZFJv203_NmzT8dE5qor9wc,55961
|
||||
sqlalchemy/dialects/oracle/cx_oracle.py,sha256=-d5tHbNcCyjbgVtAvWfHgSY2yA8C9bvCzxhwkdWFNe0,38635
|
||||
sqlalchemy/dialects/oracle/zxjdbc.py,sha256=nC7XOCY3NdTLrEyIacNTnLDCaeVjWn59q8UYssJL8Wo,8112
|
||||
sqlalchemy/dialects/postgresql/__init__.py,sha256=SjCtM5b3EaGyRaTyg_i82sh_qjkLEIVUXW91XDihiCM,1299
|
||||
sqlalchemy/dialects/postgresql/base.py,sha256=xhdLeHuWioTv9LYW41pcIPsEjD2fyeh7JflkLKmZMB8,104230
|
||||
sqlalchemy/dialects/postgresql/constraints.py,sha256=8UDx_2TNQgqIUSRETZPhgninJigQ6rMfdRNI6vIt3Is,3119
|
||||
sqlalchemy/dialects/postgresql/hstore.py,sha256=n8Wsd7Uldk3bbg66tTa0NKjVqjhJUbF1mVeUsM7keXA,11402
|
||||
sqlalchemy/dialects/postgresql/json.py,sha256=MTlIGinMDa8iaVbZMOzYnremo0xL4tn2wyGTPwnvX6U,12215
|
||||
sqlalchemy/dialects/postgresql/pg8000.py,sha256=x6o3P8Ad0wKsuF9qeyip39BKc5ORJZ4nWxv-8qOdj0E,8375
|
||||
sqlalchemy/dialects/postgresql/psycopg2.py,sha256=4ac0upErNRJz6YWJYNbATCU3ncWFvat5kal_Cuq-Jhw,26953
|
||||
sqlalchemy/dialects/postgresql/psycopg2cffi.py,sha256=8R3POkJH8z8a2DxwKNmfmQOsxFqsg4tU_OnjGj3OfDA,1651
|
||||
sqlalchemy/dialects/postgresql/pypostgresql.py,sha256=raQRfZb8T9-c-jmq1w86Wci5QyiXgf_9_71OInT_sAw,2655
|
||||
sqlalchemy/dialects/postgresql/ranges.py,sha256=MihdGXMdmCM6ToIlrj7OJx9Qh_8BX8bv5PSaAepHmII,4814
|
||||
sqlalchemy/dialects/postgresql/zxjdbc.py,sha256=AhEGRiAy8q-GM0BStFcsLBgSwjxHkkwy2-BSroIoADo,1397
|
||||
sqlalchemy/dialects/sqlite/__init__.py,sha256=0wW0VOhE_RtFDpRcbwvvo3XtD6Y2-SDgG4K7468eh_w,736
|
||||
sqlalchemy/dialects/sqlite/base.py,sha256=_L9-854ITf8Fl2BgUymF9fKjDFvXSo7Pb2yuz1CMkDo,55007
|
||||
sqlalchemy/dialects/sqlite/pysqlcipher.py,sha256=sgXCqn8ZtNIeTDwyo253Kj5mn4TPlIW3AZCNNmURi2A,4129
|
||||
sqlalchemy/dialects/sqlite/pysqlite.py,sha256=G-Cg-iI-ErYsVjOH4UlQTEY9pLnLOLV89ik8q0-reuY,14980
|
||||
sqlalchemy/dialects/sybase/__init__.py,sha256=gwCgFR_C_hoj0Re7PiaW3zmKSWaLpsd96UVXdM7EnTM,894
|
||||
sqlalchemy/dialects/sybase/base.py,sha256=Xpl3vEd5VDyvoIRMg0DZa48Or--yBSrhaZ2CbTSCt0w,28853
|
||||
sqlalchemy/dialects/sybase/mxodbc.py,sha256=E_ask6yFSjyhNPvv7gQsvA41WmyxbBvRGWjCyPVr9Gs,901
|
||||
sqlalchemy/dialects/sybase/pyodbc.py,sha256=0a_gKwrIweJGcz3ZRYuQZb5BIvwjGmFEYBo9wGk66kI,2102
|
||||
sqlalchemy/dialects/sybase/pysybase.py,sha256=tu2V_EbtgxWYOvt-ybo5_lLiBQzsIFaAtF8e7S1_-rk,3208
|
||||
sqlalchemy/engine/__init__.py,sha256=fyIFw2R5wfLQzSbfE9Jz-28ZDP5RyB-5elNH92uTZYM,18803
|
||||
sqlalchemy/engine/base.py,sha256=cRqbbG0QuUG-NGs3GOPVQsU0WLsw5bLT0Y07Yf8OOfU,79399
|
||||
sqlalchemy/engine/default.py,sha256=U_yaliCazUHp6cfk_NVzhB4F_zOJSyy959rHyk40J4M,36548
|
||||
sqlalchemy/engine/interfaces.py,sha256=CmPYM_oDp1zAPH13sKmufO4Tuha6KA-fXRQq-K_3YTE,35908
|
||||
sqlalchemy/engine/reflection.py,sha256=jly5YN-cyjoBDxHs9qO6Mlgm1OZSb2NBNFALwZMEGxE,28590
|
||||
sqlalchemy/engine/result.py,sha256=ot5RQxa6kjoScXRUR-DTl0iJJISBhmyNTj1JZkZiNsk,44027
|
||||
sqlalchemy/engine/strategies.py,sha256=mwy-CTrnXzyaIA1TRQBQ_Z2O8wN0lnTNZwDefEWCR9A,8929
|
||||
sqlalchemy/engine/threadlocal.py,sha256=y4wOLjtbeY-dvp2GcJDtos6F2jzfP11JVAaSFwZ0zRM,4191
|
||||
sqlalchemy/engine/url.py,sha256=ZhS_Iqiu6V1kfIM2pcv3ud9fOPXkFOHBv8wiLOqbJhc,8228
|
||||
sqlalchemy/engine/util.py,sha256=Tvb9sIkyd6qOwIA-RsBmo5j877UXa5x-jQmhqnhHWRA,2338
|
||||
sqlalchemy/event/__init__.py,sha256=KnUVp-NVX6k276ntGffxgkjVmIWR22FSlzrbAKqQ6S4,419
|
||||
sqlalchemy/event/api.py,sha256=O2udbj5D7HdXcvsGBQk6-dK9CAFfePTypWOrUdqmhYY,5990
|
||||
sqlalchemy/event/attr.py,sha256=VfRJJl4RD24mQaIoDwArWL2hsGOX6ISSU6vKusVMNO0,12053
|
||||
sqlalchemy/event/base.py,sha256=DWDKZV19fFsLavu2cXOxXV8NhO3XuCbKcKamBKyXuME,9540
|
||||
sqlalchemy/event/legacy.py,sha256=ACnVeBUt8uwVfh1GNRu22cWCADC3CWZdrsBKzAd6UQQ,5814
|
||||
sqlalchemy/event/registry.py,sha256=13wx1qdEmcQeCoAmgf_WQEMuR43h3v7iyd2Re54QdOE,7786
|
||||
sqlalchemy/ext/__init__.py,sha256=smCZIGgjJprT4ddhuYSLZ8PrTn4NdXPP3j03a038SdE,322
|
||||
sqlalchemy/ext/associationproxy.py,sha256=y61Y4UIZNBit5lqk2WzdHTCXIWRrBg3hHbRVsqXjnqE,33422
|
||||
sqlalchemy/ext/automap.py,sha256=Aet-3zk2vbsJVLqigwZJYau0hB1D6Y21K65QVWeB5pc,41567
|
||||
sqlalchemy/ext/baked.py,sha256=BnVaB4pkQxHk-Fyz4nUw225vCxO_zrDuVC6t5cSF9x8,16967
|
||||
sqlalchemy/ext/compiler.py,sha256=aSSlySoTsqN-JkACWFIhv3pq2CuZwxKm6pSDfQoc10Q,16257
|
||||
sqlalchemy/ext/horizontal_shard.py,sha256=XEBYIfs0YrTt_2vRuaBY6C33ZOZMUHQb2E4X2s3Szns,4814
|
||||
sqlalchemy/ext/hybrid.py,sha256=wNXvuYEEmKy-Nc6z7fu1c2gNWCMOiQA0N14Y3FCq5lo,27989
|
||||
sqlalchemy/ext/instrumentation.py,sha256=HRgNiuYJ90_uSKC1iDwsEl8_KXscMQkEb9KeElk-yLE,14856
|
||||
sqlalchemy/ext/mutable.py,sha256=lx7b_ewFVe7O6I4gTXdi9M6C6TqxWCFiViqCM2VwUac,25444
|
||||
sqlalchemy/ext/orderinglist.py,sha256=UCkuZxTWAQ0num-b5oNm8zNJAmVuIFcbFXt5e7JPx-U,13816
|
||||
sqlalchemy/ext/serializer.py,sha256=fK3N1miYF16PSIZDjLFS2zI7y-scZ9qtmopXIfzPqrA,5586
|
||||
sqlalchemy/ext/declarative/__init__.py,sha256=Jpwf2EukqwNe4RzDfCmX1p-hQ6pPhJEIL_xunaER3tw,756
|
||||
sqlalchemy/ext/declarative/api.py,sha256=PdoO_jh50TWaMvXqnjNh-vX42VqB75ZyliluilphvsU,23317
|
||||
sqlalchemy/ext/declarative/base.py,sha256=96SJBOfxpTMsU2jAHrvuXbsjUUJ7TvbLm11R8Hy2Irc,25231
|
||||
sqlalchemy/ext/declarative/clsregistry.py,sha256=jaLLSr-66XvLnA1Z9kxjKatH_XHxWchqEXMKwvjKAXk,10817
|
||||
sqlalchemy/orm/__init__.py,sha256=UzDockQEVMaWvr-FE4y1rptrMb5uX5k8v_UNQs82qFY,8033
|
||||
sqlalchemy/orm/attributes.py,sha256=OmXkppJEZxRGc0acZZZkSbUhdfDl8ry3Skmvzl3OtLQ,56510
|
||||
sqlalchemy/orm/base.py,sha256=F0aRZGK2_1F8phwBHnVYaChkAb-nnTRoFE1VKSvmAwA,14634
|
||||
sqlalchemy/orm/collections.py,sha256=TFutWIn_c07DI48FDOKMsFMnAoQB3BG2FnEMGzEF3iI,53549
|
||||
sqlalchemy/orm/dependency.py,sha256=phB8nS1788FSd4dWa2j9d4uj6QFlRL7nzcXvh3Bb7Zo,46192
|
||||
sqlalchemy/orm/deprecated_interfaces.py,sha256=A63t6ivbZB3Wq8vWgL8I05uTRR6whcWnIPkquuTIPXU,18254
|
||||
sqlalchemy/orm/descriptor_props.py,sha256=uk5r77w1VUWVgn0bkgOItkAlMh9FRgeT6OCgOHz3_bM,25141
|
||||
sqlalchemy/orm/dynamic.py,sha256=I_YP7X-H9HLjeFHmYgsOas6JPdqg0Aqe0kaltt4HVzA,13283
|
||||
sqlalchemy/orm/evaluator.py,sha256=Hozggsd_Fi0YyqHrr9-tldtOA9NLX0MVBF4e2vSM6GY,4731
|
||||
sqlalchemy/orm/events.py,sha256=yRaoXlBL78b3l11itTrAy42UhLu42-7cgXKCFUGNXSg,69410
|
||||
sqlalchemy/orm/exc.py,sha256=P5lxi5RMFokiHL136VBK0AP3UmAlJcSDHtzgo-M6Kgs,5439
|
||||
sqlalchemy/orm/identity.py,sha256=zsb8xOZaPYKvs4sGhyxW21mILQDrtdSuzD4sTyeKdJs,9021
|
||||
sqlalchemy/orm/instrumentation.py,sha256=xtq9soM3mpMws7xqNJIFYXqKw65p2nnxCTfmMpuvpeI,17510
|
||||
sqlalchemy/orm/interfaces.py,sha256=AqitvZ_BBkB6L503uhdH55nxHplleJ2kQMwM7xKq9Sc,21552
|
||||
sqlalchemy/orm/loading.py,sha256=cjC8DQ5g8_rMxroYrYHfW5s35Z5OFSNBUu0-LpxW7hI,22878
|
||||
sqlalchemy/orm/mapper.py,sha256=sfooeslzwWAKN7WNIQoZ2Y3u_mCyIxd0tebp4yEUu8k,115074
|
||||
sqlalchemy/orm/path_registry.py,sha256=8Pah0P8yPVUyRjoET7DvIMGtM5PC8HZJC4GtxAyqVAs,8370
|
||||
sqlalchemy/orm/persistence.py,sha256=WzUUNm1UGm5mGxbv94hLTQowEDNoXfU1VoyGnoKeN_g,51028
|
||||
sqlalchemy/orm/properties.py,sha256=HR3eoY3Ze3FUPPNCXM_FruWz4pEMWrGlqtCGiK2G1qE,10426
|
||||
sqlalchemy/orm/query.py,sha256=2q2XprzbZhIlAbs0vihIr9dgqfJtcbrjNewgE9q26gE,147616
|
||||
sqlalchemy/orm/relationships.py,sha256=79LRGGz8MxsKsAlv0vuZ6MYZXzDXXtfiOCZg-IQ9hiU,116992
|
||||
sqlalchemy/orm/scoping.py,sha256=Ao-K4iqg4pBp7Si5JOAlro5zUL_r500TC3lVLcFMLDs,6421
|
||||
sqlalchemy/orm/session.py,sha256=yctpvCsLUcFv9Sy8keT1SElZ2VH5DNScYtO7Z77ptYI,111314
|
||||
sqlalchemy/orm/state.py,sha256=4LwwftOtPQldH12SKZV2UFgzqPOCj40QfQ08knZs0_E,22984
|
||||
sqlalchemy/orm/strategies.py,sha256=rdLEs2pPrF8nqcQqezyG-fGdmE11r22fUva4ES3KGOE,58529
|
||||
sqlalchemy/orm/strategy_options.py,sha256=_z7ZblWCnXh8bZpGSOXDoUwtdUqnXdCaWfKXYDgCuH0,34973
|
||||
sqlalchemy/orm/sync.py,sha256=B-d-H1Gzw1TkflpvgJeQghwTzqObzhZCQdvEdSPyDeE,5451
|
||||
sqlalchemy/orm/unitofwork.py,sha256=EQvZ7RZ-u5wJT51BWTeMJJi-tt22YRnmqywGUCn0Qrc,23343
|
||||
sqlalchemy/orm/util.py,sha256=Mj3NXDd8Mwp4O5Vr5zvRGFUZRlB65WpExdDBFJp04wQ,38092
|
||||
sqlalchemy/sql/__init__.py,sha256=IFCJYIilmmAQRnSDhv9Y6LQUSpx6pUU5zp9VT7sOx0c,1737
|
||||
sqlalchemy/sql/annotation.py,sha256=8ncgAVUo5QCoinApKjREi8esWNMFklcBqie8Q42KsaQ,6136
|
||||
sqlalchemy/sql/base.py,sha256=TuXOp7z0Q30qKAjhgcsts6WGvRbvg6F7OBojMQAxjX0,20990
|
||||
sqlalchemy/sql/compiler.py,sha256=G0Ft_Dmq1AousO66eagPhI0g9Vkqui_c_LjqY0AbImU,100710
|
||||
sqlalchemy/sql/crud.py,sha256=X86dyvzEnbj0-oeJO5ufi6zXxbSKBtDeu5JHlNg-BJU,19837
|
||||
sqlalchemy/sql/ddl.py,sha256=nkjd_B4lKwC2GeyPjE0ZtRB9RKXccQL1g1XoZ4p69sM,37540
|
||||
sqlalchemy/sql/default_comparator.py,sha256=QaowWtW4apULq_aohDvmj97j0sDtHQQjMRdNxXm83vk,10447
|
||||
sqlalchemy/sql/dml.py,sha256=7846H52IMJfMYi5Jd-Cv6Hy9hZM4dkonXbjfBjl5ED4,33330
|
||||
sqlalchemy/sql/elements.py,sha256=MLeecC5dMqeekZmFbPn0J-ODKJj5DBDE5v6kuSkq66I,132898
|
||||
sqlalchemy/sql/expression.py,sha256=vFZ9MmBlC9Fg8IYzLMAwXgcsnXZhkZbUstY6dO8BFGY,5833
|
||||
sqlalchemy/sql/functions.py,sha256=ZYKyvPnVKZMtHyyjyNwK0M5UWPrZmFz3vtTqHN-8658,18533
|
||||
sqlalchemy/sql/naming.py,sha256=foE2lAzngLCFXCeHrpv0S4zT23GCnZLCiata2MPo0kE,4662
|
||||
sqlalchemy/sql/operators.py,sha256=UeZgb7eRhWd4H7OfJZkx0ZWOjvo5chIUXQsBAIeeTDY,23013
|
||||
sqlalchemy/sql/schema.py,sha256=awhLY5YjUBah8ZYxW9FBfe6lH0v4fW0UJLTNApnx7E0,145511
|
||||
sqlalchemy/sql/selectable.py,sha256=o1Hom00WGHjI21Mdb5fkX-f0k2nksQNb_txT0KWK1zQ,118995
|
||||
sqlalchemy/sql/sqltypes.py,sha256=JGxizqIjO1WFuZpppWj1Yi5cvCyBczb1JqUQeuhQn8s,54879
|
||||
sqlalchemy/sql/type_api.py,sha256=Xe6yH4slgdLA8HRjT19GBOou51SS9o4oUhyK0xfn04c,42846
|
||||
sqlalchemy/sql/util.py,sha256=7AsOsyhIq2eSLMWtwvqfTLc2MdCotGzEKQKFE3wk5sk,20382
|
||||
sqlalchemy/sql/visitors.py,sha256=4ipGvAkqFaSAWgyNuKjx5x_ms8GIy9aq-wC5pj4-Z3g,10271
|
||||
sqlalchemy/testing/__init__.py,sha256=MwKimX0atzs_SmG2j74GXLiyI8O56e3DLq96tcoL0TM,1095
|
||||
sqlalchemy/testing/assertions.py,sha256=r1I2nHC599VZcY-5g0JYRQl8bl9kjkf6WFOooOmJ2eE,16112
|
||||
sqlalchemy/testing/assertsql.py,sha256=-fP9Iuhdu52BJoT1lEj_KED8jy5ay_XiJu7i4Ry9eWA,12335
|
||||
sqlalchemy/testing/config.py,sha256=nqvVm55Vk0BVNjk1Wj3aYR65j_EEEepfB-W9QSFLU-k,2469
|
||||
sqlalchemy/testing/distutils_run.py,sha256=tkURrZRwgFiSwseKm1iJRkSjKf2Rtsb3pOXRWtACTHI,247
|
||||
sqlalchemy/testing/engines.py,sha256=u6GlDMXt0FKqVTQe_QJ5JXAnkA6W-xdw6Fe_5gMAQhg,9359
|
||||
sqlalchemy/testing/entities.py,sha256=IXqTgAihV-1TZyxL0MWdZzu4rFtxdbWKWFetIJWNGM4,2992
|
||||
sqlalchemy/testing/exclusions.py,sha256=WuH_tVK5fZJWe8Hu2LzNB4HNQMa_iAUaGC-_6mHUdIM,12570
|
||||
sqlalchemy/testing/fixtures.py,sha256=q4nK-81z2EWs17TjeJtPmnaJUCtDdoUiIU7jgLq3l_w,10721
|
||||
sqlalchemy/testing/mock.py,sha256=vj5q-GzJrLW6mMVDLqsppxBu_p7K49VvjfiVt5tn0o8,630
|
||||
sqlalchemy/testing/pickleable.py,sha256=8I8M4H1XN29pZPMxZdYkmpKWfwzPsUn6WK5FX4UP9L4,2641
|
||||
sqlalchemy/testing/profiling.py,sha256=Q_wOTS5JtcGBcs2eCYIvoRoDS_FW_HcfEW3hXWB87Zg,8392
|
||||
sqlalchemy/testing/provision.py,sha256=mU9g6JZEHIshqUkE6PWu-t61FVPs_cUJtEtVFRavj9g,9377
|
||||
sqlalchemy/testing/replay_fixture.py,sha256=iAxg7XsFkKSCcJnrNPQNJfjMxOgeBAa-ShOkywWPJ4w,5429
|
||||
sqlalchemy/testing/requirements.py,sha256=aIdvbfugMzrlVdldEbpcwretX-zjiukPhPUSZgulrzU,19949
|
||||
sqlalchemy/testing/runner.py,sha256=hpNH6MNTif4TnBRySxpm92KgFwDK0mOa8eF7wZXumTI,1607
|
||||
sqlalchemy/testing/schema.py,sha256=agOzrIMvmuUCeVZY5mYjJ1eJmOP69-wa0gZALtNtJBk,3446
|
||||
sqlalchemy/testing/util.py,sha256=IJ688AWzichtXVwWgYf_A4BUbcXPGsK6BQP5fvY3h-U,7544
|
||||
sqlalchemy/testing/warnings.py,sha256=-KskRAh1RkJ_69UIY_WR7i15u21U3gDLQ6nKlnJT7_w,987
|
||||
sqlalchemy/testing/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
sqlalchemy/testing/plugin/bootstrap.py,sha256=Iw8R-d1gqoz_NKFtPyGfdX56QPcQHny_9Lvwov65aVY,1634
|
||||
sqlalchemy/testing/plugin/noseplugin.py,sha256=In79x6zs9DOngfoYpaHojihWlSd4PeS7Nwzh3M_KNM4,2847
|
||||
sqlalchemy/testing/plugin/plugin_base.py,sha256=h4RI4nPNdNq9kYABp6IP89Eknm29q8usgO-nWb8Eobc,17120
|
||||
sqlalchemy/testing/plugin/pytestplugin.py,sha256=Pbc62y7Km0PHXd4M9dm5ThBwrlXkM4WtIX-W1pOaM84,5812
|
||||
sqlalchemy/testing/suite/__init__.py,sha256=wqCTrb28i5FwhQZOyXVlnz3mA94iQOUBio7lszkFq-g,471
|
||||
sqlalchemy/testing/suite/test_ddl.py,sha256=Baw0ou9nKdADmrRuXgWzF1FZx0rvkkw3JHc6yw5BN0M,1838
|
||||
sqlalchemy/testing/suite/test_dialect.py,sha256=ORQPXUt53XtO-5ENlWgs8BpsSdPBDjyMRl4W2UjXLI4,1165
|
||||
sqlalchemy/testing/suite/test_insert.py,sha256=nP0mgVpsVs72MHMADmihB1oXLbFBpsYsLGO3BlQ7RLU,8132
|
||||
sqlalchemy/testing/suite/test_reflection.py,sha256=HtJRsJ_vuNMrOhnPTvuIvRg66OakSaSpeCU36zhaSPg,24616
|
||||
sqlalchemy/testing/suite/test_results.py,sha256=oAcO1tD0I7c9ErMeSvSZBZfz1IBDMJHJTf64Y1pBodk,6685
|
||||
sqlalchemy/testing/suite/test_select.py,sha256=u0wAz1g-GrAFdZpG4zwSrVckVtjULvjlbd0Z1U1jHAA,5729
|
||||
sqlalchemy/testing/suite/test_sequence.py,sha256=fmBR4Pc5tOLSkXFxfcqwGx1z3xaxeJeUyqDnTakKTBU,3831
|
||||
sqlalchemy/testing/suite/test_types.py,sha256=UKa-ZPdpz16mVKvT-9ISRAfqdrqiKaE7IA-_phQQuxo,17088
|
||||
sqlalchemy/testing/suite/test_update_delete.py,sha256=r5p467r-EUsjEcWGfUE0VPIfN4LLXZpLRnnyBLyyjl4,1582
|
||||
sqlalchemy/util/__init__.py,sha256=G06a5vBxg27RtWzY6dPZHt1FO8qtOiy_2C9PHTTMblI,2520
|
||||
sqlalchemy/util/_collections.py,sha256=JZkeYK4GcIE1A5s6MAvHhmUp_X4wp6r7vMGT-iMftZ8,27842
|
||||
sqlalchemy/util/compat.py,sha256=80OXp3D-F_R-pLf7s-zITPlfCqG1s_5o6KTlY1g2p0Q,6821
|
||||
sqlalchemy/util/deprecations.py,sha256=D_LTsfb9jHokJtPEWNDRMJOc372xRGNjputAiTIysRU,4403
|
||||
sqlalchemy/util/langhelpers.py,sha256=Nhe3Y9ieK6JaFYejjYosVOjOSSIBT2V385Hu6HGcyZk,41607
|
||||
sqlalchemy/util/queue.py,sha256=rs3W0LDhKt7M_dlQEjYpI9KS-bzQmmwN38LE_-RRVvU,6548
|
||||
sqlalchemy/util/topological.py,sha256=xKsYjjAat4p8cdqRHKwibLzr6WONbPTC0X8Mqg7jYno,2794
|
||||
sqlalchemy/sql/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/cymysql.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/util.cpython-35.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_reflection.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/path_registry.cpython-35.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_update_delete.cpython-35.pyc,,
|
||||
sqlalchemy/ext/__pycache__/baked.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/sybase/__pycache__/pysybase.cpython-35.pyc,,
|
||||
sqlalchemy/testing/__pycache__/entities.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/firebird/__pycache__/kinterbasdb.cpython-35.pyc,,
|
||||
sqlalchemy/event/__pycache__/registry.cpython-35.pyc,,
|
||||
sqlalchemy/connectors/__pycache__/zxJDBC.cpython-35.pyc,,
|
||||
sqlalchemy/engine/__pycache__/result.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/pyodbc.cpython-35.pyc,,
|
||||
sqlalchemy/ext/__pycache__/hybrid.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/interfaces.cpython-35.pyc,,
|
||||
sqlalchemy/sql/__pycache__/elements.cpython-35.pyc,,
|
||||
sqlalchemy/sql/__pycache__/naming.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-35.pyc,,
|
||||
sqlalchemy/testing/__pycache__/fixtures.cpython-35.pyc,,
|
||||
sqlalchemy/sql/__pycache__/base.cpython-35.pyc,,
|
||||
sqlalchemy/sql/__pycache__/compiler.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/state.cpython-35.pyc,,
|
||||
sqlalchemy/util/__pycache__/topological.cpython-35.pyc,,
|
||||
sqlalchemy/util/__pycache__/queue.cpython-35.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_select.cpython-35.pyc,,
|
||||
sqlalchemy/sql/__pycache__/dml.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/pyodbc.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/mapper.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/evaluator.cpython-35.pyc,,
|
||||
sqlalchemy/ext/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/sql/__pycache__/crud.cpython-35.pyc,,
|
||||
sqlalchemy/ext/__pycache__/automap.cpython-35.pyc,,
|
||||
sqlalchemy/util/__pycache__/langhelpers.cpython-35.pyc,,
|
||||
sqlalchemy/__pycache__/exc.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/testing/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/ext/__pycache__/horizontal_shard.cpython-35.pyc,,
|
||||
sqlalchemy/__pycache__/interfaces.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/properties.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/dynamic.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/firebird/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/base.cpython-35.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_sequence.cpython-35.pyc,,
|
||||
sqlalchemy/engine/__pycache__/reflection.cpython-35.pyc,,
|
||||
sqlalchemy/testing/plugin/__pycache__/bootstrap.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/json.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/persistence.cpython-35.pyc,,
|
||||
sqlalchemy/testing/__pycache__/assertsql.cpython-35.pyc,,
|
||||
sqlalchemy/ext/__pycache__/compiler.cpython-35.pyc,,
|
||||
sqlalchemy/sql/__pycache__/annotation.cpython-35.pyc,,
|
||||
sqlalchemy/util/__pycache__/_collections.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/zxjdbc.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/pypostgresql.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/sybase/__pycache__/mxodbc.cpython-35.pyc,,
|
||||
sqlalchemy/testing/plugin/__pycache__/plugin_base.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/session.cpython-35.pyc,,
|
||||
sqlalchemy/testing/__pycache__/distutils_run.cpython-35.pyc,,
|
||||
sqlalchemy/__pycache__/processors.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/sql/__pycache__/functions.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/loading.cpython-35.pyc,,
|
||||
sqlalchemy/ext/declarative/__pycache__/api.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/base.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/sqlite/__pycache__/base.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/exc.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/oracle/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_insert.cpython-35.pyc,,
|
||||
sqlalchemy/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/sql/__pycache__/selectable.cpython-35.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_types.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/strategy_options.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/collections.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/dependency.cpython-35.pyc,,
|
||||
sqlalchemy/ext/__pycache__/orderinglist.cpython-35.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_ddl.cpython-35.pyc,,
|
||||
sqlalchemy/event/__pycache__/attr.cpython-35.pyc,,
|
||||
sqlalchemy/sql/__pycache__/sqltypes.cpython-35.pyc,,
|
||||
sqlalchemy/sql/__pycache__/type_api.cpython-35.pyc,,
|
||||
sqlalchemy/sql/__pycache__/schema.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/scoping.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/oracle/__pycache__/cx_oracle.cpython-35.pyc,,
|
||||
sqlalchemy/testing/__pycache__/provision.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/relationships.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/pymssql.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/zxjdbc.cpython-35.pyc,,
|
||||
sqlalchemy/event/__pycache__/api.cpython-35.pyc,,
|
||||
sqlalchemy/sql/__pycache__/ddl.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/unitofwork.cpython-35.pyc,,
|
||||
sqlalchemy/engine/__pycache__/default.cpython-35.pyc,,
|
||||
sqlalchemy/testing/plugin/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/databases/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/identity.cpython-35.pyc,,
|
||||
sqlalchemy/engine/__pycache__/util.cpython-35.pyc,,
|
||||
sqlalchemy/testing/__pycache__/util.cpython-35.pyc,,
|
||||
sqlalchemy/testing/__pycache__/config.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/descriptor_props.cpython-35.pyc,,
|
||||
sqlalchemy/engine/__pycache__/url.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/sync.cpython-35.pyc,,
|
||||
sqlalchemy/testing/__pycache__/warnings.cpython-35.pyc,,
|
||||
sqlalchemy/util/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/engine/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/strategies.cpython-35.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_results.cpython-35.pyc,,
|
||||
sqlalchemy/testing/__pycache__/schema.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/base.cpython-35.pyc,,
|
||||
sqlalchemy/util/__pycache__/compat.cpython-35.pyc,,
|
||||
sqlalchemy/__pycache__/events.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/zxjdbc.cpython-35.pyc,,
|
||||
sqlalchemy/connectors/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/sqlite/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/testing/plugin/__pycache__/pytestplugin.cpython-35.pyc,,
|
||||
sqlalchemy/ext/declarative/__pycache__/clsregistry.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/oracle/__pycache__/zxjdbc.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/base.cpython-35.pyc,,
|
||||
sqlalchemy/event/__pycache__/legacy.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/constraints.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/sybase/__pycache__/pyodbc.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/instrumentation.cpython-35.pyc,,
|
||||
sqlalchemy/testing/__pycache__/profiling.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-35.pyc,,
|
||||
sqlalchemy/__pycache__/types.cpython-35.pyc,,
|
||||
sqlalchemy/ext/__pycache__/instrumentation.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/__pycache__/postgres.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/sybase/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/firebird/__pycache__/base.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/oracle/__pycache__/base.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/pymysql.cpython-35.pyc,,
|
||||
sqlalchemy/testing/__pycache__/engines.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/mysqlconnector.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/sqlite/__pycache__/pysqlcipher.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/psycopg2cffi.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/mysqldb.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/event/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/testing/__pycache__/requirements.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/adodbapi.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/query.cpython-35.pyc,,
|
||||
sqlalchemy/ext/__pycache__/mutable.cpython-35.pyc,,
|
||||
sqlalchemy/event/__pycache__/base.cpython-35.pyc,,
|
||||
sqlalchemy/engine/__pycache__/base.cpython-35.pyc,,
|
||||
sqlalchemy/sql/__pycache__/expression.cpython-35.pyc,,
|
||||
sqlalchemy/__pycache__/pool.cpython-35.pyc,,
|
||||
sqlalchemy/connectors/__pycache__/pyodbc.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/gaerdbms.cpython-35.pyc,,
|
||||
sqlalchemy/testing/__pycache__/assertions.cpython-35.pyc,,
|
||||
sqlalchemy/__pycache__/schema.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/oursql.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/firebird/__pycache__/fdb.cpython-35.pyc,,
|
||||
sqlalchemy/__pycache__/log.cpython-35.pyc,,
|
||||
sqlalchemy/engine/__pycache__/strategies.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/sybase/__pycache__/base.cpython-35.pyc,,
|
||||
sqlalchemy/sql/__pycache__/visitors.cpython-35.pyc,,
|
||||
sqlalchemy/connectors/__pycache__/mxodbc.cpython-35.pyc,,
|
||||
sqlalchemy/engine/__pycache__/threadlocal.cpython-35.pyc,,
|
||||
sqlalchemy/ext/declarative/__pycache__/base.cpython-35.pyc,,
|
||||
sqlalchemy/testing/__pycache__/runner.cpython-35.pyc,,
|
||||
sqlalchemy/util/__pycache__/deprecations.cpython-35.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_dialect.cpython-35.pyc,,
|
||||
sqlalchemy/engine/__pycache__/interfaces.cpython-35.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/ext/declarative/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/testing/plugin/__pycache__/noseplugin.cpython-35.pyc,,
|
||||
sqlalchemy/ext/__pycache__/associationproxy.cpython-35.pyc,,
|
||||
sqlalchemy/testing/__pycache__/pickleable.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/attributes.cpython-35.pyc,,
|
||||
sqlalchemy/sql/__pycache__/default_comparator.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/mxodbc.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/__init__.cpython-35.pyc,,
|
||||
sqlalchemy/testing/__pycache__/exclusions.cpython-35.pyc,,
|
||||
sqlalchemy/__pycache__/inspection.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/information_schema.cpython-35.pyc,,
|
||||
sqlalchemy/testing/__pycache__/mock.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/deprecated_interfaces.cpython-35.pyc,,
|
||||
sqlalchemy/sql/__pycache__/operators.cpython-35.pyc,,
|
||||
sqlalchemy/testing/__pycache__/replay_fixture.cpython-35.pyc,,
|
||||
sqlalchemy/orm/__pycache__/events.cpython-35.pyc,,
|
||||
sqlalchemy/dialects/sqlite/__pycache__/pysqlite.cpython-35.pyc,,
|
||||
sqlalchemy/ext/__pycache__/serializer.cpython-35.pyc,,
|
||||
sqlalchemy/sql/__pycache__/util.cpython-35.pyc,,
|
|
@ -1 +1 @@
|
|||
{"generator": "bdist_wheel (0.26.0)", "summary": "Database Abstraction Library", "classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: Jython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Database :: Front-Ends", "Operating System :: OS Independent"], "extensions": {"python.details": {"project_urls": {"Home": "http://www.sqlalchemy.org"}, "contacts": [{"email": "mike_mp@zzzcomputing.com", "name": "Mike Bayer", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}}}, "license": "MIT License", "metadata_version": "2.0", "name": "SQLAlchemy", "version": "0.9.7", "test_requires": [{"requires": ["mock", "pytest (>=2.5.2)"]}]}
|
||||
{"generator": "bdist_wheel (0.26.0)", "summary": "Database Abstraction Library", "classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: Jython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Database :: Front-Ends", "Operating System :: OS Independent"], "extensions": {"python.details": {"project_urls": {"Home": "http://www.sqlalchemy.org"}, "contacts": [{"email": "mike_mp@zzzcomputing.com", "name": "Mike Bayer", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}}}, "license": "MIT License", "metadata_version": "2.0", "name": "SQLAlchemy", "version": "1.0.12", "test_requires": [{"requires": ["mock", "pytest (>=2.5.2)", "pytest-xdist"]}]}
|
|
@ -1,5 +1,5 @@
|
|||
# sqlalchemy/__init__.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -15,6 +15,7 @@ from .sql import (
|
|||
case,
|
||||
cast,
|
||||
collate,
|
||||
column,
|
||||
delete,
|
||||
desc,
|
||||
distinct,
|
||||
|
@ -24,6 +25,7 @@ from .sql import (
|
|||
extract,
|
||||
false,
|
||||
func,
|
||||
funcfilter,
|
||||
insert,
|
||||
intersect,
|
||||
intersect_all,
|
||||
|
@ -39,6 +41,7 @@ from .sql import (
|
|||
over,
|
||||
select,
|
||||
subquery,
|
||||
table,
|
||||
text,
|
||||
true,
|
||||
tuple_,
|
||||
|
@ -117,7 +120,7 @@ from .schema import (
|
|||
from .inspection import inspect
|
||||
from .engine import create_engine, engine_from_config
|
||||
|
||||
__version__ = '0.9.7'
|
||||
__version__ = '1.0.12'
|
||||
|
||||
|
||||
def __go(lcls):
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# connectors/__init__.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# connectors/mxodbc.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
@ -1,145 +0,0 @@
|
|||
# connectors/mysqldb.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Define behaviors common to MySQLdb dialects.
|
||||
|
||||
Currently includes MySQL and Drizzle.
|
||||
|
||||
"""
|
||||
|
||||
from . import Connector
|
||||
from ..engine import base as engine_base, default
|
||||
from ..sql import operators as sql_operators
|
||||
from .. import exc, log, schema, sql, types as sqltypes, util, processors
|
||||
import re
|
||||
|
||||
|
||||
# the subclassing of Connector by all classes
|
||||
# here is not strictly necessary
|
||||
|
||||
|
||||
class MySQLDBExecutionContext(Connector):
|
||||
|
||||
@property
|
||||
def rowcount(self):
|
||||
if hasattr(self, '_rowcount'):
|
||||
return self._rowcount
|
||||
else:
|
||||
return self.cursor.rowcount
|
||||
|
||||
|
||||
class MySQLDBCompiler(Connector):
|
||||
def visit_mod_binary(self, binary, operator, **kw):
|
||||
return self.process(binary.left, **kw) + " %% " + \
|
||||
self.process(binary.right, **kw)
|
||||
|
||||
def post_process_text(self, text):
|
||||
return text.replace('%', '%%')
|
||||
|
||||
|
||||
class MySQLDBIdentifierPreparer(Connector):
|
||||
|
||||
def _escape_identifier(self, value):
|
||||
value = value.replace(self.escape_quote, self.escape_to_quote)
|
||||
return value.replace("%", "%%")
|
||||
|
||||
|
||||
class MySQLDBConnector(Connector):
|
||||
driver = 'mysqldb'
|
||||
supports_unicode_statements = False
|
||||
supports_sane_rowcount = True
|
||||
supports_sane_multi_rowcount = True
|
||||
|
||||
supports_native_decimal = True
|
||||
|
||||
default_paramstyle = 'format'
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
# is overridden when pymysql is used
|
||||
return __import__('MySQLdb')
|
||||
|
||||
|
||||
def do_executemany(self, cursor, statement, parameters, context=None):
|
||||
rowcount = cursor.executemany(statement, parameters)
|
||||
if context is not None:
|
||||
context._rowcount = rowcount
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args(database='db', username='user',
|
||||
password='passwd')
|
||||
opts.update(url.query)
|
||||
|
||||
util.coerce_kw_type(opts, 'compress', bool)
|
||||
util.coerce_kw_type(opts, 'connect_timeout', int)
|
||||
util.coerce_kw_type(opts, 'read_timeout', int)
|
||||
util.coerce_kw_type(opts, 'client_flag', int)
|
||||
util.coerce_kw_type(opts, 'local_infile', int)
|
||||
# Note: using either of the below will cause all strings to be returned
|
||||
# as Unicode, both in raw SQL operations and with column types like
|
||||
# String and MSString.
|
||||
util.coerce_kw_type(opts, 'use_unicode', bool)
|
||||
util.coerce_kw_type(opts, 'charset', str)
|
||||
|
||||
# Rich values 'cursorclass' and 'conv' are not supported via
|
||||
# query string.
|
||||
|
||||
ssl = {}
|
||||
keys = ['ssl_ca', 'ssl_key', 'ssl_cert', 'ssl_capath', 'ssl_cipher']
|
||||
for key in keys:
|
||||
if key in opts:
|
||||
ssl[key[4:]] = opts[key]
|
||||
util.coerce_kw_type(ssl, key[4:], str)
|
||||
del opts[key]
|
||||
if ssl:
|
||||
opts['ssl'] = ssl
|
||||
|
||||
# FOUND_ROWS must be set in CLIENT_FLAGS to enable
|
||||
# supports_sane_rowcount.
|
||||
client_flag = opts.get('client_flag', 0)
|
||||
if self.dbapi is not None:
|
||||
try:
|
||||
CLIENT_FLAGS = __import__(
|
||||
self.dbapi.__name__ + '.constants.CLIENT'
|
||||
).constants.CLIENT
|
||||
client_flag |= CLIENT_FLAGS.FOUND_ROWS
|
||||
except (AttributeError, ImportError):
|
||||
self.supports_sane_rowcount = False
|
||||
opts['client_flag'] = client_flag
|
||||
return [[], opts]
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
dbapi_con = connection.connection
|
||||
version = []
|
||||
r = re.compile('[.\-]')
|
||||
for n in r.split(dbapi_con.get_server_info()):
|
||||
try:
|
||||
version.append(int(n))
|
||||
except ValueError:
|
||||
version.append(n)
|
||||
return tuple(version)
|
||||
|
||||
def _extract_error_code(self, exception):
|
||||
return exception.args[0]
|
||||
|
||||
def _detect_charset(self, connection):
|
||||
"""Sniff out the character set in use for connection results."""
|
||||
|
||||
try:
|
||||
# note: the SQL here would be
|
||||
# "SHOW VARIABLES LIKE 'character_set%%'"
|
||||
cset_name = connection.connection.character_set_name
|
||||
except AttributeError:
|
||||
util.warn(
|
||||
"No 'character_set_name' can be detected with "
|
||||
"this MySQL-Python version; "
|
||||
"please upgrade to a recent version of MySQL-Python. "
|
||||
"Assuming latin1.")
|
||||
return 'latin1'
|
||||
else:
|
||||
return cset_name()
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
# connectors/pyodbc.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -26,7 +26,7 @@ class PyODBCConnector(Connector):
|
|||
supports_native_decimal = True
|
||||
default_paramstyle = 'named'
|
||||
|
||||
# for non-DSN connections, this should
|
||||
# for non-DSN connections, this *may* be used to
|
||||
# hold the desired driver name
|
||||
pyodbc_driver_name = None
|
||||
|
||||
|
@ -75,10 +75,21 @@ class PyODBCConnector(Connector):
|
|||
if 'port' in keys and 'port' not in query:
|
||||
port = ',%d' % int(keys.pop('port'))
|
||||
|
||||
connectors = ["DRIVER={%s}" %
|
||||
keys.pop('driver', self.pyodbc_driver_name),
|
||||
connectors = []
|
||||
driver = keys.pop('driver', self.pyodbc_driver_name)
|
||||
if driver is None:
|
||||
util.warn(
|
||||
"No driver name specified; "
|
||||
"this is expected by PyODBC when using "
|
||||
"DSN-less connections")
|
||||
else:
|
||||
connectors.append("DRIVER={%s}" % driver)
|
||||
|
||||
connectors.extend(
|
||||
[
|
||||
'Server=%s%s' % (keys.pop('host', ''), port),
|
||||
'Database=%s' % keys.pop('database', '')]
|
||||
'Database=%s' % keys.pop('database', '')
|
||||
])
|
||||
|
||||
user = keys.pop("user", None)
|
||||
if user:
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# connectors/zxJDBC.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -1,5 +1,5 @@
|
|||
# databases/__init__.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -13,7 +13,6 @@ from ..dialects.sqlite import base as sqlite
|
|||
from ..dialects.postgresql import base as postgresql
|
||||
postgres = postgresql
|
||||
from ..dialects.mysql import base as mysql
|
||||
from ..dialects.drizzle import base as drizzle
|
||||
from ..dialects.oracle import base as oracle
|
||||
from ..dialects.firebird import base as firebird
|
||||
from ..dialects.mssql import base as mssql
|
||||
|
@ -21,7 +20,6 @@ from ..dialects.sybase import base as sybase
|
|||
|
||||
|
||||
__all__ = (
|
||||
'drizzle',
|
||||
'firebird',
|
||||
'mssql',
|
||||
'mysql',
|
||||
|
|
|
@ -1,12 +1,11 @@
|
|||
# dialects/__init__.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
__all__ = (
|
||||
'drizzle',
|
||||
'firebird',
|
||||
'mssql',
|
||||
'mysql',
|
||||
|
|
|
@ -1,22 +0,0 @@
|
|||
from sqlalchemy.dialects.drizzle import base, mysqldb
|
||||
|
||||
base.dialect = mysqldb.dialect
|
||||
|
||||
from sqlalchemy.dialects.drizzle.base import \
|
||||
BIGINT, BINARY, BLOB, \
|
||||
BOOLEAN, CHAR, DATE, \
|
||||
DATETIME, DECIMAL, DOUBLE, \
|
||||
ENUM, FLOAT, INTEGER, \
|
||||
NUMERIC, REAL, TEXT, \
|
||||
TIME, TIMESTAMP, VARBINARY, \
|
||||
VARCHAR, dialect
|
||||
|
||||
__all__ = (
|
||||
'BIGINT', 'BINARY', 'BLOB',
|
||||
'BOOLEAN', 'CHAR', 'DATE',
|
||||
'DATETIME', 'DECIMAL', 'DOUBLE',
|
||||
'ENUM', 'FLOAT', 'INTEGER',
|
||||
'NUMERIC', 'REAL', 'TEXT',
|
||||
'TIME', 'TIMESTAMP', 'VARBINARY',
|
||||
'VARCHAR', 'dialect'
|
||||
)
|
|
@ -1,499 +0,0 @@
|
|||
# drizzle/base.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
# Copyright (C) 2010-2011 Monty Taylor <mordred@inaugust.com>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
|
||||
"""
|
||||
|
||||
.. dialect:: drizzle
|
||||
:name: Drizzle
|
||||
|
||||
Drizzle is a variant of MySQL. Unlike MySQL, Drizzle's default storage engine
|
||||
is InnoDB (transactions, foreign-keys) rather than MyISAM. For more
|
||||
`Notable Differences <http://docs.drizzle.org/mysql_differences.html>`_, visit
|
||||
the `Drizzle Documentation <http://docs.drizzle.org/index.html>`_.
|
||||
|
||||
The SQLAlchemy Drizzle dialect leans heavily on the MySQL dialect, so much of
|
||||
the :doc:`SQLAlchemy MySQL <mysql>` documentation is also relevant.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
from sqlalchemy import exc
|
||||
from sqlalchemy import log
|
||||
from sqlalchemy import types as sqltypes
|
||||
from sqlalchemy.engine import reflection
|
||||
from sqlalchemy.dialects.mysql import base as mysql_dialect
|
||||
from sqlalchemy.types import DATE, DATETIME, BOOLEAN, TIME, \
|
||||
BLOB, BINARY, VARBINARY
|
||||
|
||||
|
||||
class _NumericType(object):
|
||||
"""Base for Drizzle numeric types."""
|
||||
|
||||
def __init__(self, **kw):
|
||||
super(_NumericType, self).__init__(**kw)
|
||||
|
||||
|
||||
class _FloatType(_NumericType, sqltypes.Float):
|
||||
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
|
||||
if isinstance(self, (REAL, DOUBLE)) and \
|
||||
(
|
||||
(precision is None and scale is not None) or
|
||||
(precision is not None and scale is None)
|
||||
):
|
||||
raise exc.ArgumentError(
|
||||
"You must specify both precision and scale or omit "
|
||||
"both altogether.")
|
||||
|
||||
super(_FloatType, self).__init__(precision=precision,
|
||||
asdecimal=asdecimal, **kw)
|
||||
self.scale = scale
|
||||
|
||||
|
||||
class _StringType(mysql_dialect._StringType):
|
||||
"""Base for Drizzle string types."""
|
||||
|
||||
def __init__(self, collation=None, binary=False, **kw):
|
||||
kw['national'] = False
|
||||
super(_StringType, self).__init__(collation=collation, binary=binary,
|
||||
**kw)
|
||||
|
||||
|
||||
class NUMERIC(_NumericType, sqltypes.NUMERIC):
|
||||
"""Drizzle NUMERIC type."""
|
||||
|
||||
__visit_name__ = 'NUMERIC'
|
||||
|
||||
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
|
||||
"""Construct a NUMERIC.
|
||||
|
||||
:param precision: Total digits in this number. If scale and precision
|
||||
are both None, values are stored to limits allowed by the server.
|
||||
|
||||
:param scale: The number of digits after the decimal point.
|
||||
|
||||
"""
|
||||
|
||||
super(NUMERIC, self).__init__(precision=precision, scale=scale,
|
||||
asdecimal=asdecimal, **kw)
|
||||
|
||||
|
||||
class DECIMAL(_NumericType, sqltypes.DECIMAL):
|
||||
"""Drizzle DECIMAL type."""
|
||||
|
||||
__visit_name__ = 'DECIMAL'
|
||||
|
||||
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
|
||||
"""Construct a DECIMAL.
|
||||
|
||||
:param precision: Total digits in this number. If scale and precision
|
||||
are both None, values are stored to limits allowed by the server.
|
||||
|
||||
:param scale: The number of digits after the decimal point.
|
||||
|
||||
"""
|
||||
super(DECIMAL, self).__init__(precision=precision, scale=scale,
|
||||
asdecimal=asdecimal, **kw)
|
||||
|
||||
|
||||
class DOUBLE(_FloatType):
|
||||
"""Drizzle DOUBLE type."""
|
||||
|
||||
__visit_name__ = 'DOUBLE'
|
||||
|
||||
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
|
||||
"""Construct a DOUBLE.
|
||||
|
||||
:param precision: Total digits in this number. If scale and precision
|
||||
are both None, values are stored to limits allowed by the server.
|
||||
|
||||
:param scale: The number of digits after the decimal point.
|
||||
|
||||
"""
|
||||
|
||||
super(DOUBLE, self).__init__(precision=precision, scale=scale,
|
||||
asdecimal=asdecimal, **kw)
|
||||
|
||||
|
||||
class REAL(_FloatType, sqltypes.REAL):
|
||||
"""Drizzle REAL type."""
|
||||
|
||||
__visit_name__ = 'REAL'
|
||||
|
||||
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
|
||||
"""Construct a REAL.
|
||||
|
||||
:param precision: Total digits in this number. If scale and precision
|
||||
are both None, values are stored to limits allowed by the server.
|
||||
|
||||
:param scale: The number of digits after the decimal point.
|
||||
|
||||
"""
|
||||
|
||||
super(REAL, self).__init__(precision=precision, scale=scale,
|
||||
asdecimal=asdecimal, **kw)
|
||||
|
||||
|
||||
class FLOAT(_FloatType, sqltypes.FLOAT):
|
||||
"""Drizzle FLOAT type."""
|
||||
|
||||
__visit_name__ = 'FLOAT'
|
||||
|
||||
def __init__(self, precision=None, scale=None, asdecimal=False, **kw):
|
||||
"""Construct a FLOAT.
|
||||
|
||||
:param precision: Total digits in this number. If scale and precision
|
||||
are both None, values are stored to limits allowed by the server.
|
||||
|
||||
:param scale: The number of digits after the decimal point.
|
||||
|
||||
"""
|
||||
|
||||
super(FLOAT, self).__init__(precision=precision, scale=scale,
|
||||
asdecimal=asdecimal, **kw)
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
return None
|
||||
|
||||
|
||||
class INTEGER(sqltypes.INTEGER):
|
||||
"""Drizzle INTEGER type."""
|
||||
|
||||
__visit_name__ = 'INTEGER'
|
||||
|
||||
def __init__(self, **kw):
|
||||
"""Construct an INTEGER."""
|
||||
|
||||
super(INTEGER, self).__init__(**kw)
|
||||
|
||||
|
||||
class BIGINT(sqltypes.BIGINT):
|
||||
"""Drizzle BIGINTEGER type."""
|
||||
|
||||
__visit_name__ = 'BIGINT'
|
||||
|
||||
def __init__(self, **kw):
|
||||
"""Construct a BIGINTEGER."""
|
||||
|
||||
super(BIGINT, self).__init__(**kw)
|
||||
|
||||
|
||||
class TIME(mysql_dialect.TIME):
|
||||
"""Drizzle TIME type."""
|
||||
|
||||
|
||||
class TIMESTAMP(sqltypes.TIMESTAMP):
|
||||
"""Drizzle TIMESTAMP type."""
|
||||
|
||||
__visit_name__ = 'TIMESTAMP'
|
||||
|
||||
|
||||
class TEXT(_StringType, sqltypes.TEXT):
|
||||
"""Drizzle TEXT type, for text up to 2^16 characters."""
|
||||
|
||||
__visit_name__ = 'TEXT'
|
||||
|
||||
def __init__(self, length=None, **kw):
|
||||
"""Construct a TEXT.
|
||||
|
||||
:param length: Optional, if provided the server may optimize storage
|
||||
by substituting the smallest TEXT type sufficient to store
|
||||
``length`` characters.
|
||||
|
||||
:param collation: Optional, a column-level collation for this string
|
||||
value. Takes precedence to 'binary' short-hand.
|
||||
|
||||
:param binary: Defaults to False: short-hand, pick the binary
|
||||
collation type that matches the column's character set. Generates
|
||||
BINARY in schema. This does not affect the type of data stored,
|
||||
only the collation of character data.
|
||||
|
||||
"""
|
||||
|
||||
super(TEXT, self).__init__(length=length, **kw)
|
||||
|
||||
|
||||
class VARCHAR(_StringType, sqltypes.VARCHAR):
|
||||
"""Drizzle VARCHAR type, for variable-length character data."""
|
||||
|
||||
__visit_name__ = 'VARCHAR'
|
||||
|
||||
def __init__(self, length=None, **kwargs):
|
||||
"""Construct a VARCHAR.
|
||||
|
||||
:param collation: Optional, a column-level collation for this string
|
||||
value. Takes precedence to 'binary' short-hand.
|
||||
|
||||
:param binary: Defaults to False: short-hand, pick the binary
|
||||
collation type that matches the column's character set. Generates
|
||||
BINARY in schema. This does not affect the type of data stored,
|
||||
only the collation of character data.
|
||||
|
||||
"""
|
||||
|
||||
super(VARCHAR, self).__init__(length=length, **kwargs)
|
||||
|
||||
|
||||
class CHAR(_StringType, sqltypes.CHAR):
|
||||
"""Drizzle CHAR type, for fixed-length character data."""
|
||||
|
||||
__visit_name__ = 'CHAR'
|
||||
|
||||
def __init__(self, length=None, **kwargs):
|
||||
"""Construct a CHAR.
|
||||
|
||||
:param length: Maximum data length, in characters.
|
||||
|
||||
:param binary: Optional, use the default binary collation for the
|
||||
national character set. This does not affect the type of data
|
||||
stored, use a BINARY type for binary data.
|
||||
|
||||
:param collation: Optional, request a particular collation. Must be
|
||||
compatible with the national character set.
|
||||
|
||||
"""
|
||||
|
||||
super(CHAR, self).__init__(length=length, **kwargs)
|
||||
|
||||
|
||||
class ENUM(mysql_dialect.ENUM):
|
||||
"""Drizzle ENUM type."""
|
||||
|
||||
def __init__(self, *enums, **kw):
|
||||
"""Construct an ENUM.
|
||||
|
||||
Example:
|
||||
|
||||
Column('myenum', ENUM("foo", "bar", "baz"))
|
||||
|
||||
:param enums: The range of valid values for this ENUM. Values will be
|
||||
quoted when generating the schema according to the quoting flag (see
|
||||
below).
|
||||
|
||||
:param strict: Defaults to False: ensure that a given value is in this
|
||||
ENUM's range of permissible values when inserting or updating rows.
|
||||
Note that Drizzle will not raise a fatal error if you attempt to
|
||||
store an out of range value- an alternate value will be stored
|
||||
instead.
|
||||
(See Drizzle ENUM documentation.)
|
||||
|
||||
:param collation: Optional, a column-level collation for this string
|
||||
value. Takes precedence to 'binary' short-hand.
|
||||
|
||||
:param binary: Defaults to False: short-hand, pick the binary
|
||||
collation type that matches the column's character set. Generates
|
||||
BINARY in schema. This does not affect the type of data stored,
|
||||
only the collation of character data.
|
||||
|
||||
:param quoting: Defaults to 'auto': automatically determine enum value
|
||||
quoting. If all enum values are surrounded by the same quoting
|
||||
character, then use 'quoted' mode. Otherwise, use 'unquoted' mode.
|
||||
|
||||
'quoted': values in enums are already quoted, they will be used
|
||||
directly when generating the schema - this usage is deprecated.
|
||||
|
||||
'unquoted': values in enums are not quoted, they will be escaped and
|
||||
surrounded by single quotes when generating the schema.
|
||||
|
||||
Previous versions of this type always required manually quoted
|
||||
values to be supplied; future versions will always quote the string
|
||||
literals for you. This is a transitional option.
|
||||
|
||||
"""
|
||||
|
||||
super(ENUM, self).__init__(*enums, **kw)
|
||||
|
||||
|
||||
class _DrizzleBoolean(sqltypes.Boolean):
|
||||
def get_dbapi_type(self, dbapi):
|
||||
return dbapi.NUMERIC
|
||||
|
||||
|
||||
colspecs = {
|
||||
sqltypes.Numeric: NUMERIC,
|
||||
sqltypes.Float: FLOAT,
|
||||
sqltypes.Time: TIME,
|
||||
sqltypes.Enum: ENUM,
|
||||
sqltypes.Boolean: _DrizzleBoolean,
|
||||
}
|
||||
|
||||
|
||||
# All the types we have in Drizzle
|
||||
ischema_names = {
|
||||
'BIGINT': BIGINT,
|
||||
'BINARY': BINARY,
|
||||
'BLOB': BLOB,
|
||||
'BOOLEAN': BOOLEAN,
|
||||
'CHAR': CHAR,
|
||||
'DATE': DATE,
|
||||
'DATETIME': DATETIME,
|
||||
'DECIMAL': DECIMAL,
|
||||
'DOUBLE': DOUBLE,
|
||||
'ENUM': ENUM,
|
||||
'FLOAT': FLOAT,
|
||||
'INT': INTEGER,
|
||||
'INTEGER': INTEGER,
|
||||
'NUMERIC': NUMERIC,
|
||||
'TEXT': TEXT,
|
||||
'TIME': TIME,
|
||||
'TIMESTAMP': TIMESTAMP,
|
||||
'VARBINARY': VARBINARY,
|
||||
'VARCHAR': VARCHAR,
|
||||
}
|
||||
|
||||
|
||||
class DrizzleCompiler(mysql_dialect.MySQLCompiler):
|
||||
|
||||
def visit_typeclause(self, typeclause):
|
||||
type_ = typeclause.type.dialect_impl(self.dialect)
|
||||
if isinstance(type_, sqltypes.Integer):
|
||||
return 'INTEGER'
|
||||
else:
|
||||
return super(DrizzleCompiler, self).visit_typeclause(typeclause)
|
||||
|
||||
def visit_cast(self, cast, **kwargs):
|
||||
type_ = self.process(cast.typeclause)
|
||||
if type_ is None:
|
||||
return self.process(cast.clause)
|
||||
|
||||
return 'CAST(%s AS %s)' % (self.process(cast.clause), type_)
|
||||
|
||||
|
||||
class DrizzleDDLCompiler(mysql_dialect.MySQLDDLCompiler):
|
||||
pass
|
||||
|
||||
|
||||
class DrizzleTypeCompiler(mysql_dialect.MySQLTypeCompiler):
|
||||
def _extend_numeric(self, type_, spec):
|
||||
return spec
|
||||
|
||||
def _extend_string(self, type_, defaults, spec):
|
||||
"""Extend a string-type declaration with standard SQL
|
||||
COLLATE annotations and Drizzle specific extensions.
|
||||
|
||||
"""
|
||||
|
||||
def attr(name):
|
||||
return getattr(type_, name, defaults.get(name))
|
||||
|
||||
if attr('collation'):
|
||||
collation = 'COLLATE %s' % type_.collation
|
||||
elif attr('binary'):
|
||||
collation = 'BINARY'
|
||||
else:
|
||||
collation = None
|
||||
|
||||
return ' '.join([c for c in (spec, collation)
|
||||
if c is not None])
|
||||
|
||||
def visit_NCHAR(self, type):
|
||||
raise NotImplementedError("Drizzle does not support NCHAR")
|
||||
|
||||
def visit_NVARCHAR(self, type):
|
||||
raise NotImplementedError("Drizzle does not support NVARCHAR")
|
||||
|
||||
def visit_FLOAT(self, type_):
|
||||
if type_.scale is not None and type_.precision is not None:
|
||||
return "FLOAT(%s, %s)" % (type_.precision, type_.scale)
|
||||
else:
|
||||
return "FLOAT"
|
||||
|
||||
def visit_BOOLEAN(self, type_):
|
||||
return "BOOLEAN"
|
||||
|
||||
def visit_BLOB(self, type_):
|
||||
return "BLOB"
|
||||
|
||||
|
||||
class DrizzleExecutionContext(mysql_dialect.MySQLExecutionContext):
|
||||
pass
|
||||
|
||||
|
||||
class DrizzleIdentifierPreparer(mysql_dialect.MySQLIdentifierPreparer):
|
||||
pass
|
||||
|
||||
|
||||
@log.class_logger
|
||||
class DrizzleDialect(mysql_dialect.MySQLDialect):
|
||||
"""Details of the Drizzle dialect.
|
||||
|
||||
Not used directly in application code.
|
||||
"""
|
||||
|
||||
name = 'drizzle'
|
||||
|
||||
_supports_cast = True
|
||||
supports_sequences = False
|
||||
supports_native_boolean = True
|
||||
supports_views = False
|
||||
|
||||
default_paramstyle = 'format'
|
||||
colspecs = colspecs
|
||||
|
||||
statement_compiler = DrizzleCompiler
|
||||
ddl_compiler = DrizzleDDLCompiler
|
||||
type_compiler = DrizzleTypeCompiler
|
||||
ischema_names = ischema_names
|
||||
preparer = DrizzleIdentifierPreparer
|
||||
|
||||
def on_connect(self):
|
||||
"""Force autocommit - Drizzle Bug#707842 doesn't set this properly"""
|
||||
|
||||
def connect(conn):
|
||||
conn.autocommit(False)
|
||||
return connect
|
||||
|
||||
@reflection.cache
|
||||
def get_table_names(self, connection, schema=None, **kw):
|
||||
"""Return a Unicode SHOW TABLES from a given schema."""
|
||||
|
||||
if schema is not None:
|
||||
current_schema = schema
|
||||
else:
|
||||
current_schema = self.default_schema_name
|
||||
|
||||
charset = 'utf8'
|
||||
rp = connection.execute("SHOW TABLES FROM %s" %
|
||||
self.identifier_preparer.quote_identifier(current_schema))
|
||||
return [row[0] for row in self._compat_fetchall(rp, charset=charset)]
|
||||
|
||||
@reflection.cache
|
||||
def get_view_names(self, connection, schema=None, **kw):
|
||||
raise NotImplementedError
|
||||
|
||||
def _detect_casing(self, connection):
|
||||
"""Sniff out identifier case sensitivity.
|
||||
|
||||
Cached per-connection. This value can not change without a server
|
||||
restart.
|
||||
"""
|
||||
|
||||
return 0
|
||||
|
||||
def _detect_collations(self, connection):
|
||||
"""Pull the active COLLATIONS list from the server.
|
||||
|
||||
Cached per-connection.
|
||||
"""
|
||||
|
||||
collations = {}
|
||||
charset = self._connection_charset
|
||||
rs = connection.execute(
|
||||
'SELECT CHARACTER_SET_NAME, COLLATION_NAME FROM'
|
||||
' data_dictionary.COLLATIONS')
|
||||
for row in self._compat_fetchall(rs, charset):
|
||||
collations[row[0]] = row[1]
|
||||
return collations
|
||||
|
||||
def _detect_ansiquotes(self, connection):
|
||||
"""Detect and adjust for the ANSI_QUOTES sql mode."""
|
||||
|
||||
self._server_ansiquotes = False
|
||||
self._backslash_escapes = False
|
||||
|
||||
|
|
@ -1,48 +0,0 @@
|
|||
"""
|
||||
.. dialect:: drizzle+mysqldb
|
||||
:name: MySQL-Python
|
||||
:dbapi: mysqldb
|
||||
:connectstring: drizzle+mysqldb://<user>:<password>@<host>[:<port>]/<dbname>
|
||||
:url: http://sourceforge.net/projects/mysql-python
|
||||
|
||||
|
||||
"""
|
||||
|
||||
from sqlalchemy.dialects.drizzle.base import (
|
||||
DrizzleDialect,
|
||||
DrizzleExecutionContext,
|
||||
DrizzleCompiler,
|
||||
DrizzleIdentifierPreparer)
|
||||
from sqlalchemy.connectors.mysqldb import (
|
||||
MySQLDBExecutionContext,
|
||||
MySQLDBCompiler,
|
||||
MySQLDBIdentifierPreparer,
|
||||
MySQLDBConnector)
|
||||
|
||||
|
||||
class DrizzleExecutionContext_mysqldb(MySQLDBExecutionContext,
|
||||
DrizzleExecutionContext):
|
||||
pass
|
||||
|
||||
|
||||
class DrizzleCompiler_mysqldb(MySQLDBCompiler, DrizzleCompiler):
|
||||
pass
|
||||
|
||||
|
||||
class DrizzleIdentifierPreparer_mysqldb(MySQLDBIdentifierPreparer,
|
||||
DrizzleIdentifierPreparer):
|
||||
pass
|
||||
|
||||
|
||||
class DrizzleDialect_mysqldb(MySQLDBConnector, DrizzleDialect):
|
||||
execution_ctx_cls = DrizzleExecutionContext_mysqldb
|
||||
statement_compiler = DrizzleCompiler_mysqldb
|
||||
preparer = DrizzleIdentifierPreparer_mysqldb
|
||||
|
||||
def _detect_charset(self, connection):
|
||||
"""Sniff out the character set in use for connection results."""
|
||||
|
||||
return 'utf8'
|
||||
|
||||
|
||||
dialect = DrizzleDialect_mysqldb
|
|
@ -1,5 +1,5 @@
|
|||
# firebird/__init__.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# firebird/base.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -78,7 +78,6 @@ from sqlalchemy.sql import expression
|
|||
from sqlalchemy.engine import base, default, reflection
|
||||
from sqlalchemy.sql import compiler
|
||||
|
||||
|
||||
from sqlalchemy.types import (BIGINT, BLOB, DATE, FLOAT, INTEGER, NUMERIC,
|
||||
SMALLINT, TEXT, TIME, TIMESTAMP, Integer)
|
||||
|
||||
|
@ -181,16 +180,16 @@ ischema_names = {
|
|||
# _FBDate, etc. as bind/result functionality is required)
|
||||
|
||||
class FBTypeCompiler(compiler.GenericTypeCompiler):
|
||||
def visit_boolean(self, type_):
|
||||
return self.visit_SMALLINT(type_)
|
||||
def visit_boolean(self, type_, **kw):
|
||||
return self.visit_SMALLINT(type_, **kw)
|
||||
|
||||
def visit_datetime(self, type_):
|
||||
return self.visit_TIMESTAMP(type_)
|
||||
def visit_datetime(self, type_, **kw):
|
||||
return self.visit_TIMESTAMP(type_, **kw)
|
||||
|
||||
def visit_TEXT(self, type_):
|
||||
def visit_TEXT(self, type_, **kw):
|
||||
return "BLOB SUB_TYPE 1"
|
||||
|
||||
def visit_BLOB(self, type_):
|
||||
def visit_BLOB(self, type_, **kw):
|
||||
return "BLOB SUB_TYPE 0"
|
||||
|
||||
def _extend_string(self, type_, basic):
|
||||
|
@ -200,16 +199,16 @@ class FBTypeCompiler(compiler.GenericTypeCompiler):
|
|||
else:
|
||||
return '%s CHARACTER SET %s' % (basic, charset)
|
||||
|
||||
def visit_CHAR(self, type_):
|
||||
basic = super(FBTypeCompiler, self).visit_CHAR(type_)
|
||||
def visit_CHAR(self, type_, **kw):
|
||||
basic = super(FBTypeCompiler, self).visit_CHAR(type_, **kw)
|
||||
return self._extend_string(type_, basic)
|
||||
|
||||
def visit_VARCHAR(self, type_):
|
||||
def visit_VARCHAR(self, type_, **kw):
|
||||
if not type_.length:
|
||||
raise exc.CompileError(
|
||||
"VARCHAR requires a length on dialect %s" %
|
||||
self.dialect.name)
|
||||
basic = super(FBTypeCompiler, self).visit_VARCHAR(type_)
|
||||
basic = super(FBTypeCompiler, self).visit_VARCHAR(type_, **kw)
|
||||
return self._extend_string(type_, basic)
|
||||
|
||||
|
||||
|
@ -294,22 +293,22 @@ class FBCompiler(sql.compiler.SQLCompiler):
|
|||
def visit_sequence(self, seq):
|
||||
return "gen_id(%s, 1)" % self.preparer.format_sequence(seq)
|
||||
|
||||
def get_select_precolumns(self, select):
|
||||
def get_select_precolumns(self, select, **kw):
|
||||
"""Called when building a ``SELECT`` statement, position is just
|
||||
before column list Firebird puts the limit and offset right
|
||||
after the ``SELECT``...
|
||||
"""
|
||||
|
||||
result = ""
|
||||
if select._limit:
|
||||
result += "FIRST %s " % self.process(sql.literal(select._limit))
|
||||
if select._offset:
|
||||
result += "SKIP %s " % self.process(sql.literal(select._offset))
|
||||
if select._limit_clause is not None:
|
||||
result += "FIRST %s " % self.process(select._limit_clause, **kw)
|
||||
if select._offset_clause is not None:
|
||||
result += "SKIP %s " % self.process(select._offset_clause, **kw)
|
||||
if select._distinct:
|
||||
result += "DISTINCT "
|
||||
return result
|
||||
|
||||
def limit_clause(self, select):
|
||||
def limit_clause(self, select, **kw):
|
||||
"""Already taken care of in the `get_select_precolumns` method."""
|
||||
|
||||
return ""
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# firebird/fdb.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# firebird/kinterbasdb.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# mssql/__init__.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# mssql/adodbapi.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# mssql/base.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -166,6 +166,55 @@ how SQLAlchemy handles this:
|
|||
This
|
||||
is an auxilliary use case suitable for testing and bulk insert scenarios.
|
||||
|
||||
.. _legacy_schema_rendering:
|
||||
|
||||
Rendering of SQL statements that include schema qualifiers
|
||||
---------------------------------------------------------
|
||||
|
||||
When using :class:`.Table` metadata that includes a "schema" qualifier,
|
||||
such as::
|
||||
|
||||
account_table = Table(
|
||||
'account', metadata,
|
||||
Column('id', Integer, primary_key=True),
|
||||
Column('info', String(100)),
|
||||
schema="customer_schema"
|
||||
)
|
||||
|
||||
The SQL Server dialect has a long-standing behavior that it will attempt
|
||||
to turn a schema-qualified table name into an alias, such as::
|
||||
|
||||
>>> eng = create_engine("mssql+pymssql://mydsn")
|
||||
>>> print(account_table.select().compile(eng))
|
||||
SELECT account_1.id, account_1.info
|
||||
FROM customer_schema.account AS account_1
|
||||
|
||||
This behavior is legacy, does not function correctly for many forms
|
||||
of SQL statements, and will be disabled by default in the 1.1 series
|
||||
of SQLAlchemy. As of 1.0.5, the above statement will produce the following
|
||||
warning::
|
||||
|
||||
SAWarning: legacy_schema_aliasing flag is defaulted to True;
|
||||
some schema-qualified queries may not function correctly.
|
||||
Consider setting this flag to False for modern SQL Server versions;
|
||||
this flag will default to False in version 1.1
|
||||
|
||||
This warning encourages the :class:`.Engine` to be created as follows::
|
||||
|
||||
>>> eng = create_engine("mssql+pymssql://mydsn", legacy_schema_aliasing=False)
|
||||
|
||||
Where the above SELECT statement will produce::
|
||||
|
||||
>>> print(account_table.select().compile(eng))
|
||||
SELECT customer_schema.account.id, customer_schema.account.info
|
||||
FROM customer_schema.account
|
||||
|
||||
The warning will not emit if the ``legacy_schema_aliasing`` flag is set
|
||||
to either True or False.
|
||||
|
||||
.. versionadded:: 1.0.5 - Added the ``legacy_schema_aliasing`` flag to disable
|
||||
the SQL Server dialect's legacy behavior with schema-qualified table
|
||||
names. This flag will default to False in version 1.1.
|
||||
|
||||
Collation Support
|
||||
-----------------
|
||||
|
@ -187,7 +236,7 @@ CREATE TABLE statement for this column will yield::
|
|||
LIMIT/OFFSET Support
|
||||
--------------------
|
||||
|
||||
MSSQL has no support for the LIMIT or OFFSET keysowrds. LIMIT is
|
||||
MSSQL has no support for the LIMIT or OFFSET keywords. LIMIT is
|
||||
supported directly through the ``TOP`` Transact SQL keyword::
|
||||
|
||||
select.limit
|
||||
|
@ -226,6 +275,53 @@ The DATE and TIME types are not available for MSSQL 2005 and
|
|||
previous - if a server version below 2008 is detected, DDL
|
||||
for these types will be issued as DATETIME.
|
||||
|
||||
.. _mssql_large_type_deprecation:
|
||||
|
||||
Large Text/Binary Type Deprecation
|
||||
----------------------------------
|
||||
|
||||
Per `SQL Server 2012/2014 Documentation <http://technet.microsoft.com/en-us/library/ms187993.aspx>`_,
|
||||
the ``NTEXT``, ``TEXT`` and ``IMAGE`` datatypes are to be removed from SQL Server
|
||||
in a future release. SQLAlchemy normally relates these types to the
|
||||
:class:`.UnicodeText`, :class:`.Text` and :class:`.LargeBinary` datatypes.
|
||||
|
||||
In order to accommodate this change, a new flag ``deprecate_large_types``
|
||||
is added to the dialect, which will be automatically set based on detection
|
||||
of the server version in use, if not otherwise set by the user. The
|
||||
behavior of this flag is as follows:
|
||||
|
||||
* When this flag is ``True``, the :class:`.UnicodeText`, :class:`.Text` and
|
||||
:class:`.LargeBinary` datatypes, when used to render DDL, will render the
|
||||
types ``NVARCHAR(max)``, ``VARCHAR(max)``, and ``VARBINARY(max)``,
|
||||
respectively. This is a new behavior as of the addition of this flag.
|
||||
|
||||
* When this flag is ``False``, the :class:`.UnicodeText`, :class:`.Text` and
|
||||
:class:`.LargeBinary` datatypes, when used to render DDL, will render the
|
||||
types ``NTEXT``, ``TEXT``, and ``IMAGE``,
|
||||
respectively. This is the long-standing behavior of these types.
|
||||
|
||||
* The flag begins with the value ``None``, before a database connection is
|
||||
established. If the dialect is used to render DDL without the flag being
|
||||
set, it is interpreted the same as ``False``.
|
||||
|
||||
* On first connection, the dialect detects if SQL Server version 2012 or greater
|
||||
is in use; if the flag is still at ``None``, it sets it to ``True`` or
|
||||
``False`` based on whether 2012 or greater is detected.
|
||||
|
||||
* The flag can be set to either ``True`` or ``False`` when the dialect
|
||||
is created, typically via :func:`.create_engine`::
|
||||
|
||||
eng = create_engine("mssql+pymssql://user:pass@host/db",
|
||||
deprecate_large_types=True)
|
||||
|
||||
* Complete control over whether the "old" or "new" types are rendered is
|
||||
available in all SQLAlchemy versions by using the UPPERCASE type objects
|
||||
instead: :class:`.NVARCHAR`, :class:`.VARCHAR`, :class:`.types.VARBINARY`,
|
||||
:class:`.TEXT`, :class:`.mssql.NTEXT`, :class:`.mssql.IMAGE` will always remain
|
||||
fixed and always output exactly that type.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
.. _mssql_indexes:
|
||||
|
||||
Clustered Index Support
|
||||
|
@ -367,19 +463,20 @@ import operator
|
|||
import re
|
||||
|
||||
from ... import sql, schema as sa_schema, exc, util
|
||||
from ...sql import compiler, expression, \
|
||||
util as sql_util, cast
|
||||
from ...sql import compiler, expression, util as sql_util
|
||||
from ... import engine
|
||||
from ...engine import reflection, default
|
||||
from ... import types as sqltypes
|
||||
from ...types import INTEGER, BIGINT, SMALLINT, DECIMAL, NUMERIC, \
|
||||
FLOAT, TIMESTAMP, DATETIME, DATE, BINARY,\
|
||||
VARBINARY, TEXT, VARCHAR, NVARCHAR, CHAR, NCHAR
|
||||
TEXT, VARCHAR, NVARCHAR, CHAR, NCHAR
|
||||
|
||||
|
||||
from ...util import update_wrapper
|
||||
from . import information_schema as ischema
|
||||
|
||||
# http://sqlserverbuilds.blogspot.com/
|
||||
MS_2012_VERSION = (11,)
|
||||
MS_2008_VERSION = (10,)
|
||||
MS_2005_VERSION = (9,)
|
||||
MS_2000_VERSION = (8,)
|
||||
|
@ -451,9 +548,13 @@ class _MSDate(sqltypes.Date):
|
|||
if isinstance(value, datetime.datetime):
|
||||
return value.date()
|
||||
elif isinstance(value, util.string_types):
|
||||
m = self._reg.match(value)
|
||||
if not m:
|
||||
raise ValueError(
|
||||
"could not parse %r as a date value" % (value, ))
|
||||
return datetime.date(*[
|
||||
int(x or 0)
|
||||
for x in self._reg.match(value).groups()
|
||||
for x in m.groups()
|
||||
])
|
||||
else:
|
||||
return value
|
||||
|
@ -485,9 +586,13 @@ class TIME(sqltypes.TIME):
|
|||
if isinstance(value, datetime.datetime):
|
||||
return value.time()
|
||||
elif isinstance(value, util.string_types):
|
||||
m = self._reg.match(value)
|
||||
if not m:
|
||||
raise ValueError(
|
||||
"could not parse %r as a time value" % (value, ))
|
||||
return datetime.time(*[
|
||||
int(x or 0)
|
||||
for x in self._reg.match(value).groups()])
|
||||
for x in m.groups()])
|
||||
else:
|
||||
return value
|
||||
return process
|
||||
|
@ -545,6 +650,26 @@ class NTEXT(sqltypes.UnicodeText):
|
|||
__visit_name__ = 'NTEXT'
|
||||
|
||||
|
||||
class VARBINARY(sqltypes.VARBINARY, sqltypes.LargeBinary):
|
||||
"""The MSSQL VARBINARY type.
|
||||
|
||||
This type extends both :class:`.types.VARBINARY` and
|
||||
:class:`.types.LargeBinary`. In "deprecate_large_types" mode,
|
||||
the :class:`.types.LargeBinary` type will produce ``VARBINARY(max)``
|
||||
on SQL Server.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`mssql_large_type_deprecation`
|
||||
|
||||
|
||||
|
||||
"""
|
||||
__visit_name__ = 'VARBINARY'
|
||||
|
||||
|
||||
class IMAGE(sqltypes.LargeBinary):
|
||||
__visit_name__ = 'IMAGE'
|
||||
|
||||
|
@ -626,7 +751,6 @@ ischema_names = {
|
|||
|
||||
|
||||
class MSTypeCompiler(compiler.GenericTypeCompiler):
|
||||
|
||||
def _extend(self, spec, type_, length=None):
|
||||
"""Extend a string-type declaration with standard SQL
|
||||
COLLATE annotations.
|
||||
|
@ -647,103 +771,115 @@ class MSTypeCompiler(compiler.GenericTypeCompiler):
|
|||
return ' '.join([c for c in (spec, collation)
|
||||
if c is not None])
|
||||
|
||||
def visit_FLOAT(self, type_):
|
||||
def visit_FLOAT(self, type_, **kw):
|
||||
precision = getattr(type_, 'precision', None)
|
||||
if precision is None:
|
||||
return "FLOAT"
|
||||
else:
|
||||
return "FLOAT(%(precision)s)" % {'precision': precision}
|
||||
|
||||
def visit_TINYINT(self, type_):
|
||||
def visit_TINYINT(self, type_, **kw):
|
||||
return "TINYINT"
|
||||
|
||||
def visit_DATETIMEOFFSET(self, type_):
|
||||
if type_.precision:
|
||||
def visit_DATETIMEOFFSET(self, type_, **kw):
|
||||
if type_.precision is not None:
|
||||
return "DATETIMEOFFSET(%s)" % type_.precision
|
||||
else:
|
||||
return "DATETIMEOFFSET"
|
||||
|
||||
def visit_TIME(self, type_):
|
||||
def visit_TIME(self, type_, **kw):
|
||||
precision = getattr(type_, 'precision', None)
|
||||
if precision:
|
||||
if precision is not None:
|
||||
return "TIME(%s)" % precision
|
||||
else:
|
||||
return "TIME"
|
||||
|
||||
def visit_DATETIME2(self, type_):
|
||||
def visit_DATETIME2(self, type_, **kw):
|
||||
precision = getattr(type_, 'precision', None)
|
||||
if precision:
|
||||
if precision is not None:
|
||||
return "DATETIME2(%s)" % precision
|
||||
else:
|
||||
return "DATETIME2"
|
||||
|
||||
def visit_SMALLDATETIME(self, type_):
|
||||
def visit_SMALLDATETIME(self, type_, **kw):
|
||||
return "SMALLDATETIME"
|
||||
|
||||
def visit_unicode(self, type_):
|
||||
return self.visit_NVARCHAR(type_)
|
||||
def visit_unicode(self, type_, **kw):
|
||||
return self.visit_NVARCHAR(type_, **kw)
|
||||
|
||||
def visit_unicode_text(self, type_):
|
||||
return self.visit_NTEXT(type_)
|
||||
def visit_text(self, type_, **kw):
|
||||
if self.dialect.deprecate_large_types:
|
||||
return self.visit_VARCHAR(type_, **kw)
|
||||
else:
|
||||
return self.visit_TEXT(type_, **kw)
|
||||
|
||||
def visit_NTEXT(self, type_):
|
||||
def visit_unicode_text(self, type_, **kw):
|
||||
if self.dialect.deprecate_large_types:
|
||||
return self.visit_NVARCHAR(type_, **kw)
|
||||
else:
|
||||
return self.visit_NTEXT(type_, **kw)
|
||||
|
||||
def visit_NTEXT(self, type_, **kw):
|
||||
return self._extend("NTEXT", type_)
|
||||
|
||||
def visit_TEXT(self, type_):
|
||||
def visit_TEXT(self, type_, **kw):
|
||||
return self._extend("TEXT", type_)
|
||||
|
||||
def visit_VARCHAR(self, type_):
|
||||
def visit_VARCHAR(self, type_, **kw):
|
||||
return self._extend("VARCHAR", type_, length=type_.length or 'max')
|
||||
|
||||
def visit_CHAR(self, type_):
|
||||
def visit_CHAR(self, type_, **kw):
|
||||
return self._extend("CHAR", type_)
|
||||
|
||||
def visit_NCHAR(self, type_):
|
||||
def visit_NCHAR(self, type_, **kw):
|
||||
return self._extend("NCHAR", type_)
|
||||
|
||||
def visit_NVARCHAR(self, type_):
|
||||
def visit_NVARCHAR(self, type_, **kw):
|
||||
return self._extend("NVARCHAR", type_, length=type_.length or 'max')
|
||||
|
||||
def visit_date(self, type_):
|
||||
def visit_date(self, type_, **kw):
|
||||
if self.dialect.server_version_info < MS_2008_VERSION:
|
||||
return self.visit_DATETIME(type_)
|
||||
return self.visit_DATETIME(type_, **kw)
|
||||
else:
|
||||
return self.visit_DATE(type_)
|
||||
return self.visit_DATE(type_, **kw)
|
||||
|
||||
def visit_time(self, type_):
|
||||
def visit_time(self, type_, **kw):
|
||||
if self.dialect.server_version_info < MS_2008_VERSION:
|
||||
return self.visit_DATETIME(type_)
|
||||
return self.visit_DATETIME(type_, **kw)
|
||||
else:
|
||||
return self.visit_TIME(type_)
|
||||
return self.visit_TIME(type_, **kw)
|
||||
|
||||
def visit_large_binary(self, type_):
|
||||
return self.visit_IMAGE(type_)
|
||||
def visit_large_binary(self, type_, **kw):
|
||||
if self.dialect.deprecate_large_types:
|
||||
return self.visit_VARBINARY(type_, **kw)
|
||||
else:
|
||||
return self.visit_IMAGE(type_, **kw)
|
||||
|
||||
def visit_IMAGE(self, type_):
|
||||
def visit_IMAGE(self, type_, **kw):
|
||||
return "IMAGE"
|
||||
|
||||
def visit_VARBINARY(self, type_):
|
||||
def visit_VARBINARY(self, type_, **kw):
|
||||
return self._extend(
|
||||
"VARBINARY",
|
||||
type_,
|
||||
length=type_.length or 'max')
|
||||
|
||||
def visit_boolean(self, type_):
|
||||
def visit_boolean(self, type_, **kw):
|
||||
return self.visit_BIT(type_)
|
||||
|
||||
def visit_BIT(self, type_):
|
||||
def visit_BIT(self, type_, **kw):
|
||||
return "BIT"
|
||||
|
||||
def visit_MONEY(self, type_):
|
||||
def visit_MONEY(self, type_, **kw):
|
||||
return "MONEY"
|
||||
|
||||
def visit_SMALLMONEY(self, type_):
|
||||
def visit_SMALLMONEY(self, type_, **kw):
|
||||
return 'SMALLMONEY'
|
||||
|
||||
def visit_UNIQUEIDENTIFIER(self, type_):
|
||||
def visit_UNIQUEIDENTIFIER(self, type_, **kw):
|
||||
return "UNIQUEIDENTIFIER"
|
||||
|
||||
def visit_SQL_VARIANT(self, type_):
|
||||
def visit_SQL_VARIANT(self, type_, **kw):
|
||||
return 'SQL_VARIANT'
|
||||
|
||||
|
||||
|
@ -846,7 +982,7 @@ class MSExecutionContext(default.DefaultExecutionContext):
|
|||
"SET IDENTITY_INSERT %s OFF" %
|
||||
self.dialect.identifier_preparer. format_table(
|
||||
self.compiled.statement.table)))
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def get_result_proxy(self):
|
||||
|
@ -872,6 +1008,15 @@ class MSSQLCompiler(compiler.SQLCompiler):
|
|||
self.tablealiases = {}
|
||||
super(MSSQLCompiler, self).__init__(*args, **kwargs)
|
||||
|
||||
def _with_legacy_schema_aliasing(fn):
|
||||
def decorate(self, *arg, **kw):
|
||||
if self.dialect.legacy_schema_aliasing:
|
||||
return fn(self, *arg, **kw)
|
||||
else:
|
||||
super_ = getattr(super(MSSQLCompiler, self), fn.__name__)
|
||||
return super_(*arg, **kw)
|
||||
return decorate
|
||||
|
||||
def visit_now_func(self, fn, **kw):
|
||||
return "CURRENT_TIMESTAMP"
|
||||
|
||||
|
@ -900,19 +1045,24 @@ class MSSQLCompiler(compiler.SQLCompiler):
|
|||
self.process(binary.left, **kw),
|
||||
self.process(binary.right, **kw))
|
||||
|
||||
def get_select_precolumns(self, select):
|
||||
""" MS-SQL puts TOP, its version of LIMIT, here """
|
||||
if select._distinct or select._limit is not None:
|
||||
s = select._distinct and "DISTINCT " or ""
|
||||
def get_select_precolumns(self, select, **kw):
|
||||
""" MS-SQL puts TOP, it's version of LIMIT here """
|
||||
|
||||
s = ""
|
||||
if select._distinct:
|
||||
s += "DISTINCT "
|
||||
|
||||
if select._simple_int_limit and not select._offset:
|
||||
# ODBC drivers and possibly others
|
||||
# don't support bind params in the SELECT clause on SQL Server.
|
||||
# so have to use literal here.
|
||||
if select._limit is not None:
|
||||
if not select._offset:
|
||||
s += "TOP %d " % select._limit
|
||||
|
||||
if s:
|
||||
return s
|
||||
return compiler.SQLCompiler.get_select_precolumns(self, select)
|
||||
else:
|
||||
return compiler.SQLCompiler.get_select_precolumns(
|
||||
self, select, **kw)
|
||||
|
||||
def get_from_hint_text(self, table, text):
|
||||
return text
|
||||
|
@ -920,7 +1070,7 @@ class MSSQLCompiler(compiler.SQLCompiler):
|
|||
def get_crud_hint_text(self, table, text):
|
||||
return text
|
||||
|
||||
def limit_clause(self, select):
|
||||
def limit_clause(self, select, **kw):
|
||||
# Limit in mssql is after the select keyword
|
||||
return ""
|
||||
|
||||
|
@ -929,39 +1079,48 @@ class MSSQLCompiler(compiler.SQLCompiler):
|
|||
so tries to wrap it in a subquery with ``row_number()`` criterion.
|
||||
|
||||
"""
|
||||
if select._offset and not getattr(select, '_mssql_visit', None):
|
||||
if (
|
||||
(
|
||||
not select._simple_int_limit and
|
||||
select._limit_clause is not None
|
||||
) or (
|
||||
select._offset_clause is not None and
|
||||
not select._simple_int_offset or select._offset
|
||||
)
|
||||
) and not getattr(select, '_mssql_visit', None):
|
||||
|
||||
# to use ROW_NUMBER(), an ORDER BY is required.
|
||||
if not select._order_by_clause.clauses:
|
||||
raise exc.CompileError('MSSQL requires an order_by when '
|
||||
'using an offset.')
|
||||
_offset = select._offset
|
||||
_limit = select._limit
|
||||
'using an OFFSET or a non-simple '
|
||||
'LIMIT clause')
|
||||
|
||||
_order_by_clauses = select._order_by_clause.clauses
|
||||
limit_clause = select._limit_clause
|
||||
offset_clause = select._offset_clause
|
||||
kwargs['select_wraps_for'] = select
|
||||
select = select._generate()
|
||||
select._mssql_visit = True
|
||||
select = select.column(
|
||||
sql.func.ROW_NUMBER().over(order_by=_order_by_clauses)
|
||||
.label("mssql_rn")
|
||||
).order_by(None).alias()
|
||||
.label("mssql_rn")).order_by(None).alias()
|
||||
|
||||
mssql_rn = sql.column('mssql_rn')
|
||||
limitselect = sql.select([c for c in select.c if
|
||||
c.key != 'mssql_rn'])
|
||||
limitselect.append_whereclause(mssql_rn > _offset)
|
||||
if _limit is not None:
|
||||
limitselect.append_whereclause(mssql_rn <= (_limit + _offset))
|
||||
return self.process(limitselect, iswrapper=True, **kwargs)
|
||||
if offset_clause is not None:
|
||||
limitselect.append_whereclause(mssql_rn > offset_clause)
|
||||
if limit_clause is not None:
|
||||
limitselect.append_whereclause(
|
||||
mssql_rn <= (limit_clause + offset_clause))
|
||||
else:
|
||||
limitselect.append_whereclause(
|
||||
mssql_rn <= (limit_clause))
|
||||
return self.process(limitselect, **kwargs)
|
||||
else:
|
||||
return compiler.SQLCompiler.visit_select(self, select, **kwargs)
|
||||
|
||||
def _schema_aliased_table(self, table):
|
||||
if getattr(table, 'schema', None) is not None:
|
||||
if table not in self.tablealiases:
|
||||
self.tablealiases[table] = table.alias()
|
||||
return self.tablealiases[table]
|
||||
else:
|
||||
return None
|
||||
|
||||
@_with_legacy_schema_aliasing
|
||||
def visit_table(self, table, mssql_aliased=False, iscrud=False, **kwargs):
|
||||
if mssql_aliased is table or iscrud:
|
||||
return super(MSSQLCompiler, self).visit_table(table, **kwargs)
|
||||
|
@ -973,25 +1132,14 @@ class MSSQLCompiler(compiler.SQLCompiler):
|
|||
else:
|
||||
return super(MSSQLCompiler, self).visit_table(table, **kwargs)
|
||||
|
||||
def visit_alias(self, alias, **kwargs):
|
||||
@_with_legacy_schema_aliasing
|
||||
def visit_alias(self, alias, **kw):
|
||||
# translate for schema-qualified table aliases
|
||||
kwargs['mssql_aliased'] = alias.original
|
||||
return super(MSSQLCompiler, self).visit_alias(alias, **kwargs)
|
||||
kw['mssql_aliased'] = alias.original
|
||||
return super(MSSQLCompiler, self).visit_alias(alias, **kw)
|
||||
|
||||
def visit_extract(self, extract, **kw):
|
||||
field = self.extract_map.get(extract.field, extract.field)
|
||||
return 'DATEPART("%s", %s)' % \
|
||||
(field, self.process(extract.expr, **kw))
|
||||
|
||||
def visit_savepoint(self, savepoint_stmt):
|
||||
return "SAVE TRANSACTION %s" % \
|
||||
self.preparer.format_savepoint(savepoint_stmt)
|
||||
|
||||
def visit_rollback_to_savepoint(self, savepoint_stmt):
|
||||
return ("ROLLBACK TRANSACTION %s"
|
||||
% self.preparer.format_savepoint(savepoint_stmt))
|
||||
|
||||
def visit_column(self, column, add_to_result_map=None, **kwargs):
|
||||
@_with_legacy_schema_aliasing
|
||||
def visit_column(self, column, add_to_result_map=None, **kw):
|
||||
if column.table is not None and \
|
||||
(not self.isupdate and not self.isdelete) or \
|
||||
self.is_subquery():
|
||||
|
@ -1009,10 +1157,40 @@ class MSSQLCompiler(compiler.SQLCompiler):
|
|||
)
|
||||
|
||||
return super(MSSQLCompiler, self).\
|
||||
visit_column(converted, **kwargs)
|
||||
visit_column(converted, **kw)
|
||||
|
||||
return super(MSSQLCompiler, self).visit_column(
|
||||
column, add_to_result_map=add_to_result_map, **kwargs)
|
||||
column, add_to_result_map=add_to_result_map, **kw)
|
||||
|
||||
def _schema_aliased_table(self, table):
|
||||
if getattr(table, 'schema', None) is not None:
|
||||
if self.dialect._warn_schema_aliasing and \
|
||||
table.schema.lower() != 'information_schema':
|
||||
util.warn(
|
||||
"legacy_schema_aliasing flag is defaulted to True; "
|
||||
"some schema-qualified queries may not function "
|
||||
"correctly. Consider setting this flag to False for "
|
||||
"modern SQL Server versions; this flag will default to "
|
||||
"False in version 1.1")
|
||||
|
||||
if table not in self.tablealiases:
|
||||
self.tablealiases[table] = table.alias()
|
||||
return self.tablealiases[table]
|
||||
else:
|
||||
return None
|
||||
|
||||
def visit_extract(self, extract, **kw):
|
||||
field = self.extract_map.get(extract.field, extract.field)
|
||||
return 'DATEPART(%s, %s)' % \
|
||||
(field, self.process(extract.expr, **kw))
|
||||
|
||||
def visit_savepoint(self, savepoint_stmt):
|
||||
return "SAVE TRANSACTION %s" % \
|
||||
self.preparer.format_savepoint(savepoint_stmt)
|
||||
|
||||
def visit_rollback_to_savepoint(self, savepoint_stmt):
|
||||
return ("ROLLBACK TRANSACTION %s"
|
||||
% self.preparer.format_savepoint(savepoint_stmt))
|
||||
|
||||
def visit_binary(self, binary, **kwargs):
|
||||
"""Move bind parameters to the right-hand side of an operator, where
|
||||
|
@ -1141,8 +1319,11 @@ class MSSQLStrictCompiler(MSSQLCompiler):
|
|||
class MSDDLCompiler(compiler.DDLCompiler):
|
||||
|
||||
def get_column_specification(self, column, **kwargs):
|
||||
colspec = (self.preparer.format_column(column) + " "
|
||||
+ self.dialect.type_compiler.process(column.type))
|
||||
colspec = (
|
||||
self.preparer.format_column(column) + " "
|
||||
+ self.dialect.type_compiler.process(
|
||||
column.type, type_expression=column)
|
||||
)
|
||||
|
||||
if column.nullable is not None:
|
||||
if not column.nullable or column.primary_key or \
|
||||
|
@ -1321,6 +1502,10 @@ class MSDialect(default.DefaultDialect):
|
|||
sqltypes.Time: TIME,
|
||||
}
|
||||
|
||||
engine_config_types = default.DefaultDialect.engine_config_types.union([
|
||||
('legacy_schema_aliasing', util.asbool),
|
||||
])
|
||||
|
||||
ischema_names = ischema_names
|
||||
|
||||
supports_native_boolean = False
|
||||
|
@ -1351,13 +1536,24 @@ class MSDialect(default.DefaultDialect):
|
|||
query_timeout=None,
|
||||
use_scope_identity=True,
|
||||
max_identifier_length=None,
|
||||
schema_name="dbo", **opts):
|
||||
schema_name="dbo",
|
||||
deprecate_large_types=None,
|
||||
legacy_schema_aliasing=None, **opts):
|
||||
self.query_timeout = int(query_timeout or 0)
|
||||
self.schema_name = schema_name
|
||||
|
||||
self.use_scope_identity = use_scope_identity
|
||||
self.max_identifier_length = int(max_identifier_length or 0) or \
|
||||
self.max_identifier_length
|
||||
self.deprecate_large_types = deprecate_large_types
|
||||
|
||||
if legacy_schema_aliasing is None:
|
||||
self.legacy_schema_aliasing = True
|
||||
self._warn_schema_aliasing = True
|
||||
else:
|
||||
self.legacy_schema_aliasing = legacy_schema_aliasing
|
||||
self._warn_schema_aliasing = False
|
||||
|
||||
super(MSDialect, self).__init__(**opts)
|
||||
|
||||
def do_savepoint(self, connection, name):
|
||||
|
@ -1371,21 +1567,31 @@ class MSDialect(default.DefaultDialect):
|
|||
|
||||
def initialize(self, connection):
|
||||
super(MSDialect, self).initialize(connection)
|
||||
self._setup_version_attributes()
|
||||
|
||||
def _setup_version_attributes(self):
|
||||
if self.server_version_info[0] not in list(range(8, 17)):
|
||||
# FreeTDS with version 4.2 seems to report here
|
||||
# a number like "95.10.255". Don't know what
|
||||
# that is. So emit warning.
|
||||
# Use TDS Version 7.0 through 7.3, per the MS information here:
|
||||
# https://msdn.microsoft.com/en-us/library/dd339982.aspx
|
||||
# and FreeTDS information here (7.3 highest supported version):
|
||||
# http://www.freetds.org/userguide/choosingtdsprotocol.htm
|
||||
util.warn(
|
||||
"Unrecognized server version info '%s'. Version specific "
|
||||
"behaviors may not function properly. If using ODBC "
|
||||
"with FreeTDS, ensure server version 7.0 or 8.0, not 4.2, "
|
||||
"is configured in the FreeTDS configuration." %
|
||||
"with FreeTDS, ensure TDS_VERSION 7.0 through 7.3, not "
|
||||
"4.2, is configured in the FreeTDS configuration." %
|
||||
".".join(str(x) for x in self.server_version_info))
|
||||
if self.server_version_info >= MS_2005_VERSION and \
|
||||
'implicit_returning' not in self.__dict__:
|
||||
self.implicit_returning = True
|
||||
if self.server_version_info >= MS_2008_VERSION:
|
||||
self.supports_multivalues_insert = True
|
||||
if self.deprecate_large_types is None:
|
||||
self.deprecate_large_types = \
|
||||
self.server_version_info >= MS_2012_VERSION
|
||||
|
||||
def _get_default_schema_name(self, connection):
|
||||
if self.server_version_info < MS_2005_VERSION:
|
||||
|
@ -1573,12 +1779,11 @@ class MSDialect(default.DefaultDialect):
|
|||
if coltype in (MSString, MSChar, MSNVarchar, MSNChar, MSText,
|
||||
MSNText, MSBinary, MSVarBinary,
|
||||
sqltypes.LargeBinary):
|
||||
if charlen == -1:
|
||||
charlen = 'max'
|
||||
kwargs['length'] = charlen
|
||||
if collation:
|
||||
kwargs['collation'] = collation
|
||||
if coltype == MSText or \
|
||||
(coltype in (MSString, MSNVarchar) and charlen == -1):
|
||||
kwargs.pop('length')
|
||||
|
||||
if coltype is None:
|
||||
util.warn(
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# mssql/information_schema.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# mssql/mxodbc.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# mssql/pymssql.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -46,11 +46,12 @@ class MSDialect_pymssql(MSDialect):
|
|||
@classmethod
|
||||
def dbapi(cls):
|
||||
module = __import__('pymssql')
|
||||
# pymmsql doesn't have a Binary method. we use string
|
||||
# pymmsql < 2.1.1 doesn't have a Binary method. we use string
|
||||
client_ver = tuple(int(x) for x in module.__version__.split("."))
|
||||
if client_ver < (2, 1, 1):
|
||||
# TODO: monkeypatching here is less than ideal
|
||||
module.Binary = lambda x: x if hasattr(x, 'decode') else str(x)
|
||||
|
||||
client_ver = tuple(int(x) for x in module.__version__.split("."))
|
||||
if client_ver < (1, ):
|
||||
util.warn("The pymssql dialect expects at least "
|
||||
"the 1.0 series of the pymssql DBAPI.")
|
||||
|
@ -63,7 +64,7 @@ class MSDialect_pymssql(MSDialect):
|
|||
def _get_server_version_info(self, connection):
|
||||
vers = connection.scalar("select @@version")
|
||||
m = re.match(
|
||||
r"Microsoft SQL Server.*? - (\d+).(\d+).(\d+).(\d+)", vers)
|
||||
r"Microsoft .*? - (\d+).(\d+).(\d+).(\d+)", vers)
|
||||
if m:
|
||||
return tuple(int(x) for x in m.group(1, 2, 3, 4))
|
||||
else:
|
||||
|
@ -84,7 +85,8 @@ class MSDialect_pymssql(MSDialect):
|
|||
"message 20003", # connection timeout
|
||||
"Error 10054",
|
||||
"Not connected to any MS SQL server",
|
||||
"Connection is closed"
|
||||
"Connection is closed",
|
||||
"message 20006", # Write to the server failed
|
||||
):
|
||||
if msg in str(e):
|
||||
return True
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# mssql/pyodbc.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -12,74 +12,57 @@
|
|||
:connectstring: mssql+pyodbc://<username>:<password>@<dsnname>
|
||||
:url: http://pypi.python.org/pypi/pyodbc/
|
||||
|
||||
Additional Connection Examples
|
||||
-------------------------------
|
||||
Connecting to PyODBC
|
||||
--------------------
|
||||
|
||||
Examples of pyodbc connection string URLs:
|
||||
The URL here is to be translated to PyODBC connection strings, as
|
||||
detailed in `ConnectionStrings <https://code.google.com/p/pyodbc/wiki/ConnectionStrings>`_.
|
||||
|
||||
* ``mssql+pyodbc://mydsn`` - connects using the specified DSN named ``mydsn``.
|
||||
The connection string that is created will appear like::
|
||||
DSN Connections
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
dsn=mydsn;Trusted_Connection=Yes
|
||||
A DSN-based connection is **preferred** overall when using ODBC. A
|
||||
basic DSN-based connection looks like::
|
||||
|
||||
* ``mssql+pyodbc://user:pass@mydsn`` - connects using the DSN named
|
||||
``mydsn`` passing in the ``UID`` and ``PWD`` information. The
|
||||
connection string that is created will appear like::
|
||||
engine = create_engine("mssql+pyodbc://scott:tiger@some_dsn")
|
||||
|
||||
Which above, will pass the following connection string to PyODBC::
|
||||
|
||||
dsn=mydsn;UID=user;PWD=pass
|
||||
|
||||
* ``mssql+pyodbc://user:pass@mydsn/?LANGUAGE=us_english`` - connects
|
||||
using the DSN named ``mydsn`` passing in the ``UID`` and ``PWD``
|
||||
information, plus the additional connection configuration option
|
||||
``LANGUAGE``. The connection string that is created will appear
|
||||
like::
|
||||
If the username and password are omitted, the DSN form will also add
|
||||
the ``Trusted_Connection=yes`` directive to the ODBC string.
|
||||
|
||||
dsn=mydsn;UID=user;PWD=pass;LANGUAGE=us_english
|
||||
Hostname Connections
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
* ``mssql+pyodbc://user:pass@host/db`` - connects using a connection
|
||||
that would appear like::
|
||||
Hostname-based connections are **not preferred**, however are supported.
|
||||
The ODBC driver name must be explicitly specified::
|
||||
|
||||
DRIVER={SQL Server};Server=host;Database=db;UID=user;PWD=pass
|
||||
engine = create_engine("mssql+pyodbc://scott:tiger@myhost:port/databasename?driver=SQL+Server+Native+Client+10.0")
|
||||
|
||||
* ``mssql+pyodbc://user:pass@host:123/db`` - connects using a connection
|
||||
string which includes the port
|
||||
information using the comma syntax. This will create the following
|
||||
connection string::
|
||||
.. versionchanged:: 1.0.0 Hostname-based PyODBC connections now require the
|
||||
SQL Server driver name specified explicitly. SQLAlchemy cannot
|
||||
choose an optimal default here as it varies based on platform
|
||||
and installed drivers.
|
||||
|
||||
DRIVER={SQL Server};Server=host,123;Database=db;UID=user;PWD=pass
|
||||
Other keywords interpreted by the Pyodbc dialect to be passed to
|
||||
``pyodbc.connect()`` in both the DSN and hostname cases include:
|
||||
``odbc_autotranslate``, ``ansi``, ``unicode_results``, ``autocommit``.
|
||||
|
||||
* ``mssql+pyodbc://user:pass@host/db?port=123`` - connects using a connection
|
||||
string that includes the port
|
||||
information as a separate ``port`` keyword. This will create the
|
||||
following connection string::
|
||||
Pass through exact Pyodbc string
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
DRIVER={SQL Server};Server=host;Database=db;UID=user;PWD=pass;port=123
|
||||
A PyODBC connection string can also be sent exactly as specified in
|
||||
`ConnectionStrings <https://code.google.com/p/pyodbc/wiki/ConnectionStrings>`_
|
||||
into the driver using the parameter ``odbc_connect``. The delimeters must be URL escaped, however,
|
||||
as illustrated below using ``urllib.quote_plus``::
|
||||
|
||||
* ``mssql+pyodbc://user:pass@host/db?driver=MyDriver`` - connects using a
|
||||
connection string that includes a custom ODBC driver name. This will create
|
||||
the following connection string::
|
||||
import urllib
|
||||
params = urllib.quote_plus("DRIVER={SQL Server Native Client 10.0};SERVER=dagger;DATABASE=test;UID=user;PWD=password")
|
||||
|
||||
DRIVER={MyDriver};Server=host;Database=db;UID=user;PWD=pass
|
||||
engine = create_engine("mssql+pyodbc:///?odbc_connect=%s" % params)
|
||||
|
||||
If you require a connection string that is outside the options
|
||||
presented above, use the ``odbc_connect`` keyword to pass in a
|
||||
urlencoded connection string. What gets passed in will be urldecoded
|
||||
and passed directly.
|
||||
|
||||
For example::
|
||||
|
||||
mssql+pyodbc:///?odbc_connect=dsn%3Dmydsn%3BDatabase%3Ddb
|
||||
|
||||
would create the following connection string::
|
||||
|
||||
dsn=mydsn;Database=db
|
||||
|
||||
Encoding your connection string can be easily accomplished through
|
||||
the python shell. For example::
|
||||
|
||||
>>> import urllib
|
||||
>>> urllib.quote_plus('dsn=mydsn;Database=db')
|
||||
'dsn%3Dmydsn%3BDatabase%3Ddb'
|
||||
|
||||
Unicode Binds
|
||||
-------------
|
||||
|
@ -112,7 +95,7 @@ for unix + PyODBC.
|
|||
|
||||
"""
|
||||
|
||||
from .base import MSExecutionContext, MSDialect
|
||||
from .base import MSExecutionContext, MSDialect, VARBINARY
|
||||
from ...connectors.pyodbc import PyODBCConnector
|
||||
from ... import types as sqltypes, util
|
||||
import decimal
|
||||
|
@ -191,6 +174,22 @@ class _MSFloat_pyodbc(_ms_numeric_pyodbc, sqltypes.Float):
|
|||
pass
|
||||
|
||||
|
||||
class _VARBINARY_pyodbc(VARBINARY):
|
||||
def bind_processor(self, dialect):
|
||||
if dialect.dbapi is None:
|
||||
return None
|
||||
|
||||
DBAPIBinary = dialect.dbapi.Binary
|
||||
|
||||
def process(value):
|
||||
if value is not None:
|
||||
return DBAPIBinary(value)
|
||||
else:
|
||||
# pyodbc-specific
|
||||
return dialect.dbapi.BinaryNull
|
||||
return process
|
||||
|
||||
|
||||
class MSExecutionContext_pyodbc(MSExecutionContext):
|
||||
_embedded_scope_identity = False
|
||||
|
||||
|
@ -243,13 +242,13 @@ class MSDialect_pyodbc(PyODBCConnector, MSDialect):
|
|||
|
||||
execution_ctx_cls = MSExecutionContext_pyodbc
|
||||
|
||||
pyodbc_driver_name = 'SQL Server'
|
||||
|
||||
colspecs = util.update_copy(
|
||||
MSDialect.colspecs,
|
||||
{
|
||||
sqltypes.Numeric: _MSNumeric_pyodbc,
|
||||
sqltypes.Float: _MSFloat_pyodbc
|
||||
sqltypes.Float: _MSFloat_pyodbc,
|
||||
VARBINARY: _VARBINARY_pyodbc,
|
||||
sqltypes.LargeBinary: _VARBINARY_pyodbc,
|
||||
}
|
||||
)
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# mssql/zxjdbc.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -13,6 +13,8 @@
|
|||
[?key=value&key=value...]
|
||||
:driverurl: http://jtds.sourceforge.net/
|
||||
|
||||
.. note:: Jython is not supported by current versions of SQLAlchemy. The
|
||||
zxjdbc dialect should be considered as experimental.
|
||||
|
||||
"""
|
||||
from ...connectors.zxJDBC import ZxJDBCConnector
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# mysql/__init__.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,5 +1,5 @@
|
|||
# mysql/cymysql.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# mysql/gaerdbms.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -17,6 +17,13 @@ developers-guide
|
|||
|
||||
.. versionadded:: 0.7.8
|
||||
|
||||
.. deprecated:: 1.0 This dialect is **no longer necessary** for
|
||||
Google Cloud SQL; the MySQLdb dialect can be used directly.
|
||||
Cloud SQL now recommends creating connections via the
|
||||
mysql dialect using the URL format
|
||||
|
||||
``mysql+mysqldb://root@/<dbname>?unix_socket=/cloudsql/<projectid>:<instancename>``
|
||||
|
||||
|
||||
Pooling
|
||||
-------
|
||||
|
@ -33,6 +40,7 @@ import os
|
|||
from .mysqldb import MySQLDialect_mysqldb
|
||||
from ...pool import NullPool
|
||||
import re
|
||||
from sqlalchemy.util import warn_deprecated
|
||||
|
||||
|
||||
def _is_dev_environment():
|
||||
|
@ -43,6 +51,14 @@ class MySQLDialect_gaerdbms(MySQLDialect_mysqldb):
|
|||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
|
||||
warn_deprecated(
|
||||
"Google Cloud SQL now recommends creating connections via the "
|
||||
"MySQLdb dialect directly, using the URL format "
|
||||
"mysql+mysqldb://root@/<dbname>?unix_socket=/cloudsql/"
|
||||
"<projectid>:<instancename>"
|
||||
)
|
||||
|
||||
# from django:
|
||||
# http://code.google.com/p/googleappengine/source/
|
||||
# browse/trunk/python/google/storage/speckle/
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# mysql/mysqlconnector.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -14,6 +14,12 @@
|
|||
:url: http://dev.mysql.com/downloads/connector/python/
|
||||
|
||||
|
||||
Unicode
|
||||
-------
|
||||
|
||||
Please see :ref:`mysql_unicode` for current recommendations on unicode
|
||||
handling.
|
||||
|
||||
"""
|
||||
|
||||
from .base import (MySQLDialect, MySQLExecutionContext,
|
||||
|
@ -21,6 +27,7 @@ from .base import (MySQLDialect, MySQLExecutionContext,
|
|||
BIT)
|
||||
|
||||
from ... import util
|
||||
import re
|
||||
|
||||
|
||||
class MySQLExecutionContext_mysqlconnector(MySQLExecutionContext):
|
||||
|
@ -31,18 +38,34 @@ class MySQLExecutionContext_mysqlconnector(MySQLExecutionContext):
|
|||
|
||||
class MySQLCompiler_mysqlconnector(MySQLCompiler):
|
||||
def visit_mod_binary(self, binary, operator, **kw):
|
||||
if self.dialect._mysqlconnector_double_percents:
|
||||
return self.process(binary.left, **kw) + " %% " + \
|
||||
self.process(binary.right, **kw)
|
||||
else:
|
||||
return self.process(binary.left, **kw) + " % " + \
|
||||
self.process(binary.right, **kw)
|
||||
|
||||
def post_process_text(self, text):
|
||||
if self.dialect._mysqlconnector_double_percents:
|
||||
return text.replace('%', '%%')
|
||||
else:
|
||||
return text
|
||||
|
||||
def escape_literal_column(self, text):
|
||||
if self.dialect._mysqlconnector_double_percents:
|
||||
return text.replace('%', '%%')
|
||||
else:
|
||||
return text
|
||||
|
||||
|
||||
class MySQLIdentifierPreparer_mysqlconnector(MySQLIdentifierPreparer):
|
||||
|
||||
def _escape_identifier(self, value):
|
||||
value = value.replace(self.escape_quote, self.escape_to_quote)
|
||||
if self.dialect._mysqlconnector_double_percents:
|
||||
return value.replace("%", "%%")
|
||||
else:
|
||||
return value
|
||||
|
||||
|
||||
class _myconnpyBIT(BIT):
|
||||
|
@ -55,8 +78,6 @@ class _myconnpyBIT(BIT):
|
|||
class MySQLDialect_mysqlconnector(MySQLDialect):
|
||||
driver = 'mysqlconnector'
|
||||
|
||||
if util.py2k:
|
||||
supports_unicode_statements = False
|
||||
supports_unicode_binds = True
|
||||
|
||||
supports_sane_rowcount = True
|
||||
|
@ -77,6 +98,10 @@ class MySQLDialect_mysqlconnector(MySQLDialect):
|
|||
}
|
||||
)
|
||||
|
||||
@util.memoized_property
|
||||
def supports_unicode_statements(self):
|
||||
return util.py3k or self._mysqlconnector_version_info > (2, 0)
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
from mysql import connector
|
||||
|
@ -89,8 +114,10 @@ class MySQLDialect_mysqlconnector(MySQLDialect):
|
|||
|
||||
util.coerce_kw_type(opts, 'buffered', bool)
|
||||
util.coerce_kw_type(opts, 'raise_on_warnings', bool)
|
||||
|
||||
# unfortunately, MySQL/connector python refuses to release a
|
||||
# cursor without reading fully, so non-buffered isn't an option
|
||||
opts.setdefault('buffered', True)
|
||||
opts.setdefault('raise_on_warnings', True)
|
||||
|
||||
# FOUND_ROWS must be set in ClientFlag to enable
|
||||
# supports_sane_rowcount.
|
||||
|
@ -101,10 +128,25 @@ class MySQLDialect_mysqlconnector(MySQLDialect):
|
|||
'client_flags', ClientFlag.get_default())
|
||||
client_flags |= ClientFlag.FOUND_ROWS
|
||||
opts['client_flags'] = client_flags
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
return [[], opts]
|
||||
|
||||
@util.memoized_property
|
||||
def _mysqlconnector_version_info(self):
|
||||
if self.dbapi and hasattr(self.dbapi, '__version__'):
|
||||
m = re.match(r'(\d+)\.(\d+)(?:\.(\d+))?',
|
||||
self.dbapi.__version__)
|
||||
if m:
|
||||
return tuple(
|
||||
int(x)
|
||||
for x in m.group(1, 2, 3)
|
||||
if x is not None)
|
||||
|
||||
@util.memoized_property
|
||||
def _mysqlconnector_double_percents(self):
|
||||
return not util.py3k and self._mysqlconnector_version_info < (2, 0)
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
dbapi_con = connection.connection
|
||||
version = dbapi_con.get_server_version()
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# mysql/mysqldb.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -13,73 +13,96 @@
|
|||
:connectstring: mysql+mysqldb://<user>:<password>@<host>[:<port>]/<dbname>
|
||||
:url: http://sourceforge.net/projects/mysql-python
|
||||
|
||||
.. _mysqldb_unicode:
|
||||
|
||||
Unicode
|
||||
-------
|
||||
|
||||
MySQLdb requires a "charset" parameter to be passed in order for it
|
||||
to handle non-ASCII characters correctly. When this parameter is passed,
|
||||
MySQLdb will also implicitly set the "use_unicode" flag to true, which means
|
||||
that it will return Python unicode objects instead of bytestrings.
|
||||
However, SQLAlchemy's decode process, when C extensions are enabled,
|
||||
is orders of magnitude faster than that of MySQLdb as it does not call into
|
||||
Python functions to do so. Therefore, the **recommended URL to use for
|
||||
unicode** will include both charset and use_unicode=0::
|
||||
Please see :ref:`mysql_unicode` for current recommendations on unicode
|
||||
handling.
|
||||
|
||||
create_engine("mysql+mysqldb://user:pass@host/dbname?charset=utf8&use_unicode=0")
|
||||
Py3K Support
|
||||
------------
|
||||
|
||||
As of this writing, MySQLdb only runs on Python 2. It is not known how
|
||||
MySQLdb behaves on Python 3 as far as unicode decoding.
|
||||
Currently, MySQLdb only runs on Python 2 and development has been stopped.
|
||||
`mysqlclient`_ is fork of MySQLdb and provides Python 3 support as well
|
||||
as some bugfixes.
|
||||
|
||||
.. _mysqlclient: https://github.com/PyMySQL/mysqlclient-python
|
||||
|
||||
Known Issues
|
||||
-------------
|
||||
Using MySQLdb with Google Cloud SQL
|
||||
-----------------------------------
|
||||
|
||||
MySQL-python version 1.2.2 has a serious memory leak related
|
||||
to unicode conversion, a feature which is disabled via ``use_unicode=0``.
|
||||
It is strongly advised to use the latest version of MySQL-Python.
|
||||
Google Cloud SQL now recommends use of the MySQLdb dialect. Connect
|
||||
using a URL like the following::
|
||||
|
||||
mysql+mysqldb://root@/<dbname>?unix_socket=/cloudsql/<projectid>:<instancename>
|
||||
|
||||
"""
|
||||
|
||||
from .base import (MySQLDialect, MySQLExecutionContext,
|
||||
MySQLCompiler, MySQLIdentifierPreparer)
|
||||
from ...connectors.mysqldb import (
|
||||
MySQLDBExecutionContext,
|
||||
MySQLDBCompiler,
|
||||
MySQLDBIdentifierPreparer,
|
||||
MySQLDBConnector
|
||||
)
|
||||
from .base import TEXT
|
||||
from ... import sql
|
||||
from ... import util
|
||||
import re
|
||||
|
||||
|
||||
class MySQLExecutionContext_mysqldb(
|
||||
MySQLDBExecutionContext,
|
||||
MySQLExecutionContext):
|
||||
pass
|
||||
class MySQLExecutionContext_mysqldb(MySQLExecutionContext):
|
||||
|
||||
@property
|
||||
def rowcount(self):
|
||||
if hasattr(self, '_rowcount'):
|
||||
return self._rowcount
|
||||
else:
|
||||
return self.cursor.rowcount
|
||||
|
||||
|
||||
class MySQLCompiler_mysqldb(MySQLDBCompiler, MySQLCompiler):
|
||||
pass
|
||||
class MySQLCompiler_mysqldb(MySQLCompiler):
|
||||
def visit_mod_binary(self, binary, operator, **kw):
|
||||
return self.process(binary.left, **kw) + " %% " + \
|
||||
self.process(binary.right, **kw)
|
||||
|
||||
def post_process_text(self, text):
|
||||
return text.replace('%', '%%')
|
||||
|
||||
|
||||
class MySQLIdentifierPreparer_mysqldb(
|
||||
MySQLDBIdentifierPreparer,
|
||||
MySQLIdentifierPreparer):
|
||||
pass
|
||||
class MySQLIdentifierPreparer_mysqldb(MySQLIdentifierPreparer):
|
||||
|
||||
def _escape_identifier(self, value):
|
||||
value = value.replace(self.escape_quote, self.escape_to_quote)
|
||||
return value.replace("%", "%%")
|
||||
|
||||
|
||||
class MySQLDialect_mysqldb(MySQLDBConnector, MySQLDialect):
|
||||
class MySQLDialect_mysqldb(MySQLDialect):
|
||||
driver = 'mysqldb'
|
||||
supports_unicode_statements = True
|
||||
supports_sane_rowcount = True
|
||||
supports_sane_multi_rowcount = True
|
||||
|
||||
supports_native_decimal = True
|
||||
|
||||
default_paramstyle = 'format'
|
||||
execution_ctx_cls = MySQLExecutionContext_mysqldb
|
||||
statement_compiler = MySQLCompiler_mysqldb
|
||||
preparer = MySQLIdentifierPreparer_mysqldb
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
return __import__('MySQLdb')
|
||||
|
||||
def do_executemany(self, cursor, statement, parameters, context=None):
|
||||
rowcount = cursor.executemany(statement, parameters)
|
||||
if context is not None:
|
||||
context._rowcount = rowcount
|
||||
|
||||
def _check_unicode_returns(self, connection):
|
||||
# work around issue fixed in
|
||||
# https://github.com/farcepest/MySQLdb1/commit/cd44524fef63bd3fcb71947392326e9742d520e8
|
||||
# specific issue w/ the utf8_bin collation and unicode returns
|
||||
|
||||
has_utf8_bin = connection.scalar(
|
||||
has_utf8_bin = self.server_version_info > (5, ) and \
|
||||
connection.scalar(
|
||||
"show collation where %s = 'utf8' and %s = 'utf8_bin'"
|
||||
% (
|
||||
self.identifier_preparer.quote("Charset"),
|
||||
|
@ -94,7 +117,82 @@ class MySQLDialect_mysqldb(MySQLDBConnector, MySQLDialect):
|
|||
]
|
||||
else:
|
||||
additional_tests = []
|
||||
return super(MySQLDBConnector, self)._check_unicode_returns(
|
||||
return super(MySQLDialect_mysqldb, self)._check_unicode_returns(
|
||||
connection, additional_tests)
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args(database='db', username='user',
|
||||
password='passwd')
|
||||
opts.update(url.query)
|
||||
|
||||
util.coerce_kw_type(opts, 'compress', bool)
|
||||
util.coerce_kw_type(opts, 'connect_timeout', int)
|
||||
util.coerce_kw_type(opts, 'read_timeout', int)
|
||||
util.coerce_kw_type(opts, 'client_flag', int)
|
||||
util.coerce_kw_type(opts, 'local_infile', int)
|
||||
# Note: using either of the below will cause all strings to be
|
||||
# returned as Unicode, both in raw SQL operations and with column
|
||||
# types like String and MSString.
|
||||
util.coerce_kw_type(opts, 'use_unicode', bool)
|
||||
util.coerce_kw_type(opts, 'charset', str)
|
||||
|
||||
# Rich values 'cursorclass' and 'conv' are not supported via
|
||||
# query string.
|
||||
|
||||
ssl = {}
|
||||
keys = ['ssl_ca', 'ssl_key', 'ssl_cert', 'ssl_capath', 'ssl_cipher']
|
||||
for key in keys:
|
||||
if key in opts:
|
||||
ssl[key[4:]] = opts[key]
|
||||
util.coerce_kw_type(ssl, key[4:], str)
|
||||
del opts[key]
|
||||
if ssl:
|
||||
opts['ssl'] = ssl
|
||||
|
||||
# FOUND_ROWS must be set in CLIENT_FLAGS to enable
|
||||
# supports_sane_rowcount.
|
||||
client_flag = opts.get('client_flag', 0)
|
||||
if self.dbapi is not None:
|
||||
try:
|
||||
CLIENT_FLAGS = __import__(
|
||||
self.dbapi.__name__ + '.constants.CLIENT'
|
||||
).constants.CLIENT
|
||||
client_flag |= CLIENT_FLAGS.FOUND_ROWS
|
||||
except (AttributeError, ImportError):
|
||||
self.supports_sane_rowcount = False
|
||||
opts['client_flag'] = client_flag
|
||||
return [[], opts]
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
dbapi_con = connection.connection
|
||||
version = []
|
||||
r = re.compile('[.\-]')
|
||||
for n in r.split(dbapi_con.get_server_info()):
|
||||
try:
|
||||
version.append(int(n))
|
||||
except ValueError:
|
||||
version.append(n)
|
||||
return tuple(version)
|
||||
|
||||
def _extract_error_code(self, exception):
|
||||
return exception.args[0]
|
||||
|
||||
def _detect_charset(self, connection):
|
||||
"""Sniff out the character set in use for connection results."""
|
||||
|
||||
try:
|
||||
# note: the SQL here would be
|
||||
# "SHOW VARIABLES LIKE 'character_set%%'"
|
||||
cset_name = connection.connection.character_set_name
|
||||
except AttributeError:
|
||||
util.warn(
|
||||
"No 'character_set_name' can be detected with "
|
||||
"this MySQL-Python version; "
|
||||
"please upgrade to a recent version of MySQL-Python. "
|
||||
"Assuming latin1.")
|
||||
return 'latin1'
|
||||
else:
|
||||
return cset_name()
|
||||
|
||||
|
||||
dialect = MySQLDialect_mysqldb
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# mysql/oursql.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -16,22 +16,10 @@
|
|||
Unicode
|
||||
-------
|
||||
|
||||
oursql defaults to using ``utf8`` as the connection charset, but other
|
||||
encodings may be used instead. Like the MySQL-Python driver, unicode support
|
||||
can be completely disabled::
|
||||
Please see :ref:`mysql_unicode` for current recommendations on unicode
|
||||
handling.
|
||||
|
||||
# oursql sets the connection charset to utf8 automatically; all strings come
|
||||
# back as utf8 str
|
||||
create_engine('mysql+oursql:///mydb?use_unicode=0')
|
||||
|
||||
To not automatically use ``utf8`` and instead use whatever the connection
|
||||
defaults to, there is a separate parameter::
|
||||
|
||||
# use the default connection charset; all strings come back as unicode
|
||||
create_engine('mysql+oursql:///mydb?default_charset=1')
|
||||
|
||||
# use latin1 as the connection charset; all strings come back as unicode
|
||||
create_engine('mysql+oursql:///mydb?charset=latin1')
|
||||
"""
|
||||
|
||||
import re
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# mysql/pymysql.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -12,7 +12,13 @@
|
|||
:dbapi: pymysql
|
||||
:connectstring: mysql+pymysql://<username>:<password>@<host>/<dbname>\
|
||||
[?<options>]
|
||||
:url: http://code.google.com/p/pymysql/
|
||||
:url: http://www.pymysql.org/
|
||||
|
||||
Unicode
|
||||
-------
|
||||
|
||||
Please see :ref:`mysql_unicode` for current recommendations on unicode
|
||||
handling.
|
||||
|
||||
MySQL-Python Compatibility
|
||||
--------------------------
|
||||
|
@ -31,8 +37,12 @@ class MySQLDialect_pymysql(MySQLDialect_mysqldb):
|
|||
driver = 'pymysql'
|
||||
|
||||
description_encoding = None
|
||||
if py3k:
|
||||
|
||||
# generally, these two values should be both True
|
||||
# or both False. PyMySQL unicode tests pass all the way back
|
||||
# to 0.4 either way. See [ticket:3337]
|
||||
supports_unicode_statements = True
|
||||
supports_unicode_binds = True
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# mysql/pyodbc.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -14,14 +14,11 @@
|
|||
:connectstring: mysql+pyodbc://<username>:<password>@<dsnname>
|
||||
:url: http://pypi.python.org/pypi/pyodbc/
|
||||
|
||||
|
||||
Limitations
|
||||
-----------
|
||||
|
||||
The mysql-pyodbc dialect is subject to unresolved character encoding issues
|
||||
.. note:: The PyODBC for MySQL dialect is not well supported, and
|
||||
is subject to unresolved character encoding issues
|
||||
which exist within the current ODBC drivers available.
|
||||
(see http://code.google.com/p/pyodbc/issues/detail?id=25). Consider usage
|
||||
of OurSQL, MySQLdb, or MySQL-connector/Python.
|
||||
(see http://code.google.com/p/pyodbc/issues/detail?id=25).
|
||||
Other dialects for MySQL are recommended.
|
||||
|
||||
"""
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# mysql/zxjdbc.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -14,6 +14,9 @@
|
|||
<database>
|
||||
:driverurl: http://dev.mysql.com/downloads/connector/j/
|
||||
|
||||
.. note:: Jython is not supported by current versions of SQLAlchemy. The
|
||||
zxjdbc dialect should be considered as experimental.
|
||||
|
||||
Character Sets
|
||||
--------------
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# oracle/__init__.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# oracle/base.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -213,15 +213,81 @@ is reflected and the type is reported as ``DATE``, the time-supporting
|
|||
examining the type of column for use in special Python translations or
|
||||
for migrating schemas to other database backends.
|
||||
|
||||
.. _oracle_table_options:
|
||||
|
||||
Oracle Table Options
|
||||
-------------------------
|
||||
|
||||
The CREATE TABLE phrase supports the following options with Oracle
|
||||
in conjunction with the :class:`.Table` construct:
|
||||
|
||||
|
||||
* ``ON COMMIT``::
|
||||
|
||||
Table(
|
||||
"some_table", metadata, ...,
|
||||
prefixes=['GLOBAL TEMPORARY'], oracle_on_commit='PRESERVE ROWS')
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
* ``COMPRESS``::
|
||||
|
||||
Table('mytable', metadata, Column('data', String(32)),
|
||||
oracle_compress=True)
|
||||
|
||||
Table('mytable', metadata, Column('data', String(32)),
|
||||
oracle_compress=6)
|
||||
|
||||
The ``oracle_compress`` parameter accepts either an integer compression
|
||||
level, or ``True`` to use the default compression level.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
.. _oracle_index_options:
|
||||
|
||||
Oracle Specific Index Options
|
||||
-----------------------------
|
||||
|
||||
Bitmap Indexes
|
||||
~~~~~~~~~~~~~~
|
||||
|
||||
You can specify the ``oracle_bitmap`` parameter to create a bitmap index
|
||||
instead of a B-tree index::
|
||||
|
||||
Index('my_index', my_table.c.data, oracle_bitmap=True)
|
||||
|
||||
Bitmap indexes cannot be unique and cannot be compressed. SQLAlchemy will not
|
||||
check for such limitations, only the database will.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
Index compression
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
||||
Oracle has a more efficient storage mode for indexes containing lots of
|
||||
repeated values. Use the ``oracle_compress`` parameter to turn on key c
|
||||
ompression::
|
||||
|
||||
Index('my_index', my_table.c.data, oracle_compress=True)
|
||||
|
||||
Index('my_index', my_table.c.data1, my_table.c.data2, unique=True,
|
||||
oracle_compress=1)
|
||||
|
||||
The ``oracle_compress`` parameter accepts either an integer specifying the
|
||||
number of prefix columns to compress, or ``True`` to use the default (all
|
||||
columns for non-unique indexes, all but the last column for unique indexes).
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
from sqlalchemy import util, sql
|
||||
from sqlalchemy.engine import default, base, reflection
|
||||
from sqlalchemy.engine import default, reflection
|
||||
from sqlalchemy.sql import compiler, visitors, expression
|
||||
from sqlalchemy.sql import (operators as sql_operators,
|
||||
functions as sql_functions)
|
||||
from sqlalchemy.sql import operators as sql_operators
|
||||
from sqlalchemy.sql.elements import quoted_name
|
||||
from sqlalchemy import types as sqltypes, schema as sa_schema
|
||||
from sqlalchemy.types import VARCHAR, NVARCHAR, CHAR, \
|
||||
BLOB, CLOB, TIMESTAMP, FLOAT
|
||||
|
@ -300,7 +366,6 @@ class LONG(sqltypes.Text):
|
|||
|
||||
|
||||
class DATE(sqltypes.DateTime):
|
||||
|
||||
"""Provide the oracle DATE type.
|
||||
|
||||
This type has no special Python behavior, except that it subclasses
|
||||
|
@ -349,7 +414,6 @@ class INTERVAL(sqltypes.TypeEngine):
|
|||
|
||||
|
||||
class ROWID(sqltypes.TypeEngine):
|
||||
|
||||
"""Oracle ROWID type.
|
||||
|
||||
When used in a cast() or similar, generates ROWID.
|
||||
|
@ -359,7 +423,6 @@ class ROWID(sqltypes.TypeEngine):
|
|||
|
||||
|
||||
class _OracleBoolean(sqltypes.Boolean):
|
||||
|
||||
def get_dbapi_type(self, dbapi):
|
||||
return dbapi.NUMBER
|
||||
|
||||
|
@ -395,19 +458,19 @@ class OracleTypeCompiler(compiler.GenericTypeCompiler):
|
|||
# Oracle does not allow milliseconds in DATE
|
||||
# Oracle does not support TIME columns
|
||||
|
||||
def visit_datetime(self, type_):
|
||||
return self.visit_DATE(type_)
|
||||
def visit_datetime(self, type_, **kw):
|
||||
return self.visit_DATE(type_, **kw)
|
||||
|
||||
def visit_float(self, type_):
|
||||
return self.visit_FLOAT(type_)
|
||||
def visit_float(self, type_, **kw):
|
||||
return self.visit_FLOAT(type_, **kw)
|
||||
|
||||
def visit_unicode(self, type_):
|
||||
def visit_unicode(self, type_, **kw):
|
||||
if self.dialect._supports_nchar:
|
||||
return self.visit_NVARCHAR2(type_)
|
||||
return self.visit_NVARCHAR2(type_, **kw)
|
||||
else:
|
||||
return self.visit_VARCHAR2(type_)
|
||||
return self.visit_VARCHAR2(type_, **kw)
|
||||
|
||||
def visit_INTERVAL(self, type_):
|
||||
def visit_INTERVAL(self, type_, **kw):
|
||||
return "INTERVAL DAY%s TO SECOND%s" % (
|
||||
type_.day_precision is not None and
|
||||
"(%d)" % type_.day_precision or
|
||||
|
@ -417,22 +480,22 @@ class OracleTypeCompiler(compiler.GenericTypeCompiler):
|
|||
"",
|
||||
)
|
||||
|
||||
def visit_LONG(self, type_):
|
||||
def visit_LONG(self, type_, **kw):
|
||||
return "LONG"
|
||||
|
||||
def visit_TIMESTAMP(self, type_):
|
||||
def visit_TIMESTAMP(self, type_, **kw):
|
||||
if type_.timezone:
|
||||
return "TIMESTAMP WITH TIME ZONE"
|
||||
else:
|
||||
return "TIMESTAMP"
|
||||
|
||||
def visit_DOUBLE_PRECISION(self, type_):
|
||||
return self._generate_numeric(type_, "DOUBLE PRECISION")
|
||||
def visit_DOUBLE_PRECISION(self, type_, **kw):
|
||||
return self._generate_numeric(type_, "DOUBLE PRECISION", **kw)
|
||||
|
||||
def visit_NUMBER(self, type_, **kw):
|
||||
return self._generate_numeric(type_, "NUMBER", **kw)
|
||||
|
||||
def _generate_numeric(self, type_, name, precision=None, scale=None):
|
||||
def _generate_numeric(self, type_, name, precision=None, scale=None, **kw):
|
||||
if precision is None:
|
||||
precision = type_.precision
|
||||
|
||||
|
@ -448,17 +511,17 @@ class OracleTypeCompiler(compiler.GenericTypeCompiler):
|
|||
n = "%(name)s(%(precision)s, %(scale)s)"
|
||||
return n % {'name': name, 'precision': precision, 'scale': scale}
|
||||
|
||||
def visit_string(self, type_):
|
||||
return self.visit_VARCHAR2(type_)
|
||||
def visit_string(self, type_, **kw):
|
||||
return self.visit_VARCHAR2(type_, **kw)
|
||||
|
||||
def visit_VARCHAR2(self, type_):
|
||||
def visit_VARCHAR2(self, type_, **kw):
|
||||
return self._visit_varchar(type_, '', '2')
|
||||
|
||||
def visit_NVARCHAR2(self, type_):
|
||||
def visit_NVARCHAR2(self, type_, **kw):
|
||||
return self._visit_varchar(type_, 'N', '2')
|
||||
visit_NVARCHAR = visit_NVARCHAR2
|
||||
|
||||
def visit_VARCHAR(self, type_):
|
||||
def visit_VARCHAR(self, type_, **kw):
|
||||
return self._visit_varchar(type_, '', '')
|
||||
|
||||
def _visit_varchar(self, type_, n, num):
|
||||
|
@ -471,36 +534,35 @@ class OracleTypeCompiler(compiler.GenericTypeCompiler):
|
|||
varchar = "%(n)sVARCHAR%(two)s(%(length)s)"
|
||||
return varchar % {'length': type_.length, 'two': num, 'n': n}
|
||||
|
||||
def visit_text(self, type_):
|
||||
return self.visit_CLOB(type_)
|
||||
def visit_text(self, type_, **kw):
|
||||
return self.visit_CLOB(type_, **kw)
|
||||
|
||||
def visit_unicode_text(self, type_):
|
||||
def visit_unicode_text(self, type_, **kw):
|
||||
if self.dialect._supports_nchar:
|
||||
return self.visit_NCLOB(type_)
|
||||
return self.visit_NCLOB(type_, **kw)
|
||||
else:
|
||||
return self.visit_CLOB(type_)
|
||||
return self.visit_CLOB(type_, **kw)
|
||||
|
||||
def visit_large_binary(self, type_):
|
||||
return self.visit_BLOB(type_)
|
||||
def visit_large_binary(self, type_, **kw):
|
||||
return self.visit_BLOB(type_, **kw)
|
||||
|
||||
def visit_big_integer(self, type_):
|
||||
return self.visit_NUMBER(type_, precision=19)
|
||||
def visit_big_integer(self, type_, **kw):
|
||||
return self.visit_NUMBER(type_, precision=19, **kw)
|
||||
|
||||
def visit_boolean(self, type_):
|
||||
return self.visit_SMALLINT(type_)
|
||||
def visit_boolean(self, type_, **kw):
|
||||
return self.visit_SMALLINT(type_, **kw)
|
||||
|
||||
def visit_RAW(self, type_):
|
||||
def visit_RAW(self, type_, **kw):
|
||||
if type_.length:
|
||||
return "RAW(%(length)s)" % {'length': type_.length}
|
||||
else:
|
||||
return "RAW"
|
||||
|
||||
def visit_ROWID(self, type_):
|
||||
def visit_ROWID(self, type_, **kw):
|
||||
return "ROWID"
|
||||
|
||||
|
||||
class OracleCompiler(compiler.SQLCompiler):
|
||||
|
||||
"""Oracle compiler modifies the lexical structure of Select
|
||||
statements to work under non-ANSI configured Oracle databases, if
|
||||
the use_ansi flag is False.
|
||||
|
@ -538,6 +600,9 @@ class OracleCompiler(compiler.SQLCompiler):
|
|||
def visit_false(self, expr, **kw):
|
||||
return '0'
|
||||
|
||||
def get_cte_preamble(self, recursive):
|
||||
return "WITH"
|
||||
|
||||
def get_select_hint_text(self, byfroms):
|
||||
return " ".join(
|
||||
"/*+ %s */" % text for table, text in byfroms.items()
|
||||
|
@ -601,29 +666,17 @@ class OracleCompiler(compiler.SQLCompiler):
|
|||
else:
|
||||
return sql.and_(*clauses)
|
||||
|
||||
def visit_outer_join_column(self, vc):
|
||||
return self.process(vc.column) + "(+)"
|
||||
def visit_outer_join_column(self, vc, **kw):
|
||||
return self.process(vc.column, **kw) + "(+)"
|
||||
|
||||
def visit_sequence(self, seq):
|
||||
return (self.dialect.identifier_preparer.format_sequence(seq) +
|
||||
".nextval")
|
||||
|
||||
def visit_alias(self, alias, asfrom=False, ashint=False, **kwargs):
|
||||
"""Oracle doesn't like ``FROM table AS alias``. Is the AS standard
|
||||
SQL??
|
||||
"""
|
||||
def get_render_as_alias_suffix(self, alias_name_text):
|
||||
"""Oracle doesn't like ``FROM table AS alias``"""
|
||||
|
||||
if asfrom or ashint:
|
||||
alias_name = isinstance(alias.name, expression._truncated_label) and \
|
||||
self._truncated_identifier("alias", alias.name) or alias.name
|
||||
|
||||
if ashint:
|
||||
return alias_name
|
||||
elif asfrom:
|
||||
return self.process(alias.original, asfrom=asfrom, **kwargs) + \
|
||||
" " + self.preparer.format_alias(alias, alias_name)
|
||||
else:
|
||||
return self.process(alias.original, **kwargs)
|
||||
return " " + alias_name_text
|
||||
|
||||
def returning_clause(self, stmt, returning_cols):
|
||||
columns = []
|
||||
|
@ -640,8 +693,9 @@ class OracleCompiler(compiler.SQLCompiler):
|
|||
self.bindparam_string(self._truncate_bindparam(outparam)))
|
||||
columns.append(
|
||||
self.process(col_expr, within_columns_clause=False))
|
||||
self.result_map[outparam.key] = (
|
||||
outparam.key,
|
||||
|
||||
self._add_to_result_map(
|
||||
outparam.key, outparam.key,
|
||||
(column, getattr(column, 'name', None),
|
||||
getattr(column, 'key', None)),
|
||||
column.type
|
||||
|
@ -669,7 +723,9 @@ class OracleCompiler(compiler.SQLCompiler):
|
|||
select = select.where(whereclause)
|
||||
select._oracle_visit = True
|
||||
|
||||
if select._limit is not None or select._offset is not None:
|
||||
limit_clause = select._limit_clause
|
||||
offset_clause = select._offset_clause
|
||||
if limit_clause is not None or offset_clause is not None:
|
||||
# See http://www.oracle.com/technology/oramag/oracle/06-sep/\
|
||||
# o56asktom.html
|
||||
#
|
||||
|
@ -682,13 +738,15 @@ class OracleCompiler(compiler.SQLCompiler):
|
|||
# Outer select and "ROWNUM as ora_rn" can be dropped if
|
||||
# limit=0
|
||||
|
||||
# TODO: use annotations instead of clone + attr set ?
|
||||
kwargs['select_wraps_for'] = select
|
||||
select = select._generate()
|
||||
select._oracle_visit = True
|
||||
|
||||
# Wrap the middle select and add the hint
|
||||
limitselect = sql.select([c for c in select.c])
|
||||
if select._limit and self.dialect.optimize_limits:
|
||||
if limit_clause is not None and \
|
||||
self.dialect.optimize_limits and \
|
||||
select._simple_int_limit:
|
||||
limitselect = limitselect.prefix_with(
|
||||
"/*+ FIRST_ROWS(%d) */" %
|
||||
select._limit)
|
||||
|
@ -697,17 +755,24 @@ class OracleCompiler(compiler.SQLCompiler):
|
|||
limitselect._is_wrapper = True
|
||||
|
||||
# If needed, add the limiting clause
|
||||
if select._limit is not None:
|
||||
max_row = select._limit
|
||||
if select._offset is not None:
|
||||
max_row += select._offset
|
||||
if limit_clause is not None:
|
||||
if not self.dialect.use_binds_for_limits:
|
||||
# use simple int limits, will raise an exception
|
||||
# if the limit isn't specified this way
|
||||
max_row = select._limit
|
||||
|
||||
if offset_clause is not None:
|
||||
max_row += select._offset
|
||||
max_row = sql.literal_column("%d" % max_row)
|
||||
else:
|
||||
max_row = limit_clause
|
||||
if offset_clause is not None:
|
||||
max_row = max_row + offset_clause
|
||||
limitselect.append_whereclause(
|
||||
sql.literal_column("ROWNUM") <= max_row)
|
||||
|
||||
# If needed, add the ora_rn, and wrap again with offset.
|
||||
if select._offset is None:
|
||||
if offset_clause is None:
|
||||
limitselect._for_update_arg = select._for_update_arg
|
||||
select = limitselect
|
||||
else:
|
||||
|
@ -721,22 +786,21 @@ class OracleCompiler(compiler.SQLCompiler):
|
|||
offsetselect._oracle_visit = True
|
||||
offsetselect._is_wrapper = True
|
||||
|
||||
offset_value = select._offset
|
||||
if not self.dialect.use_binds_for_limits:
|
||||
offset_value = sql.literal_column("%d" % offset_value)
|
||||
offset_clause = sql.literal_column(
|
||||
"%d" % select._offset)
|
||||
offsetselect.append_whereclause(
|
||||
sql.literal_column("ora_rn") > offset_value)
|
||||
sql.literal_column("ora_rn") > offset_clause)
|
||||
|
||||
offsetselect._for_update_arg = select._for_update_arg
|
||||
select = offsetselect
|
||||
|
||||
kwargs['iswrapper'] = getattr(select, '_is_wrapper', False)
|
||||
return compiler.SQLCompiler.visit_select(self, select, **kwargs)
|
||||
|
||||
def limit_clause(self, select):
|
||||
def limit_clause(self, select, **kw):
|
||||
return ""
|
||||
|
||||
def for_update_clause(self, select):
|
||||
def for_update_clause(self, select, **kw):
|
||||
if self.is_subquery():
|
||||
return ""
|
||||
|
||||
|
@ -744,7 +808,7 @@ class OracleCompiler(compiler.SQLCompiler):
|
|||
|
||||
if select._for_update_arg.of:
|
||||
tmp += ' OF ' + ', '.join(
|
||||
self.process(elem) for elem in
|
||||
self.process(elem, **kw) for elem in
|
||||
select._for_update_arg.of
|
||||
)
|
||||
|
||||
|
@ -773,15 +837,57 @@ class OracleDDLCompiler(compiler.DDLCompiler):
|
|||
|
||||
return text
|
||||
|
||||
def visit_create_index(self, create, **kw):
|
||||
return super(OracleDDLCompiler, self).\
|
||||
visit_create_index(create, include_schema=True)
|
||||
def visit_create_index(self, create):
|
||||
index = create.element
|
||||
self._verify_index_table(index)
|
||||
preparer = self.preparer
|
||||
text = "CREATE "
|
||||
if index.unique:
|
||||
text += "UNIQUE "
|
||||
if index.dialect_options['oracle']['bitmap']:
|
||||
text += "BITMAP "
|
||||
text += "INDEX %s ON %s (%s)" % (
|
||||
self._prepared_index_name(index, include_schema=True),
|
||||
preparer.format_table(index.table, use_schema=True),
|
||||
', '.join(
|
||||
self.sql_compiler.process(
|
||||
expr,
|
||||
include_table=False, literal_binds=True)
|
||||
for expr in index.expressions)
|
||||
)
|
||||
if index.dialect_options['oracle']['compress'] is not False:
|
||||
if index.dialect_options['oracle']['compress'] is True:
|
||||
text += " COMPRESS"
|
||||
else:
|
||||
text += " COMPRESS %d" % (
|
||||
index.dialect_options['oracle']['compress']
|
||||
)
|
||||
return text
|
||||
|
||||
def post_create_table(self, table):
|
||||
table_opts = []
|
||||
opts = table.dialect_options['oracle']
|
||||
|
||||
if opts['on_commit']:
|
||||
on_commit_options = opts['on_commit'].replace("_", " ").upper()
|
||||
table_opts.append('\n ON COMMIT %s' % on_commit_options)
|
||||
|
||||
if opts['compress']:
|
||||
if opts['compress'] is True:
|
||||
table_opts.append("\n COMPRESS")
|
||||
else:
|
||||
table_opts.append("\n COMPRESS FOR %s" % (
|
||||
opts['compress']
|
||||
))
|
||||
|
||||
return ''.join(table_opts)
|
||||
|
||||
|
||||
class OracleIdentifierPreparer(compiler.IdentifierPreparer):
|
||||
|
||||
reserved_words = set([x.lower() for x in RESERVED_WORDS])
|
||||
illegal_initial_characters = set(range(0, 10)).union(["_", "$"])
|
||||
illegal_initial_characters = set(
|
||||
(str(dig) for dig in range(0, 10))).union(["_", "$"])
|
||||
|
||||
def _bindparam_requires_quotes(self, value):
|
||||
"""Return True if the given identifier requires quoting."""
|
||||
|
@ -798,7 +904,6 @@ class OracleIdentifierPreparer(compiler.IdentifierPreparer):
|
|||
|
||||
|
||||
class OracleExecutionContext(default.DefaultExecutionContext):
|
||||
|
||||
def fire_sequence(self, seq, type_):
|
||||
return self._execute_scalar(
|
||||
"SELECT " +
|
||||
|
@ -815,6 +920,8 @@ class OracleDialect(default.DefaultDialect):
|
|||
supports_sane_rowcount = True
|
||||
supports_sane_multi_rowcount = False
|
||||
|
||||
supports_simple_order_by_label = False
|
||||
|
||||
supports_sequences = True
|
||||
sequences_optional = False
|
||||
postfetch_lastrowid = False
|
||||
|
@ -836,7 +943,15 @@ class OracleDialect(default.DefaultDialect):
|
|||
reflection_options = ('oracle_resolve_synonyms', )
|
||||
|
||||
construct_arguments = [
|
||||
(sa_schema.Table, {"resolve_synonyms": False})
|
||||
(sa_schema.Table, {
|
||||
"resolve_synonyms": False,
|
||||
"on_commit": None,
|
||||
"compress": False
|
||||
}),
|
||||
(sa_schema.Index, {
|
||||
"bitmap": False,
|
||||
"compress": False
|
||||
})
|
||||
]
|
||||
|
||||
def __init__(self,
|
||||
|
@ -866,6 +981,16 @@ class OracleDialect(default.DefaultDialect):
|
|||
return self.server_version_info and \
|
||||
self.server_version_info < (9, )
|
||||
|
||||
@property
|
||||
def _supports_table_compression(self):
|
||||
return self.server_version_info and \
|
||||
self.server_version_info >= (9, 2, )
|
||||
|
||||
@property
|
||||
def _supports_table_compress_for(self):
|
||||
return self.server_version_info and \
|
||||
self.server_version_info >= (11, )
|
||||
|
||||
@property
|
||||
def _supports_char_length(self):
|
||||
return not self._is_oracle_8
|
||||
|
@ -908,6 +1033,8 @@ class OracleDialect(default.DefaultDialect):
|
|||
if name.upper() == name and not \
|
||||
self.identifier_preparer._requires_quotes(name.lower()):
|
||||
return name.lower()
|
||||
elif name.lower() == name:
|
||||
return quoted_name(name, quote=True)
|
||||
else:
|
||||
return name
|
||||
|
||||
|
@ -1023,7 +1150,21 @@ class OracleDialect(default.DefaultDialect):
|
|||
"WHERE nvl(tablespace_name, 'no tablespace') NOT IN "
|
||||
"('SYSTEM', 'SYSAUX') "
|
||||
"AND OWNER = :owner "
|
||||
"AND IOT_NAME IS NULL")
|
||||
"AND IOT_NAME IS NULL "
|
||||
"AND DURATION IS NULL")
|
||||
cursor = connection.execute(s, owner=schema)
|
||||
return [self.normalize_name(row[0]) for row in cursor]
|
||||
|
||||
@reflection.cache
|
||||
def get_temp_table_names(self, connection, **kw):
|
||||
schema = self.denormalize_name(self.default_schema_name)
|
||||
s = sql.text(
|
||||
"SELECT table_name FROM all_tables "
|
||||
"WHERE nvl(tablespace_name, 'no tablespace') NOT IN "
|
||||
"('SYSTEM', 'SYSAUX') "
|
||||
"AND OWNER = :owner "
|
||||
"AND IOT_NAME IS NULL "
|
||||
"AND DURATION IS NOT NULL")
|
||||
cursor = connection.execute(s, owner=schema)
|
||||
return [self.normalize_name(row[0]) for row in cursor]
|
||||
|
||||
|
@ -1034,6 +1175,50 @@ class OracleDialect(default.DefaultDialect):
|
|||
cursor = connection.execute(s, owner=self.denormalize_name(schema))
|
||||
return [self.normalize_name(row[0]) for row in cursor]
|
||||
|
||||
@reflection.cache
|
||||
def get_table_options(self, connection, table_name, schema=None, **kw):
|
||||
options = {}
|
||||
|
||||
resolve_synonyms = kw.get('oracle_resolve_synonyms', False)
|
||||
dblink = kw.get('dblink', '')
|
||||
info_cache = kw.get('info_cache')
|
||||
|
||||
(table_name, schema, dblink, synonym) = \
|
||||
self._prepare_reflection_args(connection, table_name, schema,
|
||||
resolve_synonyms, dblink,
|
||||
info_cache=info_cache)
|
||||
|
||||
params = {"table_name": table_name}
|
||||
|
||||
columns = ["table_name"]
|
||||
if self._supports_table_compression:
|
||||
columns.append("compression")
|
||||
if self._supports_table_compress_for:
|
||||
columns.append("compress_for")
|
||||
|
||||
text = "SELECT %(columns)s "\
|
||||
"FROM ALL_TABLES%(dblink)s "\
|
||||
"WHERE table_name = :table_name"
|
||||
|
||||
if schema is not None:
|
||||
params['owner'] = schema
|
||||
text += " AND owner = :owner "
|
||||
text = text % {'dblink': dblink, 'columns': ", ".join(columns)}
|
||||
|
||||
result = connection.execute(sql.text(text), **params)
|
||||
|
||||
enabled = dict(DISABLED=False, ENABLED=True)
|
||||
|
||||
row = result.first()
|
||||
if row:
|
||||
if "compression" in row and enabled.get(row.compression, False):
|
||||
if "compress_for" in row:
|
||||
options['oracle_compress'] = row.compress_for
|
||||
else:
|
||||
options['oracle_compress'] = True
|
||||
|
||||
return options
|
||||
|
||||
@reflection.cache
|
||||
def get_columns(self, connection, table_name, schema=None, **kw):
|
||||
"""
|
||||
|
@ -1119,7 +1304,8 @@ class OracleDialect(default.DefaultDialect):
|
|||
|
||||
params = {'table_name': table_name}
|
||||
text = \
|
||||
"SELECT a.index_name, a.column_name, b.uniqueness "\
|
||||
"SELECT a.index_name, a.column_name, "\
|
||||
"\nb.index_type, b.uniqueness, b.compression, b.prefix_length "\
|
||||
"\nFROM ALL_IND_COLUMNS%(dblink)s a, "\
|
||||
"\nALL_INDEXES%(dblink)s b "\
|
||||
"\nWHERE "\
|
||||
|
@ -1145,6 +1331,7 @@ class OracleDialect(default.DefaultDialect):
|
|||
dblink=dblink, info_cache=kw.get('info_cache'))
|
||||
pkeys = pk_constraint['constrained_columns']
|
||||
uniqueness = dict(NONUNIQUE=False, UNIQUE=True)
|
||||
enabled = dict(DISABLED=False, ENABLED=True)
|
||||
|
||||
oracle_sys_col = re.compile(r'SYS_NC\d+\$', re.IGNORECASE)
|
||||
|
||||
|
@ -1164,10 +1351,15 @@ class OracleDialect(default.DefaultDialect):
|
|||
if rset.index_name != last_index_name:
|
||||
remove_if_primary_key(index)
|
||||
index = dict(name=self.normalize_name(rset.index_name),
|
||||
column_names=[])
|
||||
column_names=[], dialect_options={})
|
||||
indexes.append(index)
|
||||
index['unique'] = uniqueness.get(rset.uniqueness, False)
|
||||
|
||||
if rset.index_type in ('BITMAP', 'FUNCTION-BASED BITMAP'):
|
||||
index['dialect_options']['oracle_bitmap'] = True
|
||||
if enabled.get(rset.compression, False):
|
||||
index['dialect_options']['oracle_compress'] = rset.prefix_length
|
||||
|
||||
# filter out Oracle SYS_NC names. could also do an outer join
|
||||
# to the all_tab_columns table and check for real col names there.
|
||||
if not oracle_sys_col.match(rset.column_name):
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# oracle/cx_oracle.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -61,6 +61,14 @@ on the URL, or as keyword arguments to :func:`.create_engine()` are:
|
|||
Defaults to ``True``. Note that this is the opposite default of the
|
||||
cx_Oracle DBAPI itself.
|
||||
|
||||
* ``service_name`` - An option to use connection string (DSN) with
|
||||
``SERVICE_NAME`` instead of ``SID``. It can't be passed when a ``database``
|
||||
part is given.
|
||||
E.g. ``oracle+cx_oracle://scott:tiger@host:1521/?service_name=hr``
|
||||
is a valid url. This value is only available as a URL query string argument.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
.. _cx_oracle_unicode:
|
||||
|
||||
Unicode
|
||||
|
@ -285,6 +293,7 @@ from .base import OracleCompiler, OracleDialect, OracleExecutionContext
|
|||
from . import base as oracle
|
||||
from ...engine import result as _result
|
||||
from sqlalchemy import types as sqltypes, util, exc, processors
|
||||
from sqlalchemy import util
|
||||
import random
|
||||
import collections
|
||||
import decimal
|
||||
|
@ -711,8 +720,10 @@ class OracleDialect_cx_oracle(OracleDialect):
|
|||
# this occurs in tests with mock DBAPIs
|
||||
self._cx_oracle_string_types = set()
|
||||
self._cx_oracle_with_unicode = False
|
||||
elif self.cx_oracle_ver >= (5,) and not \
|
||||
hasattr(self.dbapi, 'UNICODE'):
|
||||
elif util.py3k or (
|
||||
self.cx_oracle_ver >= (5,) and not \
|
||||
hasattr(self.dbapi, 'UNICODE')
|
||||
):
|
||||
# cx_Oracle WITH_UNICODE mode. *only* python
|
||||
# unicode objects accepted for anything
|
||||
self.supports_unicode_statements = True
|
||||
|
@ -862,14 +873,26 @@ class OracleDialect_cx_oracle(OracleDialect):
|
|||
util.coerce_kw_type(dialect_opts, opt, bool)
|
||||
setattr(self, opt, dialect_opts[opt])
|
||||
|
||||
if url.database:
|
||||
database = url.database
|
||||
service_name = dialect_opts.get('service_name', None)
|
||||
if database or service_name:
|
||||
# if we have a database, then we have a remote host
|
||||
port = url.port
|
||||
if port:
|
||||
port = int(port)
|
||||
else:
|
||||
port = 1521
|
||||
dsn = self.dbapi.makedsn(url.host, port, url.database)
|
||||
|
||||
if database and service_name:
|
||||
raise exc.InvalidRequestError(
|
||||
'"service_name" option shouldn\'t '
|
||||
'be used with a "database" part of the url')
|
||||
if database:
|
||||
makedsn_kwargs = {'sid': database}
|
||||
if service_name:
|
||||
makedsn_kwargs = {'service_name': service_name}
|
||||
|
||||
dsn = self.dbapi.makedsn(url.host, port, **makedsn_kwargs)
|
||||
else:
|
||||
# we have a local tnsname
|
||||
dsn = url.host
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# oracle/zxjdbc.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -10,8 +10,10 @@
|
|||
:name: zxJDBC for Jython
|
||||
:dbapi: zxjdbc
|
||||
:connectstring: oracle+zxjdbc://user:pass@host/dbname
|
||||
:driverurl: http://www.oracle.com/technology/software/tech/java/\
|
||||
sqlj_jdbc/index.html.
|
||||
:driverurl: http://www.oracle.com/technetwork/database/features/jdbc/index-091264.html
|
||||
|
||||
.. note:: Jython is not supported by current versions of SQLAlchemy. The
|
||||
zxjdbc dialect should be considered as experimental.
|
||||
|
||||
"""
|
||||
import decimal
|
||||
|
@ -68,8 +70,7 @@ class OracleCompiler_zxjdbc(OracleCompiler):
|
|||
expression._select_iterables(returning_cols))
|
||||
|
||||
# within_columns_clause=False so that labels (foo AS bar) don't render
|
||||
columns = [self.process(c, within_columns_clause=False,
|
||||
result_map=self.result_map)
|
||||
columns = [self.process(c, within_columns_clause=False)
|
||||
for c in self.returning_cols]
|
||||
|
||||
if not hasattr(self, 'returning_parameters'):
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# dialects/postgres.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
# postgresql/__init__.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from . import base, psycopg2, pg8000, pypostgresql, zxjdbc
|
||||
from . import base, psycopg2, pg8000, pypostgresql, zxjdbc, psycopg2cffi
|
||||
|
||||
base.dialect = psycopg2.dialect
|
||||
|
||||
|
@ -13,7 +13,7 @@ from .base import \
|
|||
INTEGER, BIGINT, SMALLINT, VARCHAR, CHAR, TEXT, NUMERIC, FLOAT, REAL, \
|
||||
INET, CIDR, UUID, BIT, MACADDR, OID, DOUBLE_PRECISION, TIMESTAMP, TIME, \
|
||||
DATE, BYTEA, BOOLEAN, INTERVAL, ARRAY, ENUM, dialect, array, Any, All, \
|
||||
TSVECTOR
|
||||
TSVECTOR, DropEnumType
|
||||
from .constraints import ExcludeConstraint
|
||||
from .hstore import HSTORE, hstore
|
||||
from .json import JSON, JSONElement, JSONB
|
||||
|
@ -26,5 +26,6 @@ __all__ = (
|
|||
'DOUBLE_PRECISION', 'TIMESTAMP', 'TIME', 'DATE', 'BYTEA', 'BOOLEAN',
|
||||
'INTERVAL', 'ARRAY', 'ENUM', 'dialect', 'Any', 'All', 'array', 'HSTORE',
|
||||
'hstore', 'INT4RANGE', 'INT8RANGE', 'NUMRANGE', 'DATERANGE',
|
||||
'TSRANGE', 'TSTZRANGE', 'json', 'JSON', 'JSONB', 'JSONElement'
|
||||
'TSRANGE', 'TSTZRANGE', 'json', 'JSON', 'JSONB', 'JSONElement',
|
||||
'DropEnumType'
|
||||
)
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,10 +1,11 @@
|
|||
# Copyright (C) 2013-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2013-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
from sqlalchemy.schema import ColumnCollectionConstraint
|
||||
from sqlalchemy.sql import expression
|
||||
from ...sql.schema import ColumnCollectionConstraint
|
||||
from ...sql import expression
|
||||
from ... import util
|
||||
|
||||
|
||||
class ExcludeConstraint(ColumnCollectionConstraint):
|
||||
|
@ -48,20 +49,42 @@ static/sql-createtable.html#SQL-CREATETABLE-EXCLUDE
|
|||
for this constraint.
|
||||
|
||||
"""
|
||||
columns = []
|
||||
render_exprs = []
|
||||
self.operators = {}
|
||||
|
||||
expressions, operators = zip(*elements)
|
||||
|
||||
for (expr, column, strname, add_element), operator in zip(
|
||||
self._extract_col_expression_collection(expressions),
|
||||
operators
|
||||
):
|
||||
if add_element is not None:
|
||||
columns.append(add_element)
|
||||
|
||||
name = column.name if column is not None else strname
|
||||
|
||||
if name is not None:
|
||||
# backwards compat
|
||||
self.operators[name] = operator
|
||||
|
||||
expr = expression._literal_as_text(expr)
|
||||
|
||||
render_exprs.append(
|
||||
(expr, name, operator)
|
||||
)
|
||||
|
||||
self._render_exprs = render_exprs
|
||||
ColumnCollectionConstraint.__init__(
|
||||
self,
|
||||
*[col for col, op in elements],
|
||||
*columns,
|
||||
name=kw.get('name'),
|
||||
deferrable=kw.get('deferrable'),
|
||||
initially=kw.get('initially')
|
||||
)
|
||||
self.operators = {}
|
||||
for col_or_string, op in elements:
|
||||
name = getattr(col_or_string, 'name', col_or_string)
|
||||
self.operators[name] = op
|
||||
self.using = kw.get('using', 'gist')
|
||||
where = kw.get('where')
|
||||
if where:
|
||||
if where is not None:
|
||||
self.where = expression._literal_as_text(where)
|
||||
|
||||
def copy(self, **kw):
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# postgresql/hstore.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# postgresql/json.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -12,7 +12,7 @@ from .base import ischema_names
|
|||
from ... import types as sqltypes
|
||||
from ...sql.operators import custom_op
|
||||
from ... import sql
|
||||
from ...sql import elements
|
||||
from ...sql import elements, default_comparator
|
||||
from ... import util
|
||||
|
||||
__all__ = ('JSON', 'JSONElement', 'JSONB')
|
||||
|
@ -46,7 +46,8 @@ class JSONElement(elements.BinaryExpression):
|
|||
|
||||
self._json_opstring = opstring
|
||||
operator = custom_op(opstring, precedence=5)
|
||||
right = left._check_literal(left, operator, right)
|
||||
right = default_comparator._check_literal(
|
||||
left, operator, right)
|
||||
super(JSONElement, self).__init__(
|
||||
left, right, operator, type_=result_type)
|
||||
|
||||
|
@ -77,7 +78,7 @@ class JSONElement(elements.BinaryExpression):
|
|||
|
||||
def cast(self, type_):
|
||||
"""Convert this :class:`.JSONElement` to apply both the 'astext' operator
|
||||
as well as an explicit type cast when evaulated.
|
||||
as well as an explicit type cast when evaluated.
|
||||
|
||||
E.g.::
|
||||
|
||||
|
@ -164,6 +165,23 @@ class JSON(sqltypes.TypeEngine):
|
|||
|
||||
__visit_name__ = 'JSON'
|
||||
|
||||
def __init__(self, none_as_null=False):
|
||||
"""Construct a :class:`.JSON` type.
|
||||
|
||||
:param none_as_null: if True, persist the value ``None`` as a
|
||||
SQL NULL value, not the JSON encoding of ``null``. Note that
|
||||
when this flag is False, the :func:`.null` construct can still
|
||||
be used to persist a NULL value::
|
||||
|
||||
from sqlalchemy import null
|
||||
conn.execute(table.insert(), data=null())
|
||||
|
||||
.. versionchanged:: 0.9.8 - Added ``none_as_null``, and :func:`.null`
|
||||
is now supported in order to persist a NULL value.
|
||||
|
||||
"""
|
||||
self.none_as_null = none_as_null
|
||||
|
||||
class comparator_factory(sqltypes.Concatenable.Comparator):
|
||||
"""Define comparison operations for :class:`.JSON`."""
|
||||
|
||||
|
@ -185,9 +203,17 @@ class JSON(sqltypes.TypeEngine):
|
|||
encoding = dialect.encoding
|
||||
|
||||
def process(value):
|
||||
if isinstance(value, elements.Null) or (
|
||||
value is None and self.none_as_null
|
||||
):
|
||||
return None
|
||||
return json_serializer(value).encode(encoding)
|
||||
else:
|
||||
def process(value):
|
||||
if isinstance(value, elements.Null) or (
|
||||
value is None and self.none_as_null
|
||||
):
|
||||
return None
|
||||
return json_serializer(value)
|
||||
return process
|
||||
|
||||
|
@ -197,9 +223,13 @@ class JSON(sqltypes.TypeEngine):
|
|||
encoding = dialect.encoding
|
||||
|
||||
def process(value):
|
||||
if value is None:
|
||||
return None
|
||||
return json_deserializer(value.decode(encoding))
|
||||
else:
|
||||
def process(value):
|
||||
if value is None:
|
||||
return None
|
||||
return json_deserializer(value)
|
||||
return process
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# postgresql/pg8000.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors <see AUTHORS
|
||||
# file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -13,17 +13,30 @@
|
|||
postgresql+pg8000://user:password@host:port/dbname[?key=value&key=value...]
|
||||
:url: https://pythonhosted.org/pg8000/
|
||||
|
||||
|
||||
.. _pg8000_unicode:
|
||||
|
||||
Unicode
|
||||
-------
|
||||
|
||||
When communicating with the server, pg8000 **always uses the server-side
|
||||
character set**. SQLAlchemy has no ability to modify what character set
|
||||
pg8000 chooses to use, and additionally SQLAlchemy does no unicode conversion
|
||||
of any kind with the pg8000 backend. The origin of the client encoding setting
|
||||
is ultimately the CLIENT_ENCODING setting in postgresql.conf.
|
||||
pg8000 will encode / decode string values between it and the server using the
|
||||
PostgreSQL ``client_encoding`` parameter; by default this is the value in
|
||||
the ``postgresql.conf`` file, which often defaults to ``SQL_ASCII``.
|
||||
Typically, this can be changed to ``utf-8``, as a more useful default::
|
||||
|
||||
It is not necessary, though is also harmless, to pass the "encoding" parameter
|
||||
to :func:`.create_engine` when using pg8000.
|
||||
#client_encoding = sql_ascii # actually, defaults to database
|
||||
# encoding
|
||||
client_encoding = utf8
|
||||
|
||||
The ``client_encoding`` can be overriden for a session by executing the SQL:
|
||||
|
||||
SET CLIENT_ENCODING TO 'utf8';
|
||||
|
||||
SQLAlchemy will execute this SQL on all new connections based on the value
|
||||
passed to :func:`.create_engine` using the ``client_encoding`` parameter::
|
||||
|
||||
engine = create_engine(
|
||||
"postgresql+pg8000://user:pass@host/dbname", client_encoding='utf8')
|
||||
|
||||
|
||||
.. _pg8000_isolation_level:
|
||||
|
@ -58,6 +71,8 @@ from ... import types as sqltypes
|
|||
from .base import (
|
||||
PGDialect, PGCompiler, PGIdentifierPreparer, PGExecutionContext,
|
||||
_DECIMAL_TYPES, _FLOAT_TYPES, _INT_TYPES)
|
||||
import re
|
||||
from sqlalchemy.dialects.postgresql.json import JSON
|
||||
|
||||
|
||||
class _PGNumeric(sqltypes.Numeric):
|
||||
|
@ -88,6 +103,15 @@ class _PGNumericNoBind(_PGNumeric):
|
|||
return None
|
||||
|
||||
|
||||
class _PGJSON(JSON):
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if dialect._dbapi_version > (1, 10, 1):
|
||||
return None # Has native JSON
|
||||
else:
|
||||
return super(_PGJSON, self).result_processor(dialect, coltype)
|
||||
|
||||
|
||||
class PGExecutionContext_pg8000(PGExecutionContext):
|
||||
pass
|
||||
|
||||
|
@ -119,7 +143,7 @@ class PGDialect_pg8000(PGDialect):
|
|||
supports_unicode_binds = True
|
||||
|
||||
default_paramstyle = 'format'
|
||||
supports_sane_multi_rowcount = False
|
||||
supports_sane_multi_rowcount = True
|
||||
execution_ctx_cls = PGExecutionContext_pg8000
|
||||
statement_compiler = PGCompiler_pg8000
|
||||
preparer = PGIdentifierPreparer_pg8000
|
||||
|
@ -129,10 +153,29 @@ class PGDialect_pg8000(PGDialect):
|
|||
PGDialect.colspecs,
|
||||
{
|
||||
sqltypes.Numeric: _PGNumericNoBind,
|
||||
sqltypes.Float: _PGNumeric
|
||||
sqltypes.Float: _PGNumeric,
|
||||
JSON: _PGJSON,
|
||||
}
|
||||
)
|
||||
|
||||
def __init__(self, client_encoding=None, **kwargs):
|
||||
PGDialect.__init__(self, **kwargs)
|
||||
self.client_encoding = client_encoding
|
||||
|
||||
def initialize(self, connection):
|
||||
self.supports_sane_multi_rowcount = self._dbapi_version >= (1, 9, 14)
|
||||
super(PGDialect_pg8000, self).initialize(connection)
|
||||
|
||||
@util.memoized_property
|
||||
def _dbapi_version(self):
|
||||
if self.dbapi and hasattr(self.dbapi, '__version__'):
|
||||
return tuple(
|
||||
[
|
||||
int(x) for x in re.findall(
|
||||
r'(\d+)(?:[-\.]?|$)', self.dbapi.__version__)])
|
||||
else:
|
||||
return (99, 99, 99)
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
return __import__('pg8000')
|
||||
|
@ -171,4 +214,51 @@ class PGDialect_pg8000(PGDialect):
|
|||
(level, self.name, ", ".join(self._isolation_lookup))
|
||||
)
|
||||
|
||||
def set_client_encoding(self, connection, client_encoding):
|
||||
# adjust for ConnectionFairy possibly being present
|
||||
if hasattr(connection, 'connection'):
|
||||
connection = connection.connection
|
||||
|
||||
cursor = connection.cursor()
|
||||
cursor.execute("SET CLIENT_ENCODING TO '" + client_encoding + "'")
|
||||
cursor.execute("COMMIT")
|
||||
cursor.close()
|
||||
|
||||
def do_begin_twophase(self, connection, xid):
|
||||
connection.connection.tpc_begin((0, xid, ''))
|
||||
|
||||
def do_prepare_twophase(self, connection, xid):
|
||||
connection.connection.tpc_prepare()
|
||||
|
||||
def do_rollback_twophase(
|
||||
self, connection, xid, is_prepared=True, recover=False):
|
||||
connection.connection.tpc_rollback((0, xid, ''))
|
||||
|
||||
def do_commit_twophase(
|
||||
self, connection, xid, is_prepared=True, recover=False):
|
||||
connection.connection.tpc_commit((0, xid, ''))
|
||||
|
||||
def do_recover_twophase(self, connection):
|
||||
return [row[1] for row in connection.connection.tpc_recover()]
|
||||
|
||||
def on_connect(self):
|
||||
fns = []
|
||||
if self.client_encoding is not None:
|
||||
def on_connect(conn):
|
||||
self.set_client_encoding(conn, self.client_encoding)
|
||||
fns.append(on_connect)
|
||||
|
||||
if self.isolation_level is not None:
|
||||
def on_connect(conn):
|
||||
self.set_isolation_level(conn, self.isolation_level)
|
||||
fns.append(on_connect)
|
||||
|
||||
if len(fns) > 0:
|
||||
def on_connect(conn):
|
||||
for fn in fns:
|
||||
fn(conn)
|
||||
return on_connect
|
||||
else:
|
||||
return None
|
||||
|
||||
dialect = PGDialect_pg8000
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# postgresql/psycopg2.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -32,10 +32,25 @@ psycopg2-specific keyword arguments which are accepted by
|
|||
way of enabling this mode on a per-execution basis.
|
||||
* ``use_native_unicode``: Enable the usage of Psycopg2 "native unicode" mode
|
||||
per connection. True by default.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`psycopg2_disable_native_unicode`
|
||||
|
||||
* ``isolation_level``: This option, available for all PostgreSQL dialects,
|
||||
includes the ``AUTOCOMMIT`` isolation level when using the psycopg2
|
||||
dialect. See :ref:`psycopg2_isolation_level`.
|
||||
dialect.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`psycopg2_isolation_level`
|
||||
|
||||
* ``client_encoding``: sets the client encoding in a libpq-agnostic way,
|
||||
using psycopg2's ``set_client_encoding()`` method.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`psycopg2_unicode`
|
||||
|
||||
Unix Domain Connections
|
||||
------------------------
|
||||
|
@ -51,12 +66,15 @@ in ``/tmp``, or whatever socket directory was specified when PostgreSQL
|
|||
was built. This value can be overridden by passing a pathname to psycopg2,
|
||||
using ``host`` as an additional keyword argument::
|
||||
|
||||
create_engine("postgresql+psycopg2://user:password@/dbname?host=/var/lib/postgresql")
|
||||
create_engine("postgresql+psycopg2://user:password@/dbname?\
|
||||
host=/var/lib/postgresql")
|
||||
|
||||
See also:
|
||||
|
||||
`PQconnectdbParams <http://www.postgresql.org/docs/9.1/static\
|
||||
/libpq-connect.html#LIBPQ-PQCONNECTDBPARAMS>`_
|
||||
`PQconnectdbParams <http://www.postgresql.org/docs/9.1/static/\
|
||||
libpq-connect.html#LIBPQ-PQCONNECTDBPARAMS>`_
|
||||
|
||||
.. _psycopg2_execution_options:
|
||||
|
||||
Per-Statement/Connection Execution Options
|
||||
-------------------------------------------
|
||||
|
@ -65,18 +83,27 @@ The following DBAPI-specific options are respected when used with
|
|||
:meth:`.Connection.execution_options`, :meth:`.Executable.execution_options`,
|
||||
:meth:`.Query.execution_options`, in addition to those not specific to DBAPIs:
|
||||
|
||||
* isolation_level - Set the transaction isolation level for the lifespan of a
|
||||
* ``isolation_level`` - Set the transaction isolation level for the lifespan of a
|
||||
:class:`.Connection` (can only be set on a connection, not a statement
|
||||
or query). See :ref:`psycopg2_isolation_level`.
|
||||
|
||||
* stream_results - Enable or disable usage of psycopg2 server side cursors -
|
||||
* ``stream_results`` - Enable or disable usage of psycopg2 server side cursors -
|
||||
this feature makes use of "named" cursors in combination with special
|
||||
result handling methods so that result rows are not fully buffered.
|
||||
If ``None`` or not set, the ``server_side_cursors`` option of the
|
||||
:class:`.Engine` is used.
|
||||
|
||||
Unicode
|
||||
-------
|
||||
* ``max_row_buffer`` - when using ``stream_results``, an integer value that
|
||||
specifies the maximum number of rows to buffer at a time. This is
|
||||
interpreted by the :class:`.BufferedRowResultProxy`, and if omitted the
|
||||
buffer will grow to ultimately store 1000 rows at a time.
|
||||
|
||||
.. versionadded:: 1.0.6
|
||||
|
||||
.. _psycopg2_unicode:
|
||||
|
||||
Unicode with Psycopg2
|
||||
----------------------
|
||||
|
||||
By default, the psycopg2 driver uses the ``psycopg2.extensions.UNICODE``
|
||||
extension, such that the DBAPI receives and returns all strings as Python
|
||||
|
@ -84,27 +111,51 @@ Unicode objects directly - SQLAlchemy passes these values through without
|
|||
change. Psycopg2 here will encode/decode string values based on the
|
||||
current "client encoding" setting; by default this is the value in
|
||||
the ``postgresql.conf`` file, which often defaults to ``SQL_ASCII``.
|
||||
Typically, this can be changed to ``utf-8``, as a more useful default::
|
||||
Typically, this can be changed to ``utf8``, as a more useful default::
|
||||
|
||||
# postgresql.conf file
|
||||
|
||||
# client_encoding = sql_ascii # actually, defaults to database
|
||||
# encoding
|
||||
client_encoding = utf8
|
||||
|
||||
A second way to affect the client encoding is to set it within Psycopg2
|
||||
locally. SQLAlchemy will call psycopg2's ``set_client_encoding()``
|
||||
method (see:
|
||||
http://initd.org/psycopg/docs/connection.html#connection.set_client_encoding)
|
||||
locally. SQLAlchemy will call psycopg2's
|
||||
:meth:`psycopg2:connection.set_client_encoding` method
|
||||
on all new connections based on the value passed to
|
||||
:func:`.create_engine` using the ``client_encoding`` parameter::
|
||||
|
||||
# set_client_encoding() setting;
|
||||
# works for *all* Postgresql versions
|
||||
engine = create_engine("postgresql://user:pass@host/dbname",
|
||||
client_encoding='utf8')
|
||||
|
||||
This overrides the encoding specified in the Postgresql client configuration.
|
||||
When using the parameter in this way, the psycopg2 driver emits
|
||||
``SET client_encoding TO 'utf8'`` on the connection explicitly, and works
|
||||
in all Postgresql versions.
|
||||
|
||||
.. versionadded:: 0.7.3
|
||||
The psycopg2-specific ``client_encoding`` parameter to
|
||||
:func:`.create_engine`.
|
||||
Note that the ``client_encoding`` setting as passed to :func:`.create_engine`
|
||||
is **not the same** as the more recently added ``client_encoding`` parameter
|
||||
now supported by libpq directly. This is enabled when ``client_encoding``
|
||||
is passed directly to ``psycopg2.connect()``, and from SQLAlchemy is passed
|
||||
using the :paramref:`.create_engine.connect_args` parameter::
|
||||
|
||||
# libpq direct parameter setting;
|
||||
# only works for Postgresql **9.1 and above**
|
||||
engine = create_engine("postgresql://user:pass@host/dbname",
|
||||
connect_args={'client_encoding': 'utf8'})
|
||||
|
||||
# using the query string is equivalent
|
||||
engine = create_engine("postgresql://user:pass@host/dbname?client_encoding=utf8")
|
||||
|
||||
The above parameter was only added to libpq as of version 9.1 of Postgresql,
|
||||
so using the previous method is better for cross-version support.
|
||||
|
||||
.. _psycopg2_disable_native_unicode:
|
||||
|
||||
Disabling Native Unicode
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
SQLAlchemy can also be instructed to skip the usage of the psycopg2
|
||||
``UNICODE`` extension and to instead utilize its own unicode encode/decode
|
||||
|
@ -116,8 +167,56 @@ in and coerce from bytes on the way back,
|
|||
using the value of the :func:`.create_engine` ``encoding`` parameter, which
|
||||
defaults to ``utf-8``.
|
||||
SQLAlchemy's own unicode encode/decode functionality is steadily becoming
|
||||
obsolete as more DBAPIs support unicode fully along with the approach of
|
||||
Python 3; in modern usage psycopg2 should be relied upon to handle unicode.
|
||||
obsolete as most DBAPIs now support unicode fully.
|
||||
|
||||
Bound Parameter Styles
|
||||
----------------------
|
||||
|
||||
The default parameter style for the psycopg2 dialect is "pyformat", where
|
||||
SQL is rendered using ``%(paramname)s`` style. This format has the limitation
|
||||
that it does not accommodate the unusual case of parameter names that
|
||||
actually contain percent or parenthesis symbols; as SQLAlchemy in many cases
|
||||
generates bound parameter names based on the name of a column, the presence
|
||||
of these characters in a column name can lead to problems.
|
||||
|
||||
There are two solutions to the issue of a :class:`.schema.Column` that contains
|
||||
one of these characters in its name. One is to specify the
|
||||
:paramref:`.schema.Column.key` for columns that have such names::
|
||||
|
||||
measurement = Table('measurement', metadata,
|
||||
Column('Size (meters)', Integer, key='size_meters')
|
||||
)
|
||||
|
||||
Above, an INSERT statement such as ``measurement.insert()`` will use
|
||||
``size_meters`` as the parameter name, and a SQL expression such as
|
||||
``measurement.c.size_meters > 10`` will derive the bound parameter name
|
||||
from the ``size_meters`` key as well.
|
||||
|
||||
.. versionchanged:: 1.0.0 - SQL expressions will use :attr:`.Column.key`
|
||||
as the source of naming when anonymous bound parameters are created
|
||||
in SQL expressions; previously, this behavior only applied to
|
||||
:meth:`.Table.insert` and :meth:`.Table.update` parameter names.
|
||||
|
||||
The other solution is to use a positional format; psycopg2 allows use of the
|
||||
"format" paramstyle, which can be passed to
|
||||
:paramref:`.create_engine.paramstyle`::
|
||||
|
||||
engine = create_engine(
|
||||
'postgresql://scott:tiger@localhost:5432/test', paramstyle='format')
|
||||
|
||||
With the above engine, instead of a statement like::
|
||||
|
||||
INSERT INTO measurement ("Size (meters)") VALUES (%(Size (meters))s)
|
||||
{'Size (meters)': 1}
|
||||
|
||||
we instead see::
|
||||
|
||||
INSERT INTO measurement ("Size (meters)") VALUES (%s)
|
||||
(1, )
|
||||
|
||||
Where above, the dictionary style is converted into a tuple with positional
|
||||
style.
|
||||
|
||||
|
||||
Transactions
|
||||
------------
|
||||
|
@ -173,14 +272,17 @@ HSTORE type
|
|||
|
||||
The ``psycopg2`` DBAPI includes an extension to natively handle marshalling of
|
||||
the HSTORE type. The SQLAlchemy psycopg2 dialect will enable this extension
|
||||
by default when it is detected that the target database has the HSTORE
|
||||
type set up for use. In other words, when the dialect makes the first
|
||||
by default when psycopg2 version 2.4 or greater is used, and
|
||||
it is detected that the target database has the HSTORE type set up for use.
|
||||
In other words, when the dialect makes the first
|
||||
connection, a sequence like the following is performed:
|
||||
|
||||
1. Request the available HSTORE oids using
|
||||
``psycopg2.extras.HstoreAdapter.get_oids()``.
|
||||
If this function returns a list of HSTORE identifiers, we then determine
|
||||
that the ``HSTORE`` extension is present.
|
||||
This function is **skipped** if the version of psycopg2 installed is
|
||||
less than version 2.4.
|
||||
|
||||
2. If the ``use_native_hstore`` flag is at its default of ``True``, and
|
||||
we've detected that ``HSTORE`` oids are available, the
|
||||
|
@ -219,9 +321,14 @@ from ... import types as sqltypes
|
|||
from .base import PGDialect, PGCompiler, \
|
||||
PGIdentifierPreparer, PGExecutionContext, \
|
||||
ENUM, ARRAY, _DECIMAL_TYPES, _FLOAT_TYPES,\
|
||||
_INT_TYPES
|
||||
_INT_TYPES, UUID
|
||||
from .hstore import HSTORE
|
||||
from .json import JSON
|
||||
from .json import JSON, JSONB
|
||||
|
||||
try:
|
||||
from uuid import UUID as _python_UUID
|
||||
except ImportError:
|
||||
_python_UUID = None
|
||||
|
||||
|
||||
logger = logging.getLogger('sqlalchemy.dialects.postgresql')
|
||||
|
@ -256,7 +363,7 @@ class _PGNumeric(sqltypes.Numeric):
|
|||
|
||||
class _PGEnum(ENUM):
|
||||
def result_processor(self, dialect, coltype):
|
||||
if util.py2k and self.convert_unicode is True:
|
||||
if self.native_enum and util.py2k and self.convert_unicode is True:
|
||||
# we can't easily use PG's extensions here because
|
||||
# the OID is on the fly, and we need to give it a python
|
||||
# function anyway - not really worth it.
|
||||
|
@ -286,6 +393,35 @@ class _PGJSON(JSON):
|
|||
else:
|
||||
return super(_PGJSON, self).result_processor(dialect, coltype)
|
||||
|
||||
|
||||
class _PGJSONB(JSONB):
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if dialect._has_native_jsonb:
|
||||
return None
|
||||
else:
|
||||
return super(_PGJSONB, self).result_processor(dialect, coltype)
|
||||
|
||||
|
||||
class _PGUUID(UUID):
|
||||
def bind_processor(self, dialect):
|
||||
if not self.as_uuid and dialect.use_native_uuid:
|
||||
nonetype = type(None)
|
||||
|
||||
def process(value):
|
||||
if value is not None:
|
||||
value = _python_UUID(value)
|
||||
return value
|
||||
return process
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if not self.as_uuid and dialect.use_native_uuid:
|
||||
def process(value):
|
||||
if value is not None:
|
||||
value = str(value)
|
||||
return value
|
||||
return process
|
||||
|
||||
# When we're handed literal SQL, ensure it's a SELECT query. Since
|
||||
# 8.3, combining cursors and "FOR UPDATE" has been fine.
|
||||
SERVER_SIDE_CURSOR_RE = re.compile(
|
||||
|
@ -374,8 +510,21 @@ class PGDialect_psycopg2(PGDialect):
|
|||
preparer = PGIdentifierPreparer_psycopg2
|
||||
psycopg2_version = (0, 0)
|
||||
|
||||
FEATURE_VERSION_MAP = dict(
|
||||
native_json=(2, 5),
|
||||
native_jsonb=(2, 5, 4),
|
||||
sane_multi_rowcount=(2, 0, 9),
|
||||
array_oid=(2, 4, 3),
|
||||
hstore_adapter=(2, 4)
|
||||
)
|
||||
|
||||
_has_native_hstore = False
|
||||
_has_native_json = False
|
||||
_has_native_jsonb = False
|
||||
|
||||
engine_config_types = PGDialect.engine_config_types.union([
|
||||
('use_native_unicode', util.asbool),
|
||||
])
|
||||
|
||||
colspecs = util.update_copy(
|
||||
PGDialect.colspecs,
|
||||
|
@ -384,18 +533,21 @@ class PGDialect_psycopg2(PGDialect):
|
|||
ENUM: _PGEnum, # needs force_unicode
|
||||
sqltypes.Enum: _PGEnum, # needs force_unicode
|
||||
HSTORE: _PGHStore,
|
||||
JSON: _PGJSON
|
||||
JSON: _PGJSON,
|
||||
JSONB: _PGJSONB,
|
||||
UUID: _PGUUID
|
||||
}
|
||||
)
|
||||
|
||||
def __init__(self, server_side_cursors=False, use_native_unicode=True,
|
||||
client_encoding=None,
|
||||
use_native_hstore=True,
|
||||
use_native_hstore=True, use_native_uuid=True,
|
||||
**kwargs):
|
||||
PGDialect.__init__(self, **kwargs)
|
||||
self.server_side_cursors = server_side_cursors
|
||||
self.use_native_unicode = use_native_unicode
|
||||
self.use_native_hstore = use_native_hstore
|
||||
self.use_native_uuid = use_native_uuid
|
||||
self.supports_unicode_binds = use_native_unicode
|
||||
self.client_encoding = client_encoding
|
||||
if self.dbapi and hasattr(self.dbapi, '__version__'):
|
||||
|
@ -412,19 +564,34 @@ class PGDialect_psycopg2(PGDialect):
|
|||
self._has_native_hstore = self.use_native_hstore and \
|
||||
self._hstore_oids(connection.connection) \
|
||||
is not None
|
||||
self._has_native_json = self.psycopg2_version >= (2, 5)
|
||||
self._has_native_json = \
|
||||
self.psycopg2_version >= self.FEATURE_VERSION_MAP['native_json']
|
||||
self._has_native_jsonb = \
|
||||
self.psycopg2_version >= self.FEATURE_VERSION_MAP['native_jsonb']
|
||||
|
||||
# http://initd.org/psycopg/docs/news.html#what-s-new-in-psycopg-2-0-9
|
||||
self.supports_sane_multi_rowcount = self.psycopg2_version >= (2, 0, 9)
|
||||
self.supports_sane_multi_rowcount = \
|
||||
self.psycopg2_version >= \
|
||||
self.FEATURE_VERSION_MAP['sane_multi_rowcount']
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
import psycopg2
|
||||
return psycopg2
|
||||
|
||||
@classmethod
|
||||
def _psycopg2_extensions(cls):
|
||||
from psycopg2 import extensions
|
||||
return extensions
|
||||
|
||||
@classmethod
|
||||
def _psycopg2_extras(cls):
|
||||
from psycopg2 import extras
|
||||
return extras
|
||||
|
||||
@util.memoized_property
|
||||
def _isolation_lookup(self):
|
||||
from psycopg2 import extensions
|
||||
extensions = self._psycopg2_extensions()
|
||||
return {
|
||||
'AUTOCOMMIT': extensions.ISOLATION_LEVEL_AUTOCOMMIT,
|
||||
'READ COMMITTED': extensions.ISOLATION_LEVEL_READ_COMMITTED,
|
||||
|
@ -446,7 +613,8 @@ class PGDialect_psycopg2(PGDialect):
|
|||
connection.set_isolation_level(level)
|
||||
|
||||
def on_connect(self):
|
||||
from psycopg2 import extras, extensions
|
||||
extras = self._psycopg2_extras()
|
||||
extensions = self._psycopg2_extensions()
|
||||
|
||||
fns = []
|
||||
if self.client_encoding is not None:
|
||||
|
@ -459,6 +627,11 @@ class PGDialect_psycopg2(PGDialect):
|
|||
self.set_isolation_level(conn, self.isolation_level)
|
||||
fns.append(on_connect)
|
||||
|
||||
if self.dbapi and self.use_native_uuid:
|
||||
def on_connect(conn):
|
||||
extras.register_uuid(None, conn)
|
||||
fns.append(on_connect)
|
||||
|
||||
if self.dbapi and self.use_native_unicode:
|
||||
def on_connect(conn):
|
||||
extensions.register_type(extensions.UNICODE, conn)
|
||||
|
@ -470,19 +643,23 @@ class PGDialect_psycopg2(PGDialect):
|
|||
hstore_oids = self._hstore_oids(conn)
|
||||
if hstore_oids is not None:
|
||||
oid, array_oid = hstore_oids
|
||||
kw = {'oid': oid}
|
||||
if util.py2k:
|
||||
extras.register_hstore(conn, oid=oid,
|
||||
array_oid=array_oid,
|
||||
unicode=True)
|
||||
else:
|
||||
extras.register_hstore(conn, oid=oid,
|
||||
array_oid=array_oid)
|
||||
kw['unicode'] = True
|
||||
if self.psycopg2_version >= \
|
||||
self.FEATURE_VERSION_MAP['array_oid']:
|
||||
kw['array_oid'] = array_oid
|
||||
extras.register_hstore(conn, **kw)
|
||||
fns.append(on_connect)
|
||||
|
||||
if self.dbapi and self._json_deserializer:
|
||||
def on_connect(conn):
|
||||
if self._has_native_json:
|
||||
extras.register_default_json(
|
||||
conn, loads=self._json_deserializer)
|
||||
if self._has_native_jsonb:
|
||||
extras.register_default_jsonb(
|
||||
conn, loads=self._json_deserializer)
|
||||
fns.append(on_connect)
|
||||
|
||||
if fns:
|
||||
|
@ -495,8 +672,8 @@ class PGDialect_psycopg2(PGDialect):
|
|||
|
||||
@util.memoized_instancemethod
|
||||
def _hstore_oids(self, conn):
|
||||
if self.psycopg2_version >= (2, 4):
|
||||
from psycopg2 import extras
|
||||
if self.psycopg2_version >= self.FEATURE_VERSION_MAP['hstore_adapter']:
|
||||
extras = self._psycopg2_extras()
|
||||
oids = extras.HstoreAdapter.get_oids(conn)
|
||||
if oids is not None and oids[0]:
|
||||
return oids[0:2]
|
||||
|
@ -512,12 +689,14 @@ class PGDialect_psycopg2(PGDialect):
|
|||
def is_disconnect(self, e, connection, cursor):
|
||||
if isinstance(e, self.dbapi.Error):
|
||||
# check the "closed" flag. this might not be
|
||||
# present on old psycopg2 versions
|
||||
# present on old psycopg2 versions. Also,
|
||||
# this flag doesn't actually help in a lot of disconnect
|
||||
# situations, so don't rely on it.
|
||||
if getattr(connection, 'closed', False):
|
||||
return True
|
||||
|
||||
# legacy checks based on strings. the "closed" check
|
||||
# above most likely obviates the need for any of these.
|
||||
# checks based on strings. in the case that .closed
|
||||
# didn't cut it, fall back onto these.
|
||||
str_e = str(e).partition("\n")[0]
|
||||
for msg in [
|
||||
# these error messages from libpq: interfaces/libpq/fe-misc.c
|
||||
|
@ -534,8 +713,10 @@ class PGDialect_psycopg2(PGDialect):
|
|||
# not sure where this path is originally from, it may
|
||||
# be obsolete. It really says "losed", not "closed".
|
||||
'losed the connection unexpectedly',
|
||||
# this can occur in newer SSL
|
||||
'connection has been closed unexpectedly'
|
||||
# these can occur in newer SSL
|
||||
'connection has been closed unexpectedly',
|
||||
'SSL SYSCALL error: Bad file descriptor',
|
||||
'SSL SYSCALL error: EOF detected',
|
||||
]:
|
||||
idx = str_e.find(msg)
|
||||
if idx >= 0 and '"' not in str_e[:idx]:
|
||||
|
|
|
@ -0,0 +1,61 @@
|
|||
# testing/engines.py
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
"""
|
||||
.. dialect:: postgresql+psycopg2cffi
|
||||
:name: psycopg2cffi
|
||||
:dbapi: psycopg2cffi
|
||||
:connectstring: \
|
||||
postgresql+psycopg2cffi://user:password@host:port/dbname\
|
||||
[?key=value&key=value...]
|
||||
:url: http://pypi.python.org/pypi/psycopg2cffi/
|
||||
|
||||
``psycopg2cffi`` is an adaptation of ``psycopg2``, using CFFI for the C
|
||||
layer. This makes it suitable for use in e.g. PyPy. Documentation
|
||||
is as per ``psycopg2``.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
.. seealso::
|
||||
|
||||
:mod:`sqlalchemy.dialects.postgresql.psycopg2`
|
||||
|
||||
"""
|
||||
from .psycopg2 import PGDialect_psycopg2
|
||||
|
||||
|
||||
class PGDialect_psycopg2cffi(PGDialect_psycopg2):
|
||||
driver = 'psycopg2cffi'
|
||||
supports_unicode_statements = True
|
||||
|
||||
# psycopg2cffi's first release is 2.5.0, but reports
|
||||
# __version__ as 2.4.4. Subsequent releases seem to have
|
||||
# fixed this.
|
||||
|
||||
FEATURE_VERSION_MAP = dict(
|
||||
native_json=(2, 4, 4),
|
||||
native_jsonb=(2, 7, 1),
|
||||
sane_multi_rowcount=(2, 4, 4),
|
||||
array_oid=(2, 4, 4),
|
||||
hstore_adapter=(2, 4, 4)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
return __import__('psycopg2cffi')
|
||||
|
||||
@classmethod
|
||||
def _psycopg2_extensions(cls):
|
||||
root = __import__('psycopg2cffi', fromlist=['extensions'])
|
||||
return root.extensions
|
||||
|
||||
@classmethod
|
||||
def _psycopg2_extras(cls):
|
||||
root = __import__('psycopg2cffi', fromlist=['extras'])
|
||||
return root.extras
|
||||
|
||||
|
||||
dialect = PGDialect_psycopg2cffi
|
|
@ -1,5 +1,5 @@
|
|||
# postgresql/pypostgresql.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -65,6 +65,23 @@ class PGDialect_pypostgresql(PGDialect):
|
|||
from postgresql.driver import dbapi20
|
||||
return dbapi20
|
||||
|
||||
_DBAPI_ERROR_NAMES = [
|
||||
"Error",
|
||||
"InterfaceError", "DatabaseError", "DataError",
|
||||
"OperationalError", "IntegrityError", "InternalError",
|
||||
"ProgrammingError", "NotSupportedError"
|
||||
]
|
||||
|
||||
@util.memoized_property
|
||||
def dbapi_exception_translation_map(self):
|
||||
if self.dbapi is None:
|
||||
return {}
|
||||
|
||||
return dict(
|
||||
(getattr(self.dbapi, name).__name__, name)
|
||||
for name in self._DBAPI_ERROR_NAMES
|
||||
)
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args(username='user')
|
||||
if 'port' in opts:
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright (C) 2013-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2013-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# postgresql/zxjdbc.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
# sqlite/__init__.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from sqlalchemy.dialects.sqlite import base, pysqlite
|
||||
from sqlalchemy.dialects.sqlite import base, pysqlite, pysqlcipher
|
||||
|
||||
# default dialect
|
||||
base.dialect = pysqlite.dialect
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1,116 @@
|
|||
# sqlite/pysqlcipher.py
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
.. dialect:: sqlite+pysqlcipher
|
||||
:name: pysqlcipher
|
||||
:dbapi: pysqlcipher
|
||||
:connectstring: sqlite+pysqlcipher://:passphrase/file_path[?kdf_iter=<iter>]
|
||||
:url: https://pypi.python.org/pypi/pysqlcipher
|
||||
|
||||
``pysqlcipher`` is a fork of the standard ``pysqlite`` driver to make
|
||||
use of the `SQLCipher <https://www.zetetic.net/sqlcipher>`_ backend.
|
||||
|
||||
.. versionadded:: 0.9.9
|
||||
|
||||
Driver
|
||||
------
|
||||
|
||||
The driver here is the `pysqlcipher <https://pypi.python.org/pypi/pysqlcipher>`_
|
||||
driver, which makes use of the SQLCipher engine. This system essentially
|
||||
introduces new PRAGMA commands to SQLite which allows the setting of a
|
||||
passphrase and other encryption parameters, allowing the database
|
||||
file to be encrypted.
|
||||
|
||||
Connect Strings
|
||||
---------------
|
||||
|
||||
The format of the connect string is in every way the same as that
|
||||
of the :mod:`~sqlalchemy.dialects.sqlite.pysqlite` driver, except that the
|
||||
"password" field is now accepted, which should contain a passphrase::
|
||||
|
||||
e = create_engine('sqlite+pysqlcipher://:testing@/foo.db')
|
||||
|
||||
For an absolute file path, two leading slashes should be used for the
|
||||
database name::
|
||||
|
||||
e = create_engine('sqlite+pysqlcipher://:testing@//path/to/foo.db')
|
||||
|
||||
A selection of additional encryption-related pragmas supported by SQLCipher
|
||||
as documented at https://www.zetetic.net/sqlcipher/sqlcipher-api/ can be passed
|
||||
in the query string, and will result in that PRAGMA being called for each
|
||||
new connection. Currently, ``cipher``, ``kdf_iter``
|
||||
``cipher_page_size`` and ``cipher_use_hmac`` are supported::
|
||||
|
||||
e = create_engine('sqlite+pysqlcipher://:testing@/foo.db?cipher=aes-256-cfb&kdf_iter=64000')
|
||||
|
||||
|
||||
Pooling Behavior
|
||||
----------------
|
||||
|
||||
The driver makes a change to the default pool behavior of pysqlite
|
||||
as described in :ref:`pysqlite_threading_pooling`. The pysqlcipher driver
|
||||
has been observed to be significantly slower on connection than the
|
||||
pysqlite driver, most likely due to the encryption overhead, so the
|
||||
dialect here defaults to using the :class:`.SingletonThreadPool`
|
||||
implementation,
|
||||
instead of the :class:`.NullPool` pool used by pysqlite. As always, the pool
|
||||
implementation is entirely configurable using the
|
||||
:paramref:`.create_engine.poolclass` parameter; the :class:`.StaticPool` may
|
||||
be more feasible for single-threaded use, or :class:`.NullPool` may be used
|
||||
to prevent unencrypted connections from being held open for long periods of
|
||||
time, at the expense of slower startup time for new connections.
|
||||
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from .pysqlite import SQLiteDialect_pysqlite
|
||||
from ...engine import url as _url
|
||||
from ... import pool
|
||||
|
||||
|
||||
class SQLiteDialect_pysqlcipher(SQLiteDialect_pysqlite):
|
||||
driver = 'pysqlcipher'
|
||||
|
||||
pragmas = ('kdf_iter', 'cipher', 'cipher_page_size', 'cipher_use_hmac')
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
from pysqlcipher import dbapi2 as sqlcipher
|
||||
return sqlcipher
|
||||
|
||||
@classmethod
|
||||
def get_pool_class(cls, url):
|
||||
return pool.SingletonThreadPool
|
||||
|
||||
def connect(self, *cargs, **cparams):
|
||||
passphrase = cparams.pop('passphrase', '')
|
||||
|
||||
pragmas = dict(
|
||||
(key, cparams.pop(key, None)) for key in
|
||||
self.pragmas
|
||||
)
|
||||
|
||||
conn = super(SQLiteDialect_pysqlcipher, self).\
|
||||
connect(*cargs, **cparams)
|
||||
conn.execute('pragma key="%s"' % passphrase)
|
||||
for prag, value in pragmas.items():
|
||||
if value is not None:
|
||||
conn.execute('pragma %s=%s' % (prag, value))
|
||||
|
||||
return conn
|
||||
|
||||
def create_connect_args(self, url):
|
||||
super_url = _url.URL(
|
||||
url.drivername, username=url.username,
|
||||
host=url.host, database=url.database, query=url.query)
|
||||
c_args, opts = super(SQLiteDialect_pysqlcipher, self).\
|
||||
create_connect_args(super_url)
|
||||
opts['passphrase'] = url.password
|
||||
return c_args, opts
|
||||
|
||||
dialect = SQLiteDialect_pysqlcipher
|
|
@ -1,5 +1,5 @@
|
|||
# sqlite/pysqlite.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -200,30 +200,68 @@ is passed containing non-ASCII characters.
|
|||
|
||||
.. _pysqlite_serializable:
|
||||
|
||||
Serializable Transaction Isolation
|
||||
----------------------------------
|
||||
Serializable isolation / Savepoints / Transactional DDL
|
||||
-------------------------------------------------------
|
||||
|
||||
The pysqlite DBAPI driver has a long-standing bug in which transactional
|
||||
state is not begun until the first DML statement, that is INSERT, UPDATE
|
||||
or DELETE, is emitted. A SELECT statement will not cause transactional
|
||||
state to begin. While this mode of usage is fine for typical situations
|
||||
and has the advantage that the SQLite database file is not prematurely
|
||||
locked, it breaks serializable transaction isolation, which requires
|
||||
that the database file be locked upon any SQL being emitted.
|
||||
In the section :ref:`sqlite_concurrency`, we refer to the pysqlite
|
||||
driver's assortment of issues that prevent several features of SQLite
|
||||
from working correctly. The pysqlite DBAPI driver has several
|
||||
long-standing bugs which impact the correctness of its transactional
|
||||
behavior. In its default mode of operation, SQLite features such as
|
||||
SERIALIZABLE isolation, transactional DDL, and SAVEPOINT support are
|
||||
non-functional, and in order to use these features, workarounds must
|
||||
be taken.
|
||||
|
||||
To work around this issue, the ``BEGIN`` keyword can be emitted
|
||||
at the start of each transaction. The following recipe establishes
|
||||
a :meth:`.ConnectionEvents.begin` handler to achieve this::
|
||||
The issue is essentially that the driver attempts to second-guess the user's
|
||||
intent, failing to start transactions and sometimes ending them prematurely, in
|
||||
an effort to minimize the SQLite databases's file locking behavior, even
|
||||
though SQLite itself uses "shared" locks for read-only activities.
|
||||
|
||||
SQLAlchemy chooses to not alter this behavior by default, as it is the
|
||||
long-expected behavior of the pysqlite driver; if and when the pysqlite
|
||||
driver attempts to repair these issues, that will be more of a driver towards
|
||||
defaults for SQLAlchemy.
|
||||
|
||||
The good news is that with a few events, we can implement transactional
|
||||
support fully, by disabling pysqlite's feature entirely and emitting BEGIN
|
||||
ourselves. This is achieved using two event listeners::
|
||||
|
||||
from sqlalchemy import create_engine, event
|
||||
|
||||
engine = create_engine("sqlite:///myfile.db",
|
||||
isolation_level='SERIALIZABLE')
|
||||
engine = create_engine("sqlite:///myfile.db")
|
||||
|
||||
@event.listens_for(engine, "connect")
|
||||
def do_connect(dbapi_connection, connection_record):
|
||||
# disable pysqlite's emitting of the BEGIN statement entirely.
|
||||
# also stops it from emitting COMMIT before any DDL.
|
||||
dbapi_connection.isolation_level = None
|
||||
|
||||
@event.listens_for(engine, "begin")
|
||||
def do_begin(conn):
|
||||
# emit our own BEGIN
|
||||
conn.execute("BEGIN")
|
||||
|
||||
Above, we intercept a new pysqlite connection and disable any transactional
|
||||
integration. Then, at the point at which SQLAlchemy knows that transaction
|
||||
scope is to begin, we emit ``"BEGIN"`` ourselves.
|
||||
|
||||
When we take control of ``"BEGIN"``, we can also control directly SQLite's
|
||||
locking modes, introduced at `BEGIN TRANSACTION <http://sqlite.org/lang_transaction.html>`_,
|
||||
by adding the desired locking mode to our ``"BEGIN"``::
|
||||
|
||||
@event.listens_for(engine, "begin")
|
||||
def do_begin(conn):
|
||||
conn.execute("BEGIN EXCLUSIVE")
|
||||
|
||||
.. seealso::
|
||||
|
||||
`BEGIN TRANSACTION <http://sqlite.org/lang_transaction.html>`_ - on the SQLite site
|
||||
|
||||
`sqlite3 SELECT does not BEGIN a transaction <http://bugs.python.org/issue9924>`_ - on the Python bug tracker
|
||||
|
||||
`sqlite3 module breaks transactions and potentially corrupts data <http://bugs.python.org/issue10740>`_ - on the Python bug tracker
|
||||
|
||||
|
||||
"""
|
||||
|
||||
from sqlalchemy.dialects.sqlite.base import SQLiteDialect, DATETIME, DATE
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# sybase/__init__.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# sybase/base.py
|
||||
# Copyright (C) 2010-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2010-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
# get_select_precolumns(), limit_clause() implementation
|
||||
# copyright (C) 2007 Fisch Asset Management
|
||||
|
@ -98,7 +98,6 @@ RESERVED_WORDS = set([
|
|||
|
||||
|
||||
class _SybaseUnitypeMixin(object):
|
||||
|
||||
"""these types appear to return a buffer object."""
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
|
@ -147,41 +146,40 @@ class IMAGE(sqltypes.LargeBinary):
|
|||
|
||||
|
||||
class SybaseTypeCompiler(compiler.GenericTypeCompiler):
|
||||
|
||||
def visit_large_binary(self, type_):
|
||||
def visit_large_binary(self, type_, **kw):
|
||||
return self.visit_IMAGE(type_)
|
||||
|
||||
def visit_boolean(self, type_):
|
||||
def visit_boolean(self, type_, **kw):
|
||||
return self.visit_BIT(type_)
|
||||
|
||||
def visit_unicode(self, type_):
|
||||
def visit_unicode(self, type_, **kw):
|
||||
return self.visit_NVARCHAR(type_)
|
||||
|
||||
def visit_UNICHAR(self, type_):
|
||||
def visit_UNICHAR(self, type_, **kw):
|
||||
return "UNICHAR(%d)" % type_.length
|
||||
|
||||
def visit_UNIVARCHAR(self, type_):
|
||||
def visit_UNIVARCHAR(self, type_, **kw):
|
||||
return "UNIVARCHAR(%d)" % type_.length
|
||||
|
||||
def visit_UNITEXT(self, type_):
|
||||
def visit_UNITEXT(self, type_, **kw):
|
||||
return "UNITEXT"
|
||||
|
||||
def visit_TINYINT(self, type_):
|
||||
def visit_TINYINT(self, type_, **kw):
|
||||
return "TINYINT"
|
||||
|
||||
def visit_IMAGE(self, type_):
|
||||
def visit_IMAGE(self, type_, **kw):
|
||||
return "IMAGE"
|
||||
|
||||
def visit_BIT(self, type_):
|
||||
def visit_BIT(self, type_, **kw):
|
||||
return "BIT"
|
||||
|
||||
def visit_MONEY(self, type_):
|
||||
def visit_MONEY(self, type_, **kw):
|
||||
return "MONEY"
|
||||
|
||||
def visit_SMALLMONEY(self, type_):
|
||||
def visit_SMALLMONEY(self, type_, **kw):
|
||||
return "SMALLMONEY"
|
||||
|
||||
def visit_UNIQUEIDENTIFIER(self, type_):
|
||||
def visit_UNIQUEIDENTIFIER(self, type_, **kw):
|
||||
return "UNIQUEIDENTIFIER"
|
||||
|
||||
ischema_names = {
|
||||
|
@ -325,28 +323,30 @@ class SybaseSQLCompiler(compiler.SQLCompiler):
|
|||
'milliseconds': 'millisecond'
|
||||
})
|
||||
|
||||
def get_select_precolumns(self, select):
|
||||
def get_select_precolumns(self, select, **kw):
|
||||
s = select._distinct and "DISTINCT " or ""
|
||||
# TODO: don't think Sybase supports
|
||||
# bind params for FIRST / TOP
|
||||
if select._limit:
|
||||
limit = select._limit
|
||||
if limit:
|
||||
# if select._limit == 1:
|
||||
# s += "FIRST "
|
||||
# else:
|
||||
# s += "TOP %s " % (select._limit,)
|
||||
s += "TOP %s " % (select._limit,)
|
||||
if select._offset:
|
||||
if not select._limit:
|
||||
s += "TOP %s " % (limit,)
|
||||
offset = select._offset
|
||||
if offset:
|
||||
if not limit:
|
||||
# FIXME: sybase doesn't allow an offset without a limit
|
||||
# so use a huge value for TOP here
|
||||
s += "TOP 1000000 "
|
||||
s += "START AT %s " % (select._offset + 1,)
|
||||
s += "START AT %s " % (offset + 1,)
|
||||
return s
|
||||
|
||||
def get_from_hint_text(self, table, text):
|
||||
return text
|
||||
|
||||
def limit_clause(self, select):
|
||||
def limit_clause(self, select, **kw):
|
||||
# Limit in sybase is after the select keyword
|
||||
return ""
|
||||
|
||||
|
@ -375,10 +375,10 @@ class SybaseSQLCompiler(compiler.SQLCompiler):
|
|||
|
||||
|
||||
class SybaseDDLCompiler(compiler.DDLCompiler):
|
||||
|
||||
def get_column_specification(self, column, **kwargs):
|
||||
colspec = self.preparer.format_column(column) + " " + \
|
||||
self.dialect.type_compiler.process(column.type)
|
||||
self.dialect.type_compiler.process(
|
||||
column.type, type_expression=column)
|
||||
|
||||
if column.table is None:
|
||||
raise exc.CompileError(
|
||||
|
@ -608,8 +608,8 @@ class SybaseDialect(default.DefaultDialect):
|
|||
FROM sysreferences r JOIN sysobjects o on r.tableid = o.id
|
||||
WHERE r.tableid = :table_id
|
||||
""")
|
||||
referential_constraints = connection.execute(REFCONSTRAINT_SQL,
|
||||
table_id=table_id)
|
||||
referential_constraints = connection.execute(
|
||||
REFCONSTRAINT_SQL, table_id=table_id).fetchall()
|
||||
|
||||
REFTABLE_SQL = text("""
|
||||
SELECT o.name AS name, u.name AS 'schema'
|
||||
|
@ -740,10 +740,13 @@ class SybaseDialect(default.DefaultDialect):
|
|||
results.close()
|
||||
|
||||
constrained_columns = []
|
||||
if pks:
|
||||
for i in range(1, pks["count"] + 1):
|
||||
constrained_columns.append(pks["pk_%i" % (i,)])
|
||||
return {"constrained_columns": constrained_columns,
|
||||
"name": pks["name"]}
|
||||
else:
|
||||
return {"constrained_columns": [], "name": None}
|
||||
|
||||
@reflection.cache
|
||||
def get_schema_names(self, connection, **kw):
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# sybase/mxodbc.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# sybase/pyodbc.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# sybase/pysybase.py
|
||||
# Copyright (C) 2010-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2010-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# engine/__init__.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -72,6 +72,7 @@ from .base import (
|
|||
)
|
||||
|
||||
from .result import (
|
||||
BaseRowProxy,
|
||||
BufferedColumnResultProxy,
|
||||
BufferedColumnRow,
|
||||
BufferedRowResultProxy,
|
||||
|
@ -248,6 +249,34 @@ def create_engine(*args, **kwargs):
|
|||
Microsoft SQL Server. Set this to ``False`` to disable
|
||||
the automatic usage of RETURNING.
|
||||
|
||||
:param isolation_level: this string parameter is interpreted by various
|
||||
dialects in order to affect the transaction isolation level of the
|
||||
database connection. The parameter essentially accepts some subset of
|
||||
these string arguments: ``"SERIALIZABLE"``, ``"REPEATABLE_READ"``,
|
||||
``"READ_COMMITTED"``, ``"READ_UNCOMMITTED"`` and ``"AUTOCOMMIT"``.
|
||||
Behavior here varies per backend, and
|
||||
individual dialects should be consulted directly.
|
||||
|
||||
Note that the isolation level can also be set on a per-:class:`.Connection`
|
||||
basis as well, using the
|
||||
:paramref:`.Connection.execution_options.isolation_level`
|
||||
feature.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.Connection.default_isolation_level` - view default level
|
||||
|
||||
:paramref:`.Connection.execution_options.isolation_level`
|
||||
- set per :class:`.Connection` isolation level
|
||||
|
||||
:ref:`SQLite Transaction Isolation <sqlite_isolation_level>`
|
||||
|
||||
:ref:`Postgresql Transaction Isolation <postgresql_isolation_level>`
|
||||
|
||||
:ref:`MySQL Transaction Isolation <mysql_isolation_level>`
|
||||
|
||||
:ref:`session_transaction_isolation` - for the ORM
|
||||
|
||||
:param label_length=None: optional integer value which limits
|
||||
the size of dynamically generated column labels to that many
|
||||
characters. If less than 6, labels are generated as
|
||||
|
@ -276,6 +305,17 @@ def create_engine(*args, **kwargs):
|
|||
be used instead. Can be used for testing of DBAPIs as well as to
|
||||
inject "mock" DBAPI implementations into the :class:`.Engine`.
|
||||
|
||||
:param paramstyle=None: The `paramstyle <http://legacy.python.org/dev/peps/pep-0249/#paramstyle>`_
|
||||
to use when rendering bound parameters. This style defaults to the
|
||||
one recommended by the DBAPI itself, which is retrieved from the
|
||||
``.paramstyle`` attribute of the DBAPI. However, most DBAPIs accept
|
||||
more than one paramstyle, and in particular it may be desirable
|
||||
to change a "named" paramstyle into a "positional" one, or vice versa.
|
||||
When this attribute is passed, it should be one of the values
|
||||
``"qmark"``, ``"numeric"``, ``"named"``, ``"format"`` or
|
||||
``"pyformat"``, and should correspond to a parameter style known
|
||||
to be supported by the DBAPI in use.
|
||||
|
||||
:param pool=None: an already-constructed instance of
|
||||
:class:`~sqlalchemy.pool.Pool`, such as a
|
||||
:class:`~sqlalchemy.pool.QueuePool` instance. If non-None, this
|
||||
|
@ -349,14 +389,33 @@ def create_engine(*args, **kwargs):
|
|||
def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
|
||||
"""Create a new Engine instance using a configuration dictionary.
|
||||
|
||||
The dictionary is typically produced from a config file where keys
|
||||
are prefixed, such as sqlalchemy.url, sqlalchemy.echo, etc. The
|
||||
'prefix' argument indicates the prefix to be searched for.
|
||||
The dictionary is typically produced from a config file.
|
||||
|
||||
The keys of interest to ``engine_from_config()`` should be prefixed, e.g.
|
||||
``sqlalchemy.url``, ``sqlalchemy.echo``, etc. The 'prefix' argument
|
||||
indicates the prefix to be searched for. Each matching key (after the
|
||||
prefix is stripped) is treated as though it were the corresponding keyword
|
||||
argument to a :func:`.create_engine` call.
|
||||
|
||||
The only required key is (assuming the default prefix) ``sqlalchemy.url``,
|
||||
which provides the :ref:`database URL <database_urls>`.
|
||||
|
||||
A select set of keyword arguments will be "coerced" to their
|
||||
expected type based on string values. In a future release, this
|
||||
functionality will be expanded and include dialect-specific
|
||||
arguments.
|
||||
expected type based on string values. The set of arguments
|
||||
is extensible per-dialect using the ``engine_config_types`` accessor.
|
||||
|
||||
:param configuration: A dictionary (typically produced from a config file,
|
||||
but this is not a requirement). Items whose keys start with the value
|
||||
of 'prefix' will have that prefix stripped, and will then be passed to
|
||||
:ref:`create_engine`.
|
||||
|
||||
:param prefix: Prefix to match and then strip from keys
|
||||
in 'configuration'.
|
||||
|
||||
:param kwargs: Each keyword argument to ``engine_from_config()`` itself
|
||||
overrides the corresponding item taken from the 'configuration'
|
||||
dictionary. Keyword arguments should *not* be prefixed.
|
||||
|
||||
"""
|
||||
|
||||
options = dict((key[len(prefix):], configuration[key])
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# engine/base.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -45,7 +45,7 @@ class Connection(Connectable):
|
|||
"""
|
||||
|
||||
def __init__(self, engine, connection=None, close_with_result=False,
|
||||
_branch=False, _execution_options=None,
|
||||
_branch_from=None, _execution_options=None,
|
||||
_dispatch=None,
|
||||
_has_events=None):
|
||||
"""Construct a new Connection.
|
||||
|
@ -57,16 +57,27 @@ class Connection(Connectable):
|
|||
"""
|
||||
self.engine = engine
|
||||
self.dialect = engine.dialect
|
||||
self.__connection = connection or engine.raw_connection()
|
||||
self.__branch_from = _branch_from
|
||||
self.__branch = _branch_from is not None
|
||||
|
||||
if _branch_from:
|
||||
self.__connection = connection
|
||||
self._execution_options = _execution_options
|
||||
self._echo = _branch_from._echo
|
||||
self.should_close_with_result = False
|
||||
self.dispatch = _dispatch
|
||||
self._has_events = _branch_from._has_events
|
||||
else:
|
||||
self.__connection = connection \
|
||||
if connection is not None else engine.raw_connection()
|
||||
self.__transaction = None
|
||||
self.should_close_with_result = close_with_result
|
||||
self.__savepoint_seq = 0
|
||||
self.__branch = _branch
|
||||
self.should_close_with_result = close_with_result
|
||||
self.__invalid = False
|
||||
self.__can_reconnect = True
|
||||
if _dispatch:
|
||||
self.dispatch = _dispatch
|
||||
elif _has_events is None:
|
||||
self._echo = self.engine._should_log_info()
|
||||
|
||||
if _has_events is None:
|
||||
# if _has_events is sent explicitly as False,
|
||||
# then don't join the dispatch of the engine; we don't
|
||||
# want to handle any of the engine's events in that case.
|
||||
|
@ -74,32 +85,53 @@ class Connection(Connectable):
|
|||
self._has_events = _has_events or (
|
||||
_has_events is None and engine._has_events)
|
||||
|
||||
self._echo = self.engine._should_log_info()
|
||||
if _execution_options:
|
||||
self._execution_options =\
|
||||
engine._execution_options.union(_execution_options)
|
||||
else:
|
||||
assert not _execution_options
|
||||
self._execution_options = engine._execution_options
|
||||
|
||||
if self._has_events or self.engine._has_events:
|
||||
self.dispatch.engine_connect(self, _branch)
|
||||
self.dispatch.engine_connect(self, self.__branch)
|
||||
|
||||
def _branch(self):
|
||||
"""Return a new Connection which references this Connection's
|
||||
engine and connection; but does not have close_with_result enabled,
|
||||
and also whose close() method does nothing.
|
||||
|
||||
This is used to execute "sub" statements within a single execution,
|
||||
usually an INSERT statement.
|
||||
"""
|
||||
The Core uses this very sparingly, only in the case of
|
||||
custom SQL default functions that are to be INSERTed as the
|
||||
primary key of a row where we need to get the value back, so we have
|
||||
to invoke it distinctly - this is a very uncommon case.
|
||||
|
||||
Userland code accesses _branch() when the connect() or
|
||||
contextual_connect() methods are called. The branched connection
|
||||
acts as much as possible like the parent, except that it stays
|
||||
connected when a close() event occurs.
|
||||
|
||||
"""
|
||||
if self.__branch_from:
|
||||
return self.__branch_from._branch()
|
||||
else:
|
||||
return self.engine._connection_cls(
|
||||
self.engine,
|
||||
self.__connection,
|
||||
_branch=True,
|
||||
_branch_from=self,
|
||||
_execution_options=self._execution_options,
|
||||
_has_events=self._has_events,
|
||||
_dispatch=self.dispatch)
|
||||
|
||||
@property
|
||||
def _root(self):
|
||||
"""return the 'root' connection.
|
||||
|
||||
Returns 'self' if this connection is not a branch, else
|
||||
returns the root connection from which we ultimately branched.
|
||||
|
||||
"""
|
||||
|
||||
if self.__branch_from:
|
||||
return self.__branch_from
|
||||
else:
|
||||
return self
|
||||
|
||||
def _clone(self):
|
||||
"""Create a shallow copy of this Connection.
|
||||
|
||||
|
@ -169,14 +201,19 @@ class Connection(Connectable):
|
|||
used by the ORM internally supersedes a cache dictionary
|
||||
specified here.
|
||||
|
||||
:param isolation_level: Available on: Connection.
|
||||
:param isolation_level: Available on: :class:`.Connection`.
|
||||
Set the transaction isolation level for
|
||||
the lifespan of this connection. Valid values include
|
||||
those string values accepted by the ``isolation_level``
|
||||
parameter passed to :func:`.create_engine`, and are
|
||||
database specific, including those for :ref:`sqlite_toplevel`,
|
||||
:ref:`postgresql_toplevel` - see those dialect's documentation
|
||||
for further info.
|
||||
the lifespan of this :class:`.Connection` object (*not* the
|
||||
underyling DBAPI connection, for which the level is reset
|
||||
to its original setting upon termination of this
|
||||
:class:`.Connection` object).
|
||||
|
||||
Valid values include
|
||||
those string values accepted by the
|
||||
:paramref:`.create_engine.isolation_level`
|
||||
parameter passed to :func:`.create_engine`. These levels are
|
||||
semi-database specific; see individual dialect documentation for
|
||||
valid levels.
|
||||
|
||||
Note that this option necessarily affects the underlying
|
||||
DBAPI connection for the lifespan of the originating
|
||||
|
@ -185,6 +222,41 @@ class Connection(Connectable):
|
|||
is returned to the connection pool, i.e.
|
||||
the :meth:`.Connection.close` method is called.
|
||||
|
||||
.. warning:: The ``isolation_level`` execution option should
|
||||
**not** be used when a transaction is already established, that
|
||||
is, the :meth:`.Connection.begin` method or similar has been
|
||||
called. A database cannot change the isolation level on a
|
||||
transaction in progress, and different DBAPIs and/or
|
||||
SQLAlchemy dialects may implicitly roll back or commit
|
||||
the transaction, or not affect the connection at all.
|
||||
|
||||
.. versionchanged:: 0.9.9 A warning is emitted when the
|
||||
``isolation_level`` execution option is used after a
|
||||
transaction has been started with :meth:`.Connection.begin`
|
||||
or similar.
|
||||
|
||||
.. note:: The ``isolation_level`` execution option is implicitly
|
||||
reset if the :class:`.Connection` is invalidated, e.g. via
|
||||
the :meth:`.Connection.invalidate` method, or if a
|
||||
disconnection error occurs. The new connection produced after
|
||||
the invalidation will not have the isolation level re-applied
|
||||
to it automatically.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:paramref:`.create_engine.isolation_level`
|
||||
- set per :class:`.Engine` isolation level
|
||||
|
||||
:meth:`.Connection.get_isolation_level` - view current level
|
||||
|
||||
:ref:`SQLite Transaction Isolation <sqlite_isolation_level>`
|
||||
|
||||
:ref:`Postgresql Transaction Isolation <postgresql_isolation_level>`
|
||||
|
||||
:ref:`MySQL Transaction Isolation <mysql_isolation_level>`
|
||||
|
||||
:ref:`session_transaction_isolation` - for the ORM
|
||||
|
||||
:param no_parameters: When ``True``, if the final parameter
|
||||
list or dictionary is totally empty, will invoke the
|
||||
statement on the cursor as ``cursor.execute(statement)``,
|
||||
|
@ -224,24 +296,101 @@ class Connection(Connectable):
|
|||
def invalidated(self):
|
||||
"""Return True if this connection was invalidated."""
|
||||
|
||||
return self.__invalid
|
||||
return self._root.__invalid
|
||||
|
||||
@property
|
||||
def connection(self):
|
||||
"The underlying DB-API connection managed by this Connection."
|
||||
"""The underlying DB-API connection managed by this Connection.
|
||||
|
||||
.. seealso::
|
||||
|
||||
|
||||
:ref:`dbapi_connections`
|
||||
|
||||
"""
|
||||
|
||||
try:
|
||||
return self.__connection
|
||||
except AttributeError:
|
||||
try:
|
||||
return self._revalidate_connection()
|
||||
except Exception as e:
|
||||
self._handle_dbapi_exception(e, None, None, None, None)
|
||||
|
||||
def get_isolation_level(self):
|
||||
"""Return the current isolation level assigned to this
|
||||
:class:`.Connection`.
|
||||
|
||||
This will typically be the default isolation level as determined
|
||||
by the dialect, unless if the
|
||||
:paramref:`.Connection.execution_options.isolation_level`
|
||||
feature has been used to alter the isolation level on a
|
||||
per-:class:`.Connection` basis.
|
||||
|
||||
This attribute will typically perform a live SQL operation in order
|
||||
to procure the current isolation level, so the value returned is the
|
||||
actual level on the underlying DBAPI connection regardless of how
|
||||
this state was set. Compare to the
|
||||
:attr:`.Connection.default_isolation_level` accessor
|
||||
which returns the dialect-level setting without performing a SQL
|
||||
query.
|
||||
|
||||
.. versionadded:: 0.9.9
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.Connection.default_isolation_level` - view default level
|
||||
|
||||
:paramref:`.create_engine.isolation_level`
|
||||
- set per :class:`.Engine` isolation level
|
||||
|
||||
:paramref:`.Connection.execution_options.isolation_level`
|
||||
- set per :class:`.Connection` isolation level
|
||||
|
||||
"""
|
||||
try:
|
||||
return self.dialect.get_isolation_level(self.connection)
|
||||
except Exception as e:
|
||||
self._handle_dbapi_exception(e, None, None, None, None)
|
||||
|
||||
@property
|
||||
def default_isolation_level(self):
|
||||
"""The default isolation level assigned to this :class:`.Connection`.
|
||||
|
||||
This is the isolation level setting that the :class:`.Connection`
|
||||
has when first procured via the :meth:`.Engine.connect` method.
|
||||
This level stays in place until the
|
||||
:paramref:`.Connection.execution_options.isolation_level` is used
|
||||
to change the setting on a per-:class:`.Connection` basis.
|
||||
|
||||
Unlike :meth:`.Connection.get_isolation_level`, this attribute is set
|
||||
ahead of time from the first connection procured by the dialect,
|
||||
so SQL query is not invoked when this accessor is called.
|
||||
|
||||
.. versionadded:: 0.9.9
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.Connection.get_isolation_level` - view current level
|
||||
|
||||
:paramref:`.create_engine.isolation_level`
|
||||
- set per :class:`.Engine` isolation level
|
||||
|
||||
:paramref:`.Connection.execution_options.isolation_level`
|
||||
- set per :class:`.Connection` isolation level
|
||||
|
||||
"""
|
||||
return self.dialect.default_isolation_level
|
||||
|
||||
def _revalidate_connection(self):
|
||||
if self.__branch_from:
|
||||
return self.__branch_from._revalidate_connection()
|
||||
if self.__can_reconnect and self.__invalid:
|
||||
if self.__transaction is not None:
|
||||
raise exc.InvalidRequestError(
|
||||
"Can't reconnect until invalid "
|
||||
"transaction is rolled back")
|
||||
self.__connection = self.engine.raw_connection()
|
||||
self.__connection = self.engine.raw_connection(_connection=self)
|
||||
self.__invalid = False
|
||||
return self.__connection
|
||||
raise exc.ResourceClosedError("This Connection is closed")
|
||||
|
@ -343,16 +492,17 @@ class Connection(Connectable):
|
|||
:ref:`pool_connection_invalidation`
|
||||
|
||||
"""
|
||||
|
||||
if self.invalidated:
|
||||
return
|
||||
|
||||
if self.closed:
|
||||
raise exc.ResourceClosedError("This Connection is closed")
|
||||
|
||||
if self._connection_is_valid:
|
||||
self.__connection.invalidate(exception)
|
||||
del self.__connection
|
||||
self.__invalid = True
|
||||
if self._root._connection_is_valid:
|
||||
self._root.__connection.invalidate(exception)
|
||||
del self._root.__connection
|
||||
self._root.__invalid = True
|
||||
|
||||
def detach(self):
|
||||
"""Detach the underlying DB-API connection from its connection pool.
|
||||
|
@ -415,6 +565,8 @@ class Connection(Connectable):
|
|||
:class:`.Engine`.
|
||||
|
||||
"""
|
||||
if self.__branch_from:
|
||||
return self.__branch_from.begin()
|
||||
|
||||
if self.__transaction is None:
|
||||
self.__transaction = RootTransaction(self)
|
||||
|
@ -436,6 +588,9 @@ class Connection(Connectable):
|
|||
See also :meth:`.Connection.begin`,
|
||||
:meth:`.Connection.begin_twophase`.
|
||||
"""
|
||||
if self.__branch_from:
|
||||
return self.__branch_from.begin_nested()
|
||||
|
||||
if self.__transaction is None:
|
||||
self.__transaction = RootTransaction(self)
|
||||
else:
|
||||
|
@ -459,6 +614,9 @@ class Connection(Connectable):
|
|||
|
||||
"""
|
||||
|
||||
if self.__branch_from:
|
||||
return self.__branch_from.begin_twophase(xid=xid)
|
||||
|
||||
if self.__transaction is not None:
|
||||
raise exc.InvalidRequestError(
|
||||
"Cannot start a two phase transaction when a transaction "
|
||||
|
@ -479,10 +637,11 @@ class Connection(Connectable):
|
|||
|
||||
def in_transaction(self):
|
||||
"""Return True if a transaction is in progress."""
|
||||
|
||||
return self.__transaction is not None
|
||||
return self._root.__transaction is not None
|
||||
|
||||
def _begin_impl(self, transaction):
|
||||
assert not self.__branch_from
|
||||
|
||||
if self._echo:
|
||||
self.engine.logger.info("BEGIN (implicit)")
|
||||
|
||||
|
@ -497,6 +656,8 @@ class Connection(Connectable):
|
|||
self._handle_dbapi_exception(e, None, None, None, None)
|
||||
|
||||
def _rollback_impl(self):
|
||||
assert not self.__branch_from
|
||||
|
||||
if self._has_events or self.engine._has_events:
|
||||
self.dispatch.rollback(self)
|
||||
|
||||
|
@ -516,6 +677,8 @@ class Connection(Connectable):
|
|||
self.__transaction = None
|
||||
|
||||
def _commit_impl(self, autocommit=False):
|
||||
assert not self.__branch_from
|
||||
|
||||
if self._has_events or self.engine._has_events:
|
||||
self.dispatch.commit(self)
|
||||
|
||||
|
@ -532,6 +695,8 @@ class Connection(Connectable):
|
|||
self.__transaction = None
|
||||
|
||||
def _savepoint_impl(self, name=None):
|
||||
assert not self.__branch_from
|
||||
|
||||
if self._has_events or self.engine._has_events:
|
||||
self.dispatch.savepoint(self, name)
|
||||
|
||||
|
@ -543,6 +708,8 @@ class Connection(Connectable):
|
|||
return name
|
||||
|
||||
def _rollback_to_savepoint_impl(self, name, context):
|
||||
assert not self.__branch_from
|
||||
|
||||
if self._has_events or self.engine._has_events:
|
||||
self.dispatch.rollback_savepoint(self, name, context)
|
||||
|
||||
|
@ -551,6 +718,8 @@ class Connection(Connectable):
|
|||
self.__transaction = context
|
||||
|
||||
def _release_savepoint_impl(self, name, context):
|
||||
assert not self.__branch_from
|
||||
|
||||
if self._has_events or self.engine._has_events:
|
||||
self.dispatch.release_savepoint(self, name, context)
|
||||
|
||||
|
@ -559,6 +728,8 @@ class Connection(Connectable):
|
|||
self.__transaction = context
|
||||
|
||||
def _begin_twophase_impl(self, transaction):
|
||||
assert not self.__branch_from
|
||||
|
||||
if self._echo:
|
||||
self.engine.logger.info("BEGIN TWOPHASE (implicit)")
|
||||
if self._has_events or self.engine._has_events:
|
||||
|
@ -571,6 +742,8 @@ class Connection(Connectable):
|
|||
self.connection._reset_agent = transaction
|
||||
|
||||
def _prepare_twophase_impl(self, xid):
|
||||
assert not self.__branch_from
|
||||
|
||||
if self._has_events or self.engine._has_events:
|
||||
self.dispatch.prepare_twophase(self, xid)
|
||||
|
||||
|
@ -579,6 +752,8 @@ class Connection(Connectable):
|
|||
self.engine.dialect.do_prepare_twophase(self, xid)
|
||||
|
||||
def _rollback_twophase_impl(self, xid, is_prepared):
|
||||
assert not self.__branch_from
|
||||
|
||||
if self._has_events or self.engine._has_events:
|
||||
self.dispatch.rollback_twophase(self, xid, is_prepared)
|
||||
|
||||
|
@ -595,6 +770,8 @@ class Connection(Connectable):
|
|||
self.__transaction = None
|
||||
|
||||
def _commit_twophase_impl(self, xid, is_prepared):
|
||||
assert not self.__branch_from
|
||||
|
||||
if self._has_events or self.engine._has_events:
|
||||
self.dispatch.commit_twophase(self, xid, is_prepared)
|
||||
|
||||
|
@ -610,8 +787,8 @@ class Connection(Connectable):
|
|||
self.__transaction = None
|
||||
|
||||
def _autorollback(self):
|
||||
if not self.in_transaction():
|
||||
self._rollback_impl()
|
||||
if not self._root.in_transaction():
|
||||
self._root._rollback_impl()
|
||||
|
||||
def close(self):
|
||||
"""Close this :class:`.Connection`.
|
||||
|
@ -632,12 +809,20 @@ class Connection(Connectable):
|
|||
and will allow no further operations.
|
||||
|
||||
"""
|
||||
if self.__branch_from:
|
||||
try:
|
||||
del self.__connection
|
||||
except AttributeError:
|
||||
pass
|
||||
finally:
|
||||
self.__can_reconnect = False
|
||||
return
|
||||
try:
|
||||
conn = self.__connection
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
if not self.__branch:
|
||||
|
||||
conn.close()
|
||||
if conn._reset_agent is self.__transaction:
|
||||
conn._reset_agent = None
|
||||
|
@ -670,7 +855,7 @@ class Connection(Connectable):
|
|||
a subclass of :class:`.Executable`, such as a
|
||||
:func:`~.expression.select` construct
|
||||
* a :class:`.FunctionElement`, such as that generated
|
||||
by :attr:`.func`, will be automatically wrapped in
|
||||
by :data:`.func`, will be automatically wrapped in
|
||||
a SELECT statement, which is then executed.
|
||||
* a :class:`.DDLElement` object
|
||||
* a :class:`.DefaultGenerator` object
|
||||
|
@ -798,17 +983,16 @@ class Connection(Connectable):
|
|||
distilled_params = _distill_params(multiparams, params)
|
||||
if distilled_params:
|
||||
# note this is usually dict but we support RowProxy
|
||||
# as well; but dict.keys() as an iterator is OK
|
||||
# as well; but dict.keys() as an iterable is OK
|
||||
keys = distilled_params[0].keys()
|
||||
else:
|
||||
keys = []
|
||||
|
||||
dialect = self.dialect
|
||||
if 'compiled_cache' in self._execution_options:
|
||||
key = dialect, elem, tuple(keys), len(distilled_params) > 1
|
||||
if key in self._execution_options['compiled_cache']:
|
||||
compiled_sql = self._execution_options['compiled_cache'][key]
|
||||
else:
|
||||
key = dialect, elem, tuple(sorted(keys)), len(distilled_params) > 1
|
||||
compiled_sql = self._execution_options['compiled_cache'].get(key)
|
||||
if compiled_sql is None:
|
||||
compiled_sql = elem.compile(
|
||||
dialect=dialect, column_keys=keys,
|
||||
inline=len(distilled_params) > 1)
|
||||
|
@ -888,7 +1072,8 @@ class Connection(Connectable):
|
|||
|
||||
context = constructor(dialect, self, conn, *args)
|
||||
except Exception as e:
|
||||
self._handle_dbapi_exception(e,
|
||||
self._handle_dbapi_exception(
|
||||
e,
|
||||
util.text_type(statement), parameters,
|
||||
None, None)
|
||||
|
||||
|
@ -914,36 +1099,39 @@ class Connection(Connectable):
|
|||
"%r",
|
||||
sql_util._repr_params(parameters, batches=10)
|
||||
)
|
||||
|
||||
evt_handled = False
|
||||
try:
|
||||
if context.executemany:
|
||||
for fn in () if not self.dialect._has_events \
|
||||
else self.dialect.dispatch.do_executemany:
|
||||
if self.dialect._has_events:
|
||||
for fn in self.dialect.dispatch.do_executemany:
|
||||
if fn(cursor, statement, parameters, context):
|
||||
evt_handled = True
|
||||
break
|
||||
else:
|
||||
if not evt_handled:
|
||||
self.dialect.do_executemany(
|
||||
cursor,
|
||||
statement,
|
||||
parameters,
|
||||
context)
|
||||
|
||||
elif not parameters and context.no_parameters:
|
||||
for fn in () if not self.dialect._has_events \
|
||||
else self.dialect.dispatch.do_execute_no_params:
|
||||
if self.dialect._has_events:
|
||||
for fn in self.dialect.dispatch.do_execute_no_params:
|
||||
if fn(cursor, statement, context):
|
||||
evt_handled = True
|
||||
break
|
||||
else:
|
||||
if not evt_handled:
|
||||
self.dialect.do_execute_no_params(
|
||||
cursor,
|
||||
statement,
|
||||
context)
|
||||
|
||||
else:
|
||||
for fn in () if not self.dialect._has_events \
|
||||
else self.dialect.dispatch.do_execute:
|
||||
if self.dialect._has_events:
|
||||
for fn in self.dialect.dispatch.do_execute:
|
||||
if fn(cursor, statement, parameters, context):
|
||||
evt_handled = True
|
||||
break
|
||||
else:
|
||||
if not evt_handled:
|
||||
self.dialect.do_execute(
|
||||
cursor,
|
||||
statement,
|
||||
|
@ -967,36 +1155,17 @@ class Connection(Connectable):
|
|||
if context.compiled:
|
||||
context.post_exec()
|
||||
|
||||
if context.isinsert and not context.executemany:
|
||||
context.post_insert()
|
||||
|
||||
# create a resultproxy, get rowcount/implicit RETURNING
|
||||
# rows, close cursor if no further results pending
|
||||
if context.is_crud or context.is_text:
|
||||
result = context._setup_crud_result_proxy()
|
||||
else:
|
||||
result = context.get_result_proxy()
|
||||
if context.isinsert:
|
||||
if context._is_implicit_returning:
|
||||
context._fetch_implicit_returning(result)
|
||||
result.close(_autoclose_connection=False)
|
||||
result._metadata = None
|
||||
elif not context._is_explicit_returning:
|
||||
result.close(_autoclose_connection=False)
|
||||
result._metadata = None
|
||||
elif context.isupdate and context._is_implicit_returning:
|
||||
context._fetch_implicit_update_returning(result)
|
||||
result.close(_autoclose_connection=False)
|
||||
result._metadata = None
|
||||
if result._metadata is None:
|
||||
result._soft_close(_autoclose_connection=False)
|
||||
|
||||
elif result._metadata is None:
|
||||
# no results, get rowcount
|
||||
# (which requires open cursor on some drivers
|
||||
# such as kintersbasdb, mxodbc),
|
||||
result.rowcount
|
||||
result.close(_autoclose_connection=False)
|
||||
if context.should_autocommit and self._root.__transaction is None:
|
||||
self._root._commit_impl(autocommit=True)
|
||||
|
||||
if self.__transaction is None and context.should_autocommit:
|
||||
self._commit_impl(autocommit=True)
|
||||
|
||||
if result.closed and self.should_close_with_result:
|
||||
if result._soft_closed and self.should_close_with_result:
|
||||
self.close()
|
||||
|
||||
return result
|
||||
|
@ -1055,8 +1224,6 @@ class Connection(Connectable):
|
|||
"""
|
||||
try:
|
||||
cursor.close()
|
||||
except (SystemExit, KeyboardInterrupt):
|
||||
raise
|
||||
except Exception:
|
||||
# log the error through the connection pool's logger.
|
||||
self.engine.pool.logger.error(
|
||||
|
@ -1071,7 +1238,6 @@ class Connection(Connectable):
|
|||
parameters,
|
||||
cursor,
|
||||
context):
|
||||
|
||||
exc_info = sys.exc_info()
|
||||
|
||||
if context and context.exception is None:
|
||||
|
@ -1081,16 +1247,22 @@ class Connection(Connectable):
|
|||
self._is_disconnect = \
|
||||
isinstance(e, self.dialect.dbapi.Error) and \
|
||||
not self.closed and \
|
||||
self.dialect.is_disconnect(e, self.__connection, cursor)
|
||||
self.dialect.is_disconnect(
|
||||
e,
|
||||
self.__connection if not self.invalidated else None,
|
||||
cursor)
|
||||
if context:
|
||||
context.is_disconnect = self._is_disconnect
|
||||
|
||||
invalidate_pool_on_disconnect = True
|
||||
|
||||
if self._reentrant_error:
|
||||
util.raise_from_cause(
|
||||
exc.DBAPIError.instance(statement,
|
||||
parameters,
|
||||
e,
|
||||
self.dialect.dbapi.Error),
|
||||
self.dialect.dbapi.Error,
|
||||
dialect=self.dialect),
|
||||
exc_info
|
||||
)
|
||||
self._reentrant_error = True
|
||||
|
@ -1106,13 +1278,16 @@ class Connection(Connectable):
|
|||
parameters,
|
||||
e,
|
||||
self.dialect.dbapi.Error,
|
||||
connection_invalidated=self._is_disconnect)
|
||||
connection_invalidated=self._is_disconnect,
|
||||
dialect=self.dialect)
|
||||
else:
|
||||
sqlalchemy_exception = None
|
||||
|
||||
newraise = None
|
||||
|
||||
if self._has_events or self.engine._has_events:
|
||||
if (self._has_events or self.engine._has_events) and \
|
||||
not self._execution_options.get(
|
||||
'skip_user_error_events', False):
|
||||
# legacy dbapi_error event
|
||||
if should_wrap and context:
|
||||
self.dispatch.dbapi_error(self,
|
||||
|
@ -1124,7 +1299,8 @@ class Connection(Connectable):
|
|||
|
||||
# new handle_error event
|
||||
ctx = ExceptionContextImpl(
|
||||
e, sqlalchemy_exception, self, cursor, statement,
|
||||
e, sqlalchemy_exception, self.engine,
|
||||
self, cursor, statement,
|
||||
parameters, context, self._is_disconnect)
|
||||
|
||||
for fn in self.dispatch.handle_error:
|
||||
|
@ -1144,6 +1320,11 @@ class Connection(Connectable):
|
|||
sqlalchemy_exception.connection_invalidated = \
|
||||
self._is_disconnect = ctx.is_disconnect
|
||||
|
||||
# set up potentially user-defined value for
|
||||
# invalidate pool.
|
||||
invalidate_pool_on_disconnect = \
|
||||
ctx.invalidate_pool_on_disconnect
|
||||
|
||||
if should_wrap and context:
|
||||
context.handle_dbapi_exception(e)
|
||||
|
||||
|
@ -1166,12 +1347,66 @@ class Connection(Connectable):
|
|||
del self._reentrant_error
|
||||
if self._is_disconnect:
|
||||
del self._is_disconnect
|
||||
dbapi_conn_wrapper = self.connection
|
||||
if not self.invalidated:
|
||||
dbapi_conn_wrapper = self.__connection
|
||||
if invalidate_pool_on_disconnect:
|
||||
self.engine.pool._invalidate(dbapi_conn_wrapper, e)
|
||||
self.invalidate(e)
|
||||
if self.should_close_with_result:
|
||||
self.close()
|
||||
|
||||
@classmethod
|
||||
def _handle_dbapi_exception_noconnection(cls, e, dialect, engine):
|
||||
|
||||
exc_info = sys.exc_info()
|
||||
|
||||
is_disconnect = dialect.is_disconnect(e, None, None)
|
||||
|
||||
should_wrap = isinstance(e, dialect.dbapi.Error)
|
||||
|
||||
if should_wrap:
|
||||
sqlalchemy_exception = exc.DBAPIError.instance(
|
||||
None,
|
||||
None,
|
||||
e,
|
||||
dialect.dbapi.Error,
|
||||
connection_invalidated=is_disconnect)
|
||||
else:
|
||||
sqlalchemy_exception = None
|
||||
|
||||
newraise = None
|
||||
|
||||
if engine._has_events:
|
||||
ctx = ExceptionContextImpl(
|
||||
e, sqlalchemy_exception, engine, None, None, None,
|
||||
None, None, is_disconnect)
|
||||
for fn in engine.dispatch.handle_error:
|
||||
try:
|
||||
# handler returns an exception;
|
||||
# call next handler in a chain
|
||||
per_fn = fn(ctx)
|
||||
if per_fn is not None:
|
||||
ctx.chained_exception = newraise = per_fn
|
||||
except Exception as _raised:
|
||||
# handler raises an exception - stop processing
|
||||
newraise = _raised
|
||||
break
|
||||
|
||||
if sqlalchemy_exception and \
|
||||
is_disconnect != ctx.is_disconnect:
|
||||
sqlalchemy_exception.connection_invalidated = \
|
||||
is_disconnect = ctx.is_disconnect
|
||||
|
||||
if newraise:
|
||||
util.raise_from_cause(newraise, exc_info)
|
||||
elif should_wrap:
|
||||
util.raise_from_cause(
|
||||
sqlalchemy_exception,
|
||||
exc_info
|
||||
)
|
||||
else:
|
||||
util.reraise(*exc_info)
|
||||
|
||||
def default_schema_name(self):
|
||||
return self.engine.dialect.get_default_schema_name(self)
|
||||
|
||||
|
@ -1250,8 +1485,9 @@ class ExceptionContextImpl(ExceptionContext):
|
|||
"""Implement the :class:`.ExceptionContext` interface."""
|
||||
|
||||
def __init__(self, exception, sqlalchemy_exception,
|
||||
connection, cursor, statement, parameters,
|
||||
engine, connection, cursor, statement, parameters,
|
||||
context, is_disconnect):
|
||||
self.engine = engine
|
||||
self.connection = connection
|
||||
self.sqlalchemy_exception = sqlalchemy_exception
|
||||
self.original_exception = exception
|
||||
|
@ -1295,9 +1531,13 @@ class Transaction(object):
|
|||
|
||||
def __init__(self, connection, parent):
|
||||
self.connection = connection
|
||||
self._parent = parent or self
|
||||
self._actual_parent = parent
|
||||
self.is_active = True
|
||||
|
||||
@property
|
||||
def _parent(self):
|
||||
return self._actual_parent or self
|
||||
|
||||
def close(self):
|
||||
"""Close this :class:`.Transaction`.
|
||||
|
||||
|
@ -1575,29 +1815,28 @@ class Engine(Connectable, log.Identified):
|
|||
def dispose(self):
|
||||
"""Dispose of the connection pool used by this :class:`.Engine`.
|
||||
|
||||
This has the effect of fully closing all **currently checked in**
|
||||
database connections. Connections that are still checked out
|
||||
will **not** be closed, however they will no longer be associated
|
||||
with this :class:`.Engine`, so when they are closed individually,
|
||||
eventually the :class:`.Pool` which they are associated with will
|
||||
be garbage collected and they will be closed out fully, if
|
||||
not already closed on checkin.
|
||||
|
||||
A new connection pool is created immediately after the old one has
|
||||
been disposed. This new pool, like all SQLAlchemy connection pools,
|
||||
does not make any actual connections to the database until one is
|
||||
first requested.
|
||||
first requested, so as long as the :class:`.Engine` isn't used again,
|
||||
no new connections will be made.
|
||||
|
||||
This method has two general use cases:
|
||||
.. seealso::
|
||||
|
||||
* When a dropped connection is detected, it is assumed that all
|
||||
connections held by the pool are potentially dropped, and
|
||||
the entire pool is replaced.
|
||||
|
||||
* An application may want to use :meth:`dispose` within a test
|
||||
suite that is creating multiple engines.
|
||||
|
||||
It is critical to note that :meth:`dispose` does **not** guarantee
|
||||
that the application will release all open database connections - only
|
||||
those connections that are checked into the pool are closed.
|
||||
Connections which remain checked out or have been detached from
|
||||
the engine are not affected.
|
||||
:ref:`engine_disposal`
|
||||
|
||||
"""
|
||||
self.pool.dispose()
|
||||
self.pool = self.pool.recreate()
|
||||
self.dispatch.engine_disposed(self)
|
||||
|
||||
def _execute_default(self, default):
|
||||
with self.contextual_connect() as conn:
|
||||
|
@ -1795,8 +2034,9 @@ class Engine(Connectable, log.Identified):
|
|||
|
||||
"""
|
||||
|
||||
return self._connection_cls(self,
|
||||
self.pool.connect(),
|
||||
return self._connection_cls(
|
||||
self,
|
||||
self._wrap_pool_connect(self.pool.connect, None),
|
||||
close_with_result=close_with_result,
|
||||
**kwargs)
|
||||
|
||||
|
@ -1828,7 +2068,18 @@ class Engine(Connectable, log.Identified):
|
|||
"""
|
||||
return self.run_callable(self.dialect.has_table, table_name, schema)
|
||||
|
||||
def raw_connection(self):
|
||||
def _wrap_pool_connect(self, fn, connection):
|
||||
dialect = self.dialect
|
||||
try:
|
||||
return fn()
|
||||
except dialect.dbapi.Error as e:
|
||||
if connection is None:
|
||||
Connection._handle_dbapi_exception_noconnection(
|
||||
e, dialect, self)
|
||||
else:
|
||||
util.reraise(*sys.exc_info())
|
||||
|
||||
def raw_connection(self, _connection=None):
|
||||
"""Return a "raw" DBAPI connection from the connection pool.
|
||||
|
||||
The returned object is a proxied version of the DBAPI
|
||||
|
@ -1839,13 +2090,18 @@ class Engine(Connectable, log.Identified):
|
|||
for real.
|
||||
|
||||
This method provides direct DBAPI connection access for
|
||||
special situations. In most situations, the :class:`.Connection`
|
||||
object should be used, which is procured using the
|
||||
:meth:`.Engine.connect` method.
|
||||
special situations when the API provided by :class:`.Connection`
|
||||
is not needed. When a :class:`.Connection` object is already
|
||||
present, the DBAPI connection is available using
|
||||
the :attr:`.Connection.connection` accessor.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`dbapi_connections`
|
||||
|
||||
"""
|
||||
|
||||
return self.pool.unique_connection()
|
||||
return self._wrap_pool_connect(
|
||||
self.pool.unique_connection, _connection)
|
||||
|
||||
|
||||
class OptionEngine(Engine):
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# engine/default.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -61,14 +61,13 @@ class DefaultDialect(interfaces.Dialect):
|
|||
|
||||
engine_config_types = util.immutabledict([
|
||||
('convert_unicode', util.bool_or_str('force')),
|
||||
('pool_timeout', int),
|
||||
('pool_timeout', util.asint),
|
||||
('echo', util.bool_or_str('debug')),
|
||||
('echo_pool', util.bool_or_str('debug')),
|
||||
('pool_recycle', int),
|
||||
('pool_size', int),
|
||||
('max_overflow', int),
|
||||
('pool_threadlocal', bool),
|
||||
('use_native_unicode', bool),
|
||||
('pool_recycle', util.asint),
|
||||
('pool_size', util.asint),
|
||||
('max_overflow', util.asint),
|
||||
('pool_threadlocal', util.asbool),
|
||||
])
|
||||
|
||||
# if the NUMERIC type
|
||||
|
@ -157,6 +156,15 @@ class DefaultDialect(interfaces.Dialect):
|
|||
|
||||
reflection_options = ()
|
||||
|
||||
dbapi_exception_translation_map = util.immutabledict()
|
||||
"""mapping used in the extremely unusual case that a DBAPI's
|
||||
published exceptions don't actually have the __name__ that they
|
||||
are linked towards.
|
||||
|
||||
.. versionadded:: 1.0.5
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, convert_unicode=False,
|
||||
encoding='utf-8', paramstyle=None, dbapi=None,
|
||||
implicit_returning=None,
|
||||
|
@ -395,6 +403,12 @@ class DefaultDialect(interfaces.Dialect):
|
|||
self._set_connection_isolation(connection, opts['isolation_level'])
|
||||
|
||||
def _set_connection_isolation(self, connection, level):
|
||||
if connection.in_transaction():
|
||||
util.warn(
|
||||
"Connection is already established with a Transaction; "
|
||||
"setting isolation_level may implicitly rollback or commit "
|
||||
"the existing transaction, or have no effect until "
|
||||
"next transaction")
|
||||
self.set_isolation_level(connection.connection, level)
|
||||
connection.connection._connection_record.\
|
||||
finalize_callback.append(self.reset_isolation_level)
|
||||
|
@ -452,14 +466,13 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
|
|||
isinsert = False
|
||||
isupdate = False
|
||||
isdelete = False
|
||||
is_crud = False
|
||||
is_text = False
|
||||
isddl = False
|
||||
executemany = False
|
||||
result_map = None
|
||||
compiled = None
|
||||
statement = None
|
||||
postfetch_cols = None
|
||||
prefetch_cols = None
|
||||
returning_cols = None
|
||||
result_column_struct = None
|
||||
_is_implicit_returning = False
|
||||
_is_explicit_returning = False
|
||||
|
||||
|
@ -472,10 +485,9 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
|
|||
"""Initialize execution context for a DDLElement construct."""
|
||||
|
||||
self = cls.__new__(cls)
|
||||
self.dialect = dialect
|
||||
self.root_connection = connection
|
||||
self._dbapi_connection = dbapi_connection
|
||||
self.engine = connection.engine
|
||||
self.dialect = connection.dialect
|
||||
|
||||
self.compiled = compiled = compiled_ddl
|
||||
self.isddl = True
|
||||
|
@ -507,25 +519,20 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
|
|||
"""Initialize execution context for a Compiled construct."""
|
||||
|
||||
self = cls.__new__(cls)
|
||||
self.dialect = dialect
|
||||
self.root_connection = connection
|
||||
self._dbapi_connection = dbapi_connection
|
||||
self.engine = connection.engine
|
||||
self.dialect = connection.dialect
|
||||
|
||||
self.compiled = compiled
|
||||
|
||||
if not compiled.can_execute:
|
||||
raise exc.ArgumentError("Not an executable clause")
|
||||
|
||||
self.execution_options = compiled.statement._execution_options
|
||||
if connection._execution_options:
|
||||
self.execution_options = dict(self.execution_options)
|
||||
self.execution_options.update(connection._execution_options)
|
||||
self.execution_options = compiled.statement._execution_options.union(
|
||||
connection._execution_options)
|
||||
|
||||
# compiled clauseelement. process bind params, process table defaults,
|
||||
# track collections used by ResultProxy to target and process results
|
||||
|
||||
self.result_map = compiled.result_map
|
||||
self.result_column_struct = (
|
||||
compiled._result_columns, compiled._ordered_columns)
|
||||
|
||||
self.unicode_statement = util.text_type(compiled)
|
||||
if not dialect.supports_unicode_statements:
|
||||
|
@ -537,11 +544,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
|
|||
self.isinsert = compiled.isinsert
|
||||
self.isupdate = compiled.isupdate
|
||||
self.isdelete = compiled.isdelete
|
||||
|
||||
if self.isinsert or self.isupdate or self.isdelete:
|
||||
self._is_explicit_returning = bool(compiled.statement._returning)
|
||||
self._is_implicit_returning = bool(
|
||||
compiled.returning and not compiled.statement._returning)
|
||||
self.is_text = compiled.isplaintext
|
||||
|
||||
if not parameters:
|
||||
self.compiled_parameters = [compiled.construct_params()]
|
||||
|
@ -553,11 +556,19 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
|
|||
self.executemany = len(parameters) > 1
|
||||
|
||||
self.cursor = self.create_cursor()
|
||||
if self.isinsert or self.isupdate:
|
||||
self.postfetch_cols = self.compiled.postfetch
|
||||
self.prefetch_cols = self.compiled.prefetch
|
||||
self.returning_cols = self.compiled.returning
|
||||
self.__process_defaults()
|
||||
|
||||
if self.isinsert or self.isupdate or self.isdelete:
|
||||
self.is_crud = True
|
||||
self._is_explicit_returning = bool(compiled.statement._returning)
|
||||
self._is_implicit_returning = bool(
|
||||
compiled.returning and not compiled.statement._returning)
|
||||
|
||||
if not self.isdelete:
|
||||
if self.compiled.prefetch:
|
||||
if self.executemany:
|
||||
self._process_executemany_defaults()
|
||||
else:
|
||||
self._process_executesingle_defaults()
|
||||
|
||||
processors = compiled._bind_processors
|
||||
|
||||
|
@ -577,21 +588,28 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
|
|||
else:
|
||||
encode = not dialect.supports_unicode_statements
|
||||
for compiled_params in self.compiled_parameters:
|
||||
param = {}
|
||||
|
||||
if encode:
|
||||
for key in compiled_params:
|
||||
if key in processors:
|
||||
param[dialect._encoder(key)[0]] = \
|
||||
param = dict(
|
||||
(
|
||||
dialect._encoder(key)[0],
|
||||
processors[key](compiled_params[key])
|
||||
if key in processors
|
||||
else compiled_params[key]
|
||||
)
|
||||
for key in compiled_params
|
||||
)
|
||||
else:
|
||||
param[dialect._encoder(key)[0]] = \
|
||||
compiled_params[key]
|
||||
else:
|
||||
for key in compiled_params:
|
||||
if key in processors:
|
||||
param[key] = processors[key](compiled_params[key])
|
||||
else:
|
||||
param[key] = compiled_params[key]
|
||||
param = dict(
|
||||
(
|
||||
key,
|
||||
processors[key](compiled_params[key])
|
||||
if key in processors
|
||||
else compiled_params[key]
|
||||
)
|
||||
for key in compiled_params
|
||||
)
|
||||
|
||||
parameters.append(param)
|
||||
self.parameters = dialect.execute_sequence_format(parameters)
|
||||
|
||||
|
@ -603,10 +621,10 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
|
|||
"""Initialize execution context for a string SQL statement."""
|
||||
|
||||
self = cls.__new__(cls)
|
||||
self.dialect = dialect
|
||||
self.root_connection = connection
|
||||
self._dbapi_connection = dbapi_connection
|
||||
self.engine = connection.engine
|
||||
self.dialect = connection.dialect
|
||||
self.is_text = True
|
||||
|
||||
# plain text statement
|
||||
self.execution_options = connection._execution_options
|
||||
|
@ -647,21 +665,32 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
|
|||
"""Initialize execution context for a ColumnDefault construct."""
|
||||
|
||||
self = cls.__new__(cls)
|
||||
self.dialect = dialect
|
||||
self.root_connection = connection
|
||||
self._dbapi_connection = dbapi_connection
|
||||
self.engine = connection.engine
|
||||
self.dialect = connection.dialect
|
||||
self.execution_options = connection._execution_options
|
||||
self.cursor = self.create_cursor()
|
||||
return self
|
||||
|
||||
@util.memoized_property
|
||||
def no_parameters(self):
|
||||
return self.execution_options.get("no_parameters", False)
|
||||
def engine(self):
|
||||
return self.root_connection.engine
|
||||
|
||||
@util.memoized_property
|
||||
def is_crud(self):
|
||||
return self.isinsert or self.isupdate or self.isdelete
|
||||
def postfetch_cols(self):
|
||||
return self.compiled.postfetch
|
||||
|
||||
@util.memoized_property
|
||||
def prefetch_cols(self):
|
||||
return self.compiled.prefetch
|
||||
|
||||
@util.memoized_property
|
||||
def returning_cols(self):
|
||||
self.compiled.returning
|
||||
|
||||
@util.memoized_property
|
||||
def no_parameters(self):
|
||||
return self.execution_options.get("no_parameters", False)
|
||||
|
||||
@util.memoized_property
|
||||
def should_autocommit(self):
|
||||
|
@ -778,16 +807,51 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
|
|||
def supports_sane_multi_rowcount(self):
|
||||
return self.dialect.supports_sane_multi_rowcount
|
||||
|
||||
def post_insert(self):
|
||||
def _setup_crud_result_proxy(self):
|
||||
if self.isinsert and \
|
||||
not self.executemany:
|
||||
if not self._is_implicit_returning and \
|
||||
not self._is_explicit_returning and \
|
||||
not self.compiled.inline and \
|
||||
self.dialect.postfetch_lastrowid and \
|
||||
(not self.inserted_primary_key or
|
||||
None in self.inserted_primary_key):
|
||||
self.dialect.postfetch_lastrowid:
|
||||
|
||||
self._setup_ins_pk_from_lastrowid()
|
||||
|
||||
elif not self._is_implicit_returning:
|
||||
self._setup_ins_pk_from_empty()
|
||||
|
||||
result = self.get_result_proxy()
|
||||
|
||||
if self.isinsert:
|
||||
if self._is_implicit_returning:
|
||||
row = result.fetchone()
|
||||
self.returned_defaults = row
|
||||
self._setup_ins_pk_from_implicit_returning(row)
|
||||
result._soft_close(_autoclose_connection=False)
|
||||
result._metadata = None
|
||||
elif not self._is_explicit_returning:
|
||||
result._soft_close(_autoclose_connection=False)
|
||||
result._metadata = None
|
||||
elif self.isupdate and self._is_implicit_returning:
|
||||
row = result.fetchone()
|
||||
self.returned_defaults = row
|
||||
result._soft_close(_autoclose_connection=False)
|
||||
result._metadata = None
|
||||
|
||||
elif result._metadata is None:
|
||||
# no results, get rowcount
|
||||
# (which requires open cursor on some drivers
|
||||
# such as kintersbasdb, mxodbc)
|
||||
result.rowcount
|
||||
result._soft_close(_autoclose_connection=False)
|
||||
return result
|
||||
|
||||
def _setup_ins_pk_from_lastrowid(self):
|
||||
key_getter = self.compiled._key_getters_for_crud_column[2]
|
||||
table = self.compiled.statement.table
|
||||
compiled_params = self.compiled_parameters[0]
|
||||
|
||||
lastrowid = self.get_lastrowid()
|
||||
if lastrowid is not None:
|
||||
autoinc_col = table._autoincrement_column
|
||||
if autoinc_col is not None:
|
||||
# apply type post processors to the lastrowid
|
||||
|
@ -795,35 +859,44 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
|
|||
self.dialect, None)
|
||||
if proc is not None:
|
||||
lastrowid = proc(lastrowid)
|
||||
|
||||
self.inserted_primary_key = [
|
||||
lastrowid if c is autoinc_col else v
|
||||
for c, v in zip(
|
||||
table.primary_key,
|
||||
self.inserted_primary_key)
|
||||
lastrowid if c is autoinc_col else
|
||||
compiled_params.get(key_getter(c), None)
|
||||
for c in table.primary_key
|
||||
]
|
||||
else:
|
||||
# don't have a usable lastrowid, so
|
||||
# do the same as _setup_ins_pk_from_empty
|
||||
self.inserted_primary_key = [
|
||||
compiled_params.get(key_getter(c), None)
|
||||
for c in table.primary_key
|
||||
]
|
||||
|
||||
def _fetch_implicit_returning(self, resultproxy):
|
||||
def _setup_ins_pk_from_empty(self):
|
||||
key_getter = self.compiled._key_getters_for_crud_column[2]
|
||||
table = self.compiled.statement.table
|
||||
row = resultproxy.fetchone()
|
||||
compiled_params = self.compiled_parameters[0]
|
||||
self.inserted_primary_key = [
|
||||
compiled_params.get(key_getter(c), None)
|
||||
for c in table.primary_key
|
||||
]
|
||||
|
||||
ipk = []
|
||||
for c, v in zip(table.primary_key, self.inserted_primary_key):
|
||||
if v is not None:
|
||||
ipk.append(v)
|
||||
else:
|
||||
ipk.append(row[c])
|
||||
def _setup_ins_pk_from_implicit_returning(self, row):
|
||||
key_getter = self.compiled._key_getters_for_crud_column[2]
|
||||
table = self.compiled.statement.table
|
||||
compiled_params = self.compiled_parameters[0]
|
||||
|
||||
self.inserted_primary_key = ipk
|
||||
self.returned_defaults = row
|
||||
|
||||
def _fetch_implicit_update_returning(self, resultproxy):
|
||||
row = resultproxy.fetchone()
|
||||
self.returned_defaults = row
|
||||
self.inserted_primary_key = [
|
||||
row[col] if value is None else value
|
||||
for col, value in [
|
||||
(col, compiled_params.get(key_getter(col), None))
|
||||
for col in table.primary_key
|
||||
]
|
||||
]
|
||||
|
||||
def lastrow_has_defaults(self):
|
||||
return (self.isinsert or self.isupdate) and \
|
||||
bool(self.postfetch_cols)
|
||||
bool(self.compiled.postfetch)
|
||||
|
||||
def set_input_sizes(self, translate=None, exclude_types=None):
|
||||
"""Given a cursor and ClauseParameters, call the appropriate
|
||||
|
@ -901,21 +974,16 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
|
|||
else:
|
||||
return self._exec_default(column.onupdate, column.type)
|
||||
|
||||
def __process_defaults(self):
|
||||
"""Generate default values for compiled insert/update statements,
|
||||
and generate inserted_primary_key collection.
|
||||
"""
|
||||
|
||||
def _process_executemany_defaults(self):
|
||||
key_getter = self.compiled._key_getters_for_crud_column[2]
|
||||
|
||||
if self.executemany:
|
||||
if len(self.compiled.prefetch):
|
||||
prefetch = self.compiled.prefetch
|
||||
scalar_defaults = {}
|
||||
|
||||
# pre-determine scalar Python-side defaults
|
||||
# to avoid many calls of get_insert_default()/
|
||||
# get_update_default()
|
||||
for c in self.prefetch_cols:
|
||||
for c in prefetch:
|
||||
if self.isinsert and c.default and c.default.is_scalar:
|
||||
scalar_defaults[c] = c.default.arg
|
||||
elif self.isupdate and c.onupdate and c.onupdate.is_scalar:
|
||||
|
@ -923,7 +991,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
|
|||
|
||||
for param in self.compiled_parameters:
|
||||
self.current_parameters = param
|
||||
for c in self.prefetch_cols:
|
||||
for c in prefetch:
|
||||
if c in scalar_defaults:
|
||||
val = scalar_defaults[c]
|
||||
elif self.isinsert:
|
||||
|
@ -933,12 +1001,19 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
|
|||
if val is not None:
|
||||
param[key_getter(c)] = val
|
||||
del self.current_parameters
|
||||
else:
|
||||
|
||||
def _process_executesingle_defaults(self):
|
||||
key_getter = self.compiled._key_getters_for_crud_column[2]
|
||||
prefetch = self.compiled.prefetch
|
||||
self.current_parameters = compiled_parameters = \
|
||||
self.compiled_parameters[0]
|
||||
|
||||
for c in self.compiled.prefetch:
|
||||
for c in prefetch:
|
||||
if self.isinsert:
|
||||
if c.default and \
|
||||
not c.default.is_sequence and c.default.is_scalar:
|
||||
val = c.default.arg
|
||||
else:
|
||||
val = self.get_insert_default(c)
|
||||
else:
|
||||
val = self.get_update_default(c)
|
||||
|
@ -947,12 +1022,5 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
|
|||
compiled_parameters[key_getter(c)] = val
|
||||
del self.current_parameters
|
||||
|
||||
if self.isinsert:
|
||||
self.inserted_primary_key = [
|
||||
self.compiled_parameters[0].get(key_getter(c), None)
|
||||
for c in self.compiled.
|
||||
statement.table.primary_key
|
||||
]
|
||||
|
||||
|
||||
DefaultDialect.execution_ctx_cls = DefaultExecutionContext
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# engine/interfaces.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -150,6 +150,16 @@ class Dialect(object):
|
|||
This will prevent types.Boolean from generating a CHECK
|
||||
constraint when that type is used.
|
||||
|
||||
dbapi_exception_translation_map
|
||||
A dictionary of names that will contain as values the names of
|
||||
pep-249 exceptions ("IntegrityError", "OperationalError", etc)
|
||||
keyed to alternate class names, to support the case where a
|
||||
DBAPI has exception classes that aren't named as they are
|
||||
referred to (e.g. IntegrityError = MyException). In the vast
|
||||
majority of cases this dictionary is empty.
|
||||
|
||||
.. versionadded:: 1.0.5
|
||||
|
||||
"""
|
||||
|
||||
_has_events = False
|
||||
|
@ -242,7 +252,9 @@ class Dialect(object):
|
|||
|
||||
sequence
|
||||
a dictionary of the form
|
||||
{'name' : str, 'start' :int, 'increment': int}
|
||||
{'name' : str, 'start' :int, 'increment': int, 'minvalue': int,
|
||||
'maxvalue': int, 'nominvalue': bool, 'nomaxvalue': bool,
|
||||
'cycle': bool}
|
||||
|
||||
Additional column attributes may be present.
|
||||
"""
|
||||
|
@ -308,7 +320,15 @@ class Dialect(object):
|
|||
def get_table_names(self, connection, schema=None, **kw):
|
||||
"""Return a list of table names for `schema`."""
|
||||
|
||||
raise NotImplementedError
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_temp_table_names(self, connection, schema=None, **kw):
|
||||
"""Return a list of temporary table names on the given connection,
|
||||
if supported by the underlying backend.
|
||||
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_view_names(self, connection, schema=None, **kw):
|
||||
"""Return a list of all view names available in the database.
|
||||
|
@ -319,6 +339,14 @@ class Dialect(object):
|
|||
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_temp_view_names(self, connection, schema=None, **kw):
|
||||
"""Return a list of temporary view names on the given connection,
|
||||
if supported by the underlying backend.
|
||||
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_view_definition(self, connection, view_name, schema=None, **kw):
|
||||
"""Return view definition.
|
||||
|
||||
|
@ -638,20 +666,120 @@ class Dialect(object):
|
|||
return None
|
||||
|
||||
def reset_isolation_level(self, dbapi_conn):
|
||||
"""Given a DBAPI connection, revert its isolation to the default."""
|
||||
"""Given a DBAPI connection, revert its isolation to the default.
|
||||
|
||||
Note that this is a dialect-level method which is used as part
|
||||
of the implementation of the :class:`.Connection` and
|
||||
:class:`.Engine`
|
||||
isolation level facilities; these APIs should be preferred for
|
||||
most typical use cases.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.Connection.get_isolation_level` - view current level
|
||||
|
||||
:attr:`.Connection.default_isolation_level` - view default level
|
||||
|
||||
:paramref:`.Connection.execution_options.isolation_level` -
|
||||
set per :class:`.Connection` isolation level
|
||||
|
||||
:paramref:`.create_engine.isolation_level` -
|
||||
set per :class:`.Engine` isolation level
|
||||
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def set_isolation_level(self, dbapi_conn, level):
|
||||
"""Given a DBAPI connection, set its isolation level."""
|
||||
"""Given a DBAPI connection, set its isolation level.
|
||||
|
||||
Note that this is a dialect-level method which is used as part
|
||||
of the implementation of the :class:`.Connection` and
|
||||
:class:`.Engine`
|
||||
isolation level facilities; these APIs should be preferred for
|
||||
most typical use cases.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.Connection.get_isolation_level` - view current level
|
||||
|
||||
:attr:`.Connection.default_isolation_level` - view default level
|
||||
|
||||
:paramref:`.Connection.execution_options.isolation_level` -
|
||||
set per :class:`.Connection` isolation level
|
||||
|
||||
:paramref:`.create_engine.isolation_level` -
|
||||
set per :class:`.Engine` isolation level
|
||||
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_isolation_level(self, dbapi_conn):
|
||||
"""Given a DBAPI connection, return its isolation level."""
|
||||
"""Given a DBAPI connection, return its isolation level.
|
||||
|
||||
When working with a :class:`.Connection` object, the corresponding
|
||||
DBAPI connection may be procured using the
|
||||
:attr:`.Connection.connection` accessor.
|
||||
|
||||
Note that this is a dialect-level method which is used as part
|
||||
of the implementation of the :class:`.Connection` and
|
||||
:class:`.Engine` isolation level facilities;
|
||||
these APIs should be preferred for most typical use cases.
|
||||
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.Connection.get_isolation_level` - view current level
|
||||
|
||||
:attr:`.Connection.default_isolation_level` - view default level
|
||||
|
||||
:paramref:`.Connection.execution_options.isolation_level` -
|
||||
set per :class:`.Connection` isolation level
|
||||
|
||||
:paramref:`.create_engine.isolation_level` -
|
||||
set per :class:`.Engine` isolation level
|
||||
|
||||
|
||||
"""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
def get_dialect_cls(cls, url):
|
||||
"""Given a URL, return the :class:`.Dialect` that will be used.
|
||||
|
||||
This is a hook that allows an external plugin to provide functionality
|
||||
around an existing dialect, by allowing the plugin to be loaded
|
||||
from the url based on an entrypoint, and then the plugin returns
|
||||
the actual dialect to be used.
|
||||
|
||||
By default this just returns the cls.
|
||||
|
||||
.. versionadded:: 1.0.3
|
||||
|
||||
"""
|
||||
return cls
|
||||
|
||||
@classmethod
|
||||
def engine_created(cls, engine):
|
||||
"""A convenience hook called before returning the final :class:`.Engine`.
|
||||
|
||||
If the dialect returned a different class from the
|
||||
:meth:`.get_dialect_cls`
|
||||
method, then the hook is called on both classes, first on
|
||||
the dialect class returned by the :meth:`.get_dialect_cls` method and
|
||||
then on the class on which the method was called.
|
||||
|
||||
The hook should be used by dialects and/or wrappers to apply special
|
||||
events to the engine or its components. In particular, it allows
|
||||
a dialect-wrapping class to apply dialect-level events.
|
||||
|
||||
.. versionadded:: 1.0.3
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class ExecutionContext(object):
|
||||
"""A messenger object for a Dialect that corresponds to a single
|
||||
|
@ -901,7 +1029,23 @@ class ExceptionContext(object):
|
|||
connection = None
|
||||
"""The :class:`.Connection` in use during the exception.
|
||||
|
||||
This member is always present.
|
||||
This member is present, except in the case of a failure when
|
||||
first connecting.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.ExceptionContext.engine`
|
||||
|
||||
|
||||
"""
|
||||
|
||||
engine = None
|
||||
"""The :class:`.Engine` in use during the exception.
|
||||
|
||||
This member should always be present, even in the case of a failure
|
||||
when first connecting.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
"""
|
||||
|
||||
|
@ -988,3 +1132,21 @@ class ExceptionContext(object):
|
|||
changing this flag.
|
||||
|
||||
"""
|
||||
|
||||
invalidate_pool_on_disconnect = True
|
||||
"""Represent whether all connections in the pool should be invalidated
|
||||
when a "disconnect" condition is in effect.
|
||||
|
||||
Setting this flag to False within the scope of the
|
||||
:meth:`.ConnectionEvents.handle_error` event will have the effect such
|
||||
that the full collection of connections in the pool will not be
|
||||
invalidated during a disconnect; only the current connection that is the
|
||||
subject of the error will actually be invalidated.
|
||||
|
||||
The purpose of this flag is for custom disconnect-handling schemes where
|
||||
the invalidation of other connections in the pool is to be performed
|
||||
based on other conditions, or even on a per-connection basis.
|
||||
|
||||
.. versionadded:: 1.0.3
|
||||
|
||||
"""
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# engine/reflection.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -173,7 +173,14 @@ class Inspector(object):
|
|||
passed as ``None``. For special quoting, use :class:`.quoted_name`.
|
||||
|
||||
:param order_by: Optional, may be the string "foreign_key" to sort
|
||||
the result on foreign key dependencies.
|
||||
the result on foreign key dependencies. Does not automatically
|
||||
resolve cycles, and will raise :class:`.CircularDependencyError`
|
||||
if cycles exist.
|
||||
|
||||
.. deprecated:: 1.0.0 - see
|
||||
:meth:`.Inspector.get_sorted_table_and_fkc_names` for a version
|
||||
of this which resolves foreign key cycles between tables
|
||||
automatically.
|
||||
|
||||
.. versionchanged:: 0.8 the "foreign_key" sorting sorts tables
|
||||
in order of dependee to dependent; that is, in creation
|
||||
|
@ -183,6 +190,8 @@ class Inspector(object):
|
|||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.Inspector.get_sorted_table_and_fkc_names`
|
||||
|
||||
:attr:`.MetaData.sorted_tables`
|
||||
|
||||
"""
|
||||
|
@ -201,6 +210,88 @@ class Inspector(object):
|
|||
tnames = list(topological.sort(tuples, tnames))
|
||||
return tnames
|
||||
|
||||
def get_sorted_table_and_fkc_names(self, schema=None):
|
||||
"""Return dependency-sorted table and foreign key constraint names in
|
||||
referred to within a particular schema.
|
||||
|
||||
This will yield 2-tuples of
|
||||
``(tablename, [(tname, fkname), (tname, fkname), ...])``
|
||||
consisting of table names in CREATE order grouped with the foreign key
|
||||
constraint names that are not detected as belonging to a cycle.
|
||||
The final element
|
||||
will be ``(None, [(tname, fkname), (tname, fkname), ..])``
|
||||
which will consist of remaining
|
||||
foreign key constraint names that would require a separate CREATE
|
||||
step after-the-fact, based on dependencies between tables.
|
||||
|
||||
.. versionadded:: 1.0.-
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.Inspector.get_table_names`
|
||||
|
||||
:func:`.sort_tables_and_constraints` - similar method which works
|
||||
with an already-given :class:`.MetaData`.
|
||||
|
||||
"""
|
||||
if hasattr(self.dialect, 'get_table_names'):
|
||||
tnames = self.dialect.get_table_names(
|
||||
self.bind, schema, info_cache=self.info_cache)
|
||||
else:
|
||||
tnames = self.engine.table_names(schema)
|
||||
|
||||
tuples = set()
|
||||
remaining_fkcs = set()
|
||||
|
||||
fknames_for_table = {}
|
||||
for tname in tnames:
|
||||
fkeys = self.get_foreign_keys(tname, schema)
|
||||
fknames_for_table[tname] = set(
|
||||
[fk['name'] for fk in fkeys]
|
||||
)
|
||||
for fkey in fkeys:
|
||||
if tname != fkey['referred_table']:
|
||||
tuples.add((fkey['referred_table'], tname))
|
||||
try:
|
||||
candidate_sort = list(topological.sort(tuples, tnames))
|
||||
except exc.CircularDependencyError as err:
|
||||
for edge in err.edges:
|
||||
tuples.remove(edge)
|
||||
remaining_fkcs.update(
|
||||
(edge[1], fkc)
|
||||
for fkc in fknames_for_table[edge[1]]
|
||||
)
|
||||
|
||||
candidate_sort = list(topological.sort(tuples, tnames))
|
||||
return [
|
||||
(tname, fknames_for_table[tname].difference(remaining_fkcs))
|
||||
for tname in candidate_sort
|
||||
] + [(None, list(remaining_fkcs))]
|
||||
|
||||
def get_temp_table_names(self):
|
||||
"""return a list of temporary table names for the current bind.
|
||||
|
||||
This method is unsupported by most dialects; currently
|
||||
only SQLite implements it.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
"""
|
||||
return self.dialect.get_temp_table_names(
|
||||
self.bind, info_cache=self.info_cache)
|
||||
|
||||
def get_temp_view_names(self):
|
||||
"""return a list of temporary view names for the current bind.
|
||||
|
||||
This method is unsupported by most dialects; currently
|
||||
only SQLite implements it.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
"""
|
||||
return self.dialect.get_temp_view_names(
|
||||
self.bind, info_cache=self.info_cache)
|
||||
|
||||
def get_table_options(self, table_name, schema=None, **kw):
|
||||
"""Return a dictionary of options specified when the table of the
|
||||
given name was created.
|
||||
|
@ -370,6 +461,12 @@ class Inspector(object):
|
|||
unique
|
||||
boolean
|
||||
|
||||
dialect_options
|
||||
dict of dialect-specific index options. May not be present
|
||||
for all dialects.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
:param table_name: string name of the table. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
|
@ -465,15 +562,43 @@ class Inspector(object):
|
|||
for col_d in self.get_columns(
|
||||
table_name, schema, **table.dialect_kwargs):
|
||||
found_table = True
|
||||
|
||||
self._reflect_column(
|
||||
table, col_d, include_columns,
|
||||
exclude_columns, cols_by_orig_name)
|
||||
|
||||
if not found_table:
|
||||
raise exc.NoSuchTableError(table.name)
|
||||
|
||||
self._reflect_pk(
|
||||
table_name, schema, table, cols_by_orig_name, exclude_columns)
|
||||
|
||||
self._reflect_fk(
|
||||
table_name, schema, table, cols_by_orig_name,
|
||||
exclude_columns, reflection_options)
|
||||
|
||||
self._reflect_indexes(
|
||||
table_name, schema, table, cols_by_orig_name,
|
||||
include_columns, exclude_columns, reflection_options)
|
||||
|
||||
self._reflect_unique_constraints(
|
||||
table_name, schema, table, cols_by_orig_name,
|
||||
include_columns, exclude_columns, reflection_options)
|
||||
|
||||
def _reflect_column(
|
||||
self, table, col_d, include_columns,
|
||||
exclude_columns, cols_by_orig_name):
|
||||
|
||||
orig_name = col_d['name']
|
||||
|
||||
table.dispatch.column_reflect(self, table, col_d)
|
||||
|
||||
# fetch name again as column_reflect is allowed to
|
||||
# change it
|
||||
name = col_d['name']
|
||||
if include_columns and name not in include_columns:
|
||||
continue
|
||||
if exclude_columns and name in exclude_columns:
|
||||
continue
|
||||
if (include_columns and name not in include_columns) \
|
||||
or (exclude_columns and name in exclude_columns):
|
||||
return
|
||||
|
||||
coltype = col_d['type']
|
||||
|
||||
|
@ -494,6 +619,17 @@ class Inspector(object):
|
|||
)
|
||||
)
|
||||
|
||||
if 'sequence' in col_d:
|
||||
self._reflect_col_sequence(col_d, colargs)
|
||||
|
||||
cols_by_orig_name[orig_name] = col = \
|
||||
sa_schema.Column(name, coltype, *colargs, **col_kw)
|
||||
|
||||
if col.key in table.primary_key:
|
||||
col.primary_key = True
|
||||
table.append_column(col)
|
||||
|
||||
def _reflect_col_sequence(self, col_d, colargs):
|
||||
if 'sequence' in col_d:
|
||||
# TODO: mssql and sybase are using this.
|
||||
seq = col_d['sequence']
|
||||
|
@ -504,16 +640,9 @@ class Inspector(object):
|
|||
sequence.increment = seq['increment']
|
||||
colargs.append(sequence)
|
||||
|
||||
cols_by_orig_name[orig_name] = col = \
|
||||
sa_schema.Column(name, coltype, *colargs, **col_kw)
|
||||
|
||||
if col.key in table.primary_key:
|
||||
col.primary_key = True
|
||||
table.append_column(col)
|
||||
|
||||
if not found_table:
|
||||
raise exc.NoSuchTableError(table.name)
|
||||
|
||||
def _reflect_pk(
|
||||
self, table_name, schema, table,
|
||||
cols_by_orig_name, exclude_columns):
|
||||
pk_cons = self.get_pk_constraint(
|
||||
table_name, schema, **table.dialect_kwargs)
|
||||
if pk_cons:
|
||||
|
@ -530,6 +659,9 @@ class Inspector(object):
|
|||
# its column collection
|
||||
table.primary_key._reload(pk_cols)
|
||||
|
||||
def _reflect_fk(
|
||||
self, table_name, schema, table, cols_by_orig_name,
|
||||
exclude_columns, reflection_options):
|
||||
fkeys = self.get_foreign_keys(
|
||||
table_name, schema, **table.dialect_kwargs)
|
||||
for fkey_d in fkeys:
|
||||
|
@ -572,24 +704,85 @@ class Inspector(object):
|
|||
sa_schema.ForeignKeyConstraint(constrained_columns, refspec,
|
||||
conname, link_to_name=True,
|
||||
**options))
|
||||
|
||||
def _reflect_indexes(
|
||||
self, table_name, schema, table, cols_by_orig_name,
|
||||
include_columns, exclude_columns, reflection_options):
|
||||
# Indexes
|
||||
indexes = self.get_indexes(table_name, schema)
|
||||
for index_d in indexes:
|
||||
name = index_d['name']
|
||||
columns = index_d['column_names']
|
||||
unique = index_d['unique']
|
||||
flavor = index_d.get('type', 'unknown type')
|
||||
flavor = index_d.get('type', 'index')
|
||||
dialect_options = index_d.get('dialect_options', {})
|
||||
|
||||
duplicates = index_d.get('duplicates_constraint')
|
||||
if include_columns and \
|
||||
not set(columns).issubset(include_columns):
|
||||
util.warn(
|
||||
"Omitting %s KEY for (%s), key covers omitted columns." %
|
||||
"Omitting %s key for (%s), key covers omitted columns." %
|
||||
(flavor, ', '.join(columns)))
|
||||
continue
|
||||
if duplicates:
|
||||
continue
|
||||
# look for columns by orig name in cols_by_orig_name,
|
||||
# but support columns that are in-Python only as fallback
|
||||
sa_schema.Index(name, *[
|
||||
cols_by_orig_name[c] if c in cols_by_orig_name
|
||||
else table.c[c]
|
||||
for c in columns
|
||||
],
|
||||
**dict(unique=unique))
|
||||
idx_cols = []
|
||||
for c in columns:
|
||||
try:
|
||||
idx_col = cols_by_orig_name[c] \
|
||||
if c in cols_by_orig_name else table.c[c]
|
||||
except KeyError:
|
||||
util.warn(
|
||||
"%s key '%s' was not located in "
|
||||
"columns for table '%s'" % (
|
||||
flavor, c, table_name
|
||||
))
|
||||
else:
|
||||
idx_cols.append(idx_col)
|
||||
|
||||
sa_schema.Index(
|
||||
name, *idx_cols,
|
||||
**dict(list(dialect_options.items()) + [('unique', unique)])
|
||||
)
|
||||
|
||||
def _reflect_unique_constraints(
|
||||
self, table_name, schema, table, cols_by_orig_name,
|
||||
include_columns, exclude_columns, reflection_options):
|
||||
|
||||
# Unique Constraints
|
||||
try:
|
||||
constraints = self.get_unique_constraints(table_name, schema)
|
||||
except NotImplementedError:
|
||||
# optional dialect feature
|
||||
return
|
||||
|
||||
for const_d in constraints:
|
||||
conname = const_d['name']
|
||||
columns = const_d['column_names']
|
||||
duplicates = const_d.get('duplicates_index')
|
||||
if include_columns and \
|
||||
not set(columns).issubset(include_columns):
|
||||
util.warn(
|
||||
"Omitting unique constraint key for (%s), "
|
||||
"key covers omitted columns." %
|
||||
', '.join(columns))
|
||||
continue
|
||||
if duplicates:
|
||||
continue
|
||||
# look for columns by orig name in cols_by_orig_name,
|
||||
# but support columns that are in-Python only as fallback
|
||||
constrained_cols = []
|
||||
for c in columns:
|
||||
try:
|
||||
constrained_col = cols_by_orig_name[c] \
|
||||
if c in cols_by_orig_name else table.c[c]
|
||||
except KeyError:
|
||||
util.warn(
|
||||
"unique constraint key '%s' was not located in "
|
||||
"columns for table '%s'" % (c, table_name))
|
||||
else:
|
||||
constrained_cols.append(constrained_col)
|
||||
table.append_constraint(
|
||||
sa_schema.UniqueConstraint(*constrained_cols, name=conname))
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# engine/result.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -110,7 +110,7 @@ class RowProxy(BaseRowProxy):
|
|||
__slots__ = ()
|
||||
|
||||
def __contains__(self, key):
|
||||
return self._parent._has_key(self._row, key)
|
||||
return self._parent._has_key(key)
|
||||
|
||||
def __getstate__(self):
|
||||
return {
|
||||
|
@ -155,7 +155,7 @@ class RowProxy(BaseRowProxy):
|
|||
def has_key(self, key):
|
||||
"""Return True if this RowProxy contains the given key."""
|
||||
|
||||
return self._parent._has_key(self._row, key)
|
||||
return self._parent._has_key(key)
|
||||
|
||||
def items(self):
|
||||
"""Return a list of tuples, each tuple containing a key/value pair."""
|
||||
|
@ -187,23 +187,62 @@ class ResultMetaData(object):
|
|||
context."""
|
||||
|
||||
def __init__(self, parent, metadata):
|
||||
self._processors = processors = []
|
||||
|
||||
# We do not strictly need to store the processor in the key mapping,
|
||||
# though it is faster in the Python version (probably because of the
|
||||
# saved attribute lookup self._processors)
|
||||
self._keymap = keymap = {}
|
||||
self.keys = []
|
||||
context = parent.context
|
||||
dialect = context.dialect
|
||||
typemap = dialect.dbapi_type_map
|
||||
translate_colname = context._translate_colname
|
||||
self.case_sensitive = dialect.case_sensitive
|
||||
self.case_sensitive = case_sensitive = dialect.case_sensitive
|
||||
|
||||
# high precedence key values.
|
||||
primary_keymap = {}
|
||||
if context.result_column_struct:
|
||||
result_columns, cols_are_ordered = context.result_column_struct
|
||||
num_ctx_cols = len(result_columns)
|
||||
else:
|
||||
num_ctx_cols = None
|
||||
|
||||
for i, rec in enumerate(metadata):
|
||||
if num_ctx_cols and \
|
||||
cols_are_ordered and \
|
||||
num_ctx_cols == len(metadata):
|
||||
# case 1 - SQL expression statement, number of columns
|
||||
# in result matches number of cols in compiled. This is the
|
||||
# vast majority case for SQL expression constructs. In this
|
||||
# case we don't bother trying to parse or match up to
|
||||
# the colnames in the result description.
|
||||
raw = [
|
||||
(
|
||||
idx,
|
||||
key,
|
||||
name.lower() if not case_sensitive else name,
|
||||
context.get_result_processor(
|
||||
type_, key, metadata[idx][1]
|
||||
),
|
||||
obj,
|
||||
None
|
||||
) for idx, (key, name, obj, type_)
|
||||
in enumerate(result_columns)
|
||||
]
|
||||
self.keys = [
|
||||
elem[0] for elem in result_columns
|
||||
]
|
||||
else:
|
||||
# case 2 - raw string, or number of columns in result does
|
||||
# not match number of cols in compiled. The raw string case
|
||||
# is very common. The latter can happen
|
||||
# when text() is used with only a partial typemap, or
|
||||
# in the extremely unlikely cases where the compiled construct
|
||||
# has a single element with multiple col expressions in it
|
||||
# (e.g. has commas embedded) or there's some kind of statement
|
||||
# that is adding extra columns.
|
||||
# In all these cases we fall back to the "named" approach
|
||||
# that SQLAlchemy has used up through 0.9.
|
||||
|
||||
if num_ctx_cols:
|
||||
result_map = self._create_result_map(
|
||||
result_columns, case_sensitive)
|
||||
|
||||
raw = []
|
||||
self.keys = []
|
||||
untranslated = None
|
||||
for idx, rec in enumerate(metadata):
|
||||
colname = rec[0]
|
||||
coltype = rec[1]
|
||||
|
||||
|
@ -216,61 +255,97 @@ class ResultMetaData(object):
|
|||
if dialect.requires_name_normalize:
|
||||
colname = dialect.normalize_name(colname)
|
||||
|
||||
if context.result_map:
|
||||
try:
|
||||
name, obj, type_ = context.result_map[
|
||||
colname if self.case_sensitive else colname.lower()]
|
||||
except KeyError:
|
||||
name, obj, type_ = \
|
||||
colname, None, typemap.get(coltype, sqltypes.NULLTYPE)
|
||||
else:
|
||||
name, obj, type_ = \
|
||||
colname, None, typemap.get(coltype, sqltypes.NULLTYPE)
|
||||
|
||||
processor = context.get_result_processor(type_, colname, coltype)
|
||||
|
||||
processors.append(processor)
|
||||
rec = (processor, obj, i)
|
||||
|
||||
# indexes as keys. This is only needed for the Python version of
|
||||
# RowProxy (the C version uses a faster path for integer indexes).
|
||||
primary_keymap[i] = rec
|
||||
|
||||
# populate primary keymap, looking for conflicts.
|
||||
if primary_keymap.setdefault(
|
||||
name if self.case_sensitive
|
||||
else name.lower(),
|
||||
rec) is not rec:
|
||||
# place a record that doesn't have the "index" - this
|
||||
# is interpreted later as an AmbiguousColumnError,
|
||||
# but only when actually accessed. Columns
|
||||
# colliding by name is not a problem if those names
|
||||
# aren't used; integer access is always
|
||||
# unambiguous.
|
||||
primary_keymap[name
|
||||
if self.case_sensitive
|
||||
else name.lower()] = rec = (None, obj, None)
|
||||
|
||||
self.keys.append(colname)
|
||||
if obj:
|
||||
for o in obj:
|
||||
keymap[o] = rec
|
||||
# technically we should be doing this but we
|
||||
# are saving on callcounts by not doing so.
|
||||
# if keymap.setdefault(o, rec) is not rec:
|
||||
# keymap[o] = (None, obj, None)
|
||||
if not case_sensitive:
|
||||
colname = colname.lower()
|
||||
|
||||
if translate_colname and \
|
||||
untranslated:
|
||||
keymap[untranslated] = rec
|
||||
if num_ctx_cols:
|
||||
try:
|
||||
ctx_rec = result_map[colname]
|
||||
except KeyError:
|
||||
mapped_type = typemap.get(coltype, sqltypes.NULLTYPE)
|
||||
obj = None
|
||||
else:
|
||||
obj = ctx_rec[1]
|
||||
mapped_type = ctx_rec[2]
|
||||
else:
|
||||
mapped_type = typemap.get(coltype, sqltypes.NULLTYPE)
|
||||
obj = None
|
||||
processor = context.get_result_processor(
|
||||
mapped_type, colname, coltype)
|
||||
|
||||
# overwrite keymap values with those of the
|
||||
# high precedence keymap.
|
||||
keymap.update(primary_keymap)
|
||||
raw.append(
|
||||
(idx, colname, colname, processor, obj, untranslated)
|
||||
)
|
||||
|
||||
if parent._echo:
|
||||
context.engine.logger.debug(
|
||||
"Col %r", tuple(x[0] for x in metadata))
|
||||
# keymap indexes by integer index...
|
||||
self._keymap = dict([
|
||||
(elem[0], (elem[3], elem[4], elem[0]))
|
||||
for elem in raw
|
||||
])
|
||||
|
||||
# processors in key order for certain per-row
|
||||
# views like __iter__ and slices
|
||||
self._processors = [elem[3] for elem in raw]
|
||||
|
||||
if num_ctx_cols:
|
||||
# keymap by primary string...
|
||||
by_key = dict([
|
||||
(elem[2], (elem[3], elem[4], elem[0]))
|
||||
for elem in raw
|
||||
])
|
||||
|
||||
# if by-primary-string dictionary smaller (or bigger?!) than
|
||||
# number of columns, assume we have dupes, rewrite
|
||||
# dupe records with "None" for index which results in
|
||||
# ambiguous column exception when accessed.
|
||||
if len(by_key) != num_ctx_cols:
|
||||
seen = set()
|
||||
for rec in raw:
|
||||
key = rec[1]
|
||||
if key in seen:
|
||||
by_key[key] = (None, by_key[key][1], None)
|
||||
seen.add(key)
|
||||
|
||||
# update keymap with secondary "object"-based keys
|
||||
self._keymap.update([
|
||||
(obj_elem, by_key[elem[2]])
|
||||
for elem in raw if elem[4]
|
||||
for obj_elem in elem[4]
|
||||
])
|
||||
|
||||
# update keymap with primary string names taking
|
||||
# precedence
|
||||
self._keymap.update(by_key)
|
||||
else:
|
||||
self._keymap.update([
|
||||
(elem[2], (elem[3], elem[4], elem[0]))
|
||||
for elem in raw
|
||||
])
|
||||
# update keymap with "translated" names (sqlite-only thing)
|
||||
if translate_colname:
|
||||
self._keymap.update([
|
||||
(elem[5], self._keymap[elem[2]])
|
||||
for elem in raw if elem[5]
|
||||
])
|
||||
|
||||
@classmethod
|
||||
def _create_result_map(cls, result_columns, case_sensitive=True):
|
||||
d = {}
|
||||
for elem in result_columns:
|
||||
key, rec = elem[0], elem[1:]
|
||||
if not case_sensitive:
|
||||
key = key.lower()
|
||||
if key in d:
|
||||
# conflicting keyname, just double up the list
|
||||
# of objects. this will cause an "ambiguous name"
|
||||
# error if an attempt is made by the result set to
|
||||
# access.
|
||||
e_name, e_obj, e_type = d[key]
|
||||
d[key] = e_name, e_obj + rec[1], e_type
|
||||
else:
|
||||
d[key] = rec
|
||||
return d
|
||||
|
||||
@util.pending_deprecation("0.8", "sqlite dialect uses "
|
||||
"_translate_colname() now")
|
||||
|
@ -335,12 +410,28 @@ class ResultMetaData(object):
|
|||
map[key] = result
|
||||
return result
|
||||
|
||||
def _has_key(self, row, key):
|
||||
def _has_key(self, key):
|
||||
if key in self._keymap:
|
||||
return True
|
||||
else:
|
||||
return self._key_fallback(key, False) is not None
|
||||
|
||||
def _getter(self, key):
|
||||
if key in self._keymap:
|
||||
processor, obj, index = self._keymap[key]
|
||||
else:
|
||||
ret = self._key_fallback(key, False)
|
||||
if ret is None:
|
||||
return None
|
||||
processor, obj, index = ret
|
||||
|
||||
if index is None:
|
||||
raise exc.InvalidRequestError(
|
||||
"Ambiguous column name '%s' in result set! "
|
||||
"try 'use_labels' option on select statement." % key)
|
||||
|
||||
return operator.itemgetter(index)
|
||||
|
||||
def __getstate__(self):
|
||||
return {
|
||||
'_pickled_keymap': dict(
|
||||
|
@ -391,21 +482,49 @@ class ResultProxy(object):
|
|||
out_parameters = None
|
||||
_can_close_connection = False
|
||||
_metadata = None
|
||||
_soft_closed = False
|
||||
closed = False
|
||||
|
||||
def __init__(self, context):
|
||||
self.context = context
|
||||
self.dialect = context.dialect
|
||||
self.closed = False
|
||||
self.cursor = self._saved_cursor = context.cursor
|
||||
self.connection = context.root_connection
|
||||
self._echo = self.connection._echo and \
|
||||
context.engine._should_log_debug()
|
||||
self._init_metadata()
|
||||
|
||||
def _getter(self, key):
|
||||
try:
|
||||
getter = self._metadata._getter
|
||||
except AttributeError:
|
||||
return self._non_result(None)
|
||||
else:
|
||||
return getter(key)
|
||||
|
||||
def _has_key(self, key):
|
||||
try:
|
||||
has_key = self._metadata._has_key
|
||||
except AttributeError:
|
||||
return self._non_result(None)
|
||||
else:
|
||||
return has_key(key)
|
||||
|
||||
def _init_metadata(self):
|
||||
metadata = self._cursor_description()
|
||||
if metadata is not None:
|
||||
if self.context.compiled and \
|
||||
'compiled_cache' in self.context.execution_options:
|
||||
if self.context.compiled._cached_metadata:
|
||||
self._metadata = self.context.compiled._cached_metadata
|
||||
else:
|
||||
self._metadata = self.context.compiled._cached_metadata = \
|
||||
ResultMetaData(self, metadata)
|
||||
else:
|
||||
self._metadata = ResultMetaData(self, metadata)
|
||||
if self._echo:
|
||||
self.context.engine.logger.debug(
|
||||
"Col %r", tuple(x[0] for x in metadata))
|
||||
|
||||
def keys(self):
|
||||
"""Return the current set of string keys for rows."""
|
||||
|
@ -515,39 +634,85 @@ class ResultProxy(object):
|
|||
|
||||
return self._saved_cursor.description
|
||||
|
||||
def close(self, _autoclose_connection=True):
|
||||
"""Close this ResultProxy.
|
||||
def _soft_close(self, _autoclose_connection=True):
|
||||
"""Soft close this :class:`.ResultProxy`.
|
||||
|
||||
Closes the underlying DBAPI cursor corresponding to the execution.
|
||||
|
||||
Note that any data cached within this ResultProxy is still available.
|
||||
For some types of results, this may include buffered rows.
|
||||
|
||||
If this ResultProxy was generated from an implicit execution,
|
||||
the underlying Connection will also be closed (returns the
|
||||
underlying DBAPI connection to the connection pool.)
|
||||
This releases all DBAPI cursor resources, but leaves the
|
||||
ResultProxy "open" from a semantic perspective, meaning the
|
||||
fetchXXX() methods will continue to return empty results.
|
||||
|
||||
This method is called automatically when:
|
||||
|
||||
* all result rows are exhausted using the fetchXXX() methods.
|
||||
* cursor.description is None.
|
||||
|
||||
"""
|
||||
This method is **not public**, but is documented in order to clarify
|
||||
the "autoclose" process used.
|
||||
|
||||
if not self.closed:
|
||||
self.closed = True
|
||||
self.connection._safe_close_cursor(self.cursor)
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.ResultProxy.close`
|
||||
|
||||
|
||||
"""
|
||||
if self._soft_closed:
|
||||
return
|
||||
self._soft_closed = True
|
||||
cursor = self.cursor
|
||||
self.connection._safe_close_cursor(cursor)
|
||||
if _autoclose_connection and \
|
||||
self.connection.should_close_with_result:
|
||||
self.connection.close()
|
||||
# allow consistent errors
|
||||
self.cursor = None
|
||||
|
||||
def close(self):
|
||||
"""Close this ResultProxy.
|
||||
|
||||
This closes out the underlying DBAPI cursor corresonding
|
||||
to the statement execution, if one is stil present. Note that the
|
||||
DBAPI cursor is automatically released when the :class:`.ResultProxy`
|
||||
exhausts all available rows. :meth:`.ResultProxy.close` is generally
|
||||
an optional method except in the case when discarding a
|
||||
:class:`.ResultProxy` that still has additional rows pending for fetch.
|
||||
|
||||
In the case of a result that is the product of
|
||||
:ref:`connectionless execution <dbengine_implicit>`,
|
||||
the underyling :class:`.Connection` object is also closed, which
|
||||
:term:`releases` DBAPI connection resources.
|
||||
|
||||
After this method is called, it is no longer valid to call upon
|
||||
the fetch methods, which will raise a :class:`.ResourceClosedError`
|
||||
on subsequent use.
|
||||
|
||||
.. versionchanged:: 1.0.0 - the :meth:`.ResultProxy.close` method
|
||||
has been separated out from the process that releases the underlying
|
||||
DBAPI cursor resource. The "auto close" feature of the
|
||||
:class:`.Connection` now performs a so-called "soft close", which
|
||||
releases the underlying DBAPI cursor, but allows the
|
||||
:class:`.ResultProxy` to still behave as an open-but-exhausted
|
||||
result set; the actual :meth:`.ResultProxy.close` method is never
|
||||
called. It is still safe to discard a :class:`.ResultProxy`
|
||||
that has been fully exhausted without calling this method.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`connections_toplevel`
|
||||
|
||||
:meth:`.ResultProxy._soft_close`
|
||||
|
||||
"""
|
||||
|
||||
if not self.closed:
|
||||
self._soft_close()
|
||||
self.closed = True
|
||||
|
||||
def __iter__(self):
|
||||
while True:
|
||||
row = self.fetchone()
|
||||
if row is None:
|
||||
raise StopIteration
|
||||
return
|
||||
else:
|
||||
yield row
|
||||
|
||||
|
@ -732,7 +897,7 @@ class ResultProxy(object):
|
|||
try:
|
||||
return self.cursor.fetchone()
|
||||
except AttributeError:
|
||||
self._non_result()
|
||||
return self._non_result(None)
|
||||
|
||||
def _fetchmany_impl(self, size=None):
|
||||
try:
|
||||
|
@ -741,22 +906,24 @@ class ResultProxy(object):
|
|||
else:
|
||||
return self.cursor.fetchmany(size)
|
||||
except AttributeError:
|
||||
self._non_result()
|
||||
return self._non_result([])
|
||||
|
||||
def _fetchall_impl(self):
|
||||
try:
|
||||
return self.cursor.fetchall()
|
||||
except AttributeError:
|
||||
self._non_result()
|
||||
return self._non_result([])
|
||||
|
||||
def _non_result(self):
|
||||
def _non_result(self, default):
|
||||
if self._metadata is None:
|
||||
raise exc.ResourceClosedError(
|
||||
"This result object does not return rows. "
|
||||
"It has been closed automatically.",
|
||||
)
|
||||
else:
|
||||
elif self.closed:
|
||||
raise exc.ResourceClosedError("This result object is closed.")
|
||||
else:
|
||||
return default
|
||||
|
||||
def process_rows(self, rows):
|
||||
process_row = self._process_row
|
||||
|
@ -775,11 +942,25 @@ class ResultProxy(object):
|
|||
for row in rows]
|
||||
|
||||
def fetchall(self):
|
||||
"""Fetch all rows, just like DB-API ``cursor.fetchall()``."""
|
||||
"""Fetch all rows, just like DB-API ``cursor.fetchall()``.
|
||||
|
||||
After all rows have been exhausted, the underlying DBAPI
|
||||
cursor resource is released, and the object may be safely
|
||||
discarded.
|
||||
|
||||
Subsequent calls to :meth:`.ResultProxy.fetchall` will return
|
||||
an empty list. After the :meth:`.ResultProxy.close` method is
|
||||
called, the method will raise :class:`.ResourceClosedError`.
|
||||
|
||||
.. versionchanged:: 1.0.0 - Added "soft close" behavior which
|
||||
allows the result to be used in an "exhausted" state prior to
|
||||
calling the :meth:`.ResultProxy.close` method.
|
||||
|
||||
"""
|
||||
|
||||
try:
|
||||
l = self.process_rows(self._fetchall_impl())
|
||||
self.close()
|
||||
self._soft_close()
|
||||
return l
|
||||
except Exception as e:
|
||||
self.connection._handle_dbapi_exception(
|
||||
|
@ -790,15 +971,25 @@ class ResultProxy(object):
|
|||
"""Fetch many rows, just like DB-API
|
||||
``cursor.fetchmany(size=cursor.arraysize)``.
|
||||
|
||||
If rows are present, the cursor remains open after this is called.
|
||||
Else the cursor is automatically closed and an empty list is returned.
|
||||
After all rows have been exhausted, the underlying DBAPI
|
||||
cursor resource is released, and the object may be safely
|
||||
discarded.
|
||||
|
||||
Calls to :meth:`.ResultProxy.fetchmany` after all rows have been
|
||||
exhuasted will return
|
||||
an empty list. After the :meth:`.ResultProxy.close` method is
|
||||
called, the method will raise :class:`.ResourceClosedError`.
|
||||
|
||||
.. versionchanged:: 1.0.0 - Added "soft close" behavior which
|
||||
allows the result to be used in an "exhausted" state prior to
|
||||
calling the :meth:`.ResultProxy.close` method.
|
||||
|
||||
"""
|
||||
|
||||
try:
|
||||
l = self.process_rows(self._fetchmany_impl(size))
|
||||
if len(l) == 0:
|
||||
self.close()
|
||||
self._soft_close()
|
||||
return l
|
||||
except Exception as e:
|
||||
self.connection._handle_dbapi_exception(
|
||||
|
@ -808,8 +999,18 @@ class ResultProxy(object):
|
|||
def fetchone(self):
|
||||
"""Fetch one row, just like DB-API ``cursor.fetchone()``.
|
||||
|
||||
If a row is present, the cursor remains open after this is called.
|
||||
Else the cursor is automatically closed and None is returned.
|
||||
After all rows have been exhausted, the underlying DBAPI
|
||||
cursor resource is released, and the object may be safely
|
||||
discarded.
|
||||
|
||||
Calls to :meth:`.ResultProxy.fetchone` after all rows have
|
||||
been exhausted will return ``None``.
|
||||
After the :meth:`.ResultProxy.close` method is
|
||||
called, the method will raise :class:`.ResourceClosedError`.
|
||||
|
||||
.. versionchanged:: 1.0.0 - Added "soft close" behavior which
|
||||
allows the result to be used in an "exhausted" state prior to
|
||||
calling the :meth:`.ResultProxy.close` method.
|
||||
|
||||
"""
|
||||
try:
|
||||
|
@ -817,7 +1018,7 @@ class ResultProxy(object):
|
|||
if row is not None:
|
||||
return self.process_rows([row])[0]
|
||||
else:
|
||||
self.close()
|
||||
self._soft_close()
|
||||
return None
|
||||
except Exception as e:
|
||||
self.connection._handle_dbapi_exception(
|
||||
|
@ -829,9 +1030,12 @@ class ResultProxy(object):
|
|||
|
||||
Returns None if no row is present.
|
||||
|
||||
After calling this method, the object is fully closed,
|
||||
e.g. the :meth:`.ResultProxy.close` method will have been called.
|
||||
|
||||
"""
|
||||
if self._metadata is None:
|
||||
self._non_result()
|
||||
return self._non_result(None)
|
||||
|
||||
try:
|
||||
row = self._fetchone_impl()
|
||||
|
@ -853,6 +1057,9 @@ class ResultProxy(object):
|
|||
|
||||
Returns None if no row is present.
|
||||
|
||||
After calling this method, the object is fully closed,
|
||||
e.g. the :meth:`.ResultProxy.close` method will have been called.
|
||||
|
||||
"""
|
||||
row = self.first()
|
||||
if row is not None:
|
||||
|
@ -873,10 +1080,27 @@ class BufferedRowResultProxy(ResultProxy):
|
|||
|
||||
The pre-fetching behavior fetches only one row initially, and then
|
||||
grows its buffer size by a fixed amount with each successive need
|
||||
for additional rows up to a size of 100.
|
||||
for additional rows up to a size of 1000.
|
||||
|
||||
The size argument is configurable using the ``max_row_buffer``
|
||||
execution option::
|
||||
|
||||
with psycopg2_engine.connect() as conn:
|
||||
|
||||
result = conn.execution_options(
|
||||
stream_results=True, max_row_buffer=50
|
||||
).execute("select * from table")
|
||||
|
||||
.. versionadded:: 1.0.6 Added the ``max_row_buffer`` option.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`psycopg2_execution_options`
|
||||
"""
|
||||
|
||||
def _init_metadata(self):
|
||||
self._max_row_buffer = self.context.execution_options.get(
|
||||
'max_row_buffer', None)
|
||||
self.__buffer_rows()
|
||||
super(BufferedRowResultProxy, self)._init_metadata()
|
||||
|
||||
|
@ -896,13 +1120,21 @@ class BufferedRowResultProxy(ResultProxy):
|
|||
}
|
||||
|
||||
def __buffer_rows(self):
|
||||
if self.cursor is None:
|
||||
return
|
||||
size = getattr(self, '_bufsize', 1)
|
||||
self.__rowbuffer = collections.deque(self.cursor.fetchmany(size))
|
||||
self._bufsize = self.size_growth.get(size, size)
|
||||
if self._max_row_buffer is not None:
|
||||
self._bufsize = min(self._max_row_buffer, self._bufsize)
|
||||
|
||||
def _soft_close(self, **kw):
|
||||
self.__rowbuffer.clear()
|
||||
super(BufferedRowResultProxy, self)._soft_close(**kw)
|
||||
|
||||
def _fetchone_impl(self):
|
||||
if self.closed:
|
||||
return None
|
||||
if self.cursor is None:
|
||||
return self._non_result(None)
|
||||
if not self.__rowbuffer:
|
||||
self.__buffer_rows()
|
||||
if not self.__rowbuffer:
|
||||
|
@ -921,6 +1153,8 @@ class BufferedRowResultProxy(ResultProxy):
|
|||
return result
|
||||
|
||||
def _fetchall_impl(self):
|
||||
if self.cursor is None:
|
||||
return self._non_result([])
|
||||
self.__rowbuffer.extend(self.cursor.fetchall())
|
||||
ret = self.__rowbuffer
|
||||
self.__rowbuffer = collections.deque()
|
||||
|
@ -943,11 +1177,15 @@ class FullyBufferedResultProxy(ResultProxy):
|
|||
def _buffer_rows(self):
|
||||
return collections.deque(self.cursor.fetchall())
|
||||
|
||||
def _soft_close(self, **kw):
|
||||
self.__rowbuffer.clear()
|
||||
super(FullyBufferedResultProxy, self)._soft_close(**kw)
|
||||
|
||||
def _fetchone_impl(self):
|
||||
if self.__rowbuffer:
|
||||
return self.__rowbuffer.popleft()
|
||||
else:
|
||||
return None
|
||||
return self._non_result(None)
|
||||
|
||||
def _fetchmany_impl(self, size=None):
|
||||
if size is None:
|
||||
|
@ -961,6 +1199,8 @@ class FullyBufferedResultProxy(ResultProxy):
|
|||
return result
|
||||
|
||||
def _fetchall_impl(self):
|
||||
if not self.cursor:
|
||||
return self._non_result([])
|
||||
ret = self.__rowbuffer
|
||||
self.__rowbuffer = collections.deque()
|
||||
return ret
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# engine/strategies.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -48,7 +48,8 @@ class DefaultEngineStrategy(EngineStrategy):
|
|||
# create url.URL object
|
||||
u = url.make_url(name_or_url)
|
||||
|
||||
dialect_cls = u.get_dialect()
|
||||
entrypoint = u._get_entrypoint()
|
||||
dialect_cls = entrypoint.get_dialect_cls(u)
|
||||
|
||||
if kwargs.pop('_coerce_config', False):
|
||||
def pop_kwarg(key, default=None):
|
||||
|
@ -81,21 +82,19 @@ class DefaultEngineStrategy(EngineStrategy):
|
|||
# assemble connection arguments
|
||||
(cargs, cparams) = dialect.create_connect_args(u)
|
||||
cparams.update(pop_kwarg('connect_args', {}))
|
||||
cargs = list(cargs) # allow mutability
|
||||
|
||||
# look for existing pool or create
|
||||
pool = pop_kwarg('pool', None)
|
||||
if pool is None:
|
||||
def connect():
|
||||
try:
|
||||
def connect(connection_record=None):
|
||||
if dialect._has_events:
|
||||
for fn in dialect.dispatch.do_connect:
|
||||
connection = fn(
|
||||
dialect, connection_record, cargs, cparams)
|
||||
if connection is not None:
|
||||
return connection
|
||||
return dialect.connect(*cargs, **cparams)
|
||||
except dialect.dbapi.Error as e:
|
||||
invalidated = dialect.is_disconnect(e, None, None)
|
||||
util.raise_from_cause(
|
||||
exc.DBAPIError.instance(
|
||||
None, None, e, dialect.dbapi.Error,
|
||||
connection_invalidated=invalidated
|
||||
)
|
||||
)
|
||||
|
||||
creator = pop_kwarg('creator', connect)
|
||||
|
||||
|
@ -162,9 +161,14 @@ class DefaultEngineStrategy(EngineStrategy):
|
|||
def first_connect(dbapi_connection, connection_record):
|
||||
c = base.Connection(engine, connection=dbapi_connection,
|
||||
_has_events=False)
|
||||
c._execution_options = util.immutabledict()
|
||||
dialect.initialize(c)
|
||||
event.listen(pool, 'first_connect', first_connect, once=True)
|
||||
|
||||
dialect_cls.engine_created(engine)
|
||||
if entrypoint is not dialect_cls:
|
||||
entrypoint.engine_created(engine)
|
||||
|
||||
return engine
|
||||
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# engine/threadlocal.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -59,7 +59,10 @@ class TLEngine(base.Engine):
|
|||
# guards against pool-level reapers, if desired.
|
||||
# or not connection.connection.is_valid:
|
||||
connection = self._tl_connection_cls(
|
||||
self, self.pool.connect(), **kw)
|
||||
self,
|
||||
self._wrap_pool_connect(
|
||||
self.pool.connect, connection),
|
||||
**kw)
|
||||
self._connections.conn = weakref.ref(connection)
|
||||
|
||||
return connection._increment_connect()
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# engine/url.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -105,11 +105,25 @@ class URL(object):
|
|||
self.database == other.database and \
|
||||
self.query == other.query
|
||||
|
||||
def get_dialect(self):
|
||||
"""Return the SQLAlchemy database dialect class corresponding
|
||||
to this URL's driver name.
|
||||
"""
|
||||
def get_backend_name(self):
|
||||
if '+' not in self.drivername:
|
||||
return self.drivername
|
||||
else:
|
||||
return self.drivername.split('+')[0]
|
||||
|
||||
def get_driver_name(self):
|
||||
if '+' not in self.drivername:
|
||||
return self.get_dialect().driver
|
||||
else:
|
||||
return self.drivername.split('+')[1]
|
||||
|
||||
def _get_entrypoint(self):
|
||||
"""Return the "entry point" dialect class.
|
||||
|
||||
This is normally the dialect itself except in the case when the
|
||||
returned class implements the get_dialect_cls() method.
|
||||
|
||||
"""
|
||||
if '+' not in self.drivername:
|
||||
name = self.drivername
|
||||
else:
|
||||
|
@ -125,6 +139,14 @@ class URL(object):
|
|||
else:
|
||||
return cls
|
||||
|
||||
def get_dialect(self):
|
||||
"""Return the SQLAlchemy database dialect class corresponding
|
||||
to this URL's driver name.
|
||||
"""
|
||||
entrypoint = self._get_entrypoint()
|
||||
dialect_cls = entrypoint.get_dialect_cls(self)
|
||||
return dialect_cls
|
||||
|
||||
def translate_connect_args(self, names=[], **kw):
|
||||
"""Translate url attributes into a dictionary of connection arguments.
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# engine/util.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# event/__init__.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# event/api.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -58,6 +58,32 @@ def listen(target, identifier, fn, *args, **kw):
|
|||
.. versionadded:: 0.9.4 Added ``once=True`` to :func:`.event.listen`
|
||||
and :func:`.event.listens_for`.
|
||||
|
||||
.. note::
|
||||
|
||||
The :func:`.listen` function cannot be called at the same time
|
||||
that the target event is being run. This has implications
|
||||
for thread safety, and also means an event cannot be added
|
||||
from inside the listener function for itself. The list of
|
||||
events to be run are present inside of a mutable collection
|
||||
that can't be changed during iteration.
|
||||
|
||||
Event registration and removal is not intended to be a "high
|
||||
velocity" operation; it is a configurational operation. For
|
||||
systems that need to quickly associate and deassociate with
|
||||
events at high scale, use a mutable structure that is handled
|
||||
from inside of a single listener.
|
||||
|
||||
.. versionchanged:: 1.0.0 - a ``collections.deque()`` object is now
|
||||
used as the container for the list of events, which explicitly
|
||||
disallows collection mutation while the collection is being
|
||||
iterated.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:func:`.listens_for`
|
||||
|
||||
:func:`.remove`
|
||||
|
||||
"""
|
||||
|
||||
_event_key(target, identifier, fn).listen(*args, **kw)
|
||||
|
@ -89,6 +115,10 @@ def listens_for(target, identifier, *args, **kw):
|
|||
.. versionadded:: 0.9.4 Added ``once=True`` to :func:`.event.listen`
|
||||
and :func:`.event.listens_for`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:func:`.listen` - general description of event listening
|
||||
|
||||
"""
|
||||
def decorate(fn):
|
||||
listen(target, identifier, fn, *args, **kw)
|
||||
|
@ -120,6 +150,30 @@ def remove(target, identifier, fn):
|
|||
|
||||
.. versionadded:: 0.9.0
|
||||
|
||||
.. note::
|
||||
|
||||
The :func:`.remove` function cannot be called at the same time
|
||||
that the target event is being run. This has implications
|
||||
for thread safety, and also means an event cannot be removed
|
||||
from inside the listener function for itself. The list of
|
||||
events to be run are present inside of a mutable collection
|
||||
that can't be changed during iteration.
|
||||
|
||||
Event registration and removal is not intended to be a "high
|
||||
velocity" operation; it is a configurational operation. For
|
||||
systems that need to quickly associate and deassociate with
|
||||
events at high scale, use a mutable structure that is handled
|
||||
from inside of a single listener.
|
||||
|
||||
.. versionchanged:: 1.0.0 - a ``collections.deque()`` object is now
|
||||
used as the container for the list of events, which explicitly
|
||||
disallows collection mutation while the collection is being
|
||||
iterated.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:func:`.listen`
|
||||
|
||||
"""
|
||||
_event_key(target, identifier, fn).remove()
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# event/attr.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -37,19 +37,24 @@ from . import registry
|
|||
from . import legacy
|
||||
from itertools import chain
|
||||
import weakref
|
||||
import collections
|
||||
|
||||
|
||||
class RefCollection(object):
|
||||
@util.memoized_property
|
||||
def ref(self):
|
||||
class RefCollection(util.MemoizedSlots):
|
||||
__slots__ = 'ref',
|
||||
|
||||
def _memoized_attr_ref(self):
|
||||
return weakref.ref(self, registry._collection_gced)
|
||||
|
||||
|
||||
class _DispatchDescriptor(RefCollection):
|
||||
"""Class-level attributes on :class:`._Dispatch` classes."""
|
||||
class _ClsLevelDispatch(RefCollection):
|
||||
"""Class-level events on :class:`._Dispatch` classes."""
|
||||
|
||||
__slots__ = ('name', 'arg_names', 'has_kw',
|
||||
'legacy_signatures', '_clslevel', '__weakref__')
|
||||
|
||||
def __init__(self, parent_dispatch_cls, fn):
|
||||
self.__name__ = fn.__name__
|
||||
self.name = fn.__name__
|
||||
argspec = util.inspect_getargspec(fn)
|
||||
self.arg_names = argspec.args[1:]
|
||||
self.has_kw = bool(argspec.keywords)
|
||||
|
@ -59,11 +64,9 @@ class _DispatchDescriptor(RefCollection):
|
|||
key=lambda s: s[0]
|
||||
)
|
||||
))
|
||||
self.__doc__ = fn.__doc__ = legacy._augment_fn_docs(
|
||||
self, parent_dispatch_cls, fn)
|
||||
fn.__doc__ = legacy._augment_fn_docs(self, parent_dispatch_cls, fn)
|
||||
|
||||
self._clslevel = weakref.WeakKeyDictionary()
|
||||
self._empty_listeners = weakref.WeakKeyDictionary()
|
||||
|
||||
def _adjust_fn_spec(self, fn, named):
|
||||
if named:
|
||||
|
@ -96,8 +99,8 @@ class _DispatchDescriptor(RefCollection):
|
|||
self.update_subclass(cls)
|
||||
else:
|
||||
if cls not in self._clslevel:
|
||||
self._clslevel[cls] = []
|
||||
self._clslevel[cls].insert(0, event_key._listen_fn)
|
||||
self._clslevel[cls] = collections.deque()
|
||||
self._clslevel[cls].appendleft(event_key._listen_fn)
|
||||
registry._stored_in_collection(event_key, self)
|
||||
|
||||
def append(self, event_key, propagate):
|
||||
|
@ -113,13 +116,13 @@ class _DispatchDescriptor(RefCollection):
|
|||
self.update_subclass(cls)
|
||||
else:
|
||||
if cls not in self._clslevel:
|
||||
self._clslevel[cls] = []
|
||||
self._clslevel[cls] = collections.deque()
|
||||
self._clslevel[cls].append(event_key._listen_fn)
|
||||
registry._stored_in_collection(event_key, self)
|
||||
|
||||
def update_subclass(self, target):
|
||||
if target not in self._clslevel:
|
||||
self._clslevel[target] = []
|
||||
self._clslevel[target] = collections.deque()
|
||||
clslevel = self._clslevel[target]
|
||||
for cls in target.__mro__[1:]:
|
||||
if cls in self._clslevel:
|
||||
|
@ -145,40 +148,29 @@ class _DispatchDescriptor(RefCollection):
|
|||
to_clear = set()
|
||||
for dispatcher in self._clslevel.values():
|
||||
to_clear.update(dispatcher)
|
||||
dispatcher[:] = []
|
||||
dispatcher.clear()
|
||||
registry._clear(self, to_clear)
|
||||
|
||||
def for_modify(self, obj):
|
||||
"""Return an event collection which can be modified.
|
||||
|
||||
For _DispatchDescriptor at the class level of
|
||||
For _ClsLevelDispatch at the class level of
|
||||
a dispatcher, this returns self.
|
||||
|
||||
"""
|
||||
return self
|
||||
|
||||
def __get__(self, obj, cls):
|
||||
if obj is None:
|
||||
return self
|
||||
elif obj._parent_cls in self._empty_listeners:
|
||||
ret = self._empty_listeners[obj._parent_cls]
|
||||
else:
|
||||
self._empty_listeners[obj._parent_cls] = ret = \
|
||||
_EmptyListener(self, obj._parent_cls)
|
||||
# assigning it to __dict__ means
|
||||
# memoized for fast re-access. but more memory.
|
||||
obj.__dict__[self.__name__] = ret
|
||||
return ret
|
||||
|
||||
class _InstanceLevelDispatch(RefCollection):
|
||||
__slots__ = ()
|
||||
|
||||
class _HasParentDispatchDescriptor(object):
|
||||
def _adjust_fn_spec(self, fn, named):
|
||||
return self.parent._adjust_fn_spec(fn, named)
|
||||
|
||||
|
||||
class _EmptyListener(_HasParentDispatchDescriptor):
|
||||
"""Serves as a class-level interface to the events
|
||||
served by a _DispatchDescriptor, when there are no
|
||||
class _EmptyListener(_InstanceLevelDispatch):
|
||||
"""Serves as a proxy interface to the events
|
||||
served by a _ClsLevelDispatch, when there are no
|
||||
instance-level events present.
|
||||
|
||||
Is replaced by _ListenerCollection when instance-level
|
||||
|
@ -186,14 +178,17 @@ class _EmptyListener(_HasParentDispatchDescriptor):
|
|||
|
||||
"""
|
||||
|
||||
propagate = frozenset()
|
||||
listeners = ()
|
||||
|
||||
__slots__ = 'parent', 'parent_listeners', 'name'
|
||||
|
||||
def __init__(self, parent, target_cls):
|
||||
if target_cls not in parent._clslevel:
|
||||
parent.update_subclass(target_cls)
|
||||
self.parent = parent # _DispatchDescriptor
|
||||
self.parent = parent # _ClsLevelDispatch
|
||||
self.parent_listeners = parent._clslevel[target_cls]
|
||||
self.name = parent.__name__
|
||||
self.propagate = frozenset()
|
||||
self.listeners = ()
|
||||
self.name = parent.name
|
||||
|
||||
def for_modify(self, obj):
|
||||
"""Return an event collection which can be modified.
|
||||
|
@ -204,9 +199,11 @@ class _EmptyListener(_HasParentDispatchDescriptor):
|
|||
and returns it.
|
||||
|
||||
"""
|
||||
result = _ListenerCollection(self.parent, obj._parent_cls)
|
||||
if obj.__dict__[self.name] is self:
|
||||
obj.__dict__[self.name] = result
|
||||
result = _ListenerCollection(self.parent, obj._instance_cls)
|
||||
if getattr(obj, self.name) is self:
|
||||
setattr(obj, self.name, result)
|
||||
else:
|
||||
assert isinstance(getattr(obj, self.name), _JoinedListener)
|
||||
return result
|
||||
|
||||
def _needs_modify(self, *args, **kw):
|
||||
|
@ -232,11 +229,10 @@ class _EmptyListener(_HasParentDispatchDescriptor):
|
|||
__nonzero__ = __bool__
|
||||
|
||||
|
||||
class _CompoundListener(_HasParentDispatchDescriptor):
|
||||
_exec_once = False
|
||||
class _CompoundListener(_InstanceLevelDispatch):
|
||||
__slots__ = '_exec_once_mutex', '_exec_once'
|
||||
|
||||
@util.memoized_property
|
||||
def _exec_once_mutex(self):
|
||||
def _memoized_attr__exec_once_mutex(self):
|
||||
return threading.Lock()
|
||||
|
||||
def exec_once(self, *args, **kw):
|
||||
|
@ -271,7 +267,7 @@ class _CompoundListener(_HasParentDispatchDescriptor):
|
|||
__nonzero__ = __bool__
|
||||
|
||||
|
||||
class _ListenerCollection(RefCollection, _CompoundListener):
|
||||
class _ListenerCollection(_CompoundListener):
|
||||
"""Instance-level attributes on instances of :class:`._Dispatch`.
|
||||
|
||||
Represents a collection of listeners.
|
||||
|
@ -281,13 +277,18 @@ class _ListenerCollection(RefCollection, _CompoundListener):
|
|||
|
||||
"""
|
||||
|
||||
__slots__ = (
|
||||
'parent_listeners', 'parent', 'name', 'listeners',
|
||||
'propagate', '__weakref__')
|
||||
|
||||
def __init__(self, parent, target_cls):
|
||||
if target_cls not in parent._clslevel:
|
||||
parent.update_subclass(target_cls)
|
||||
self._exec_once = False
|
||||
self.parent_listeners = parent._clslevel[target_cls]
|
||||
self.parent = parent
|
||||
self.name = parent.__name__
|
||||
self.listeners = []
|
||||
self.name = parent.name
|
||||
self.listeners = collections.deque()
|
||||
self.propagate = set()
|
||||
|
||||
def for_modify(self, obj):
|
||||
|
@ -318,14 +319,12 @@ class _ListenerCollection(RefCollection, _CompoundListener):
|
|||
registry._stored_in_collection_multi(self, other, to_associate)
|
||||
|
||||
def insert(self, event_key, propagate):
|
||||
if event_key._listen_fn not in self.listeners:
|
||||
event_key.prepend_to_list(self, self.listeners)
|
||||
if event_key.prepend_to_list(self, self.listeners):
|
||||
if propagate:
|
||||
self.propagate.add(event_key._listen_fn)
|
||||
|
||||
def append(self, event_key, propagate):
|
||||
if event_key._listen_fn not in self.listeners:
|
||||
event_key.append_to_list(self, self.listeners)
|
||||
if event_key.append_to_list(self, self.listeners):
|
||||
if propagate:
|
||||
self.propagate.add(event_key._listen_fn)
|
||||
|
||||
|
@ -337,28 +336,14 @@ class _ListenerCollection(RefCollection, _CompoundListener):
|
|||
def clear(self):
|
||||
registry._clear(self, self.listeners)
|
||||
self.propagate.clear()
|
||||
self.listeners[:] = []
|
||||
|
||||
|
||||
class _JoinedDispatchDescriptor(object):
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
|
||||
def __get__(self, obj, cls):
|
||||
if obj is None:
|
||||
return self
|
||||
else:
|
||||
obj.__dict__[self.name] = ret = _JoinedListener(
|
||||
obj.parent, self.name,
|
||||
getattr(obj.local, self.name)
|
||||
)
|
||||
return ret
|
||||
self.listeners.clear()
|
||||
|
||||
|
||||
class _JoinedListener(_CompoundListener):
|
||||
_exec_once = False
|
||||
__slots__ = 'parent', 'name', 'local', 'parent_listeners'
|
||||
|
||||
def __init__(self, parent, name, local):
|
||||
self._exec_once = False
|
||||
self.parent = parent
|
||||
self.name = name
|
||||
self.local = local
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# event/base.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -17,9 +17,11 @@ instances of ``_Dispatch``.
|
|||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import weakref
|
||||
|
||||
from .. import util
|
||||
from .attr import _JoinedDispatchDescriptor, \
|
||||
_EmptyListener, _DispatchDescriptor
|
||||
from .attr import _JoinedListener, \
|
||||
_EmptyListener, _ClsLevelDispatch
|
||||
|
||||
_registrars = util.defaultdict(list)
|
||||
|
||||
|
@ -34,10 +36,11 @@ class _UnpickleDispatch(object):
|
|||
|
||||
"""
|
||||
|
||||
def __call__(self, _parent_cls):
|
||||
for cls in _parent_cls.__mro__:
|
||||
def __call__(self, _instance_cls):
|
||||
for cls in _instance_cls.__mro__:
|
||||
if 'dispatch' in cls.__dict__:
|
||||
return cls.__dict__['dispatch'].dispatch_cls(_parent_cls)
|
||||
return cls.__dict__['dispatch'].\
|
||||
dispatch_cls._for_class(_instance_cls)
|
||||
else:
|
||||
raise AttributeError("No class with a 'dispatch' member present.")
|
||||
|
||||
|
@ -62,16 +65,53 @@ class _Dispatch(object):
|
|||
|
||||
"""
|
||||
|
||||
_events = None
|
||||
"""reference the :class:`.Events` class which this
|
||||
:class:`._Dispatch` is created for."""
|
||||
# in one ORM edge case, an attribute is added to _Dispatch,
|
||||
# so __dict__ is used in just that case and potentially others.
|
||||
__slots__ = '_parent', '_instance_cls', '__dict__', '_empty_listeners'
|
||||
|
||||
def __init__(self, _parent_cls):
|
||||
self._parent_cls = _parent_cls
|
||||
_empty_listener_reg = weakref.WeakKeyDictionary()
|
||||
|
||||
@util.classproperty
|
||||
def _listen(cls):
|
||||
return cls._events._listen
|
||||
def __init__(self, parent, instance_cls=None):
|
||||
self._parent = parent
|
||||
self._instance_cls = instance_cls
|
||||
if instance_cls:
|
||||
try:
|
||||
self._empty_listeners = self._empty_listener_reg[instance_cls]
|
||||
except KeyError:
|
||||
self._empty_listeners = \
|
||||
self._empty_listener_reg[instance_cls] = dict(
|
||||
(ls.name, _EmptyListener(ls, instance_cls))
|
||||
for ls in parent._event_descriptors
|
||||
)
|
||||
else:
|
||||
self._empty_listeners = {}
|
||||
|
||||
def __getattr__(self, name):
|
||||
# assign EmptyListeners as attributes on demand
|
||||
# to reduce startup time for new dispatch objects
|
||||
try:
|
||||
ls = self._empty_listeners[name]
|
||||
except KeyError:
|
||||
raise AttributeError(name)
|
||||
else:
|
||||
setattr(self, ls.name, ls)
|
||||
return ls
|
||||
|
||||
@property
|
||||
def _event_descriptors(self):
|
||||
for k in self._event_names:
|
||||
yield getattr(self, k)
|
||||
|
||||
def _for_class(self, instance_cls):
|
||||
return self.__class__(self, instance_cls)
|
||||
|
||||
def _for_instance(self, instance):
|
||||
instance_cls = instance.__class__
|
||||
return self._for_class(instance_cls)
|
||||
|
||||
@property
|
||||
def _listen(self):
|
||||
return self._events._listen
|
||||
|
||||
def _join(self, other):
|
||||
"""Create a 'join' of this :class:`._Dispatch` and another.
|
||||
|
@ -83,36 +123,27 @@ class _Dispatch(object):
|
|||
if '_joined_dispatch_cls' not in self.__class__.__dict__:
|
||||
cls = type(
|
||||
"Joined%s" % self.__class__.__name__,
|
||||
(_JoinedDispatcher, self.__class__), {}
|
||||
(_JoinedDispatcher, ), {'__slots__': self._event_names}
|
||||
)
|
||||
for ls in _event_descriptors(self):
|
||||
setattr(cls, ls.name, _JoinedDispatchDescriptor(ls.name))
|
||||
|
||||
self.__class__._joined_dispatch_cls = cls
|
||||
return self._joined_dispatch_cls(self, other)
|
||||
|
||||
def __reduce__(self):
|
||||
return _UnpickleDispatch(), (self._parent_cls, )
|
||||
return _UnpickleDispatch(), (self._instance_cls, )
|
||||
|
||||
def _update(self, other, only_propagate=True):
|
||||
"""Populate from the listeners in another :class:`_Dispatch`
|
||||
object."""
|
||||
|
||||
for ls in _event_descriptors(other):
|
||||
for ls in other._event_descriptors:
|
||||
if isinstance(ls, _EmptyListener):
|
||||
continue
|
||||
getattr(self, ls.name).\
|
||||
for_modify(self)._update(ls, only_propagate=only_propagate)
|
||||
|
||||
@util.hybridmethod
|
||||
def _clear(self):
|
||||
for attr in dir(self):
|
||||
if _is_event_name(attr):
|
||||
getattr(self, attr).for_modify(self).clear()
|
||||
|
||||
|
||||
def _event_descriptors(target):
|
||||
return [getattr(target, k) for k in dir(target) if _is_event_name(k)]
|
||||
for ls in self._event_descriptors:
|
||||
ls.for_modify(self).clear()
|
||||
|
||||
|
||||
class _EventMeta(type):
|
||||
|
@ -131,23 +162,34 @@ def _create_dispatcher_class(cls, classname, bases, dict_):
|
|||
# there's all kinds of ways to do this,
|
||||
# i.e. make a Dispatch class that shares the '_listen' method
|
||||
# of the Event class, this is the straight monkeypatch.
|
||||
dispatch_base = getattr(cls, 'dispatch', _Dispatch)
|
||||
dispatch_cls = type("%sDispatch" % classname,
|
||||
(dispatch_base, ), {})
|
||||
cls._set_dispatch(cls, dispatch_cls)
|
||||
if hasattr(cls, 'dispatch'):
|
||||
dispatch_base = cls.dispatch.__class__
|
||||
else:
|
||||
dispatch_base = _Dispatch
|
||||
|
||||
for k in dict_:
|
||||
if _is_event_name(k):
|
||||
setattr(dispatch_cls, k, _DispatchDescriptor(cls, dict_[k]))
|
||||
event_names = [k for k in dict_ if _is_event_name(k)]
|
||||
dispatch_cls = type("%sDispatch" % classname,
|
||||
(dispatch_base, ), {'__slots__': event_names})
|
||||
|
||||
dispatch_cls._event_names = event_names
|
||||
|
||||
dispatch_inst = cls._set_dispatch(cls, dispatch_cls)
|
||||
for k in dispatch_cls._event_names:
|
||||
setattr(dispatch_inst, k, _ClsLevelDispatch(cls, dict_[k]))
|
||||
_registrars[k].append(cls)
|
||||
|
||||
for super_ in dispatch_cls.__bases__:
|
||||
if issubclass(super_, _Dispatch) and super_ is not _Dispatch:
|
||||
for ls in super_._events.dispatch._event_descriptors:
|
||||
setattr(dispatch_inst, ls.name, ls)
|
||||
dispatch_cls._event_names.append(ls.name)
|
||||
|
||||
if getattr(cls, '_dispatch_target', None):
|
||||
cls._dispatch_target.dispatch = dispatcher(cls)
|
||||
|
||||
|
||||
def _remove_dispatcher(cls):
|
||||
for k in dir(cls):
|
||||
if _is_event_name(k):
|
||||
for k in cls.dispatch._event_names:
|
||||
_registrars[k].remove(cls)
|
||||
if not _registrars[k]:
|
||||
del _registrars[k]
|
||||
|
@ -163,17 +205,30 @@ class Events(util.with_metaclass(_EventMeta, object)):
|
|||
# "self.dispatch._events.<utilitymethod>"
|
||||
# @staticemethod to allow easy "super" calls while in a metaclass
|
||||
# constructor.
|
||||
cls.dispatch = dispatch_cls
|
||||
cls.dispatch = dispatch_cls(None)
|
||||
dispatch_cls._events = cls
|
||||
return cls.dispatch
|
||||
|
||||
@classmethod
|
||||
def _accept_with(cls, target):
|
||||
# Mapper, ClassManager, Session override this to
|
||||
# also accept classes, scoped_sessions, sessionmakers, etc.
|
||||
if hasattr(target, 'dispatch') and (
|
||||
isinstance(target.dispatch, cls.dispatch) or
|
||||
|
||||
isinstance(target.dispatch, cls.dispatch.__class__) or
|
||||
|
||||
|
||||
(
|
||||
isinstance(target.dispatch, type) and
|
||||
issubclass(target.dispatch, cls.dispatch)
|
||||
isinstance(target.dispatch, cls.dispatch.__class__)
|
||||
) or
|
||||
|
||||
(
|
||||
isinstance(target.dispatch, _JoinedDispatcher) and
|
||||
isinstance(target.dispatch.parent, cls.dispatch.__class__)
|
||||
)
|
||||
|
||||
|
||||
):
|
||||
return target
|
||||
else:
|
||||
|
@ -195,10 +250,24 @@ class Events(util.with_metaclass(_EventMeta, object)):
|
|||
class _JoinedDispatcher(object):
|
||||
"""Represent a connection between two _Dispatch objects."""
|
||||
|
||||
__slots__ = 'local', 'parent', '_instance_cls'
|
||||
|
||||
def __init__(self, local, parent):
|
||||
self.local = local
|
||||
self.parent = parent
|
||||
self._parent_cls = local._parent_cls
|
||||
self._instance_cls = self.local._instance_cls
|
||||
|
||||
def __getattr__(self, name):
|
||||
# assign _JoinedListeners as attributes on demand
|
||||
# to reduce startup time for new dispatch objects
|
||||
ls = getattr(self.local, name)
|
||||
jl = _JoinedListener(self.parent, ls.name, ls)
|
||||
setattr(self, ls.name, jl)
|
||||
return jl
|
||||
|
||||
@property
|
||||
def _listen(self):
|
||||
return self.parent._listen
|
||||
|
||||
|
||||
class dispatcher(object):
|
||||
|
@ -216,5 +285,5 @@ class dispatcher(object):
|
|||
def __get__(self, obj, cls):
|
||||
if obj is None:
|
||||
return self.dispatch_cls
|
||||
obj.__dict__['dispatch'] = disp = self.dispatch_cls(cls)
|
||||
obj.__dict__['dispatch'] = disp = self.dispatch_cls._for_instance(obj)
|
||||
return disp
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# event/legacy.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -22,8 +22,8 @@ def _legacy_signature(since, argnames, converter=None):
|
|||
return leg
|
||||
|
||||
|
||||
def _wrap_fn_for_legacy(dispatch_descriptor, fn, argspec):
|
||||
for since, argnames, conv in dispatch_descriptor.legacy_signatures:
|
||||
def _wrap_fn_for_legacy(dispatch_collection, fn, argspec):
|
||||
for since, argnames, conv in dispatch_collection.legacy_signatures:
|
||||
if argnames[-1] == "**kw":
|
||||
has_kw = True
|
||||
argnames = argnames[0:-1]
|
||||
|
@ -40,7 +40,7 @@ def _wrap_fn_for_legacy(dispatch_descriptor, fn, argspec):
|
|||
return fn(*conv(*args))
|
||||
else:
|
||||
def wrap_leg(*args, **kw):
|
||||
argdict = dict(zip(dispatch_descriptor.arg_names, args))
|
||||
argdict = dict(zip(dispatch_collection.arg_names, args))
|
||||
args = [argdict[name] for name in argnames]
|
||||
if has_kw:
|
||||
return fn(*args, **kw)
|
||||
|
@ -58,16 +58,16 @@ def _indent(text, indent):
|
|||
)
|
||||
|
||||
|
||||
def _standard_listen_example(dispatch_descriptor, sample_target, fn):
|
||||
def _standard_listen_example(dispatch_collection, sample_target, fn):
|
||||
example_kw_arg = _indent(
|
||||
"\n".join(
|
||||
"%(arg)s = kw['%(arg)s']" % {"arg": arg}
|
||||
for arg in dispatch_descriptor.arg_names[0:2]
|
||||
for arg in dispatch_collection.arg_names[0:2]
|
||||
),
|
||||
" ")
|
||||
if dispatch_descriptor.legacy_signatures:
|
||||
if dispatch_collection.legacy_signatures:
|
||||
current_since = max(since for since, args, conv
|
||||
in dispatch_descriptor.legacy_signatures)
|
||||
in dispatch_collection.legacy_signatures)
|
||||
else:
|
||||
current_since = None
|
||||
text = (
|
||||
|
@ -80,7 +80,7 @@ def _standard_listen_example(dispatch_descriptor, sample_target, fn):
|
|||
"\n # ... (event handling logic) ...\n"
|
||||
)
|
||||
|
||||
if len(dispatch_descriptor.arg_names) > 3:
|
||||
if len(dispatch_collection.arg_names) > 3:
|
||||
text += (
|
||||
|
||||
"\n# named argument style (new in 0.9)\n"
|
||||
|
@ -96,17 +96,17 @@ def _standard_listen_example(dispatch_descriptor, sample_target, fn):
|
|||
"current_since": " (arguments as of %s)" %
|
||||
current_since if current_since else "",
|
||||
"event_name": fn.__name__,
|
||||
"has_kw_arguments": ", **kw" if dispatch_descriptor.has_kw else "",
|
||||
"named_event_arguments": ", ".join(dispatch_descriptor.arg_names),
|
||||
"has_kw_arguments": ", **kw" if dispatch_collection.has_kw else "",
|
||||
"named_event_arguments": ", ".join(dispatch_collection.arg_names),
|
||||
"example_kw_arg": example_kw_arg,
|
||||
"sample_target": sample_target
|
||||
}
|
||||
return text
|
||||
|
||||
|
||||
def _legacy_listen_examples(dispatch_descriptor, sample_target, fn):
|
||||
def _legacy_listen_examples(dispatch_collection, sample_target, fn):
|
||||
text = ""
|
||||
for since, args, conv in dispatch_descriptor.legacy_signatures:
|
||||
for since, args, conv in dispatch_collection.legacy_signatures:
|
||||
text += (
|
||||
"\n# legacy calling style (pre-%(since)s)\n"
|
||||
"@event.listens_for(%(sample_target)s, '%(event_name)s')\n"
|
||||
|
@ -117,7 +117,7 @@ def _legacy_listen_examples(dispatch_descriptor, sample_target, fn):
|
|||
"since": since,
|
||||
"event_name": fn.__name__,
|
||||
"has_kw_arguments": " **kw"
|
||||
if dispatch_descriptor.has_kw else "",
|
||||
if dispatch_collection.has_kw else "",
|
||||
"named_event_arguments": ", ".join(args),
|
||||
"sample_target": sample_target
|
||||
}
|
||||
|
@ -125,8 +125,8 @@ def _legacy_listen_examples(dispatch_descriptor, sample_target, fn):
|
|||
return text
|
||||
|
||||
|
||||
def _version_signature_changes(dispatch_descriptor):
|
||||
since, args, conv = dispatch_descriptor.legacy_signatures[0]
|
||||
def _version_signature_changes(dispatch_collection):
|
||||
since, args, conv = dispatch_collection.legacy_signatures[0]
|
||||
return (
|
||||
"\n.. versionchanged:: %(since)s\n"
|
||||
" The ``%(event_name)s`` event now accepts the \n"
|
||||
|
@ -135,14 +135,14 @@ def _version_signature_changes(dispatch_descriptor):
|
|||
" signature(s) listed above will be automatically \n"
|
||||
" adapted to the new signature." % {
|
||||
"since": since,
|
||||
"event_name": dispatch_descriptor.__name__,
|
||||
"named_event_arguments": ", ".join(dispatch_descriptor.arg_names),
|
||||
"has_kw_arguments": ", **kw" if dispatch_descriptor.has_kw else ""
|
||||
"event_name": dispatch_collection.name,
|
||||
"named_event_arguments": ", ".join(dispatch_collection.arg_names),
|
||||
"has_kw_arguments": ", **kw" if dispatch_collection.has_kw else ""
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def _augment_fn_docs(dispatch_descriptor, parent_dispatch_cls, fn):
|
||||
def _augment_fn_docs(dispatch_collection, parent_dispatch_cls, fn):
|
||||
header = ".. container:: event_signatures\n\n"\
|
||||
" Example argument forms::\n"\
|
||||
"\n"
|
||||
|
@ -152,16 +152,16 @@ def _augment_fn_docs(dispatch_descriptor, parent_dispatch_cls, fn):
|
|||
header +
|
||||
_indent(
|
||||
_standard_listen_example(
|
||||
dispatch_descriptor, sample_target, fn),
|
||||
dispatch_collection, sample_target, fn),
|
||||
" " * 8)
|
||||
)
|
||||
if dispatch_descriptor.legacy_signatures:
|
||||
if dispatch_collection.legacy_signatures:
|
||||
text += _indent(
|
||||
_legacy_listen_examples(
|
||||
dispatch_descriptor, sample_target, fn),
|
||||
dispatch_collection, sample_target, fn),
|
||||
" " * 8)
|
||||
|
||||
text += _version_signature_changes(dispatch_descriptor)
|
||||
text += _version_signature_changes(dispatch_collection)
|
||||
|
||||
return util.inject_docstring_text(fn.__doc__,
|
||||
text,
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# event/registry.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -37,7 +37,7 @@ listener collections and the listener fn contained
|
|||
|
||||
_collection_to_key = collections.defaultdict(dict)
|
||||
"""
|
||||
Given a _ListenerCollection or _DispatchDescriptor, can locate
|
||||
Given a _ListenerCollection or _ClsLevelListener, can locate
|
||||
all the original listen() arguments and the listener fn contained
|
||||
|
||||
ref(listenercollection) -> {
|
||||
|
@ -71,13 +71,15 @@ def _stored_in_collection(event_key, owner):
|
|||
listen_ref = weakref.ref(event_key._listen_fn)
|
||||
|
||||
if owner_ref in dispatch_reg:
|
||||
assert dispatch_reg[owner_ref] == listen_ref
|
||||
else:
|
||||
return False
|
||||
|
||||
dispatch_reg[owner_ref] = listen_ref
|
||||
|
||||
listener_to_key = _collection_to_key[owner_ref]
|
||||
listener_to_key[listen_ref] = key
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _removed_from_collection(event_key, owner):
|
||||
key = event_key._key
|
||||
|
@ -138,6 +140,10 @@ class _EventKey(object):
|
|||
"""Represent :func:`.listen` arguments.
|
||||
"""
|
||||
|
||||
__slots__ = (
|
||||
'target', 'identifier', 'fn', 'fn_key', 'fn_wrap', 'dispatch_target'
|
||||
)
|
||||
|
||||
def __init__(self, target, identifier,
|
||||
fn, dispatch_target, _fn_wrap=None):
|
||||
self.target = target
|
||||
|
@ -180,6 +186,17 @@ class _EventKey(object):
|
|||
|
||||
def listen(self, *args, **kw):
|
||||
once = kw.pop("once", False)
|
||||
named = kw.pop("named", False)
|
||||
|
||||
target, identifier, fn = \
|
||||
self.dispatch_target, self.identifier, self._listen_fn
|
||||
|
||||
dispatch_collection = getattr(target.dispatch, identifier)
|
||||
|
||||
adjusted_fn = dispatch_collection._adjust_fn_spec(fn, named)
|
||||
|
||||
self = self.with_wrapper(adjusted_fn)
|
||||
|
||||
if once:
|
||||
self.with_wrapper(
|
||||
util.only_once(self._listen_fn)).listen(*args, **kw)
|
||||
|
@ -213,34 +230,33 @@ class _EventKey(object):
|
|||
target, identifier, fn = \
|
||||
self.dispatch_target, self.identifier, self._listen_fn
|
||||
|
||||
dispatch_descriptor = getattr(target.dispatch, identifier)
|
||||
|
||||
fn = dispatch_descriptor._adjust_fn_spec(fn, named)
|
||||
self = self.with_wrapper(fn)
|
||||
dispatch_collection = getattr(target.dispatch, identifier)
|
||||
|
||||
if insert:
|
||||
dispatch_descriptor.\
|
||||
dispatch_collection.\
|
||||
for_modify(target.dispatch).insert(self, propagate)
|
||||
else:
|
||||
dispatch_descriptor.\
|
||||
dispatch_collection.\
|
||||
for_modify(target.dispatch).append(self, propagate)
|
||||
|
||||
@property
|
||||
def _listen_fn(self):
|
||||
return self.fn_wrap or self.fn
|
||||
|
||||
def append_value_to_list(self, owner, list_, value):
|
||||
_stored_in_collection(self, owner)
|
||||
list_.append(value)
|
||||
|
||||
def append_to_list(self, owner, list_):
|
||||
_stored_in_collection(self, owner)
|
||||
if _stored_in_collection(self, owner):
|
||||
list_.append(self._listen_fn)
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def remove_from_list(self, owner, list_):
|
||||
_removed_from_collection(self, owner)
|
||||
list_.remove(self._listen_fn)
|
||||
|
||||
def prepend_to_list(self, owner, list_):
|
||||
_stored_in_collection(self, owner)
|
||||
list_.insert(0, self._listen_fn)
|
||||
if _stored_in_collection(self, owner):
|
||||
list_.appendleft(self._listen_fn)
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# sqlalchemy/events.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -338,7 +338,7 @@ class PoolEvents(event.Events):
|
|||
|
||||
"""
|
||||
|
||||
def reset(self, dbapi_connnection, connection_record):
|
||||
def reset(self, dbapi_connection, connection_record):
|
||||
"""Called before the "reset" action occurs for a pooled connection.
|
||||
|
||||
This event represents
|
||||
|
@ -371,7 +371,9 @@ class PoolEvents(event.Events):
|
|||
"""Called when a DBAPI connection is to be "invalidated".
|
||||
|
||||
This event is called any time the :meth:`._ConnectionRecord.invalidate`
|
||||
method is invoked, either from API usage or via "auto-invalidation".
|
||||
method is invoked, either from API usage or via "auto-invalidation",
|
||||
without the ``soft`` flag.
|
||||
|
||||
The event occurs before a final attempt to call ``.close()`` on the
|
||||
connection occurs.
|
||||
|
||||
|
@ -392,6 +394,21 @@ class PoolEvents(event.Events):
|
|||
|
||||
"""
|
||||
|
||||
def soft_invalidate(self, dbapi_connection, connection_record, exception):
|
||||
"""Called when a DBAPI connection is to be "soft invalidated".
|
||||
|
||||
This event is called any time the :meth:`._ConnectionRecord.invalidate`
|
||||
method is invoked with the ``soft`` flag.
|
||||
|
||||
Soft invalidation refers to when the connection record that tracks
|
||||
this connection will force a reconnect after the current connection
|
||||
is checked in. It does not actively close the dbapi_connection
|
||||
at the point at which it is called.
|
||||
|
||||
.. versionadded:: 1.0.3
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class ConnectionEvents(event.Events):
|
||||
"""Available events for :class:`.Connectable`, which includes
|
||||
|
@ -420,6 +437,12 @@ class ConnectionEvents(event.Events):
|
|||
context, executemany):
|
||||
log.info("Received statement: %s" % statement)
|
||||
|
||||
When the methods are called with a `statement` parameter, such as in
|
||||
:meth:`.after_cursor_execute`, :meth:`.before_cursor_execute` and
|
||||
:meth:`.dbapi_error`, the statement is the exact SQL string that was
|
||||
prepared for transmission to the DBAPI ``cursor`` in the connection's
|
||||
:class:`.Dialect`.
|
||||
|
||||
The :meth:`.before_execute` and :meth:`.before_cursor_execute`
|
||||
events can also be established with the ``retval=True`` flag, which
|
||||
allows modification of the statement and parameters to be sent
|
||||
|
@ -470,7 +493,8 @@ class ConnectionEvents(event.Events):
|
|||
@classmethod
|
||||
def _listen(cls, event_key, retval=False):
|
||||
target, identifier, fn = \
|
||||
event_key.dispatch_target, event_key.identifier, event_key.fn
|
||||
event_key.dispatch_target, event_key.identifier, \
|
||||
event_key._listen_fn
|
||||
|
||||
target._has_events = True
|
||||
|
||||
|
@ -548,9 +572,8 @@ class ConnectionEvents(event.Events):
|
|||
def before_cursor_execute(self, conn, cursor, statement,
|
||||
parameters, context, executemany):
|
||||
"""Intercept low-level cursor execute() events before execution,
|
||||
receiving the string
|
||||
SQL statement and DBAPI-specific parameter list to be invoked
|
||||
against a cursor.
|
||||
receiving the string SQL statement and DBAPI-specific parameter list to
|
||||
be invoked against a cursor.
|
||||
|
||||
This event is a good choice for logging as well as late modifications
|
||||
to the SQL string. It's less ideal for parameter modifications except
|
||||
|
@ -570,7 +593,7 @@ class ConnectionEvents(event.Events):
|
|||
|
||||
:param conn: :class:`.Connection` object
|
||||
:param cursor: DBAPI cursor object
|
||||
:param statement: string SQL statement
|
||||
:param statement: string SQL statement, as to be passed to the DBAPI
|
||||
:param parameters: Dictionary, tuple, or list of parameters being
|
||||
passed to the ``execute()`` or ``executemany()`` method of the
|
||||
DBAPI ``cursor``. In some cases may be ``None``.
|
||||
|
@ -595,7 +618,7 @@ class ConnectionEvents(event.Events):
|
|||
:param cursor: DBAPI cursor object. Will have results pending
|
||||
if the statement was a SELECT, but these should not be consumed
|
||||
as they will be needed by the :class:`.ResultProxy`.
|
||||
:param statement: string SQL statement
|
||||
:param statement: string SQL statement, as passed to the DBAPI
|
||||
:param parameters: Dictionary, tuple, or list of parameters being
|
||||
passed to the ``execute()`` or ``executemany()`` method of the
|
||||
DBAPI ``cursor``. In some cases may be ``None``.
|
||||
|
@ -639,7 +662,7 @@ class ConnectionEvents(event.Events):
|
|||
|
||||
:param conn: :class:`.Connection` object
|
||||
:param cursor: DBAPI cursor object
|
||||
:param statement: string SQL statement
|
||||
:param statement: string SQL statement, as passed to the DBAPI
|
||||
:param parameters: Dictionary, tuple, or list of parameters being
|
||||
passed to the ``execute()`` or ``executemany()`` method of the
|
||||
DBAPI ``cursor``. In some cases may be ``None``.
|
||||
|
@ -701,6 +724,16 @@ class ConnectionEvents(event.Events):
|
|||
"failed" in str(context.original_exception):
|
||||
raise MySpecialException("failed operation")
|
||||
|
||||
.. warning:: Because the :meth:`.ConnectionEvents.handle_error`
|
||||
event specifically provides for exceptions to be re-thrown as
|
||||
the ultimate exception raised by the failed statement,
|
||||
**stack traces will be misleading** if the user-defined event
|
||||
handler itself fails and throws an unexpected exception;
|
||||
the stack trace may not illustrate the actual code line that
|
||||
failed! It is advised to code carefully here and use
|
||||
logging and/or inline debugging if unexpected exceptions are
|
||||
occurring.
|
||||
|
||||
Alternatively, a "chained" style of event handling can be
|
||||
used, by configuring the handler with the ``retval=True``
|
||||
modifier and returning the new exception instance from the
|
||||
|
@ -733,6 +766,22 @@ class ConnectionEvents(event.Events):
|
|||
.. versionadded:: 0.9.7 Added the
|
||||
:meth:`.ConnectionEvents.handle_error` hook.
|
||||
|
||||
.. versionchanged:: 1.0.0 The :meth:`.handle_error` event is now
|
||||
invoked when an :class:`.Engine` fails during the initial
|
||||
call to :meth:`.Engine.connect`, as well as when a
|
||||
:class:`.Connection` object encounters an error during a
|
||||
reconnect operation.
|
||||
|
||||
.. versionchanged:: 1.0.0 The :meth:`.handle_error` event is
|
||||
not fired off when a dialect makes use of the
|
||||
``skip_user_error_events`` execution option. This is used
|
||||
by dialects which intend to catch SQLAlchemy-specific exceptions
|
||||
within specific operations, such as when the MySQL dialect detects
|
||||
a table not present within the ``has_table()`` dialect method.
|
||||
Prior to 1.0.0, code which implements :meth:`.handle_error` needs
|
||||
to ensure that exceptions thrown in these scenarios are re-raised
|
||||
without modification.
|
||||
|
||||
"""
|
||||
|
||||
def engine_connect(self, conn, branch):
|
||||
|
@ -770,6 +819,11 @@ class ConnectionEvents(event.Events):
|
|||
|
||||
.. seealso::
|
||||
|
||||
:ref:`pool_disconnects_pessimistic` - illustrates how to use
|
||||
:meth:`.ConnectionEvents.engine_connect`
|
||||
to transparently ensure pooled connections are connected to the
|
||||
database.
|
||||
|
||||
:meth:`.PoolEvents.checkout` the lower-level pool checkout event
|
||||
for an individual DBAPI connection
|
||||
|
||||
|
@ -833,6 +887,23 @@ class ConnectionEvents(event.Events):
|
|||
|
||||
"""
|
||||
|
||||
def engine_disposed(self, engine):
|
||||
"""Intercept when the :meth:`.Engine.dispose` method is called.
|
||||
|
||||
The :meth:`.Engine.dispose` method instructs the engine to
|
||||
"dispose" of it's connection pool (e.g. :class:`.Pool`), and
|
||||
replaces it with a new one. Disposing of the old pool has the
|
||||
effect that existing checked-in connections are closed. The new
|
||||
pool does not establish any new connections until it is first used.
|
||||
|
||||
This event can be used to indicate that resources related to the
|
||||
:class:`.Engine` should also be cleaned up, keeping in mind that the
|
||||
:class:`.Engine` can still be used for new requests in which case
|
||||
it re-acquires connection resources.
|
||||
|
||||
.. versionadded:: 1.0.5
|
||||
|
||||
"""
|
||||
def begin(self, conn):
|
||||
"""Intercept begin() events.
|
||||
|
||||
|
@ -985,6 +1056,23 @@ class DialectEvents(event.Events):
|
|||
else:
|
||||
return target
|
||||
|
||||
def do_connect(self, dialect, conn_rec, cargs, cparams):
|
||||
"""Receive connection arguments before a connection is made.
|
||||
|
||||
Return a DBAPI connection to halt further events from invoking;
|
||||
the returned connection will be used.
|
||||
|
||||
Alternatively, the event can manipulate the cargs and/or cparams
|
||||
collections; cargs will always be a Python list that can be mutated
|
||||
in-place and cparams a Python dictionary. Return None to
|
||||
allow control to pass to the next event handler and ultimately
|
||||
to allow the dialect to connect normally, given the updated
|
||||
arguments.
|
||||
|
||||
.. versionadded:: 1.0.3
|
||||
|
||||
"""
|
||||
|
||||
def do_executemany(self, cursor, statement, parameters, context):
|
||||
"""Receive a cursor to have executemany() called.
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# sqlalchemy/exc.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -13,8 +13,6 @@ raised as a result of DBAPI exceptions are all subclasses of
|
|||
|
||||
"""
|
||||
|
||||
import traceback
|
||||
|
||||
|
||||
class SQLAlchemyError(Exception):
|
||||
"""Generic error class."""
|
||||
|
@ -54,8 +52,7 @@ class CircularDependencyError(SQLAlchemyError):
|
|||
or pre-deassociate one of the foreign key constrained values.
|
||||
The ``post_update`` flag described at :ref:`post_update` can resolve
|
||||
this cycle.
|
||||
* In a :meth:`.MetaData.create_all`, :meth:`.MetaData.drop_all`,
|
||||
:attr:`.MetaData.sorted_tables` operation, two :class:`.ForeignKey`
|
||||
* In a :attr:`.MetaData.sorted_tables` operation, two :class:`.ForeignKey`
|
||||
or :class:`.ForeignKeyConstraint` objects mutually refer to each
|
||||
other. Apply the ``use_alter=True`` flag to one or both,
|
||||
see :ref:`use_alter`.
|
||||
|
@ -63,7 +60,7 @@ class CircularDependencyError(SQLAlchemyError):
|
|||
"""
|
||||
def __init__(self, message, cycles, edges, msg=None):
|
||||
if msg is None:
|
||||
message += " Cycles: %r all edges: %r" % (cycles, edges)
|
||||
message += " (%s)" % ", ".join(repr(s) for s in cycles)
|
||||
else:
|
||||
message = msg
|
||||
SQLAlchemyError.__init__(self, message)
|
||||
|
@ -238,14 +235,16 @@ class StatementError(SQLAlchemyError):
|
|||
|
||||
def __str__(self):
|
||||
from sqlalchemy.sql import util
|
||||
params_repr = util._repr_params(self.params, 10)
|
||||
|
||||
details = [SQLAlchemyError.__str__(self)]
|
||||
if self.statement:
|
||||
details.append("[SQL: %r]" % self.statement)
|
||||
if self.params:
|
||||
params_repr = util._repr_params(self.params, 10)
|
||||
details.append("[parameters: %r]" % params_repr)
|
||||
return ' '.join([
|
||||
"(%s)" % det for det in self.detail
|
||||
] + [
|
||||
SQLAlchemyError.__str__(self),
|
||||
repr(self.statement), repr(params_repr)
|
||||
])
|
||||
] + details)
|
||||
|
||||
def __unicode__(self):
|
||||
return self.__str__()
|
||||
|
@ -277,26 +276,35 @@ class DBAPIError(StatementError):
|
|||
@classmethod
|
||||
def instance(cls, statement, params,
|
||||
orig, dbapi_base_err,
|
||||
connection_invalidated=False):
|
||||
connection_invalidated=False,
|
||||
dialect=None):
|
||||
# Don't ever wrap these, just return them directly as if
|
||||
# DBAPIError didn't exist.
|
||||
if isinstance(orig, (KeyboardInterrupt, SystemExit, DontWrapMixin)):
|
||||
if (isinstance(orig, BaseException) and
|
||||
not isinstance(orig, Exception)) or \
|
||||
isinstance(orig, DontWrapMixin):
|
||||
return orig
|
||||
|
||||
if orig is not None:
|
||||
# not a DBAPI error, statement is present.
|
||||
# raise a StatementError
|
||||
if not isinstance(orig, dbapi_base_err) and statement:
|
||||
msg = traceback.format_exception_only(
|
||||
orig.__class__, orig)[-1].strip()
|
||||
return StatementError(
|
||||
"%s (original cause: %s)" % (str(orig), msg),
|
||||
"(%s.%s) %s" %
|
||||
(orig.__class__.__module__, orig.__class__.__name__,
|
||||
orig),
|
||||
statement, params, orig
|
||||
)
|
||||
|
||||
name, glob = orig.__class__.__name__, globals()
|
||||
glob = globals()
|
||||
for super_ in orig.__class__.__mro__:
|
||||
name = super_.__name__
|
||||
if dialect:
|
||||
name = dialect.dbapi_exception_translation_map.get(
|
||||
name, name)
|
||||
if name in glob and issubclass(glob[name], DBAPIError):
|
||||
cls = glob[name]
|
||||
break
|
||||
|
||||
return cls(statement, params, orig, connection_invalidated)
|
||||
|
||||
|
@ -307,13 +315,12 @@ class DBAPIError(StatementError):
|
|||
def __init__(self, statement, params, orig, connection_invalidated=False):
|
||||
try:
|
||||
text = str(orig)
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
raise
|
||||
except Exception as e:
|
||||
text = 'Error in str() of DB-API-generated exception: ' + str(e)
|
||||
StatementError.__init__(
|
||||
self,
|
||||
'(%s) %s' % (orig.__class__.__name__, text),
|
||||
'(%s.%s) %s' % (
|
||||
orig.__class__.__module__, orig.__class__.__name__, text, ),
|
||||
statement,
|
||||
params,
|
||||
orig
|
||||
|
|
|
@ -1,6 +1,11 @@
|
|||
# ext/__init__.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from .. import util as _sa_util
|
||||
|
||||
_sa_util.dependencies.resolve_all("sqlalchemy.ext")
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# ext/associationproxy.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -77,16 +77,16 @@ def association_proxy(target_collection, attr, **kw):
|
|||
|
||||
|
||||
ASSOCIATION_PROXY = util.symbol('ASSOCIATION_PROXY')
|
||||
"""Symbol indicating an :class:`_InspectionAttr` that's
|
||||
"""Symbol indicating an :class:`InspectionAttr` that's
|
||||
of type :class:`.AssociationProxy`.
|
||||
|
||||
Is assigned to the :attr:`._InspectionAttr.extension_type`
|
||||
Is assigned to the :attr:`.InspectionAttr.extension_type`
|
||||
attibute.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class AssociationProxy(interfaces._InspectionAttr):
|
||||
class AssociationProxy(interfaces.InspectionAttrInfo):
|
||||
"""A descriptor that presents a read/write view of an object attribute."""
|
||||
|
||||
is_attribute = False
|
||||
|
@ -94,7 +94,7 @@ class AssociationProxy(interfaces._InspectionAttr):
|
|||
|
||||
def __init__(self, target_collection, attr, creator=None,
|
||||
getset_factory=None, proxy_factory=None,
|
||||
proxy_bulk_set=None):
|
||||
proxy_bulk_set=None, info=None):
|
||||
"""Construct a new :class:`.AssociationProxy`.
|
||||
|
||||
The :func:`.association_proxy` function is provided as the usual
|
||||
|
@ -138,6 +138,11 @@ class AssociationProxy(interfaces._InspectionAttr):
|
|||
:param proxy_bulk_set: Optional, use with proxy_factory. See
|
||||
the _set() method for details.
|
||||
|
||||
:param info: optional, will be assigned to
|
||||
:attr:`.AssociationProxy.info` if present.
|
||||
|
||||
.. versionadded:: 1.0.9
|
||||
|
||||
"""
|
||||
self.target_collection = target_collection
|
||||
self.value_attr = attr
|
||||
|
@ -150,6 +155,8 @@ class AssociationProxy(interfaces._InspectionAttr):
|
|||
self.key = '_%s_%s_%s' % (
|
||||
type(self).__name__, target_collection, id(self))
|
||||
self.collection_class = None
|
||||
if info:
|
||||
self.info = info
|
||||
|
||||
@property
|
||||
def remote_attr(self):
|
||||
|
@ -365,13 +372,17 @@ class AssociationProxy(interfaces._InspectionAttr):
|
|||
operators of the underlying proxied attributes.
|
||||
|
||||
"""
|
||||
|
||||
if self._target_is_object:
|
||||
if self._value_is_scalar:
|
||||
value_expr = getattr(
|
||||
self.target_class, self.value_attr).has(criterion, **kwargs)
|
||||
self.target_class, self.value_attr).has(
|
||||
criterion, **kwargs)
|
||||
else:
|
||||
value_expr = getattr(
|
||||
self.target_class, self.value_attr).any(criterion, **kwargs)
|
||||
self.target_class, self.value_attr).any(
|
||||
criterion, **kwargs)
|
||||
else:
|
||||
value_expr = criterion
|
||||
|
||||
# check _value_is_scalar here, otherwise
|
||||
# we're scalar->scalar - call .any() so that
|
||||
|
@ -527,7 +538,10 @@ class _AssociationList(_AssociationCollection):
|
|||
return self.setter(object, value)
|
||||
|
||||
def __getitem__(self, index):
|
||||
if not isinstance(index, slice):
|
||||
return self._get(self.col[index])
|
||||
else:
|
||||
return [self._get(member) for member in self.col[index]]
|
||||
|
||||
def __setitem__(self, index, value):
|
||||
if not isinstance(index, slice):
|
||||
|
@ -589,7 +603,7 @@ class _AssociationList(_AssociationCollection):
|
|||
|
||||
for member in self.col:
|
||||
yield self._get(member)
|
||||
raise StopIteration
|
||||
return
|
||||
|
||||
def append(self, value):
|
||||
item = self._create(value)
|
||||
|
@ -893,7 +907,7 @@ class _AssociationSet(_AssociationCollection):
|
|||
"""
|
||||
for member in self.col:
|
||||
yield self._get(member)
|
||||
raise StopIteration
|
||||
return
|
||||
|
||||
def add(self, value):
|
||||
if value not in self:
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# ext/automap.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -11,12 +11,6 @@ schema, typically though not necessarily one which is reflected.
|
|||
|
||||
.. versionadded:: 0.9.1 Added :mod:`sqlalchemy.ext.automap`.
|
||||
|
||||
.. note::
|
||||
|
||||
The :mod:`sqlalchemy.ext.automap` extension should be considered
|
||||
**experimental** as of 0.9.1. Featureset and API stability is
|
||||
not guaranteed at this time.
|
||||
|
||||
It is hoped that the :class:`.AutomapBase` system provides a quick
|
||||
and modernized solution to the problem that the very famous
|
||||
`SQLSoup <https://sqlsoup.readthedocs.org/en/latest/>`_
|
||||
|
@ -67,7 +61,7 @@ asking it to reflect the schema and produce mappings::
|
|||
Above, calling :meth:`.AutomapBase.prepare` while passing along the
|
||||
:paramref:`.AutomapBase.prepare.reflect` parameter indicates that the
|
||||
:meth:`.MetaData.reflect` method will be called on this declarative base
|
||||
classes' :class:`.MetaData` collection; then, each viable
|
||||
classes' :class:`.MetaData` collection; then, each **viable**
|
||||
:class:`.Table` within the :class:`.MetaData` will get a new mapped class
|
||||
generated automatically. The :class:`.ForeignKeyConstraint` objects which
|
||||
link the various tables together will be used to produce new, bidirectional
|
||||
|
@ -76,6 +70,12 @@ follow along a default naming scheme that we can customize. At this point,
|
|||
our basic mapping consisting of related ``User`` and ``Address`` classes is
|
||||
ready to use in the traditional way.
|
||||
|
||||
.. note:: By **viable**, we mean that for a table to be mapped, it must
|
||||
specify a primary key. Additionally, if the table is detected as being
|
||||
a pure association table between two other tables, it will not be directly
|
||||
mapped and will instead be configured as a many-to-many table between
|
||||
the mappings for the two referring tables.
|
||||
|
||||
Generating Mappings from an Existing MetaData
|
||||
=============================================
|
||||
|
||||
|
@ -111,8 +111,8 @@ explicit table declaration::
|
|||
User, Address, Order = Base.classes.user, Base.classes.address,\
|
||||
Base.classes.user_order
|
||||
|
||||
Specifying Classes Explcitly
|
||||
============================
|
||||
Specifying Classes Explicitly
|
||||
=============================
|
||||
|
||||
The :mod:`.sqlalchemy.ext.automap` extension allows classes to be defined
|
||||
explicitly, in a way similar to that of the :class:`.DeferredReflection` class.
|
||||
|
@ -188,7 +188,7 @@ scheme for class names and a "pluralizer" for collection names using the
|
|||
"'words_and_underscores' -> 'WordsAndUnderscores'"
|
||||
|
||||
return str(tablename[0].upper() + \\
|
||||
re.sub(r'_(\w)', lambda m: m.group(1).upper(), tablename[1:]))
|
||||
re.sub(r'_([a-z])', lambda m: m.group(1).upper(), tablename[1:]))
|
||||
|
||||
_pluralizer = inflect.engine()
|
||||
def pluralize_collection(base, local_cls, referred_cls, constraint):
|
||||
|
@ -196,10 +196,9 @@ scheme for class names and a "pluralizer" for collection names using the
|
|||
"'SomeTerm' -> 'some_terms'"
|
||||
|
||||
referred_name = referred_cls.__name__
|
||||
uncamelized = referred_name[0].lower() + \\
|
||||
re.sub(r'\W',
|
||||
uncamelized = re.sub(r'[A-Z]',
|
||||
lambda m: "_%s" % m.group(0).lower(),
|
||||
referred_name[1:])
|
||||
referred_name)[1:]
|
||||
pluralized = _pluralizer.plural(uncamelized)
|
||||
return pluralized
|
||||
|
||||
|
@ -243,7 +242,26 @@ follows:
|
|||
one-to-many backref will be created on the referred class referring
|
||||
to this class.
|
||||
|
||||
4. The names of the relationships are determined using the
|
||||
4. If any of the columns that are part of the :class:`.ForeignKeyConstraint`
|
||||
are not nullable (e.g. ``nullable=False``), a
|
||||
:paramref:`~.relationship.cascade` keyword argument
|
||||
of ``all, delete-orphan`` will be added to the keyword arguments to
|
||||
be passed to the relationship or backref. If the
|
||||
:class:`.ForeignKeyConstraint` reports that
|
||||
:paramref:`.ForeignKeyConstraint.ondelete`
|
||||
is set to ``CASCADE`` for a not null or ``SET NULL`` for a nullable
|
||||
set of columns, the option :paramref:`~.relationship.passive_deletes`
|
||||
flag is set to ``True`` in the set of relationship keyword arguments.
|
||||
Note that not all backends support reflection of ON DELETE.
|
||||
|
||||
.. versionadded:: 1.0.0 - automap will detect non-nullable foreign key
|
||||
constraints when producing a one-to-many relationship and establish
|
||||
a default cascade of ``all, delete-orphan`` if so; additionally,
|
||||
if the constraint specifies :paramref:`.ForeignKeyConstraint.ondelete`
|
||||
of ``CASCADE`` for non-nullable or ``SET NULL`` for nullable columns,
|
||||
the ``passive_deletes=True`` option is also added.
|
||||
|
||||
5. The names of the relationships are determined using the
|
||||
:paramref:`.AutomapBase.prepare.name_for_scalar_relationship` and
|
||||
:paramref:`.AutomapBase.prepare.name_for_collection_relationship`
|
||||
callable functions. It is important to note that the default relationship
|
||||
|
@ -252,18 +270,18 @@ follows:
|
|||
alternate class naming scheme, that's the name from which the relationship
|
||||
name will be derived.
|
||||
|
||||
5. The classes are inspected for an existing mapped property matching these
|
||||
6. The classes are inspected for an existing mapped property matching these
|
||||
names. If one is detected on one side, but none on the other side,
|
||||
:class:`.AutomapBase` attempts to create a relationship on the missing side,
|
||||
then uses the :paramref:`.relationship.back_populates` parameter in order to
|
||||
point the new relationship to the other side.
|
||||
|
||||
6. In the usual case where no relationship is on either side,
|
||||
7. In the usual case where no relationship is on either side,
|
||||
:meth:`.AutomapBase.prepare` produces a :func:`.relationship` on the
|
||||
"many-to-one" side and matches it to the other using the
|
||||
:paramref:`.relationship.backref` parameter.
|
||||
|
||||
7. Production of the :func:`.relationship` and optionally the :func:`.backref`
|
||||
8. Production of the :func:`.relationship` and optionally the :func:`.backref`
|
||||
is handed off to the :paramref:`.AutomapBase.prepare.generate_relationship`
|
||||
function, which can be supplied by the end-user in order to augment
|
||||
the arguments passed to :func:`.relationship` or :func:`.backref` or to
|
||||
|
@ -606,7 +624,7 @@ def generate_relationship(
|
|||
:param base: the :class:`.AutomapBase` class doing the prepare.
|
||||
|
||||
:param direction: indicate the "direction" of the relationship; this will
|
||||
be one of :data:`.ONETOMANY`, :data:`.MANYTOONE`, :data:`.MANYTOONE`.
|
||||
be one of :data:`.ONETOMANY`, :data:`.MANYTOONE`, :data:`.MANYTOMANY`.
|
||||
|
||||
:param return_fn: the function that is used by default to create the
|
||||
relationship. This will be either :func:`.relationship` or
|
||||
|
@ -877,6 +895,19 @@ def _relationships_for_fks(automap_base, map_config, table_to_map_config,
|
|||
constraint
|
||||
)
|
||||
|
||||
o2m_kws = {}
|
||||
nullable = False not in set([fk.parent.nullable for fk in fks])
|
||||
if not nullable:
|
||||
o2m_kws['cascade'] = "all, delete-orphan"
|
||||
|
||||
if constraint.ondelete and \
|
||||
constraint.ondelete.lower() == "cascade":
|
||||
o2m_kws['passive_deletes'] = True
|
||||
else:
|
||||
if constraint.ondelete and \
|
||||
constraint.ondelete.lower() == "set null":
|
||||
o2m_kws['passive_deletes'] = True
|
||||
|
||||
create_backref = backref_name not in referred_cfg.properties
|
||||
|
||||
if relationship_name not in map_config.properties:
|
||||
|
@ -885,7 +916,8 @@ def _relationships_for_fks(automap_base, map_config, table_to_map_config,
|
|||
automap_base,
|
||||
interfaces.ONETOMANY, backref,
|
||||
backref_name, referred_cls, local_cls,
|
||||
collection_class=collection_class)
|
||||
collection_class=collection_class,
|
||||
**o2m_kws)
|
||||
else:
|
||||
backref_obj = None
|
||||
rel = generate_relationship(automap_base,
|
||||
|
@ -916,7 +948,8 @@ def _relationships_for_fks(automap_base, map_config, table_to_map_config,
|
|||
fk.parent
|
||||
for fk in constraint.elements],
|
||||
back_populates=relationship_name,
|
||||
collection_class=collection_class)
|
||||
collection_class=collection_class,
|
||||
**o2m_kws)
|
||||
if rel is not None:
|
||||
referred_cfg.properties[backref_name] = rel
|
||||
map_config.properties[
|
||||
|
|
523
lib/python3.5/site-packages/sqlalchemy/ext/baked.py
Normal file
523
lib/python3.5/site-packages/sqlalchemy/ext/baked.py
Normal file
|
@ -0,0 +1,523 @@
|
|||
# sqlalchemy/ext/baked.py
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
"""Baked query extension.
|
||||
|
||||
Provides a creational pattern for the :class:`.query.Query` object which
|
||||
allows the fully constructed object, Core select statement, and string
|
||||
compiled result to be fully cached.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
from ..orm.query import Query
|
||||
from ..orm import strategies, attributes, properties, \
|
||||
strategy_options, util as orm_util, interfaces
|
||||
from .. import log as sqla_log
|
||||
from ..sql import util as sql_util
|
||||
from ..orm import exc as orm_exc
|
||||
from .. import exc as sa_exc
|
||||
from .. import util
|
||||
|
||||
import copy
|
||||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BakedQuery(object):
|
||||
"""A builder object for :class:`.query.Query` objects."""
|
||||
|
||||
__slots__ = 'steps', '_bakery', '_cache_key', '_spoiled'
|
||||
|
||||
def __init__(self, bakery, initial_fn, args=()):
|
||||
self._cache_key = ()
|
||||
self._update_cache_key(initial_fn, args)
|
||||
self.steps = [initial_fn]
|
||||
self._spoiled = False
|
||||
self._bakery = bakery
|
||||
|
||||
@classmethod
|
||||
def bakery(cls, size=200):
|
||||
"""Construct a new bakery."""
|
||||
|
||||
_bakery = util.LRUCache(size)
|
||||
|
||||
def call(initial_fn, *args):
|
||||
return cls(_bakery, initial_fn, args)
|
||||
|
||||
return call
|
||||
|
||||
def _clone(self):
|
||||
b1 = BakedQuery.__new__(BakedQuery)
|
||||
b1._cache_key = self._cache_key
|
||||
b1.steps = list(self.steps)
|
||||
b1._bakery = self._bakery
|
||||
b1._spoiled = self._spoiled
|
||||
return b1
|
||||
|
||||
def _update_cache_key(self, fn, args=()):
|
||||
self._cache_key += (fn.__code__,) + args
|
||||
|
||||
def __iadd__(self, other):
|
||||
if isinstance(other, tuple):
|
||||
self.add_criteria(*other)
|
||||
else:
|
||||
self.add_criteria(other)
|
||||
return self
|
||||
|
||||
def __add__(self, other):
|
||||
if isinstance(other, tuple):
|
||||
return self.with_criteria(*other)
|
||||
else:
|
||||
return self.with_criteria(other)
|
||||
|
||||
def add_criteria(self, fn, *args):
|
||||
"""Add a criteria function to this :class:`.BakedQuery`.
|
||||
|
||||
This is equivalent to using the ``+=`` operator to
|
||||
modify a :class:`.BakedQuery` in-place.
|
||||
|
||||
"""
|
||||
self._update_cache_key(fn, args)
|
||||
self.steps.append(fn)
|
||||
return self
|
||||
|
||||
def with_criteria(self, fn, *args):
|
||||
"""Add a criteria function to a :class:`.BakedQuery` cloned from this one.
|
||||
|
||||
This is equivalent to using the ``+`` operator to
|
||||
produce a new :class:`.BakedQuery` with modifications.
|
||||
|
||||
"""
|
||||
return self._clone().add_criteria(fn, *args)
|
||||
|
||||
def for_session(self, session):
|
||||
"""Return a :class:`.Result` object for this :class:`.BakedQuery`.
|
||||
|
||||
This is equivalent to calling the :class:`.BakedQuery` as a
|
||||
Python callable, e.g. ``result = my_baked_query(session)``.
|
||||
|
||||
"""
|
||||
return Result(self, session)
|
||||
|
||||
def __call__(self, session):
|
||||
return self.for_session(session)
|
||||
|
||||
def spoil(self, full=False):
|
||||
"""Cancel any query caching that will occur on this BakedQuery object.
|
||||
|
||||
The BakedQuery can continue to be used normally, however additional
|
||||
creational functions will not be cached; they will be called
|
||||
on every invocation.
|
||||
|
||||
This is to support the case where a particular step in constructing
|
||||
a baked query disqualifies the query from being cacheable, such
|
||||
as a variant that relies upon some uncacheable value.
|
||||
|
||||
:param full: if False, only functions added to this
|
||||
:class:`.BakedQuery` object subsequent to the spoil step will be
|
||||
non-cached; the state of the :class:`.BakedQuery` up until
|
||||
this point will be pulled from the cache. If True, then the
|
||||
entire :class:`.Query` object is built from scratch each
|
||||
time, with all creational functions being called on each
|
||||
invocation.
|
||||
|
||||
"""
|
||||
if not full:
|
||||
_spoil_point = self._clone()
|
||||
_spoil_point._cache_key += ('_query_only', )
|
||||
self.steps = [_spoil_point._retrieve_baked_query]
|
||||
self._spoiled = True
|
||||
return self
|
||||
|
||||
def _retrieve_baked_query(self, session):
|
||||
query = self._bakery.get(self._cache_key, None)
|
||||
if query is None:
|
||||
query = self._as_query(session)
|
||||
self._bakery[self._cache_key] = query.with_session(None)
|
||||
return query.with_session(session)
|
||||
|
||||
def _bake(self, session):
|
||||
query = self._as_query(session)
|
||||
|
||||
context = query._compile_context()
|
||||
self._bake_subquery_loaders(session, context)
|
||||
context.session = None
|
||||
context.query = query = context.query.with_session(None)
|
||||
query._execution_options = query._execution_options.union(
|
||||
{"compiled_cache": self._bakery}
|
||||
)
|
||||
# we'll be holding onto the query for some of its state,
|
||||
# so delete some compilation-use-only attributes that can take up
|
||||
# space
|
||||
for attr in (
|
||||
'_correlate', '_from_obj', '_mapper_adapter_map',
|
||||
'_joinpath', '_joinpoint'):
|
||||
query.__dict__.pop(attr, None)
|
||||
self._bakery[self._cache_key] = context
|
||||
return context
|
||||
|
||||
def _as_query(self, session):
|
||||
query = self.steps[0](session)
|
||||
|
||||
for step in self.steps[1:]:
|
||||
query = step(query)
|
||||
return query
|
||||
|
||||
def _bake_subquery_loaders(self, session, context):
|
||||
"""convert subquery eager loaders in the cache into baked queries.
|
||||
|
||||
For subquery eager loading to work, all we need here is that the
|
||||
Query point to the correct session when it is run. However, since
|
||||
we are "baking" anyway, we may as well also turn the query into
|
||||
a "baked" query so that we save on performance too.
|
||||
|
||||
"""
|
||||
context.attributes['baked_queries'] = baked_queries = []
|
||||
for k, v in list(context.attributes.items()):
|
||||
if isinstance(v, Query):
|
||||
if 'subquery' in k:
|
||||
bk = BakedQuery(self._bakery, lambda *args: v)
|
||||
bk._cache_key = self._cache_key + k
|
||||
bk._bake(session)
|
||||
baked_queries.append((k, bk._cache_key, v))
|
||||
del context.attributes[k]
|
||||
|
||||
def _unbake_subquery_loaders(self, session, context, params):
|
||||
"""Retrieve subquery eager loaders stored by _bake_subquery_loaders
|
||||
and turn them back into Result objects that will iterate just
|
||||
like a Query object.
|
||||
|
||||
"""
|
||||
for k, cache_key, query in context.attributes["baked_queries"]:
|
||||
bk = BakedQuery(self._bakery, lambda sess: query.with_session(sess))
|
||||
bk._cache_key = cache_key
|
||||
context.attributes[k] = bk.for_session(session).params(**params)
|
||||
|
||||
|
||||
class Result(object):
|
||||
"""Invokes a :class:`.BakedQuery` against a :class:`.Session`.
|
||||
|
||||
The :class:`.Result` object is where the actual :class:`.query.Query`
|
||||
object gets created, or retrieved from the cache,
|
||||
against a target :class:`.Session`, and is then invoked for results.
|
||||
|
||||
"""
|
||||
__slots__ = 'bq', 'session', '_params'
|
||||
|
||||
def __init__(self, bq, session):
|
||||
self.bq = bq
|
||||
self.session = session
|
||||
self._params = {}
|
||||
|
||||
def params(self, *args, **kw):
|
||||
"""Specify parameters to be replaced into the string SQL statement."""
|
||||
|
||||
if len(args) == 1:
|
||||
kw.update(args[0])
|
||||
elif len(args) > 0:
|
||||
raise sa_exc.ArgumentError(
|
||||
"params() takes zero or one positional argument, "
|
||||
"which is a dictionary.")
|
||||
self._params.update(kw)
|
||||
return self
|
||||
|
||||
def _as_query(self):
|
||||
return self.bq._as_query(self.session).params(self._params)
|
||||
|
||||
def __str__(self):
|
||||
return str(self._as_query())
|
||||
|
||||
def __iter__(self):
|
||||
bq = self.bq
|
||||
if bq._spoiled:
|
||||
return iter(self._as_query())
|
||||
|
||||
baked_context = bq._bakery.get(bq._cache_key, None)
|
||||
if baked_context is None:
|
||||
baked_context = bq._bake(self.session)
|
||||
|
||||
context = copy.copy(baked_context)
|
||||
context.session = self.session
|
||||
context.attributes = context.attributes.copy()
|
||||
|
||||
bq._unbake_subquery_loaders(self.session, context, self._params)
|
||||
|
||||
context.statement.use_labels = True
|
||||
if context.autoflush and not context.populate_existing:
|
||||
self.session._autoflush()
|
||||
return context.query.params(self._params).\
|
||||
with_session(self.session)._execute_and_instances(context)
|
||||
|
||||
def first(self):
|
||||
"""Return the first row.
|
||||
|
||||
Equivalent to :meth:`.Query.first`.
|
||||
|
||||
"""
|
||||
bq = self.bq.with_criteria(lambda q: q.slice(0, 1))
|
||||
ret = list(bq.for_session(self.session).params(self._params))
|
||||
if len(ret) > 0:
|
||||
return ret[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
def one(self):
|
||||
"""Return exactly one result or raise an exception.
|
||||
|
||||
Equivalent to :meth:`.Query.one`.
|
||||
|
||||
"""
|
||||
ret = list(self)
|
||||
|
||||
l = len(ret)
|
||||
if l == 1:
|
||||
return ret[0]
|
||||
elif l == 0:
|
||||
raise orm_exc.NoResultFound("No row was found for one()")
|
||||
else:
|
||||
raise orm_exc.MultipleResultsFound(
|
||||
"Multiple rows were found for one()")
|
||||
|
||||
def one_or_none(self):
|
||||
"""Return one or zero results, or raise an exception for multiple
|
||||
rows.
|
||||
|
||||
Equivalent to :meth:`.Query.one_or_none`.
|
||||
|
||||
.. versionadded:: 1.0.9
|
||||
|
||||
"""
|
||||
ret = list(self)
|
||||
|
||||
l = len(ret)
|
||||
if l == 1:
|
||||
return ret[0]
|
||||
elif l == 0:
|
||||
return None
|
||||
else:
|
||||
raise orm_exc.MultipleResultsFound(
|
||||
"Multiple rows were found for one_or_none()")
|
||||
|
||||
def all(self):
|
||||
"""Return all rows.
|
||||
|
||||
Equivalent to :meth:`.Query.all`.
|
||||
|
||||
"""
|
||||
return list(self)
|
||||
|
||||
def get(self, ident):
|
||||
"""Retrieve an object based on identity.
|
||||
|
||||
Equivalent to :meth:`.Query.get`.
|
||||
|
||||
"""
|
||||
|
||||
query = self.bq.steps[0](self.session)
|
||||
return query._get_impl(ident, self._load_on_ident)
|
||||
|
||||
def _load_on_ident(self, query, key):
|
||||
"""Load the given identity key from the database."""
|
||||
|
||||
ident = key[1]
|
||||
|
||||
mapper = query._mapper_zero()
|
||||
|
||||
_get_clause, _get_params = mapper._get_clause
|
||||
|
||||
def setup(query):
|
||||
_lcl_get_clause = _get_clause
|
||||
q = query._clone()
|
||||
q._get_condition()
|
||||
q._order_by = None
|
||||
|
||||
# None present in ident - turn those comparisons
|
||||
# into "IS NULL"
|
||||
if None in ident:
|
||||
nones = set([
|
||||
_get_params[col].key for col, value in
|
||||
zip(mapper.primary_key, ident) if value is None
|
||||
])
|
||||
_lcl_get_clause = sql_util.adapt_criterion_to_null(
|
||||
_lcl_get_clause, nones)
|
||||
|
||||
_lcl_get_clause = q._adapt_clause(_lcl_get_clause, True, False)
|
||||
q._criterion = _lcl_get_clause
|
||||
return q
|
||||
|
||||
# cache the query against a key that includes
|
||||
# which positions in the primary key are NULL
|
||||
# (remember, we can map to an OUTER JOIN)
|
||||
bq = self.bq
|
||||
|
||||
# add the clause we got from mapper._get_clause to the cache
|
||||
# key so that if a race causes multiple calls to _get_clause,
|
||||
# we've cached on ours
|
||||
bq = bq._clone()
|
||||
bq._cache_key += (_get_clause, )
|
||||
|
||||
bq = bq.with_criteria(setup, tuple(elem is None for elem in ident))
|
||||
|
||||
params = dict([
|
||||
(_get_params[primary_key].key, id_val)
|
||||
for id_val, primary_key in zip(ident, mapper.primary_key)
|
||||
])
|
||||
|
||||
result = list(bq.for_session(self.session).params(**params))
|
||||
l = len(result)
|
||||
if l > 1:
|
||||
raise orm_exc.MultipleResultsFound()
|
||||
elif l:
|
||||
return result[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def bake_lazy_loaders():
|
||||
"""Enable the use of baked queries for all lazyloaders systemwide.
|
||||
|
||||
This operation should be safe for all lazy loaders, and will reduce
|
||||
Python overhead for these operations.
|
||||
|
||||
"""
|
||||
BakedLazyLoader._strategy_keys[:] = []
|
||||
|
||||
properties.RelationshipProperty.strategy_for(
|
||||
lazy="select")(BakedLazyLoader)
|
||||
properties.RelationshipProperty.strategy_for(
|
||||
lazy=True)(BakedLazyLoader)
|
||||
properties.RelationshipProperty.strategy_for(
|
||||
lazy="baked_select")(BakedLazyLoader)
|
||||
|
||||
strategies.LazyLoader._strategy_keys[:] = BakedLazyLoader._strategy_keys[:]
|
||||
|
||||
|
||||
def unbake_lazy_loaders():
|
||||
"""Disable the use of baked queries for all lazyloaders systemwide.
|
||||
|
||||
This operation reverts the changes produced by :func:`.bake_lazy_loaders`.
|
||||
|
||||
"""
|
||||
strategies.LazyLoader._strategy_keys[:] = []
|
||||
BakedLazyLoader._strategy_keys[:] = []
|
||||
|
||||
properties.RelationshipProperty.strategy_for(
|
||||
lazy="select")(strategies.LazyLoader)
|
||||
properties.RelationshipProperty.strategy_for(
|
||||
lazy=True)(strategies.LazyLoader)
|
||||
properties.RelationshipProperty.strategy_for(
|
||||
lazy="baked_select")(BakedLazyLoader)
|
||||
assert strategies.LazyLoader._strategy_keys
|
||||
|
||||
|
||||
@sqla_log.class_logger
|
||||
@properties.RelationshipProperty.strategy_for(lazy="baked_select")
|
||||
class BakedLazyLoader(strategies.LazyLoader):
|
||||
|
||||
def _emit_lazyload(self, session, state, ident_key, passive):
|
||||
q = BakedQuery(
|
||||
self.mapper._compiled_cache,
|
||||
lambda session: session.query(self.mapper))
|
||||
q.add_criteria(
|
||||
lambda q: q._adapt_all_clauses()._with_invoke_all_eagers(False),
|
||||
self.parent_property)
|
||||
|
||||
if not self.parent_property.bake_queries:
|
||||
q.spoil(full=True)
|
||||
|
||||
if self.parent_property.secondary is not None:
|
||||
q.add_criteria(
|
||||
lambda q:
|
||||
q.select_from(self.mapper, self.parent_property.secondary))
|
||||
|
||||
pending = not state.key
|
||||
|
||||
# don't autoflush on pending
|
||||
if pending or passive & attributes.NO_AUTOFLUSH:
|
||||
q.add_criteria(lambda q: q.autoflush(False))
|
||||
|
||||
if state.load_path:
|
||||
q.spoil()
|
||||
q.add_criteria(
|
||||
lambda q:
|
||||
q._with_current_path(state.load_path[self.parent_property]))
|
||||
|
||||
if state.load_options:
|
||||
q.spoil()
|
||||
q.add_criteria(
|
||||
lambda q: q._conditional_options(*state.load_options))
|
||||
|
||||
if self.use_get:
|
||||
return q(session)._load_on_ident(
|
||||
session.query(self.mapper), ident_key)
|
||||
|
||||
if self.parent_property.order_by:
|
||||
q.add_criteria(
|
||||
lambda q:
|
||||
q.order_by(*util.to_list(self.parent_property.order_by)))
|
||||
|
||||
for rev in self.parent_property._reverse_property:
|
||||
# reverse props that are MANYTOONE are loading *this*
|
||||
# object from get(), so don't need to eager out to those.
|
||||
if rev.direction is interfaces.MANYTOONE and \
|
||||
rev._use_get and \
|
||||
not isinstance(rev.strategy, strategies.LazyLoader):
|
||||
q.add_criteria(
|
||||
lambda q:
|
||||
q.options(
|
||||
strategy_options.Load(
|
||||
rev.parent).baked_lazyload(rev.key)))
|
||||
|
||||
lazy_clause, params = self._generate_lazy_clause(state, passive)
|
||||
|
||||
if pending:
|
||||
if orm_util._none_set.intersection(params.values()):
|
||||
return None
|
||||
|
||||
q.add_criteria(lambda q: q.filter(lazy_clause))
|
||||
result = q(session).params(**params).all()
|
||||
if self.uselist:
|
||||
return result
|
||||
else:
|
||||
l = len(result)
|
||||
if l:
|
||||
if l > 1:
|
||||
util.warn(
|
||||
"Multiple rows returned with "
|
||||
"uselist=False for lazily-loaded attribute '%s' "
|
||||
% self.parent_property)
|
||||
|
||||
return result[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
@strategy_options.loader_option()
|
||||
def baked_lazyload(loadopt, attr):
|
||||
"""Indicate that the given attribute should be loaded using "lazy"
|
||||
loading with a "baked" query used in the load.
|
||||
|
||||
"""
|
||||
return loadopt.set_relationship_strategy(attr, {"lazy": "baked_select"})
|
||||
|
||||
|
||||
@baked_lazyload._add_unbound_fn
|
||||
def baked_lazyload(*keys):
|
||||
return strategy_options._UnboundLoad._from_keys(
|
||||
strategy_options._UnboundLoad.baked_lazyload, keys, False, {})
|
||||
|
||||
|
||||
@baked_lazyload._add_unbound_all_fn
|
||||
def baked_lazyload_all(*keys):
|
||||
return strategy_options._UnboundLoad._from_keys(
|
||||
strategy_options._UnboundLoad.baked_lazyload, keys, True, {})
|
||||
|
||||
baked_lazyload = baked_lazyload._unbound_fn
|
||||
baked_lazyload_all = baked_lazyload_all._unbound_all_fn
|
||||
|
||||
bakery = BakedQuery.bakery
|
|
@ -1,5 +1,5 @@
|
|||
# ext/compiler.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -121,9 +121,19 @@ below where we generate a CHECK constraint that embeds a SQL expression::
|
|||
def compile_my_constraint(constraint, ddlcompiler, **kw):
|
||||
return "CONSTRAINT %s CHECK (%s)" % (
|
||||
constraint.name,
|
||||
ddlcompiler.sql_compiler.process(constraint.expression)
|
||||
ddlcompiler.sql_compiler.process(
|
||||
constraint.expression, literal_binds=True)
|
||||
)
|
||||
|
||||
Above, we add an additional flag to the process step as called by
|
||||
:meth:`.SQLCompiler.process`, which is the ``literal_binds`` flag. This
|
||||
indicates that any SQL expression which refers to a :class:`.BindParameter`
|
||||
object or other "literal" object such as those which refer to strings or
|
||||
integers should be rendered **in-place**, rather than being referred to as
|
||||
a bound parameter; when emitting DDL, bound parameters are typically not
|
||||
supported.
|
||||
|
||||
|
||||
.. _enabling_compiled_autocommit:
|
||||
|
||||
Enabling Autocommit on a Construct
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,5 +1,5 @@
|
|||
# ext/declarative/api.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -7,13 +7,14 @@
|
|||
"""Public API functions and helpers for declarative."""
|
||||
|
||||
|
||||
from ...schema import Table, MetaData
|
||||
from ...orm import synonym as _orm_synonym, mapper,\
|
||||
from ...schema import Table, MetaData, Column
|
||||
from ...orm import synonym as _orm_synonym, \
|
||||
comparable_property,\
|
||||
interfaces, properties
|
||||
interfaces, properties, attributes
|
||||
from ...orm.util import polymorphic_union
|
||||
from ...orm.base import _mapper_or_none
|
||||
from ...util import OrderedDict
|
||||
from ...util import OrderedDict, hybridmethod, hybridproperty
|
||||
from ... import util
|
||||
from ... import exc
|
||||
import weakref
|
||||
|
||||
|
@ -21,7 +22,6 @@ from .base import _as_declarative, \
|
|||
_declarative_constructor,\
|
||||
_DeferredMapperConfig, _add_attribute
|
||||
from .clsregistry import _class_resolver
|
||||
from . import clsregistry
|
||||
|
||||
|
||||
def instrument_declarative(cls, registry, metadata):
|
||||
|
@ -157,13 +157,91 @@ class declared_attr(interfaces._MappedAttribute, property):
|
|||
|
||||
"""
|
||||
|
||||
def __init__(self, fget, *arg, **kw):
|
||||
super(declared_attr, self).__init__(fget, *arg, **kw)
|
||||
def __init__(self, fget, cascading=False):
|
||||
super(declared_attr, self).__init__(fget)
|
||||
self.__doc__ = fget.__doc__
|
||||
self._cascading = cascading
|
||||
|
||||
def __get__(desc, self, cls):
|
||||
reg = cls.__dict__.get('_sa_declared_attr_reg', None)
|
||||
if reg is None:
|
||||
manager = attributes.manager_of_class(cls)
|
||||
if manager is None:
|
||||
util.warn(
|
||||
"Unmanaged access of declarative attribute %s from "
|
||||
"non-mapped class %s" %
|
||||
(desc.fget.__name__, cls.__name__))
|
||||
return desc.fget(cls)
|
||||
|
||||
if reg is None:
|
||||
return desc.fget(cls)
|
||||
elif desc in reg:
|
||||
return reg[desc]
|
||||
else:
|
||||
reg[desc] = obj = desc.fget(cls)
|
||||
return obj
|
||||
|
||||
@hybridmethod
|
||||
def _stateful(cls, **kw):
|
||||
return _stateful_declared_attr(**kw)
|
||||
|
||||
@hybridproperty
|
||||
def cascading(cls):
|
||||
"""Mark a :class:`.declared_attr` as cascading.
|
||||
|
||||
This is a special-use modifier which indicates that a column
|
||||
or MapperProperty-based declared attribute should be configured
|
||||
distinctly per mapped subclass, within a mapped-inheritance scenario.
|
||||
|
||||
Below, both MyClass as well as MySubClass will have a distinct
|
||||
``id`` Column object established::
|
||||
|
||||
class HasSomeAttribute(object):
|
||||
@declared_attr.cascading
|
||||
def some_id(cls):
|
||||
if has_inherited_table(cls):
|
||||
return Column(
|
||||
ForeignKey('myclass.id'), primary_key=True)
|
||||
else:
|
||||
return Column(Integer, primary_key=True)
|
||||
|
||||
return Column('id', Integer, primary_key=True)
|
||||
|
||||
class MyClass(HasSomeAttribute, Base):
|
||||
""
|
||||
# ...
|
||||
|
||||
class MySubClass(MyClass):
|
||||
""
|
||||
# ...
|
||||
|
||||
The behavior of the above configuration is that ``MySubClass``
|
||||
will refer to both its own ``id`` column as well as that of
|
||||
``MyClass`` underneath the attribute named ``some_id``.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`declarative_inheritance`
|
||||
|
||||
:ref:`mixin_inheritance_columns`
|
||||
|
||||
|
||||
"""
|
||||
return cls._stateful(cascading=True)
|
||||
|
||||
|
||||
class _stateful_declared_attr(declared_attr):
|
||||
def __init__(self, **kw):
|
||||
self.kw = kw
|
||||
|
||||
def _stateful(self, **kw):
|
||||
new_kw = self.kw.copy()
|
||||
new_kw.update(kw)
|
||||
return _stateful_declared_attr(**new_kw)
|
||||
|
||||
def __call__(self, fn):
|
||||
return declared_attr(fn, **self.kw)
|
||||
|
||||
|
||||
def declarative_base(bind=None, metadata=None, mapper=None, cls=object,
|
||||
name='Base', constructor=_declarative_constructor,
|
||||
|
@ -319,6 +397,15 @@ class ConcreteBase(object):
|
|||
'polymorphic_identity':'manager',
|
||||
'concrete':True}
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`.AbstractConcreteBase`
|
||||
|
||||
:ref:`concrete_inheritance`
|
||||
|
||||
:ref:`inheritance_concrete_helpers`
|
||||
|
||||
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
|
@ -349,9 +436,11 @@ class AbstractConcreteBase(ConcreteBase):
|
|||
``__declare_last__()`` function, which is essentially
|
||||
a hook for the :meth:`.after_configured` event.
|
||||
|
||||
:class:`.AbstractConcreteBase` does not produce a mapped
|
||||
table for the class itself. Compare to :class:`.ConcreteBase`,
|
||||
which does.
|
||||
:class:`.AbstractConcreteBase` does produce a mapped class
|
||||
for the base class, however it is not persisted to any table; it
|
||||
is instead mapped directly to the "polymorphic" selectable directly
|
||||
and is only used for selecting. Compare to :class:`.ConcreteBase`,
|
||||
which does create a persisted table for the base class.
|
||||
|
||||
Example::
|
||||
|
||||
|
@ -365,20 +454,79 @@ class AbstractConcreteBase(ConcreteBase):
|
|||
employee_id = Column(Integer, primary_key=True)
|
||||
name = Column(String(50))
|
||||
manager_data = Column(String(40))
|
||||
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity':'manager',
|
||||
'concrete':True}
|
||||
|
||||
The abstract base class is handled by declarative in a special way;
|
||||
at class configuration time, it behaves like a declarative mixin
|
||||
or an ``__abstract__`` base class. Once classes are configured
|
||||
and mappings are produced, it then gets mapped itself, but
|
||||
after all of its decscendants. This is a very unique system of mapping
|
||||
not found in any other SQLAlchemy system.
|
||||
|
||||
Using this approach, we can specify columns and properties
|
||||
that will take place on mapped subclasses, in the way that
|
||||
we normally do as in :ref:`declarative_mixins`::
|
||||
|
||||
class Company(Base):
|
||||
__tablename__ = 'company'
|
||||
id = Column(Integer, primary_key=True)
|
||||
|
||||
class Employee(AbstractConcreteBase, Base):
|
||||
employee_id = Column(Integer, primary_key=True)
|
||||
|
||||
@declared_attr
|
||||
def company_id(cls):
|
||||
return Column(ForeignKey('company.id'))
|
||||
|
||||
@declared_attr
|
||||
def company(cls):
|
||||
return relationship("Company")
|
||||
|
||||
class Manager(Employee):
|
||||
__tablename__ = 'manager'
|
||||
|
||||
name = Column(String(50))
|
||||
manager_data = Column(String(40))
|
||||
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity':'manager',
|
||||
'concrete':True}
|
||||
|
||||
When we make use of our mappings however, both ``Manager`` and
|
||||
``Employee`` will have an independently usable ``.company`` attribute::
|
||||
|
||||
session.query(Employee).filter(Employee.company.has(id=5))
|
||||
|
||||
.. versionchanged:: 1.0.0 - The mechanics of :class:`.AbstractConcreteBase`
|
||||
have been reworked to support relationships established directly
|
||||
on the abstract base, without any special configurational steps.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`.ConcreteBase`
|
||||
|
||||
:ref:`concrete_inheritance`
|
||||
|
||||
:ref:`inheritance_concrete_helpers`
|
||||
|
||||
"""
|
||||
|
||||
__abstract__ = True
|
||||
__no_table__ = True
|
||||
|
||||
@classmethod
|
||||
def __declare_first__(cls):
|
||||
if hasattr(cls, '__mapper__'):
|
||||
cls._sa_decl_prepare_nocascade()
|
||||
|
||||
@classmethod
|
||||
def _sa_decl_prepare_nocascade(cls):
|
||||
if getattr(cls, '__mapper__', None):
|
||||
return
|
||||
|
||||
clsregistry.add_class(cls.__name__, cls)
|
||||
to_map = _DeferredMapperConfig.config_for_cls(cls)
|
||||
|
||||
# can't rely on 'self_and_descendants' here
|
||||
# since technically an immediate subclass
|
||||
# might not be mapped, but a subclass
|
||||
|
@ -392,11 +540,33 @@ class AbstractConcreteBase(ConcreteBase):
|
|||
if mn is not None:
|
||||
mappers.append(mn)
|
||||
pjoin = cls._create_polymorphic_union(mappers)
|
||||
cls.__mapper__ = m = mapper(cls, pjoin, polymorphic_on=pjoin.c.type)
|
||||
|
||||
# For columns that were declared on the class, these
|
||||
# are normally ignored with the "__no_table__" mapping,
|
||||
# unless they have a different attribute key vs. col name
|
||||
# and are in the properties argument.
|
||||
# In that case, ensure we update the properties entry
|
||||
# to the correct column from the pjoin target table.
|
||||
declared_cols = set(to_map.declared_columns)
|
||||
for k, v in list(to_map.properties.items()):
|
||||
if v in declared_cols:
|
||||
to_map.properties[k] = pjoin.c[v.key]
|
||||
|
||||
to_map.local_table = pjoin
|
||||
|
||||
m_args = to_map.mapper_args_fn or dict
|
||||
|
||||
def mapper_args():
|
||||
args = m_args()
|
||||
args['polymorphic_on'] = pjoin.c.type
|
||||
return args
|
||||
to_map.mapper_args_fn = mapper_args
|
||||
|
||||
m = to_map.map()
|
||||
|
||||
for scls in cls.__subclasses__():
|
||||
sm = _mapper_or_none(scls)
|
||||
if sm.concrete and cls in scls.__bases__:
|
||||
if sm and sm.concrete and cls in scls.__bases__:
|
||||
sm._set_concrete_base(m)
|
||||
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# ext/declarative/base.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -19,6 +19,9 @@ from ... import event
|
|||
from . import clsregistry
|
||||
import collections
|
||||
import weakref
|
||||
from sqlalchemy.orm import instrumentation
|
||||
|
||||
declared_attr = declarative_props = None
|
||||
|
||||
|
||||
def _declared_mapping_info(cls):
|
||||
|
@ -32,38 +35,135 @@ def _declared_mapping_info(cls):
|
|||
return None
|
||||
|
||||
|
||||
def _resolve_for_abstract(cls):
|
||||
if cls is object:
|
||||
return None
|
||||
|
||||
if _get_immediate_cls_attr(cls, '__abstract__', strict=True):
|
||||
for sup in cls.__bases__:
|
||||
sup = _resolve_for_abstract(sup)
|
||||
if sup is not None:
|
||||
return sup
|
||||
else:
|
||||
return None
|
||||
else:
|
||||
return cls
|
||||
|
||||
|
||||
def _get_immediate_cls_attr(cls, attrname, strict=False):
|
||||
"""return an attribute of the class that is either present directly
|
||||
on the class, e.g. not on a superclass, or is from a superclass but
|
||||
this superclass is a mixin, that is, not a descendant of
|
||||
the declarative base.
|
||||
|
||||
This is used to detect attributes that indicate something about
|
||||
a mapped class independently from any mapped classes that it may
|
||||
inherit from.
|
||||
|
||||
"""
|
||||
if not issubclass(cls, object):
|
||||
return None
|
||||
|
||||
for base in cls.__mro__:
|
||||
_is_declarative_inherits = hasattr(base, '_decl_class_registry')
|
||||
if attrname in base.__dict__ and (
|
||||
base is cls or
|
||||
((base in cls.__bases__ if strict else True)
|
||||
and not _is_declarative_inherits)
|
||||
):
|
||||
return getattr(base, attrname)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def _as_declarative(cls, classname, dict_):
|
||||
global declared_attr, declarative_props
|
||||
if declared_attr is None:
|
||||
from .api import declared_attr
|
||||
declarative_props = (declared_attr, util.classproperty)
|
||||
|
||||
if _get_immediate_cls_attr(cls, '__abstract__', strict=True):
|
||||
return
|
||||
|
||||
_MapperConfig.setup_mapping(cls, classname, dict_)
|
||||
|
||||
|
||||
class _MapperConfig(object):
|
||||
|
||||
@classmethod
|
||||
def setup_mapping(cls, cls_, classname, dict_):
|
||||
defer_map = _get_immediate_cls_attr(
|
||||
cls_, '_sa_decl_prepare_nocascade', strict=True) or \
|
||||
hasattr(cls_, '_sa_decl_prepare')
|
||||
|
||||
if defer_map:
|
||||
cfg_cls = _DeferredMapperConfig
|
||||
else:
|
||||
cfg_cls = _MapperConfig
|
||||
cfg_cls(cls_, classname, dict_)
|
||||
|
||||
def __init__(self, cls_, classname, dict_):
|
||||
|
||||
self.cls = cls_
|
||||
|
||||
# dict_ will be a dictproxy, which we can't write to, and we need to!
|
||||
dict_ = dict(dict_)
|
||||
self.dict_ = dict(dict_)
|
||||
self.classname = classname
|
||||
self.mapped_table = None
|
||||
self.properties = util.OrderedDict()
|
||||
self.declared_columns = set()
|
||||
self.column_copies = {}
|
||||
self._setup_declared_events()
|
||||
|
||||
column_copies = {}
|
||||
potential_columns = {}
|
||||
# temporary registry. While early 1.0 versions
|
||||
# set up the ClassManager here, by API contract
|
||||
# we can't do that until there's a mapper.
|
||||
self.cls._sa_declared_attr_reg = {}
|
||||
|
||||
self._scan_attributes()
|
||||
|
||||
clsregistry.add_class(self.classname, self.cls)
|
||||
|
||||
self._extract_mappable_attributes()
|
||||
|
||||
self._extract_declared_columns()
|
||||
|
||||
self._setup_table()
|
||||
|
||||
self._setup_inheritance()
|
||||
|
||||
self._early_mapping()
|
||||
|
||||
def _early_mapping(self):
|
||||
self.map()
|
||||
|
||||
def _setup_declared_events(self):
|
||||
if _get_immediate_cls_attr(self.cls, '__declare_last__'):
|
||||
@event.listens_for(mapper, "after_configured")
|
||||
def after_configured():
|
||||
self.cls.__declare_last__()
|
||||
|
||||
if _get_immediate_cls_attr(self.cls, '__declare_first__'):
|
||||
@event.listens_for(mapper, "before_configured")
|
||||
def before_configured():
|
||||
self.cls.__declare_first__()
|
||||
|
||||
def _scan_attributes(self):
|
||||
cls = self.cls
|
||||
dict_ = self.dict_
|
||||
column_copies = self.column_copies
|
||||
mapper_args_fn = None
|
||||
table_args = inherited_table_args = None
|
||||
tablename = None
|
||||
|
||||
declarative_props = (declared_attr, util.classproperty)
|
||||
|
||||
for base in cls.__mro__:
|
||||
_is_declarative_inherits = hasattr(base, '_decl_class_registry')
|
||||
class_mapped = base is not cls and \
|
||||
_declared_mapping_info(base) is not None and \
|
||||
not _get_immediate_cls_attr(
|
||||
base, '_sa_decl_prepare_nocascade', strict=True)
|
||||
|
||||
if '__declare_last__' in base.__dict__:
|
||||
@event.listens_for(mapper, "after_configured")
|
||||
def go():
|
||||
cls.__declare_last__()
|
||||
if '__declare_first__' in base.__dict__:
|
||||
@event.listens_for(mapper, "before_configured")
|
||||
def go():
|
||||
cls.__declare_first__()
|
||||
if '__abstract__' in base.__dict__ and base.__abstract__:
|
||||
if (base is cls or
|
||||
(base in cls.__bases__ and not _is_declarative_inherits)):
|
||||
return
|
||||
|
||||
class_mapped = _declared_mapping_info(base) is not None
|
||||
if not class_mapped and base is not cls:
|
||||
self._produce_column_copies(base)
|
||||
|
||||
for name, obj in vars(base).items():
|
||||
if name == '__mapper_args__':
|
||||
|
@ -90,7 +190,8 @@ def _as_declarative(cls, classname, dict_):
|
|||
isinstance(obj, declarative_props)
|
||||
):
|
||||
table_args = cls.__table_args__
|
||||
if not isinstance(table_args, (tuple, dict, type(None))):
|
||||
if not isinstance(
|
||||
table_args, (tuple, dict, type(None))):
|
||||
raise exc.ArgumentError(
|
||||
"__table_args__ value must be a tuple, "
|
||||
"dict, or None")
|
||||
|
@ -105,27 +206,11 @@ def _as_declarative(cls, classname, dict_):
|
|||
% (base.__name__, name, base, cls))
|
||||
continue
|
||||
elif base is not cls:
|
||||
# we're a mixin.
|
||||
# we're a mixin, abstract base, or something that is
|
||||
# acting like that for now.
|
||||
if isinstance(obj, Column):
|
||||
if getattr(cls, name) is not obj:
|
||||
# if column has been overridden
|
||||
# (like by the InstrumentedAttribute of the
|
||||
# superclass), skip
|
||||
# already copied columns to the mapped class.
|
||||
continue
|
||||
if obj.foreign_keys:
|
||||
raise exc.InvalidRequestError(
|
||||
"Columns with foreign keys to other columns "
|
||||
"must be declared as @declared_attr callables "
|
||||
"on declarative mixin classes. ")
|
||||
if name not in dict_ and not (
|
||||
'__table__' in dict_ and
|
||||
(obj.name or name) in dict_['__table__'].c
|
||||
) and name not in potential_columns:
|
||||
potential_columns[name] = \
|
||||
column_copies[obj] = \
|
||||
obj.copy()
|
||||
column_copies[obj]._creation_order = \
|
||||
obj._creation_order
|
||||
elif isinstance(obj, MapperProperty):
|
||||
raise exc.InvalidRequestError(
|
||||
"Mapper properties (i.e. deferred,"
|
||||
|
@ -133,25 +218,63 @@ def _as_declarative(cls, classname, dict_):
|
|||
"be declared as @declared_attr callables "
|
||||
"on declarative mixin classes.")
|
||||
elif isinstance(obj, declarative_props):
|
||||
dict_[name] = ret = \
|
||||
column_copies[obj] = getattr(cls, name)
|
||||
oldclassprop = isinstance(obj, util.classproperty)
|
||||
if not oldclassprop and obj._cascading:
|
||||
dict_[name] = column_copies[obj] = \
|
||||
ret = obj.__get__(obj, cls)
|
||||
setattr(cls, name, ret)
|
||||
else:
|
||||
if oldclassprop:
|
||||
util.warn_deprecated(
|
||||
"Use of sqlalchemy.util.classproperty on "
|
||||
"declarative classes is deprecated.")
|
||||
dict_[name] = column_copies[obj] = \
|
||||
ret = getattr(cls, name)
|
||||
if isinstance(ret, (Column, MapperProperty)) and \
|
||||
ret.doc is None:
|
||||
ret.doc = obj.__doc__
|
||||
|
||||
# apply inherited columns as we should
|
||||
for k, v in potential_columns.items():
|
||||
dict_[k] = v
|
||||
|
||||
if inherited_table_args and not tablename:
|
||||
table_args = None
|
||||
|
||||
clsregistry.add_class(classname, cls)
|
||||
our_stuff = util.OrderedDict()
|
||||
self.table_args = table_args
|
||||
self.tablename = tablename
|
||||
self.mapper_args_fn = mapper_args_fn
|
||||
|
||||
def _produce_column_copies(self, base):
|
||||
cls = self.cls
|
||||
dict_ = self.dict_
|
||||
column_copies = self.column_copies
|
||||
# copy mixin columns to the mapped class
|
||||
for name, obj in vars(base).items():
|
||||
if isinstance(obj, Column):
|
||||
if getattr(cls, name) is not obj:
|
||||
# if column has been overridden
|
||||
# (like by the InstrumentedAttribute of the
|
||||
# superclass), skip
|
||||
continue
|
||||
elif obj.foreign_keys:
|
||||
raise exc.InvalidRequestError(
|
||||
"Columns with foreign keys to other columns "
|
||||
"must be declared as @declared_attr callables "
|
||||
"on declarative mixin classes. ")
|
||||
elif name not in dict_ and not (
|
||||
'__table__' in dict_ and
|
||||
(obj.name or name) in dict_['__table__'].c
|
||||
):
|
||||
column_copies[obj] = copy_ = obj.copy()
|
||||
copy_._creation_order = obj._creation_order
|
||||
setattr(cls, name, copy_)
|
||||
dict_[name] = copy_
|
||||
|
||||
def _extract_mappable_attributes(self):
|
||||
cls = self.cls
|
||||
dict_ = self.dict_
|
||||
|
||||
our_stuff = self.properties
|
||||
|
||||
for k in list(dict_):
|
||||
|
||||
# TODO: improve this ? all dunders ?
|
||||
if k in ('__table__', '__tablename__', '__mapper_args__'):
|
||||
continue
|
||||
|
||||
|
@ -173,12 +296,18 @@ def _as_declarative(cls, classname, dict_):
|
|||
"%s: possibly a copy-and-paste error with a comma "
|
||||
"left at the end of the line?" % k)
|
||||
continue
|
||||
if not isinstance(value, (Column, MapperProperty)):
|
||||
elif not isinstance(value, (Column, MapperProperty)):
|
||||
# using @declared_attr for some object that
|
||||
# isn't Column/MapperProperty; remove from the dict_
|
||||
# and place the evaluated value onto the class.
|
||||
if not k.startswith('__'):
|
||||
dict_.pop(k)
|
||||
setattr(cls, k, value)
|
||||
continue
|
||||
if k == 'metadata':
|
||||
# we expect to see the name 'metadata' in some valid cases;
|
||||
# however at this point we see it's assigned to something trying
|
||||
# to be mapped, so raise for that.
|
||||
elif k == 'metadata':
|
||||
raise exc.InvalidRequestError(
|
||||
"Attribute name 'metadata' is reserved "
|
||||
"for the MetaData instance when using a "
|
||||
|
@ -187,11 +316,14 @@ def _as_declarative(cls, classname, dict_):
|
|||
prop = clsregistry._deferred_relationship(cls, value)
|
||||
our_stuff[k] = prop
|
||||
|
||||
def _extract_declared_columns(self):
|
||||
our_stuff = self.properties
|
||||
|
||||
# set up attributes in the order they were created
|
||||
our_stuff.sort(key=lambda key: our_stuff[key]._creation_order)
|
||||
|
||||
# extract columns from the class dict
|
||||
declared_columns = set()
|
||||
declared_columns = self.declared_columns
|
||||
name_to_prop_key = collections.defaultdict(set)
|
||||
for key, c in list(our_stuff.items()):
|
||||
if isinstance(c, (ColumnProperty, CompositeProperty)):
|
||||
|
@ -217,12 +349,20 @@ def _as_declarative(cls, classname, dict_):
|
|||
for name, keys in name_to_prop_key.items():
|
||||
if len(keys) > 1:
|
||||
util.warn(
|
||||
"On class %r, Column object %r named directly multiple times, "
|
||||
"On class %r, Column object %r named "
|
||||
"directly multiple times, "
|
||||
"only one will be used: %s" %
|
||||
(classname, name, (", ".join(sorted(keys))))
|
||||
(self.classname, name, (", ".join(sorted(keys))))
|
||||
)
|
||||
|
||||
declared_columns = sorted(
|
||||
def _setup_table(self):
|
||||
cls = self.cls
|
||||
tablename = self.tablename
|
||||
table_args = self.table_args
|
||||
dict_ = self.dict_
|
||||
declared_columns = self.declared_columns
|
||||
|
||||
declared_columns = self.declared_columns = sorted(
|
||||
declared_columns, key=lambda c: c._creation_order)
|
||||
table = None
|
||||
|
||||
|
@ -261,27 +401,35 @@ def _as_declarative(cls, classname, dict_):
|
|||
"Can't add additional column %r when "
|
||||
"specifying __table__" % c.key
|
||||
)
|
||||
self.local_table = table
|
||||
|
||||
if hasattr(cls, '__mapper_cls__'):
|
||||
mapper_cls = util.unbound_method_to_callable(cls.__mapper_cls__)
|
||||
else:
|
||||
mapper_cls = mapper
|
||||
|
||||
def _setup_inheritance(self):
|
||||
table = self.local_table
|
||||
cls = self.cls
|
||||
table_args = self.table_args
|
||||
declared_columns = self.declared_columns
|
||||
for c in cls.__bases__:
|
||||
if _declared_mapping_info(c) is not None:
|
||||
inherits = c
|
||||
c = _resolve_for_abstract(c)
|
||||
if c is None:
|
||||
continue
|
||||
if _declared_mapping_info(c) is not None and \
|
||||
not _get_immediate_cls_attr(
|
||||
c, '_sa_decl_prepare_nocascade', strict=True):
|
||||
self.inherits = c
|
||||
break
|
||||
else:
|
||||
inherits = None
|
||||
self.inherits = None
|
||||
|
||||
if table is None and self.inherits is None and \
|
||||
not _get_immediate_cls_attr(cls, '__no_table__'):
|
||||
|
||||
if table is None and inherits is None:
|
||||
raise exc.InvalidRequestError(
|
||||
"Class %r does not have a __table__ or __tablename__ "
|
||||
"specified and does not inherit from an existing "
|
||||
"table-mapped class." % cls
|
||||
)
|
||||
elif inherits:
|
||||
inherited_mapper = _declared_mapping_info(inherits)
|
||||
elif self.inherits:
|
||||
inherited_mapper = _declared_mapping_info(self.inherits)
|
||||
inherited_table = inherited_mapper.local_table
|
||||
inherited_mapped_table = inherited_mapper.mapped_table
|
||||
|
||||
|
@ -313,42 +461,6 @@ def _as_declarative(cls, classname, dict_):
|
|||
inherited_mapped_table is not inherited_table:
|
||||
inherited_mapped_table._refresh_for_new_column(c)
|
||||
|
||||
defer_map = hasattr(cls, '_sa_decl_prepare')
|
||||
if defer_map:
|
||||
cfg_cls = _DeferredMapperConfig
|
||||
else:
|
||||
cfg_cls = _MapperConfig
|
||||
mt = cfg_cls(mapper_cls,
|
||||
cls, table,
|
||||
inherits,
|
||||
declared_columns,
|
||||
column_copies,
|
||||
our_stuff,
|
||||
mapper_args_fn)
|
||||
if not defer_map:
|
||||
mt.map()
|
||||
|
||||
|
||||
class _MapperConfig(object):
|
||||
|
||||
mapped_table = None
|
||||
|
||||
def __init__(self, mapper_cls,
|
||||
cls,
|
||||
table,
|
||||
inherits,
|
||||
declared_columns,
|
||||
column_copies,
|
||||
properties, mapper_args_fn):
|
||||
self.mapper_cls = mapper_cls
|
||||
self.cls = cls
|
||||
self.local_table = table
|
||||
self.inherits = inherits
|
||||
self.properties = properties
|
||||
self.mapper_args_fn = mapper_args_fn
|
||||
self.declared_columns = declared_columns
|
||||
self.column_copies = column_copies
|
||||
|
||||
def _prepare_mapper_arguments(self):
|
||||
properties = self.properties
|
||||
if self.mapper_args_fn:
|
||||
|
@ -401,20 +513,31 @@ class _MapperConfig(object):
|
|||
properties[k] = [col] + p.columns
|
||||
result_mapper_args = mapper_args.copy()
|
||||
result_mapper_args['properties'] = properties
|
||||
return result_mapper_args
|
||||
self.mapper_args = result_mapper_args
|
||||
|
||||
def map(self):
|
||||
mapper_args = self._prepare_mapper_arguments()
|
||||
self.cls.__mapper__ = self.mapper_cls(
|
||||
self._prepare_mapper_arguments()
|
||||
if hasattr(self.cls, '__mapper_cls__'):
|
||||
mapper_cls = util.unbound_method_to_callable(
|
||||
self.cls.__mapper_cls__)
|
||||
else:
|
||||
mapper_cls = mapper
|
||||
|
||||
self.cls.__mapper__ = mp_ = mapper_cls(
|
||||
self.cls,
|
||||
self.local_table,
|
||||
**mapper_args
|
||||
**self.mapper_args
|
||||
)
|
||||
del self.cls._sa_declared_attr_reg
|
||||
return mp_
|
||||
|
||||
|
||||
class _DeferredMapperConfig(_MapperConfig):
|
||||
_configs = util.OrderedDict()
|
||||
|
||||
def _early_mapping(self):
|
||||
pass
|
||||
|
||||
@property
|
||||
def cls(self):
|
||||
return self._cls()
|
||||
|
@ -466,7 +589,7 @@ class _DeferredMapperConfig(_MapperConfig):
|
|||
|
||||
def map(self):
|
||||
self._configs.pop(self._cls, None)
|
||||
super(_DeferredMapperConfig, self).map()
|
||||
return super(_DeferredMapperConfig, self).map()
|
||||
|
||||
|
||||
def _add_attribute(cls, key, value):
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# ext/declarative/clsregistry.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -71,6 +71,8 @@ class _MultipleClassMarker(object):
|
|||
|
||||
"""
|
||||
|
||||
__slots__ = 'on_remove', 'contents', '__weakref__'
|
||||
|
||||
def __init__(self, classes, on_remove=None):
|
||||
self.on_remove = on_remove
|
||||
self.contents = set([
|
||||
|
@ -103,7 +105,12 @@ class _MultipleClassMarker(object):
|
|||
self.on_remove()
|
||||
|
||||
def add_item(self, item):
|
||||
modules = set([cls().__module__ for cls in self.contents])
|
||||
# protect against class registration race condition against
|
||||
# asynchronous garbage collection calling _remove_item,
|
||||
# [ticket:3208]
|
||||
modules = set([
|
||||
cls.__module__ for cls in
|
||||
[ref() for ref in self.contents] if cls is not None])
|
||||
if item.__module__ in modules:
|
||||
util.warn(
|
||||
"This declarative base already contains a class with the "
|
||||
|
@ -122,6 +129,8 @@ class _ModuleMarker(object):
|
|||
|
||||
"""
|
||||
|
||||
__slots__ = 'parent', 'name', 'contents', 'mod_ns', 'path', '__weakref__'
|
||||
|
||||
def __init__(self, name, parent):
|
||||
self.parent = parent
|
||||
self.name = name
|
||||
|
@ -167,6 +176,8 @@ class _ModuleMarker(object):
|
|||
|
||||
|
||||
class _ModNS(object):
|
||||
__slots__ = '__parent',
|
||||
|
||||
def __init__(self, parent):
|
||||
self.__parent = parent
|
||||
|
||||
|
@ -188,6 +199,8 @@ class _ModNS(object):
|
|||
|
||||
|
||||
class _GetColumns(object):
|
||||
__slots__ = 'cls',
|
||||
|
||||
def __init__(self, cls):
|
||||
self.cls = cls
|
||||
|
||||
|
@ -216,6 +229,8 @@ inspection._inspects(_GetColumns)(
|
|||
|
||||
|
||||
class _GetTable(object):
|
||||
__slots__ = 'key', 'metadata'
|
||||
|
||||
def __init__(self, key, metadata):
|
||||
self.key = key
|
||||
self.metadata = metadata
|
||||
|
@ -306,7 +321,8 @@ def _deferred_relationship(cls, prop):
|
|||
key, kwargs = prop.backref
|
||||
for attr in ('primaryjoin', 'secondaryjoin', 'secondary',
|
||||
'foreign_keys', 'remote_side', 'order_by'):
|
||||
if attr in kwargs and isinstance(kwargs[attr], str):
|
||||
if attr in kwargs and isinstance(kwargs[attr],
|
||||
util.string_types):
|
||||
kwargs[attr] = resolve_arg(kwargs[attr])
|
||||
|
||||
return prop
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# ext/horizontal_shard.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# ext/hybrid.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -46,7 +46,7 @@ as the class itself::
|
|||
|
||||
@hybrid_method
|
||||
def contains(self, point):
|
||||
return (self.start <= point) & (point < self.end)
|
||||
return (self.start <= point) & (point <= self.end)
|
||||
|
||||
@hybrid_method
|
||||
def intersects(self, other):
|
||||
|
@ -145,7 +145,7 @@ usage of the absolute value function::
|
|||
return func.abs(cls.length) / 2
|
||||
|
||||
Above the Python function ``abs()`` is used for instance-level
|
||||
operations, the SQL function ``ABS()`` is used via the :attr:`.func`
|
||||
operations, the SQL function ``ABS()`` is used via the :data:`.func`
|
||||
object for class-level expressions::
|
||||
|
||||
>>> i1.radius
|
||||
|
@ -634,10 +634,10 @@ from .. import util
|
|||
from ..orm import attributes, interfaces
|
||||
|
||||
HYBRID_METHOD = util.symbol('HYBRID_METHOD')
|
||||
"""Symbol indicating an :class:`_InspectionAttr` that's
|
||||
"""Symbol indicating an :class:`InspectionAttr` that's
|
||||
of type :class:`.hybrid_method`.
|
||||
|
||||
Is assigned to the :attr:`._InspectionAttr.extension_type`
|
||||
Is assigned to the :attr:`.InspectionAttr.extension_type`
|
||||
attibute.
|
||||
|
||||
.. seealso::
|
||||
|
@ -647,10 +647,10 @@ HYBRID_METHOD = util.symbol('HYBRID_METHOD')
|
|||
"""
|
||||
|
||||
HYBRID_PROPERTY = util.symbol('HYBRID_PROPERTY')
|
||||
"""Symbol indicating an :class:`_InspectionAttr` that's
|
||||
"""Symbol indicating an :class:`InspectionAttr` that's
|
||||
of type :class:`.hybrid_method`.
|
||||
|
||||
Is assigned to the :attr:`._InspectionAttr.extension_type`
|
||||
Is assigned to the :attr:`.InspectionAttr.extension_type`
|
||||
attibute.
|
||||
|
||||
.. seealso::
|
||||
|
@ -660,7 +660,7 @@ HYBRID_PROPERTY = util.symbol('HYBRID_PROPERTY')
|
|||
"""
|
||||
|
||||
|
||||
class hybrid_method(interfaces._InspectionAttr):
|
||||
class hybrid_method(interfaces.InspectionAttrInfo):
|
||||
"""A decorator which allows definition of a Python object method with both
|
||||
instance-level and class-level behavior.
|
||||
|
||||
|
@ -703,7 +703,7 @@ class hybrid_method(interfaces._InspectionAttr):
|
|||
return self
|
||||
|
||||
|
||||
class hybrid_property(interfaces._InspectionAttr):
|
||||
class hybrid_property(interfaces.InspectionAttrInfo):
|
||||
"""A decorator which allows definition of a Python descriptor with both
|
||||
instance-level and class-level behavior.
|
||||
|
||||
|
|
|
@ -166,7 +166,13 @@ class ExtendedInstrumentationRegistry(InstrumentationFactory):
|
|||
def manager_of_class(self, cls):
|
||||
if cls is None:
|
||||
return None
|
||||
return self._manager_finders.get(cls, _default_manager_getter)(cls)
|
||||
try:
|
||||
finder = self._manager_finders.get(cls, _default_manager_getter)
|
||||
except TypeError:
|
||||
# due to weakref lookup on invalid object
|
||||
return None
|
||||
else:
|
||||
return finder(cls)
|
||||
|
||||
def state_of(self, instance):
|
||||
if instance is None:
|
||||
|
@ -392,6 +398,7 @@ def _reinstall_default_lookups():
|
|||
manager_of_class=_default_manager_getter
|
||||
)
|
||||
)
|
||||
_instrumentation_factory._extended = False
|
||||
|
||||
|
||||
def _install_lookups(lookups):
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# ext/mutable.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -402,6 +402,27 @@ class MutableBase(object):
|
|||
msg = "Attribute '%s' does not accept objects of type %s"
|
||||
raise ValueError(msg % (key, type(value)))
|
||||
|
||||
@classmethod
|
||||
def _get_listen_keys(cls, attribute):
|
||||
"""Given a descriptor attribute, return a ``set()`` of the attribute
|
||||
keys which indicate a change in the state of this attribute.
|
||||
|
||||
This is normally just ``set([attribute.key])``, but can be overridden
|
||||
to provide for additional keys. E.g. a :class:`.MutableComposite`
|
||||
augments this set with the attribute keys associated with the columns
|
||||
that comprise the composite value.
|
||||
|
||||
This collection is consulted in the case of intercepting the
|
||||
:meth:`.InstanceEvents.refresh` and
|
||||
:meth:`.InstanceEvents.refresh_flush` events, which pass along a list
|
||||
of attribute names that have been refreshed; the list is compared
|
||||
against this set to determine if action needs to be taken.
|
||||
|
||||
.. versionadded:: 1.0.5
|
||||
|
||||
"""
|
||||
return set([attribute.key])
|
||||
|
||||
@classmethod
|
||||
def _listen_on_attribute(cls, attribute, coerce, parent_cls):
|
||||
"""Establish this type as a mutation listener for the given
|
||||
|
@ -415,6 +436,8 @@ class MutableBase(object):
|
|||
# rely on "propagate" here
|
||||
parent_cls = attribute.class_
|
||||
|
||||
listen_keys = cls._get_listen_keys(attribute)
|
||||
|
||||
def load(state, *args):
|
||||
"""Listen for objects loaded or refreshed.
|
||||
|
||||
|
@ -429,6 +452,10 @@ class MutableBase(object):
|
|||
state.dict[key] = val
|
||||
val._parents[state.obj()] = key
|
||||
|
||||
def load_attrs(state, ctx, attrs):
|
||||
if not attrs or listen_keys.intersection(attrs):
|
||||
load(state)
|
||||
|
||||
def set(target, value, oldvalue, initiator):
|
||||
"""Listen for set/replace events on the target
|
||||
data member.
|
||||
|
@ -463,7 +490,9 @@ class MutableBase(object):
|
|||
|
||||
event.listen(parent_cls, 'load', load,
|
||||
raw=True, propagate=True)
|
||||
event.listen(parent_cls, 'refresh', load,
|
||||
event.listen(parent_cls, 'refresh', load_attrs,
|
||||
raw=True, propagate=True)
|
||||
event.listen(parent_cls, 'refresh_flush', load_attrs,
|
||||
raw=True, propagate=True)
|
||||
event.listen(attribute, 'set', set,
|
||||
raw=True, retval=True, propagate=True)
|
||||
|
@ -574,6 +603,10 @@ class MutableComposite(MutableBase):
|
|||
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def _get_listen_keys(cls, attribute):
|
||||
return set([attribute.key]).union(attribute.property._attribute_keys)
|
||||
|
||||
def changed(self):
|
||||
"""Subclasses should call this method whenever change events occur."""
|
||||
|
||||
|
@ -602,6 +635,18 @@ _setup_composite_listener()
|
|||
class MutableDict(Mutable, dict):
|
||||
"""A dictionary type that implements :class:`.Mutable`.
|
||||
|
||||
The :class:`.MutableDict` object implements a dictionary that will
|
||||
emit change events to the underlying mapping when the contents of
|
||||
the dictionary are altered, including when values are added or removed.
|
||||
|
||||
Note that :class:`.MutableDict` does **not** apply mutable tracking to the
|
||||
*values themselves* inside the dictionary. Therefore it is not a sufficient
|
||||
solution for the use case of tracking deep changes to a *recursive*
|
||||
dictionary structure, such as a JSON structure. To support this use case,
|
||||
build a subclass of :class:`.MutableDict` that provides appropriate
|
||||
coersion to the values placed in the dictionary so that they too are
|
||||
"mutable", and emit events up to their parent structure.
|
||||
|
||||
.. versionadded:: 0.8
|
||||
|
||||
"""
|
||||
|
@ -621,16 +666,30 @@ class MutableDict(Mutable, dict):
|
|||
dict.__delitem__(self, key)
|
||||
self.changed()
|
||||
|
||||
def update(self, *a, **kw):
|
||||
dict.update(self, *a, **kw)
|
||||
self.changed()
|
||||
|
||||
def pop(self, *arg):
|
||||
result = dict.pop(self, *arg)
|
||||
self.changed()
|
||||
return result
|
||||
|
||||
def popitem(self):
|
||||
result = dict.popitem(self)
|
||||
self.changed()
|
||||
return result
|
||||
|
||||
def clear(self):
|
||||
dict.clear(self)
|
||||
self.changed()
|
||||
|
||||
@classmethod
|
||||
def coerce(cls, key, value):
|
||||
"""Convert plain dictionary to MutableDict."""
|
||||
if not isinstance(value, MutableDict):
|
||||
"""Convert plain dictionary to instance of this class."""
|
||||
if not isinstance(value, cls):
|
||||
if isinstance(value, dict):
|
||||
return MutableDict(value)
|
||||
return cls(value)
|
||||
return Mutable.coerce(key, value)
|
||||
else:
|
||||
return value
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# ext/orderinglist.py
|
||||
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@ -119,7 +119,7 @@ start numbering at 1 or some other integer, provide ``count_from=1``.
|
|||
|
||||
|
||||
"""
|
||||
from ..orm.collections import collection
|
||||
from ..orm.collections import collection, collection_adapter
|
||||
from .. import util
|
||||
|
||||
__all__ = ['ordering_list']
|
||||
|
@ -319,6 +319,9 @@ class OrderingList(list):
|
|||
|
||||
def remove(self, entity):
|
||||
super(OrderingList, self).remove(entity)
|
||||
|
||||
adapter = collection_adapter(self)
|
||||
if adapter and adapter._referenced_by_owner:
|
||||
self._reorder()
|
||||
|
||||
def pop(self, index=-1):
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue