update sqlalchemy

This commit is contained in:
Jan Gerber 2016-02-22 13:17:39 +01:00
commit a4267212e4
192 changed files with 17429 additions and 9601 deletions

View file

@ -0,0 +1,44 @@
"""
Bootstrapper for nose/pytest plugins.
The entire rationale for this system is to get the modules in plugin/
imported without importing all of the supporting library, so that we can
set up things for testing before coverage starts.
The rationale for all of plugin/ being *in* the supporting library in the
first place is so that the testing and plugin suite is available to other
libraries, mainly external SQLAlchemy and Alembic dialects, to make use
of the same test environment and standard suites available to
SQLAlchemy/Alembic themselves without the need to ship/install a separate
package outside of SQLAlchemy.
NOTE: copied/adapted from SQLAlchemy master for backwards compatibility;
this should be removable when Alembic targets SQLAlchemy 1.0.0.
"""
import os
import sys
bootstrap_file = locals()['bootstrap_file']
to_bootstrap = locals()['to_bootstrap']
def load_file_as_module(name):
path = os.path.join(os.path.dirname(bootstrap_file), "%s.py" % name)
if sys.version_info >= (3, 3):
from importlib import machinery
mod = machinery.SourceFileLoader(name, path).load_module()
else:
import imp
mod = imp.load_source(name, path)
return mod
if to_bootstrap == "pytest":
sys.modules["sqla_plugin_base"] = load_file_as_module("plugin_base")
sys.modules["sqla_pytestplugin"] = load_file_as_module("pytestplugin")
elif to_bootstrap == "nose":
sys.modules["sqla_plugin_base"] = load_file_as_module("plugin_base")
sys.modules["sqla_noseplugin"] = load_file_as_module("noseplugin")
else:
raise Exception("unknown bootstrap: %s" % to_bootstrap) # noqa

View file

@ -1,5 +1,5 @@
# plugin/noseplugin.py
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@ -12,22 +12,22 @@ way (e.g. as a package-less import).
"""
try:
# installed by bootstrap.py
import sqla_plugin_base as plugin_base
except ImportError:
# assume we're a package, use traditional import
from . import plugin_base
import os
import sys
from nose.plugins import Plugin
import nose
fixtures = None
# no package imports yet! this prevents us from tripping coverage
# too soon.
path = os.path.join(os.path.dirname(__file__), "plugin_base.py")
if sys.version_info >= (3, 3):
from importlib import machinery
plugin_base = machinery.SourceFileLoader(
"plugin_base", path).load_module()
else:
import imp
plugin_base = imp.load_source("plugin_base", path)
py3k = sys.version_info >= (3, 0)
class NoseSQLAlchemy(Plugin):
@ -57,28 +57,39 @@ class NoseSQLAlchemy(Plugin):
plugin_base.set_coverage_flag(options.enable_plugin_coverage)
global fixtures
from sqlalchemy.testing import fixtures
plugin_base.set_skip_test(nose.SkipTest)
def begin(self):
global fixtures
from sqlalchemy.testing import fixtures # noqa
plugin_base.post_begin()
def describeTest(self, test):
return ""
def wantFunction(self, fn):
if fn.__module__ is None:
return False
if fn.__module__.startswith('sqlalchemy.testing'):
return False
return False
def wantMethod(self, fn):
if py3k:
if not hasattr(fn.__self__, 'cls'):
return False
cls = fn.__self__.cls
else:
cls = fn.im_class
return plugin_base.want_method(cls, fn)
def wantClass(self, cls):
return plugin_base.want_class(cls)
def beforeTest(self, test):
plugin_base.before_test(test,
test.test.cls.__module__,
test.test.cls, test.test.method.__name__)
if not hasattr(test.test, 'cls'):
return
plugin_base.before_test(
test,
test.test.cls.__module__,
test.test.cls, test.test.method.__name__)
def afterTest(self, test):
plugin_base.after_test(test)

View file

@ -1,5 +1,5 @@
# plugin/plugin_base.py
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@ -14,12 +14,6 @@ functionality via py.test.
"""
from __future__ import absolute_import
try:
# unitttest has a SkipTest also but pytest doesn't
# honor it unless nose is imported too...
from nose import SkipTest
except ImportError:
from _pytest.runner import Skipped as SkipTest
import sys
import re
@ -31,7 +25,6 @@ if py3k:
else:
import ConfigParser as configparser
# late imports
fixtures = None
engines = None
@ -47,7 +40,8 @@ file_config = None
logging = None
db_opts = {}
include_tags = set()
exclude_tags = set()
options = None
@ -69,8 +63,6 @@ def setup_options(make_option):
help="Drop all tables in the target database first")
make_option("--backend-only", action="store_true", dest="backend_only",
help="Run only tests marked with __backend__")
make_option("--mockpool", action="store_true", dest="mockpool",
help="Use mock pool (asserts only one connection used)")
make_option("--low-connections", action="store_true",
dest="low_connections",
help="Use a low number of distinct connections - "
@ -86,18 +78,56 @@ def setup_options(make_option):
dest="cdecimal", default=False,
help="Monkeypatch the cdecimal library into Python 'decimal' "
"for all tests")
make_option("--serverside", action="callback",
callback=_server_side_cursors,
help="Turn on server side cursors for PG")
make_option("--mysql-engine", action="store",
dest="mysql_engine", default=None,
help="Use the specified MySQL storage engine for all tables, "
"default is a db-default/InnoDB combo.")
make_option("--tableopts", action="append", dest="tableopts", default=[],
help="Add a dialect-specific table option, key=value")
make_option("--include-tag", action="callback", callback=_include_tag,
type="string",
help="Include tests with tag <tag>")
make_option("--exclude-tag", action="callback", callback=_exclude_tag,
type="string",
help="Exclude tests with tag <tag>")
make_option("--write-profiles", action="store_true",
dest="write_profiles", default=False,
help="Write/update profiling data.")
help="Write/update failing profiling data.")
make_option("--force-write-profiles", action="store_true",
dest="force_write_profiles", default=False,
help="Unconditionally write/update profiling data.")
def configure_follower(follower_ident):
"""Configure required state for a follower.
This invokes in the parent process and typically includes
database creation.
"""
from sqlalchemy.testing import provision
provision.FOLLOWER_IDENT = follower_ident
def memoize_important_follower_config(dict_):
"""Store important configuration we will need to send to a follower.
This invokes in the parent process after normal config is set up.
This is necessary as py.test seems to not be using forking, so we
start with nothing in memory, *but* it isn't running our argparse
callables, so we have to just copy all of that over.
"""
dict_['memoized_config'] = {
'include_tags': include_tags,
'exclude_tags': exclude_tags
}
def restore_important_follower_config(dict_):
"""Restore important configuration needed by a follower.
This invokes in the follower process.
"""
global include_tags, exclude_tags
include_tags.update(dict_['memoized_config']['include_tags'])
exclude_tags.update(dict_['memoized_config']['exclude_tags'])
def read_config():
@ -117,6 +147,13 @@ def pre_begin(opt):
def set_coverage_flag(value):
options.has_coverage = value
_skip_test_exception = None
def set_skip_test(exc):
global _skip_test_exception
_skip_test_exception = exc
def post_begin():
"""things to set up later, once we know coverage is running."""
@ -129,10 +166,12 @@ def post_begin():
global util, fixtures, engines, exclusions, \
assertions, warnings, profiling,\
config, testing
from sqlalchemy import testing
from sqlalchemy.testing import fixtures, engines, exclusions, \
assertions, warnings, profiling, config
from sqlalchemy import util
from sqlalchemy import testing # noqa
from sqlalchemy.testing import fixtures, engines, exclusions # noqa
from sqlalchemy.testing import assertions, warnings, profiling # noqa
from sqlalchemy.testing import config # noqa
from sqlalchemy import util # noqa
warnings.setup_filters()
def _log(opt_str, value, parser):
@ -154,14 +193,17 @@ def _list_dbs(*args):
sys.exit(0)
def _server_side_cursors(opt_str, value, parser):
db_opts['server_side_cursors'] = True
def _requirements_opt(opt_str, value, parser):
_setup_requirements(value)
def _exclude_tag(opt_str, value, parser):
exclude_tags.add(value.replace('-', '_'))
def _include_tag(opt_str, value, parser):
include_tags.add(value.replace('-', '_'))
pre_configure = []
post_configure = []
@ -189,10 +231,18 @@ def _monkeypatch_cdecimal(options, file_config):
sys.modules['decimal'] = cdecimal
@post
def _init_skiptest(options, file_config):
from sqlalchemy.testing import config
config._skip_test_exception = _skip_test_exception
@post
def _engine_uri(options, file_config):
from sqlalchemy.testing import engines, config
from sqlalchemy.testing import config
from sqlalchemy import testing
from sqlalchemy.testing import provision
if options.dburi:
db_urls = list(options.dburi)
@ -214,18 +264,11 @@ def _engine_uri(options, file_config):
db_urls.append(file_config.get('db', 'default'))
for db_url in db_urls:
eng = engines.testing_engine(db_url, db_opts)
eng.connect().close()
config.Config.register(eng, db_opts, options, file_config, testing)
cfg = provision.setup_config(
db_url, options, file_config, provision.FOLLOWER_IDENT)
config.db_opts = db_opts
@post
def _engine_pool(options, file_config):
if options.mockpool:
from sqlalchemy import pool
db_opts['poolclass'] = pool.AssertionPool
if not config._current:
cfg.set_as_current(cfg, testing)
@post
@ -256,7 +299,8 @@ def _setup_requirements(argument):
@post
def _prep_testing_database(options, file_config):
from sqlalchemy.testing import config
from sqlalchemy.testing import config, util
from sqlalchemy.testing.exclusions import against
from sqlalchemy import schema, inspect
if options.dropfirst:
@ -286,32 +330,18 @@ def _prep_testing_database(options, file_config):
schema="test_schema")
))
for tname in reversed(inspector.get_table_names(
order_by="foreign_key")):
e.execute(schema.DropTable(
schema.Table(tname, schema.MetaData())
))
util.drop_all_tables(e, inspector)
if config.requirements.schemas.enabled_for_config(cfg):
for tname in reversed(inspector.get_table_names(
order_by="foreign_key", schema="test_schema")):
e.execute(schema.DropTable(
schema.Table(tname, schema.MetaData(),
schema="test_schema")
))
util.drop_all_tables(e, inspector, schema=cfg.test_schema)
@post
def _set_table_options(options, file_config):
from sqlalchemy.testing import schema
table_options = schema.table_options
for spec in options.tableopts:
key, value = spec.split('=')
table_options[key] = value
if options.mysql_engine:
table_options['mysql_engine'] = options.mysql_engine
if against(cfg, "postgresql"):
from sqlalchemy.dialects import postgresql
for enum in inspector.get_enums("*"):
e.execute(postgresql.DropEnumType(
postgresql.ENUM(
name=enum['name'],
schema=enum['schema'])))
@post
@ -347,6 +377,30 @@ def want_class(cls):
return True
def want_method(cls, fn):
if not fn.__name__.startswith("test_"):
return False
elif fn.__module__ is None:
return False
elif include_tags:
return (
hasattr(cls, '__tags__') and
exclusions.tags(cls.__tags__).include_test(
include_tags, exclude_tags)
) or (
hasattr(fn, '_sa_exclusion_extend') and
fn._sa_exclusion_extend.include_test(
include_tags, exclude_tags)
)
elif exclude_tags and hasattr(cls, '__tags__'):
return exclusions.tags(cls.__tags__).include_test(
include_tags, exclude_tags)
elif exclude_tags and hasattr(fn, '_sa_exclusion_extend'):
return fn._sa_exclusion_extend.include_test(include_tags, exclude_tags)
else:
return True
def generate_sub_tests(cls, module):
if getattr(cls, '__backend__', False):
for cfg in _possible_configs_for_cls(cls):
@ -356,7 +410,7 @@ def generate_sub_tests(cls, module):
(cls, ),
{
"__only_on__": ("%s+%s" % (cfg.db.name, cfg.db.driver)),
"__backend__": False}
}
)
setattr(module, name, subcls)
yield subcls
@ -370,6 +424,8 @@ def start_test_class(cls):
def stop_test_class(cls):
#from sqlalchemy import inspect
#assert not inspect(testing.db).get_table_names()
engines.testing_reaper._stop_test_ctx()
if not options.low_connections:
assertions.global_cleanup_assertions()
@ -398,33 +454,27 @@ def before_test(test, test_module_name, test_class, test_name):
id_ = "%s.%s.%s" % (test_module_name, name, test_name)
warnings.resetwarnings()
profiling._current_test = id_
def after_test(test):
engines.testing_reaper._after_test_ctx()
warnings.resetwarnings()
def _possible_configs_for_cls(cls):
def _possible_configs_for_cls(cls, reasons=None):
all_configs = set(config.Config.all_configs())
if cls.__unsupported_on__:
spec = exclusions.db_spec(*cls.__unsupported_on__)
for config_obj in list(all_configs):
if spec(config_obj):
all_configs.remove(config_obj)
if getattr(cls, '__only_on__', None):
spec = exclusions.db_spec(*util.to_list(cls.__only_on__))
for config_obj in list(all_configs):
if not spec(config_obj):
all_configs.remove(config_obj)
return all_configs
def _do_skips(cls):
all_configs = _possible_configs_for_cls(cls)
reasons = []
if hasattr(cls, '__requires__'):
requirements = config.requirements
@ -432,10 +482,11 @@ def _do_skips(cls):
for requirement in cls.__requires__:
check = getattr(requirements, requirement)
if check.predicate(config_obj):
skip_reasons = check.matching_config_reasons(config_obj)
if skip_reasons:
all_configs.remove(config_obj)
if check.reason:
reasons.append(check.reason)
if reasons is not None:
reasons.extend(skip_reasons)
break
if hasattr(cls, '__prefer_requires__'):
@ -445,36 +496,45 @@ def _do_skips(cls):
for requirement in cls.__prefer_requires__:
check = getattr(requirements, requirement)
if check.predicate(config_obj):
if not check.enabled_for_config(config_obj):
non_preferred.add(config_obj)
if all_configs.difference(non_preferred):
all_configs.difference_update(non_preferred)
return all_configs
def _do_skips(cls):
reasons = []
all_configs = _possible_configs_for_cls(cls, reasons)
if getattr(cls, '__skip_if__', False):
for c in getattr(cls, '__skip_if__'):
if c():
raise SkipTest("'%s' skipped by %s" % (
config.skip_test("'%s' skipped by %s" % (
cls.__name__, c.__name__)
)
for db_spec, op, spec in getattr(cls, '__excluded_on__', ()):
for config_obj in list(all_configs):
if exclusions.skip_if(
exclusions.SpecPredicate(db_spec, op, spec)
).predicate(config_obj):
all_configs.remove(config_obj)
if not all_configs:
raise SkipTest(
"'%s' unsupported on DB implementation %s%s" % (
if getattr(cls, '__backend__', False):
msg = "'%s' unsupported for implementation '%s'" % (
cls.__name__, cls.__only_on__)
else:
msg = "'%s' unsupported on any DB implementation %s%s" % (
cls.__name__,
", ".join("'%s' = %s"
% (config_obj.db.name,
config_obj.db.dialect.server_version_info)
for config_obj in config.Config.all_configs()
),
", ".join(
"'%s(%s)+%s'" % (
config_obj.db.name,
".".join(
str(dig) for dig in
config_obj.db.dialect.server_version_info),
config_obj.db.driver
)
for config_obj in config.Config.all_configs()
),
", ".join(reasons)
)
)
config.skip_test(msg)
elif hasattr(cls, '__prefer_backends__'):
non_preferred = set()
spec = exclusions.db_spec(*util.to_list(cls.__prefer_backends__))

View file

@ -1,8 +1,21 @@
try:
# installed by bootstrap.py
import sqla_plugin_base as plugin_base
except ImportError:
# assume we're a package, use traditional import
from . import plugin_base
import pytest
import argparse
import inspect
from . import plugin_base
import collections
import itertools
try:
import xdist # noqa
has_xdist = True
except ImportError:
has_xdist = False
def pytest_addoption(parser):
@ -24,13 +37,40 @@ def pytest_addoption(parser):
def pytest_configure(config):
if hasattr(config, "slaveinput"):
plugin_base.restore_important_follower_config(config.slaveinput)
plugin_base.configure_follower(
config.slaveinput["follower_ident"]
)
plugin_base.pre_begin(config.option)
plugin_base.set_coverage_flag(bool(getattr(config.option,
"cov_source", False)))
plugin_base.set_skip_test(pytest.skip.Exception)
def pytest_sessionstart(session):
plugin_base.post_begin()
if has_xdist:
import uuid
def pytest_configure_node(node):
# the master for each node fills slaveinput dictionary
# which pytest-xdist will transfer to the subprocess
plugin_base.memoize_important_follower_config(node.slaveinput)
node.slaveinput["follower_ident"] = "test_%s" % uuid.uuid4().hex[0:12]
from sqlalchemy.testing import provision
provision.create_follower_db(node.slaveinput["follower_ident"])
def pytest_testnodedown(node, error):
from sqlalchemy.testing import provision
provision.drop_follower_db(node.slaveinput["follower_ident"])
def pytest_collection_modifyitems(session, config, items):
# look for all those classes that specify __backend__ and
@ -44,6 +84,10 @@ def pytest_collection_modifyitems(session, config, items):
# new classes to a module on the fly.
rebuilt_items = collections.defaultdict(list)
items[:] = [
item for item in
items if isinstance(item.parent, pytest.Instance)
and not item.parent.parent.name.startswith("_")]
test_classes = set(item.parent for item in items)
for test_class in test_classes:
for sub_cls in plugin_base.generate_sub_tests(
@ -74,12 +118,11 @@ def pytest_collection_modifyitems(session, config, items):
def pytest_pycollect_makeitem(collector, name, obj):
if inspect.isclass(obj) and plugin_base.want_class(obj):
return pytest.Class(name, parent=collector)
elif inspect.isfunction(obj) and \
name.startswith("test_") and \
isinstance(collector, pytest.Instance):
isinstance(collector, pytest.Instance) and \
plugin_base.want_method(collector.cls, obj):
return pytest.Function(name, parent=collector)
else:
return []
@ -97,16 +140,18 @@ def pytest_runtest_setup(item):
return
# ... so we're doing a little dance here to figure it out...
if item.parent.parent is not _current_class:
if _current_class is None:
class_setup(item.parent.parent)
_current_class = item.parent.parent
# this is needed for the class-level, to ensure that the
# teardown runs after the class is completed with its own
# class-level teardown...
item.parent.parent.addfinalizer(
lambda: class_teardown(item.parent.parent))
def finalize():
global _current_class
class_teardown(item.parent.parent)
_current_class = None
item.parent.parent.addfinalizer(finalize)
test_setup(item)