]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
remove metadata.bind use from test suite
authorMike Bayer <mike_mp@zzzcomputing.com>
Mon, 21 Dec 2020 15:22:43 +0000 (10:22 -0500)
committerMike Bayer <mike_mp@zzzcomputing.com>
Sun, 3 Jan 2021 18:22:29 +0000 (13:22 -0500)
importantly this means we can remove bound metadata from
the fixtures that are used by Alembic's test suite.

hopefully this is the last one that has to happen to allow
Alembic to be fully 1.4/2.0.

Start moving from @testing.provide_metadata to a pytest
metadata fixture.  This does not seem to have any negative
effects even though TablesTest uses a "self.metadata" attribute.

Change-Id: Iae6ab95938a7e92b6d42086aec534af27b5577d3

50 files changed:
lib/sqlalchemy/dialects/postgresql/provision.py
lib/sqlalchemy/testing/assertions.py
lib/sqlalchemy/testing/engines.py
lib/sqlalchemy/testing/fixtures.py
lib/sqlalchemy/testing/pickleable.py
lib/sqlalchemy/testing/plugin/plugin_base.py
lib/sqlalchemy/testing/provision.py
lib/sqlalchemy/testing/suite/test_reflection.py
lib/sqlalchemy/testing/suite/test_types.py
lib/sqlalchemy/testing/util.py
lib/sqlalchemy/testing/warnings.py
test/aaa_profiling/test_memusage.py
test/aaa_profiling/test_resultset.py
test/dialect/mssql/test_engine.py
test/dialect/mssql/test_reflection.py
test/dialect/mssql/test_types.py
test/dialect/mysql/test_reflection.py
test/dialect/mysql/test_types.py
test/dialect/oracle/test_dialect.py
test/dialect/oracle/test_reflection.py
test/dialect/oracle/test_types.py
test/dialect/postgresql/test_dialect.py
test/dialect/postgresql/test_reflection.py
test/dialect/postgresql/test_types.py
test/dialect/test_sqlite.py
test/engine/test_ddlevents.py
test/engine/test_deprecations.py
test/engine/test_execute.py
test/engine/test_reflection.py
test/ext/declarative/test_reflection.py
test/ext/test_associationproxy.py
test/ext/test_automap.py
test/ext/test_deprecations.py
test/ext/test_horizontal_shard.py
test/ext/test_orderinglist.py
test/orm/declarative/test_basic.py
test/orm/declarative/test_mixin.py
test/orm/inheritance/_poly_fixtures.py
test/orm/test_bind.py
test/orm/test_compile.py
test/orm/test_lazy_relations.py
test/orm/test_mapper.py
test/orm/test_query.py
test/orm/test_relationships.py
test/orm/test_session.py
test/sql/test_constraints.py
test/sql/test_query.py
test/sql/test_quote.py
test/sql/test_sequences.py
test/sql/test_types.py

index 575316c61dee8a459e63274dca6c4fe5b892e33b..d345cdfdfecf6eeab2879526cf63cb64a83551da 100644 (file)
@@ -1,8 +1,11 @@
 import time
 
 from ... import exc
+from ... import inspect
 from ... import text
 from ...testing.provision import create_db
+from ...testing.provision import drop_all_schema_objects_post_tables
+from ...testing.provision import drop_all_schema_objects_pre_tables
 from ...testing.provision import drop_db
 from ...testing.provision import log
 from ...testing.provision import set_default_schema_on_connection
@@ -78,3 +81,24 @@ def _postgresql_set_default_schema_on_connection(
     cursor.execute("SET SESSION search_path='%s'" % schema_name)
     cursor.close()
     dbapi_connection.autocommit = existing_autocommit
+
+
+@drop_all_schema_objects_pre_tables.for_db("postgresql")
+def drop_all_schema_objects_pre_tables(cfg, eng):
+    with eng.connect().execution_options(isolation_level="AUTOCOMMIT") as conn:
+        for xid in conn.execute("select gid from pg_prepared_xacts").scalars():
+            conn.execute("ROLLBACK PREPARED '%s'" % xid)
+
+
+@drop_all_schema_objects_post_tables.for_db("postgresql")
+def drop_all_schema_objects_post_tables(cfg, eng):
+    from sqlalchemy.dialects import postgresql
+
+    inspector = inspect(eng)
+    with eng.begin() as conn:
+        for enum in inspector.get_enums("*"):
+            conn.execute(
+                postgresql.DropEnumType(
+                    postgresql.ENUM(name=enum["name"], schema=enum["schema"])
+                )
+            )
index 17a0acf20d436b934095759a58b1934182da771b..8c22b047e789c05f0c10e96b905a698bc5ae826a 100644 (file)
@@ -225,11 +225,11 @@ def is_instance_of(a, b, msg=None):
 
 
 def is_true(a, msg=None):
-    is_(a, True, msg=msg)
+    is_(bool(a), True, msg=msg)
 
 
 def is_false(a, msg=None):
-    is_(a, False, msg=msg)
+    is_(bool(a), False, msg=msg)
 
 
 def is_(a, b, msg=None):
index 4d4563afb0c0dd82dfd4250b186fded05c760472..5ab2fa02ab50d9ad83ee5c569d768cd28a2d1fd9 100644 (file)
@@ -275,7 +275,7 @@ def testing_engine(url=None, options=None, future=False, asyncio=False):
 
     if isinstance(engine.pool, pool.QueuePool):
         engine.pool._timeout = 0
-        engine.pool._max_overflow = 0
+        engine.pool._max_overflow = 5
     if use_reaper:
         testing_reaper.add_engine(engine)
 
index 0ede25176a4c7bf439ec488b28378e36e61fd40e..4eccf89c7f1384ba5a0fe16f8fad5ab60e9bd33e 100644 (file)
@@ -13,6 +13,7 @@ from . import assertions
 from . import config
 from . import schema
 from .engines import drop_all_tables
+from .engines import testing_engine
 from .entities import BasicEntity
 from .entities import ComparableEntity
 from .entities import ComparableMixin  # noqa
@@ -24,7 +25,6 @@ from ..orm import registry
 from ..orm.decl_api import DeclarativeMeta
 from ..schema import sort_tables_and_constraints
 
-
 # whether or not we use unittest changes things dramatically,
 # as far as how pytest collection works.
 
@@ -73,21 +73,31 @@ class TestBase(object):
                 trans.rollback()
             conn.close()
 
-    # propose a replacement for @testing.provide_metadata.
-    # the problem with this is that TablesTest below has a ".metadata"
-    # attribute already which is accessed directly as part of the
-    # @testing.provide_metadata pattern.  Might need to call this _metadata
-    # for it to be useful.
-    # @config.fixture()
-    # def metadata(self):
-    #    """Provide bound MetaData for a single test, dropping afterwards."""
-    #
-    #    from . import engines
-    #    metadata = schema.MetaData(config.db)
-    #    try:
-    #        yield metadata
-    #    finally:
-    #       engines.drop_all_tables(metadata, config.db)
+    @config.fixture()
+    def future_connection(self):
+
+        eng = testing_engine(future=True)
+        conn = eng.connect()
+        trans = conn.begin()
+        try:
+            yield conn
+        finally:
+            if trans.is_active:
+                trans.rollback()
+            conn.close()
+
+    @config.fixture()
+    def metadata(self):
+        """Provide bound MetaData for a single test, dropping afterwards."""
+
+        from . import engines
+        from ..sql import schema
+
+        metadata = schema.MetaData()
+        try:
+            yield metadata
+        finally:
+            engines.drop_all_tables(metadata, config.db)
 
 
 class FutureEngineMixin(object):
@@ -136,11 +146,15 @@ class TablesTest(TestBase):
     run_dispose_bind = None
 
     bind = None
-    metadata = None
+    _tables_metadata = None
     tables = None
     other = None
     sequences = None
 
+    @property
+    def tables_test_metadata(self):
+        return self._tables_metadata
+
     @classmethod
     def setup_class(cls):
         cls._init_class()
@@ -161,8 +175,8 @@ class TablesTest(TestBase):
         cls.sequences = adict()
 
         cls.bind = cls.setup_bind()
-        cls.metadata = sa.MetaData()
-        cls.metadata.bind = cls.bind
+        cls._tables_metadata = sa.MetaData()
+        cls._tables_metadata.bind = cls.bind
 
     @classmethod
     def _setup_once_inserts(cls):
@@ -174,21 +188,21 @@ class TablesTest(TestBase):
     @classmethod
     def _setup_once_tables(cls):
         if cls.run_define_tables == "once":
-            cls.define_tables(cls.metadata)
+            cls.define_tables(cls._tables_metadata)
             if cls.run_create_tables == "once":
-                cls.metadata.create_all(cls.bind)
-            cls.tables.update(cls.metadata.tables)
-            cls.sequences.update(cls.metadata._sequences)
+                cls._tables_metadata.create_all(cls.bind)
+            cls.tables.update(cls._tables_metadata.tables)
+            cls.sequences.update(cls._tables_metadata._sequences)
 
     def _setup_each_tables(self):
         if self.run_define_tables == "each":
-            self.define_tables(self.metadata)
+            self.define_tables(self._tables_metadata)
             if self.run_create_tables == "each":
-                self.metadata.create_all(self.bind)
-            self.tables.update(self.metadata.tables)
-            self.sequences.update(self.metadata._sequences)
+                self._tables_metadata.create_all(self.bind)
+            self.tables.update(self._tables_metadata.tables)
+            self.sequences.update(self._tables_metadata._sequences)
         elif self.run_create_tables == "each":
-            self.metadata.create_all(self.bind)
+            self._tables_metadata.create_all(self.bind)
 
     def _setup_each_inserts(self):
         if self.run_inserts == "each":
@@ -200,10 +214,10 @@ class TablesTest(TestBase):
         if self.run_define_tables == "each":
             self.tables.clear()
             if self.run_create_tables == "each":
-                drop_all_tables(self.metadata, self.bind)
-            self.metadata.clear()
+                drop_all_tables(self._tables_metadata, self.bind)
+            self._tables_metadata.clear()
         elif self.run_create_tables == "each":
-            drop_all_tables(self.metadata, self.bind)
+            drop_all_tables(self._tables_metadata, self.bind)
 
         # no need to run deletes if tables are recreated on setup
         if (
@@ -216,7 +230,7 @@ class TablesTest(TestBase):
                     [
                         t
                         for (t, fks) in sort_tables_and_constraints(
-                            self.metadata.tables.values()
+                            self._tables_metadata.tables.values()
                         )
                         if t is not None
                     ]
@@ -239,12 +253,12 @@ class TablesTest(TestBase):
     @classmethod
     def _teardown_once_metadata_bind(cls):
         if cls.run_create_tables:
-            drop_all_tables(cls.metadata, cls.bind)
+            drop_all_tables(cls._tables_metadata, cls.bind)
 
         if cls.run_dispose_bind == "once":
             cls.dispose_bind(cls.bind)
 
-        cls.metadata.bind = None
+        cls._tables_metadata.bind = None
 
         if cls.run_setup_bind is not None:
             cls.bind = None
@@ -294,7 +308,7 @@ class TablesTest(TestBase):
             headers[table] = data[0]
             rows[table] = data[1:]
         for table, fks in sort_tables_and_constraints(
-            cls.metadata.tables.values()
+            cls._tables_metadata.tables.values()
         ):
             if table is None:
                 continue
@@ -480,7 +494,7 @@ class DeclarativeMappedTest(MappedTest):
             __table_cls__ = schema.Table
 
         _DeclBase = declarative_base(
-            metadata=cls.metadata,
+            metadata=cls._tables_metadata,
             metaclass=FindFixtureDeclarative,
             cls=DeclarativeBasic,
         )
@@ -490,8 +504,8 @@ class DeclarativeMappedTest(MappedTest):
         # classes
         super(DeclarativeMappedTest, cls)._with_register_classes(fn)
 
-        if cls.metadata.tables and cls.run_create_tables:
-            cls.metadata.create_all(config.db)
+        if cls._tables_metadata.tables and cls.run_create_tables:
+            cls._tables_metadata.create_all(config.db)
 
 
 class ComputedReflectionFixtureTest(TablesTest):
index 8f8e26913adddab6f99df9392343dcda7ee4599c..d8c6d5fdf2ae31ed11fe677fc5d9343f6171825d 100644 (file)
@@ -52,9 +52,9 @@ class Screen(object):
 
 
 class Foo(object):
-    def __init__(self, moredata):
+    def __init__(self, moredata, stuff="im stuff"):
         self.data = "im data"
-        self.stuff = "im stuff"
+        self.stuff = stuff
         self.moredata = moredata
 
     __hash__ = object.__hash__
index 8b6a7d68ab3973e2114bd0724fec4f34a92528bd..d200b262eb51286987cf408239aacf69c486e181 100644 (file)
@@ -461,73 +461,13 @@ def _setup_requirements(argument):
 
 @post
 def _prep_testing_database(options, file_config):
-    from sqlalchemy.testing import config, util
-    from sqlalchemy.testing.exclusions import against
-    from sqlalchemy import schema, inspect
+    from sqlalchemy.testing import config
 
     if options.dropfirst:
-        for cfg in config.Config.all_configs():
-            e = cfg.db
-
-            # TODO: this has to be part of provision.py in postgresql
-            if against(cfg, "postgresql"):
-                with e.connect().execution_options(
-                    isolation_level="AUTOCOMMIT"
-                ) as conn:
-                    for xid in conn.execute(
-                        "select gid from pg_prepared_xacts"
-                    ).scalars():
-                        conn.execute("ROLLBACK PREPARED '%s'" % xid)
-
-            inspector = inspect(e)
-            try:
-                view_names = inspector.get_view_names()
-            except NotImplementedError:
-                pass
-            else:
-                for vname in view_names:
-                    e.execute(
-                        schema._DropView(
-                            schema.Table(vname, schema.MetaData())
-                        )
-                    )
+        from sqlalchemy.testing import provision
 
-            if config.requirements.schemas.enabled_for_config(cfg):
-                try:
-                    view_names = inspector.get_view_names(schema="test_schema")
-                except NotImplementedError:
-                    pass
-                else:
-                    for vname in view_names:
-                        e.execute(
-                            schema._DropView(
-                                schema.Table(
-                                    vname,
-                                    schema.MetaData(),
-                                    schema="test_schema",
-                                )
-                            )
-                        )
-
-            util.drop_all_tables(e, inspector)
-
-            if config.requirements.schemas.enabled_for_config(cfg):
-                util.drop_all_tables(e, inspector, schema=cfg.test_schema)
-
-            # TODO: this has to be part of provision.py in postgresql
-            if against(cfg, "postgresql"):
-                from sqlalchemy.dialects import postgresql
-
-                for enum in inspector.get_enums("*"):
-                    e.execute(
-                        postgresql.DropEnumType(
-                            postgresql.ENUM(
-                                name=enum["name"], schema=enum["schema"]
-                            )
-                        )
-                    )
-
-            # TODO: need to do a get_sequences and drop them also after tables
+        for cfg in config.Config.all_configs():
+            provision.drop_all_schema_objects(cfg, cfg.db)
 
 
 @post
index 589045453590acbb8336e590f8af586ae8ef058a..4ee0567f22402220e7781d36b1b470a96b62a232 100644 (file)
@@ -3,10 +3,15 @@ import logging
 
 from . import config
 from . import engines
+from . import util
 from .. import exc
+from .. import inspect
 from ..engine import url as sa_url
+from ..sql import ddl
+from ..sql import schema
 from ..util import compat
 
+
 log = logging.getLogger(__name__)
 
 FOLLOWER_IDENT = None
@@ -211,6 +216,63 @@ def _configs_for_db_operation():
         cfg.db.dispose()
 
 
+@register.init
+def drop_all_schema_objects_pre_tables(cfg, eng):
+    pass
+
+
+@register.init
+def drop_all_schema_objects_post_tables(cfg, eng):
+    pass
+
+
+def drop_all_schema_objects(cfg, eng):
+
+    drop_all_schema_objects_pre_tables(cfg, eng)
+
+    inspector = inspect(eng)
+    try:
+        view_names = inspector.get_view_names()
+    except NotImplementedError:
+        pass
+    else:
+        with eng.begin() as conn:
+            for vname in view_names:
+                conn.execute(
+                    ddl._DropView(schema.Table(vname, schema.MetaData()))
+                )
+
+    if config.requirements.schemas.enabled_for_config(cfg):
+        try:
+            view_names = inspector.get_view_names(schema="test_schema")
+        except NotImplementedError:
+            pass
+        else:
+            with eng.begin() as conn:
+                for vname in view_names:
+                    conn.execute(
+                        ddl._DropView(
+                            schema.Table(
+                                vname,
+                                schema.MetaData(),
+                                schema="test_schema",
+                            )
+                        )
+                    )
+
+    util.drop_all_tables(eng, inspector)
+
+    if config.requirements.schemas.enabled_for_config(cfg):
+        util.drop_all_tables(eng, inspector, schema=cfg.test_schema)
+
+    drop_all_schema_objects_post_tables(cfg, eng)
+
+    if config.requirements.sequences.enabled_for_config(cfg):
+        with eng.begin() as conn:
+            for seq in inspector.get_sequence_names():
+                conn.execute(ddl.DropSequence(schema.Sequence(seq)))
+
+
 @register.init
 def create_db(cfg, eng, ident):
     """Dynamically create a database for testing.
index bef3abb5996a2966e5a3f12028cf9d7369ac0143..6c3c1005ab54b196e5f92199a43e42103d498d2d 100644 (file)
@@ -207,10 +207,10 @@ class QuotedNameArgumentTest(fixtures.TablesTest):
                 ]
             for name in names:
                 query = "CREATE VIEW %s AS SELECT * FROM %s" % (
-                    testing.db.dialect.identifier_preparer.quote(
+                    config.db.dialect.identifier_preparer.quote(
                         "view %s" % name
                     ),
-                    testing.db.dialect.identifier_preparer.quote(name),
+                    config.db.dialect.identifier_preparer.quote(name),
                 )
 
                 event.listen(metadata, "after_create", DDL(query))
@@ -219,7 +219,7 @@ class QuotedNameArgumentTest(fixtures.TablesTest):
                     "before_drop",
                     DDL(
                         "DROP VIEW %s"
-                        % testing.db.dialect.identifier_preparer.quote(
+                        % config.db.dialect.identifier_preparer.quote(
                             "view %s" % name
                         )
                     ),
@@ -233,52 +233,52 @@ class QuotedNameArgumentTest(fixtures.TablesTest):
 
     @quote_fixtures
     def test_get_table_options(self, name):
-        insp = inspect(testing.db)
+        insp = inspect(config.db)
 
         insp.get_table_options(name)
 
     @quote_fixtures
     @testing.requires.view_column_reflection
     def test_get_view_definition(self, name):
-        insp = inspect(testing.db)
+        insp = inspect(config.db)
         assert insp.get_view_definition("view %s" % name)
 
     @quote_fixtures
     def test_get_columns(self, name):
-        insp = inspect(testing.db)
+        insp = inspect(config.db)
         assert insp.get_columns(name)
 
     @quote_fixtures
     def test_get_pk_constraint(self, name):
-        insp = inspect(testing.db)
+        insp = inspect(config.db)
         assert insp.get_pk_constraint(name)
 
     @quote_fixtures
     def test_get_foreign_keys(self, name):
-        insp = inspect(testing.db)
+        insp = inspect(config.db)
         assert insp.get_foreign_keys(name)
 
     @quote_fixtures
     def test_get_indexes(self, name):
-        insp = inspect(testing.db)
+        insp = inspect(config.db)
         assert insp.get_indexes(name)
 
     @quote_fixtures
     @testing.requires.unique_constraint_reflection
     def test_get_unique_constraints(self, name):
-        insp = inspect(testing.db)
+        insp = inspect(config.db)
         assert insp.get_unique_constraints(name)
 
     @quote_fixtures
     @testing.requires.comment_reflection
     def test_get_table_comment(self, name):
-        insp = inspect(testing.db)
+        insp = inspect(config.db)
         assert insp.get_table_comment(name)
 
     @quote_fixtures
     @testing.requires.check_constraint_reflection
     def test_get_check_constraints(self, name):
-        insp = inspect(testing.db)
+        insp = inspect(config.db)
         assert insp.get_check_constraints(name)
 
 
@@ -508,7 +508,7 @@ class ComponentReflectionTest(fixtures.TablesTest):
 
     @testing.requires.schema_reflection
     def test_get_schema_names(self):
-        insp = inspect(testing.db)
+        insp = inspect(self.bind)
 
         self.assert_(testing.config.test_schema in insp.get_schema_names())
 
@@ -520,13 +520,28 @@ class ComponentReflectionTest(fixtures.TablesTest):
 
     @testing.requires.schema_reflection
     def test_get_default_schema_name(self):
-        insp = inspect(testing.db)
-        eq_(insp.default_schema_name, testing.db.dialect.default_schema_name)
-
-    @testing.provide_metadata
-    def _test_get_table_names(
-        self, schema=None, table_type="table", order_by=None
+        insp = inspect(self.bind)
+        eq_(insp.default_schema_name, self.bind.dialect.default_schema_name)
+
+    @testing.combinations(
+        (None, True, False, False),
+        (None, True, False, True, testing.requires.schemas),
+        ("foreign_key", True, False, False),
+        (None, False, True, False),
+        (None, False, True, True, testing.requires.schemas),
+        (None, True, True, False),
+        (None, True, True, True, testing.requires.schemas),
+        argnames="order_by,include_plain,include_views,use_schema",
+    )
+    def test_get_table_names(
+        self, connection, order_by, include_plain, include_views, use_schema
     ):
+
+        if use_schema:
+            schema = config.test_schema
+        else:
+            schema = None
+
         _ignore_tables = [
             "comment_test",
             "noncol_idx_test_pk",
@@ -535,16 +550,16 @@ class ComponentReflectionTest(fixtures.TablesTest):
             "remote_table",
             "remote_table_2",
         ]
-        meta = self.metadata
 
-        insp = inspect(meta.bind)
+        insp = inspect(connection)
 
-        if table_type == "view":
+        if include_views:
             table_names = insp.get_view_names(schema)
             table_names.sort()
             answer = ["email_addresses_v", "users_v"]
             eq_(sorted(table_names), answer)
-        else:
+
+        if include_plain:
             if order_by:
                 tables = [
                     rec[0]
@@ -576,15 +591,6 @@ class ComponentReflectionTest(fixtures.TablesTest):
         temp_table_names = insp.get_temp_view_names()
         eq_(sorted(temp_table_names), ["user_tmp_v"])
 
-    @testing.requires.table_reflection
-    def test_get_table_names(self):
-        self._test_get_table_names()
-
-    @testing.requires.table_reflection
-    @testing.requires.foreign_key_constraint_reflection
-    def test_get_table_names_fks(self):
-        self._test_get_table_names(order_by="foreign_key")
-
     @testing.requires.comment_reflection
     def test_get_comments(self):
         self._test_get_comments()
@@ -595,7 +601,7 @@ class ComponentReflectionTest(fixtures.TablesTest):
         self._test_get_comments(testing.config.test_schema)
 
     def _test_get_comments(self, schema=None):
-        insp = inspect(testing.db)
+        insp = inspect(self.bind)
 
         eq_(
             insp.get_table_comment("comment_test", schema=schema),
@@ -621,35 +627,27 @@ class ComponentReflectionTest(fixtures.TablesTest):
             ],
         )
 
-    @testing.requires.table_reflection
-    @testing.requires.schemas
-    def test_get_table_names_with_schema(self):
-        self._test_get_table_names(testing.config.test_schema)
-
-    @testing.requires.view_column_reflection
-    def test_get_view_names(self):
-        self._test_get_table_names(table_type="view")
-
-    @testing.requires.view_column_reflection
-    @testing.requires.schemas
-    def test_get_view_names_with_schema(self):
-        self._test_get_table_names(
-            testing.config.test_schema, table_type="view"
-        )
-
-    @testing.requires.table_reflection
-    @testing.requires.view_column_reflection
-    def test_get_tables_and_views(self):
-        self._test_get_table_names()
-        self._test_get_table_names(table_type="view")
+    @testing.combinations(
+        (False, False),
+        (False, True, testing.requires.schemas),
+        (True, False),
+        (False, True, testing.requires.schemas),
+        argnames="use_views,use_schema",
+    )
+    def test_get_columns(self, connection, use_views, use_schema):
+
+        if use_schema:
+            schema = config.test_schema
+        else:
+            schema = None
 
-    def _test_get_columns(self, schema=None, table_type="table"):
-        meta = MetaData(testing.db)
         users, addresses = (self.tables.users, self.tables.email_addresses)
-        table_names = ["users", "email_addresses"]
-        if table_type == "view":
+        if use_views:
             table_names = ["users_v", "email_addresses_v"]
-        insp = inspect(meta.bind)
+        else:
+            table_names = ["users", "email_addresses"]
+
+        insp = inspect(connection)
         for table_name, table in zip(table_names, (users, addresses)):
             schema_name = schema
             cols = insp.get_columns(table_name, schema=schema_name)
@@ -699,67 +697,13 @@ class ComponentReflectionTest(fixtures.TablesTest):
                 if not col.primary_key:
                     assert cols[i]["default"] is None
 
-    @testing.requires.table_reflection
-    def test_get_columns(self):
-        self._test_get_columns()
-
-    @testing.provide_metadata
-    def _type_round_trip(self, *types):
-        t = Table(
-            "t",
-            self.metadata,
-            *[Column("t%d" % i, type_) for i, type_ in enumerate(types)]
-        )
-        t.create()
-
-        return [
-            c["type"] for c in inspect(self.metadata.bind).get_columns("t")
-        ]
-
-    @testing.requires.table_reflection
-    def test_numeric_reflection(self):
-        for typ in self._type_round_trip(sql_types.Numeric(18, 5)):
-            assert isinstance(typ, sql_types.Numeric)
-            eq_(typ.precision, 18)
-            eq_(typ.scale, 5)
-
-    @testing.requires.table_reflection
-    def test_varchar_reflection(self):
-        typ = self._type_round_trip(sql_types.String(52))[0]
-        assert isinstance(typ, sql_types.String)
-        eq_(typ.length, 52)
-
-    @testing.requires.table_reflection
-    @testing.provide_metadata
-    def test_nullable_reflection(self):
-        t = Table(
-            "t",
-            self.metadata,
-            Column("a", Integer, nullable=True),
-            Column("b", Integer, nullable=False),
-        )
-        t.create()
-        eq_(
-            dict(
-                (col["name"], col["nullable"])
-                for col in inspect(self.metadata.bind).get_columns("t")
-            ),
-            {"a": True, "b": False},
-        )
-
-    @testing.requires.table_reflection
-    @testing.requires.schemas
-    def test_get_columns_with_schema(self):
-        self._test_get_columns(schema=testing.config.test_schema)
-
     @testing.requires.temp_table_reflection
     def test_get_temp_table_columns(self):
         table_name = get_temp_table_name(
-            config, config.db, "user_tmp_%s" % config.ident
+            config, self.bind, "user_tmp_%s" % config.ident
         )
-        meta = MetaData(self.bind)
         user_tmp = self.tables[table_name]
-        insp = inspect(meta.bind)
+        insp = inspect(self.bind)
         cols = insp.get_columns(table_name)
         self.assert_(len(cols) > 0, len(cols))
 
@@ -774,22 +718,18 @@ class ComponentReflectionTest(fixtures.TablesTest):
         cols = insp.get_columns("user_tmp_v")
         eq_([col["name"] for col in cols], ["id", "name", "foo"])
 
-    @testing.requires.view_column_reflection
-    def test_get_view_columns(self):
-        self._test_get_columns(table_type="view")
-
-    @testing.requires.view_column_reflection
-    @testing.requires.schemas
-    def test_get_view_columns_with_schema(self):
-        self._test_get_columns(
-            schema=testing.config.test_schema, table_type="view"
-        )
+    @testing.combinations(
+        (False,), (True, testing.requires.schemas), argnames="use_schema"
+    )
+    @testing.requires.primary_key_constraint_reflection
+    def test_get_pk_constraint(self, connection, use_schema):
+        if use_schema:
+            schema = testing.config.test_schema
+        else:
+            schema = None
 
-    @testing.provide_metadata
-    def _test_get_pk_constraint(self, schema=None):
-        meta = self.metadata
         users, addresses = self.tables.users, self.tables.email_addresses
-        insp = inspect(meta.bind)
+        insp = inspect(connection)
 
         users_cons = insp.get_pk_constraint(users.name, schema=schema)
         users_pkeys = users_cons["constrained_columns"]
@@ -802,21 +742,18 @@ class ComponentReflectionTest(fixtures.TablesTest):
         with testing.requires.reflects_pk_names.fail_if():
             eq_(addr_cons["name"], "email_ad_pk")
 
-    @testing.requires.primary_key_constraint_reflection
-    def test_get_pk_constraint(self):
-        self._test_get_pk_constraint()
-
-    @testing.requires.table_reflection
-    @testing.requires.primary_key_constraint_reflection
-    @testing.requires.schemas
-    def test_get_pk_constraint_with_schema(self):
-        self._test_get_pk_constraint(schema=testing.config.test_schema)
+    @testing.combinations(
+        (False,), (True, testing.requires.schemas), argnames="use_schema"
+    )
+    @testing.requires.foreign_key_constraint_reflection
+    def test_get_foreign_keys(self, connection, use_schema):
+        if use_schema:
+            schema = config.test_schema
+        else:
+            schema = None
 
-    @testing.provide_metadata
-    def _test_get_foreign_keys(self, schema=None):
-        meta = self.metadata
         users, addresses = (self.tables.users, self.tables.email_addresses)
-        insp = inspect(meta.bind)
+        insp = inspect(connection)
         expected_schema = schema
         # users
 
@@ -845,25 +782,16 @@ class ComponentReflectionTest(fixtures.TablesTest):
         eq_(fkey1["referred_columns"], ["user_id"])
         eq_(fkey1["constrained_columns"], ["remote_user_id"])
 
-    @testing.requires.foreign_key_constraint_reflection
-    def test_get_foreign_keys(self):
-        self._test_get_foreign_keys()
-
-    @testing.requires.foreign_key_constraint_reflection
-    @testing.requires.schemas
-    def test_get_foreign_keys_with_schema(self):
-        self._test_get_foreign_keys(schema=testing.config.test_schema)
-
     @testing.requires.cross_schema_fk_reflection
     @testing.requires.schemas
     def test_get_inter_schema_foreign_keys(self):
         local_table, remote_table, remote_table_2 = self.tables(
-            "%s.local_table" % testing.db.dialect.default_schema_name,
+            "%s.local_table" % self.bind.dialect.default_schema_name,
             "%s.remote_table" % testing.config.test_schema,
             "%s.remote_table_2" % testing.config.test_schema,
         )
 
-        insp = inspect(config.db)
+        insp = inspect(self.bind)
 
         local_fkeys = insp.get_foreign_keys(local_table.name)
         eq_(len(local_fkeys), 1)
@@ -883,85 +811,12 @@ class ComponentReflectionTest(fixtures.TablesTest):
 
         assert fkey2["referred_schema"] in (
             None,
-            testing.db.dialect.default_schema_name,
+            self.bind.dialect.default_schema_name,
         )
         eq_(fkey2["referred_table"], local_table.name)
         eq_(fkey2["referred_columns"], ["id"])
         eq_(fkey2["constrained_columns"], ["local_id"])
 
-    @testing.requires.foreign_key_constraint_option_reflection_ondelete
-    def test_get_foreign_key_options_ondelete(self):
-        self._test_get_foreign_key_options(ondelete="CASCADE")
-
-    @testing.requires.foreign_key_constraint_option_reflection_onupdate
-    def test_get_foreign_key_options_onupdate(self):
-        self._test_get_foreign_key_options(onupdate="SET NULL")
-
-    @testing.requires.foreign_key_constraint_option_reflection_onupdate
-    def test_get_foreign_key_options_onupdate_noaction(self):
-        self._test_get_foreign_key_options(onupdate="NO ACTION", expected={})
-
-    @testing.requires.fk_constraint_option_reflection_ondelete_noaction
-    def test_get_foreign_key_options_ondelete_noaction(self):
-        self._test_get_foreign_key_options(ondelete="NO ACTION", expected={})
-
-    @testing.requires.fk_constraint_option_reflection_onupdate_restrict
-    def test_get_foreign_key_options_onupdate_restrict(self):
-        self._test_get_foreign_key_options(onupdate="RESTRICT")
-
-    @testing.requires.fk_constraint_option_reflection_ondelete_restrict
-    def test_get_foreign_key_options_ondelete_restrict(self):
-        self._test_get_foreign_key_options(ondelete="RESTRICT")
-
-    @testing.provide_metadata
-    def _test_get_foreign_key_options(self, expected=None, **options):
-        meta = self.metadata
-
-        if expected is None:
-            expected = options
-
-        Table(
-            "x",
-            meta,
-            Column("id", Integer, primary_key=True),
-            test_needs_fk=True,
-        )
-
-        Table(
-            "table",
-            meta,
-            Column("id", Integer, primary_key=True),
-            Column("x_id", Integer, sa.ForeignKey("x.id", name="xid")),
-            Column("test", String(10)),
-            test_needs_fk=True,
-        )
-
-        Table(
-            "user",
-            meta,
-            Column("id", Integer, primary_key=True),
-            Column("name", String(50), nullable=False),
-            Column("tid", Integer),
-            sa.ForeignKeyConstraint(
-                ["tid"], ["table.id"], name="myfk", **options
-            ),
-            test_needs_fk=True,
-        )
-
-        meta.create_all()
-
-        insp = inspect(meta.bind)
-
-        # test 'options' is always present for a backend
-        # that can reflect these, since alembic looks for this
-        opts = insp.get_foreign_keys("table")[0]["options"]
-
-        eq_(dict((k, opts[k]) for k in opts if opts[k]), {})
-
-        opts = insp.get_foreign_keys("user")[0]["options"]
-        eq_(opts, expected)
-        # eq_(dict((k, opts[k]) for k in opts if opts[k]), expected)
-
     def _assert_insp_indexes(self, indexes, expected_indexes):
         index_names = [d["name"] for d in indexes]
         for e_index in expected_indexes:
@@ -970,13 +825,19 @@ class ComponentReflectionTest(fixtures.TablesTest):
             for key in e_index:
                 eq_(e_index[key], index[key])
 
-    @testing.provide_metadata
-    def _test_get_indexes(self, schema=None):
-        meta = self.metadata
+    @testing.combinations(
+        (False,), (True, testing.requires.schemas), argnames="use_schema"
+    )
+    def test_get_indexes(self, connection, use_schema):
+
+        if use_schema:
+            schema = config.test_schema
+        else:
+            schema = None
 
         # The database may decide to create indexes for foreign keys, etc.
         # so there may be more indexes than expected.
-        insp = inspect(meta.bind)
+        insp = inspect(self.bind)
         indexes = insp.get_indexes("users", schema=schema)
         expected_indexes = [
             {
@@ -992,19 +853,15 @@ class ComponentReflectionTest(fixtures.TablesTest):
         ]
         self._assert_insp_indexes(indexes, expected_indexes)
 
+    @testing.combinations(
+        ("noncol_idx_test_nopk", "noncol_idx_nopk"),
+        ("noncol_idx_test_pk", "noncol_idx_pk"),
+        argnames="tname,ixname",
+    )
     @testing.requires.index_reflection
-    def test_get_indexes(self):
-        self._test_get_indexes()
-
-    @testing.requires.index_reflection
-    @testing.requires.schemas
-    def test_get_indexes_with_schema(self):
-        self._test_get_indexes(schema=testing.config.test_schema)
-
-    @testing.provide_metadata
-    def _test_get_noncol_index(self, tname, ixname):
-        meta = self.metadata
-        insp = inspect(meta.bind)
+    @testing.requires.indexes_with_ascdesc
+    def test_get_noncol_index(self, connection, tname, ixname):
+        insp = inspect(connection)
         indexes = insp.get_indexes(tname)
 
         # reflecting an index that has "x DESC" in it as the column.
@@ -1013,85 +870,11 @@ class ComponentReflectionTest(fixtures.TablesTest):
         expected_indexes = [{"unique": False, "name": ixname}]
         self._assert_insp_indexes(indexes, expected_indexes)
 
-        t = Table(tname, meta, autoload_with=meta.bind)
+        t = Table(tname, MetaData(), autoload_with=connection)
         eq_(len(t.indexes), 1)
         is_(list(t.indexes)[0].table, t)
         eq_(list(t.indexes)[0].name, ixname)
 
-    @testing.requires.index_reflection
-    @testing.requires.indexes_with_ascdesc
-    def test_get_noncol_index_no_pk(self):
-        self._test_get_noncol_index("noncol_idx_test_nopk", "noncol_idx_nopk")
-
-    @testing.requires.index_reflection
-    @testing.requires.indexes_with_ascdesc
-    def test_get_noncol_index_pk(self):
-        self._test_get_noncol_index("noncol_idx_test_pk", "noncol_idx_pk")
-
-    @testing.requires.indexes_with_expressions
-    @testing.provide_metadata
-    def test_reflect_expression_based_indexes(self):
-        t = Table(
-            "t",
-            self.metadata,
-            Column("x", String(30)),
-            Column("y", String(30)),
-        )
-
-        Index("t_idx", func.lower(t.c.x), func.lower(t.c.y))
-
-        Index("t_idx_2", t.c.x)
-
-        self.metadata.create_all(testing.db)
-
-        insp = inspect(testing.db)
-
-        expected = [
-            {"name": "t_idx_2", "column_names": ["x"], "unique": False}
-        ]
-        if testing.requires.index_reflects_included_columns.enabled:
-            expected[0]["include_columns"] = []
-
-        with expect_warnings(
-            "Skipped unsupported reflection of expression-based index t_idx"
-        ):
-            eq_(
-                insp.get_indexes("t"),
-                expected,
-            )
-
-    @testing.requires.index_reflects_included_columns
-    @testing.provide_metadata
-    def test_reflect_covering_index(self):
-        t = Table(
-            "t",
-            self.metadata,
-            Column("x", String(30)),
-            Column("y", String(30)),
-        )
-        idx = Index("t_idx", t.c.x)
-        idx.dialect_options[testing.db.name]["include"] = ["y"]
-
-        self.metadata.create_all(testing.db)
-
-        insp = inspect(testing.db)
-
-        eq_(
-            insp.get_indexes("t"),
-            [
-                {
-                    "name": "t_idx",
-                    "column_names": ["x"],
-                    "include_columns": ["y"],
-                    "unique": False,
-                }
-            ],
-        )
-
-    @testing.requires.unique_constraint_reflection
-    def test_get_unique_constraints(self):
-        self._test_get_unique_constraints()
-
     @testing.requires.temp_table_reflection
     @testing.requires.unique_constraint_reflection
     def test_get_temp_table_unique_constraints(self):
@@ -1130,19 +913,22 @@ class ComponentReflectionTest(fixtures.TablesTest):
             expected,
         )
 
+    @testing.combinations(
+        (True, testing.requires.schemas), (False,), argnames="use_schema"
+    )
     @testing.requires.unique_constraint_reflection
-    @testing.requires.schemas
-    def test_get_unique_constraints_with_schema(self):
-        self._test_get_unique_constraints(schema=testing.config.test_schema)
-
-    @testing.provide_metadata
-    def _test_get_unique_constraints(self, schema=None):
+    def test_get_unique_constraints(self, metadata, connection, use_schema):
         # SQLite dialect needs to parse the names of the constraints
         # separately from what it gets from PRAGMA index_list(), and
         # then matches them up.  so same set of column_names in two
         # constraints will confuse it.    Perhaps we should no longer
         # bother with index_list() here since we have the whole
         # CREATE TABLE?
+
+        if use_schema:
+            schema = config.test_schema
+        else:
+            schema = None
         uniques = sorted(
             [
                 {"name": "unique_a", "column_names": ["a"]},
@@ -1154,10 +940,9 @@ class ComponentReflectionTest(fixtures.TablesTest):
             ],
             key=operator.itemgetter("name"),
         )
-        orig_meta = self.metadata
         table = Table(
             "testtbl",
-            orig_meta,
+            metadata,
             Column("a", sa.String(20)),
             Column("b", sa.String(30)),
             Column("c", sa.Integer),
@@ -1170,9 +955,9 @@ class ComponentReflectionTest(fixtures.TablesTest):
             table.append_constraint(
                 sa.UniqueConstraint(*uc["column_names"], name=uc["name"])
             )
-        orig_meta.create_all()
+        table.create(connection)
 
-        inspector = inspect(orig_meta.bind)
+        inspector = inspect(connection)
         reflected = sorted(
             inspector.get_unique_constraints("testtbl", schema=schema),
             key=operator.itemgetter("name"),
@@ -1192,7 +977,7 @@ class ComponentReflectionTest(fixtures.TablesTest):
         reflected = Table(
             "testtbl",
             reflected_metadata,
-            autoload_with=orig_meta.bind,
+            autoload_with=connection,
             schema=schema,
         )
 
@@ -1214,30 +999,90 @@ class ComponentReflectionTest(fixtures.TablesTest):
             eq_(names_that_duplicate_index, idx_names)
             eq_(uq_names, set())
 
-    @testing.requires.check_constraint_reflection
-    def test_get_check_constraints(self):
-        self._test_get_check_constraints()
+    @testing.combinations(
+        (False,), (True, testing.requires.schemas), argnames="use_schema"
+    )
+    def test_get_view_definition(self, connection, use_schema):
+        if use_schema:
+            schema = config.test_schema
+        else:
+            schema = None
+        view_name1 = "users_v"
+        view_name2 = "email_addresses_v"
+        insp = inspect(connection)
+        v1 = insp.get_view_definition(view_name1, schema=schema)
+        self.assert_(v1)
+        v2 = insp.get_view_definition(view_name2, schema=schema)
+        self.assert_(v2)
+
+    # why is this here if it's PG specific ?
+    @testing.combinations(
+        ("users", False),
+        ("users", True, testing.requires.schemas),
+        argnames="table_name,use_schema",
+    )
+    @testing.only_on("postgresql", "PG specific feature")
+    def test_get_table_oid(self, connection, table_name, use_schema):
+        if use_schema:
+            schema = config.test_schema
+        else:
+            schema = None
+        insp = inspect(connection)
+        oid = insp.get_table_oid(table_name, schema)
+        self.assert_(isinstance(oid, int))
+
+    @testing.requires.table_reflection
+    def test_autoincrement_col(self):
+        """test that 'autoincrement' is reflected according to sqla's policy.
+
+        Don't mark this test as unsupported for any backend !
+
+        (technically it fails with MySQL InnoDB since "id" comes before "id2")
+
+        A backend is better off not returning "autoincrement" at all,
+        instead of potentially returning "False" for an auto-incrementing
+        primary key column.
+
+        """
+
+        insp = inspect(self.bind)
+
+        for tname, cname in [
+            ("users", "user_id"),
+            ("email_addresses", "address_id"),
+            ("dingalings", "dingaling_id"),
+        ]:
+            cols = insp.get_columns(tname)
+            id_ = {c["name"]: c for c in cols}[cname]
+            assert id_.get("autoincrement", True)
+
+
+class ComponentReflectionTestExtra(fixtures.TestBase):
 
+    __backend__ = True
+
+    @testing.combinations(
+        (True, testing.requires.schemas), (False,), argnames="use_schema"
+    )
     @testing.requires.check_constraint_reflection
-    @testing.requires.schemas
-    def test_get_check_constraints_schema(self):
-        self._test_get_check_constraints(schema=testing.config.test_schema)
+    def test_get_check_constraints(self, metadata, connection, use_schema):
+        if use_schema:
+            schema = config.test_schema
+        else:
+            schema = None
 
-    @testing.provide_metadata
-    def _test_get_check_constraints(self, schema=None):
-        orig_meta = self.metadata
         Table(
             "sa_cc",
-            orig_meta,
+            metadata,
             Column("a", Integer()),
             sa.CheckConstraint("a > 1 AND a < 5", name="cc1"),
             sa.CheckConstraint("a = 1 OR (a > 2 AND a < 5)", name="cc2"),
             schema=schema,
         )
 
-        orig_meta.create_all()
+        metadata.create_all(connection)
 
-        inspector = inspect(orig_meta.bind)
+        inspector = inspect(connection)
         reflected = sorted(
             inspector.get_check_constraints("sa_cc", schema=schema),
             key=operator.itemgetter("name"),
@@ -1263,67 +1108,200 @@ class ComponentReflectionTest(fixtures.TablesTest):
             ],
         )
 
-    @testing.provide_metadata
-    def _test_get_view_definition(self, schema=None):
-        meta = self.metadata
-        view_name1 = "users_v"
-        view_name2 = "email_addresses_v"
-        insp = inspect(meta.bind)
-        v1 = insp.get_view_definition(view_name1, schema=schema)
-        self.assert_(v1)
-        v2 = insp.get_view_definition(view_name2, schema=schema)
-        self.assert_(v2)
+    @testing.requires.indexes_with_expressions
+    def test_reflect_expression_based_indexes(self, metadata, connection):
+        t = Table(
+            "t",
+            metadata,
+            Column("x", String(30)),
+            Column("y", String(30)),
+        )
 
-    @testing.requires.view_reflection
-    def test_get_view_definition(self):
-        self._test_get_view_definition()
+        Index("t_idx", func.lower(t.c.x), func.lower(t.c.y))
 
-    @testing.requires.view_reflection
-    @testing.requires.schemas
-    def test_get_view_definition_with_schema(self):
-        self._test_get_view_definition(schema=testing.config.test_schema)
+        Index("t_idx_2", t.c.x)
 
-    @testing.only_on("postgresql", "PG specific feature")
-    @testing.provide_metadata
-    def _test_get_table_oid(self, table_name, schema=None):
-        meta = self.metadata
-        insp = inspect(meta.bind)
-        oid = insp.get_table_oid(table_name, schema)
-        self.assert_(isinstance(oid, int))
+        metadata.create_all(connection)
 
-    def test_get_table_oid(self):
-        self._test_get_table_oid("users")
+        insp = inspect(connection)
 
-    @testing.requires.schemas
-    def test_get_table_oid_with_schema(self):
-        self._test_get_table_oid("users", schema=testing.config.test_schema)
+        expected = [
+            {"name": "t_idx_2", "column_names": ["x"], "unique": False}
+        ]
+        if testing.requires.index_reflects_included_columns.enabled:
+            expected[0]["include_columns"] = []
+
+        with expect_warnings(
+            "Skipped unsupported reflection of expression-based index t_idx"
+        ):
+            eq_(
+                insp.get_indexes("t"),
+                expected,
+            )
+
+    @testing.requires.index_reflects_included_columns
+    def test_reflect_covering_index(self, metadata, connection):
+        t = Table(
+            "t",
+            metadata,
+            Column("x", String(30)),
+            Column("y", String(30)),
+        )
+        idx = Index("t_idx", t.c.x)
+        idx.dialect_options[connection.engine.name]["include"] = ["y"]
+
+        metadata.create_all(connection)
+
+        insp = inspect(connection)
+
+        eq_(
+            insp.get_indexes("t"),
+            [
+                {
+                    "name": "t_idx",
+                    "column_names": ["x"],
+                    "include_columns": ["y"],
+                    "unique": False,
+                }
+            ],
+        )
+
+    def _type_round_trip(self, connection, metadata, *types):
+        t = Table(
+            "t",
+            metadata,
+            *[Column("t%d" % i, type_) for i, type_ in enumerate(types)]
+        )
+        t.create(connection)
+
+        return [c["type"] for c in inspect(connection).get_columns("t")]
 
     @testing.requires.table_reflection
-    @testing.provide_metadata
-    def test_autoincrement_col(self):
-        """test that 'autoincrement' is reflected according to sqla's policy.
+    def test_numeric_reflection(self, connection, metadata):
+        for typ in self._type_round_trip(
+            connection, metadata, sql_types.Numeric(18, 5)
+        ):
+            assert isinstance(typ, sql_types.Numeric)
+            eq_(typ.precision, 18)
+            eq_(typ.scale, 5)
 
-        Don't mark this test as unsupported for any backend !
+    @testing.requires.table_reflection
+    def test_varchar_reflection(self, connection, metadata):
+        typ = self._type_round_trip(
+            connection, metadata, sql_types.String(52)
+        )[0]
+        assert isinstance(typ, sql_types.String)
+        eq_(typ.length, 52)
 
-        (technically it fails with MySQL InnoDB since "id" comes before "id2")
+    @testing.requires.table_reflection
+    def test_nullable_reflection(self, connection, metadata):
+        t = Table(
+            "t",
+            metadata,
+            Column("a", Integer, nullable=True),
+            Column("b", Integer, nullable=False),
+        )
+        t.create(connection)
+        eq_(
+            dict(
+                (col["name"], col["nullable"])
+                for col in inspect(connection).get_columns("t")
+            ),
+            {"a": True, "b": False},
+        )
 
-        A backend is better off not returning "autoincrement" at all,
-        instead of potentially returning "False" for an auto-incrementing
-        primary key column.
+    @testing.combinations(
+        (
+            None,
+            "CASCADE",
+            None,
+            testing.requires.foreign_key_constraint_option_reflection_ondelete,
+        ),
+        (
+            None,
+            None,
+            "SET NULL",
+            testing.requires.foreign_key_constraint_option_reflection_onupdate,
+        ),
+        (
+            {},
+            None,
+            "NO ACTION",
+            testing.requires.foreign_key_constraint_option_reflection_onupdate,
+        ),
+        (
+            {},
+            "NO ACTION",
+            None,
+            testing.requires.fk_constraint_option_reflection_ondelete_noaction,
+        ),
+        (
+            None,
+            None,
+            "RESTRICT",
+            testing.requires.fk_constraint_option_reflection_onupdate_restrict,
+        ),
+        (
+            None,
+            "RESTRICT",
+            None,
+            testing.requires.fk_constraint_option_reflection_ondelete_restrict,
+        ),
+        argnames="expected,ondelete,onupdate",
+    )
+    def test_get_foreign_key_options(
+        self, connection, metadata, expected, ondelete, onupdate
+    ):
+        options = {}
+        if ondelete:
+            options["ondelete"] = ondelete
+        if onupdate:
+            options["onupdate"] = onupdate
 
-        """
+        if expected is None:
+            expected = options
 
-        meta = self.metadata
-        insp = inspect(meta.bind)
+        Table(
+            "x",
+            metadata,
+            Column("id", Integer, primary_key=True),
+            test_needs_fk=True,
+        )
 
-        for tname, cname in [
-            ("users", "user_id"),
-            ("email_addresses", "address_id"),
-            ("dingalings", "dingaling_id"),
-        ]:
-            cols = insp.get_columns(tname)
-            id_ = {c["name"]: c for c in cols}[cname]
-            assert id_.get("autoincrement", True)
+        Table(
+            "table",
+            metadata,
+            Column("id", Integer, primary_key=True),
+            Column("x_id", Integer, sa.ForeignKey("x.id", name="xid")),
+            Column("test", String(10)),
+            test_needs_fk=True,
+        )
+
+        Table(
+            "user",
+            metadata,
+            Column("id", Integer, primary_key=True),
+            Column("name", String(50), nullable=False),
+            Column("tid", Integer),
+            sa.ForeignKeyConstraint(
+                ["tid"], ["table.id"], name="myfk", **options
+            ),
+            test_needs_fk=True,
+        )
+
+        metadata.create_all(connection)
+
+        insp = inspect(connection)
+
+        # test 'options' is always present for a backend
+        # that can reflect these, since alembic looks for this
+        opts = insp.get_foreign_keys("table")[0]["options"]
+
+        eq_(dict((k, opts[k]) for k in opts if opts[k]), {})
+
+        opts = insp.get_foreign_keys("user")[0]["options"]
+        eq_(opts, expected)
+        # eq_(dict((k, opts[k]) for k in opts if opts[k]), expected)
 
 
 class NormalizedNameTest(fixtures.TablesTest):
@@ -1348,21 +1326,21 @@ class NormalizedNameTest(fixtures.TablesTest):
 
         m2 = MetaData()
         t2_ref = Table(
-            quoted_name("t2", quote=True), m2, autoload_with=testing.db
+            quoted_name("t2", quote=True), m2, autoload_with=config.db
         )
         t1_ref = m2.tables["t1"]
         assert t2_ref.c.t1id.references(t1_ref.c.id)
 
         m3 = MetaData()
         m3.reflect(
-            testing.db, only=lambda name, m: name.lower() in ("t1", "t2")
+            config.db, only=lambda name, m: name.lower() in ("t1", "t2")
         )
         assert m3.tables["t2"].c.t1id.references(m3.tables["t1"].c.id)
 
     def test_get_table_names(self):
         tablenames = [
             t
-            for t in inspect(testing.db).get_table_names()
+            for t in inspect(config.db).get_table_names()
             if t.lower() in ("t1", "t2")
         ]
 
@@ -1637,20 +1615,16 @@ class CompositeKeyReflectionTest(fixtures.TablesTest):
         )
 
     @testing.requires.primary_key_constraint_reflection
-    @testing.provide_metadata
     def test_pk_column_order(self):
         # test for issue #5661
-        meta = self.metadata
-        insp = inspect(meta.bind)
+        insp = inspect(self.bind)
         primary_key = insp.get_pk_constraint(self.tables.tb1.name)
         eq_(primary_key.get("constrained_columns"), ["name", "id", "attr"])
 
     @testing.requires.foreign_key_constraint_reflection
-    @testing.provide_metadata
     def test_fk_column_order(self):
         # test for issue #5661
-        meta = self.metadata
-        insp = inspect(meta.bind)
+        insp = inspect(self.bind)
         foreign_keys = insp.get_foreign_keys(self.tables.tb2.name)
         eq_(len(foreign_keys), 1)
         fkey1 = foreign_keys[0]
@@ -1660,6 +1634,7 @@ class CompositeKeyReflectionTest(fixtures.TablesTest):
 
 __all__ = (
     "ComponentReflectionTest",
+    "ComponentReflectionTestExtra",
     "QuotedNameArgumentTest",
     "HasTableTest",
     "HasIndexTest",
index 43777239c606e5c5a4cab463fe8246c80fd8df2d..3a5e02c32bbe7595c7f29bf857102ff599cef611 100644 (file)
@@ -47,18 +47,19 @@ from ...util import u
 class _LiteralRoundTripFixture(object):
     supports_whereclause = True
 
-    @testing.provide_metadata
-    def _literal_round_trip(self, type_, input_, output, filter_=None):
+    @testing.fixture
+    def literal_round_trip(self, metadata, connection):
         """test literal rendering """
 
         # for literal, we test the literal render in an INSERT
         # into a typed column.  we can then SELECT it back as its
         # official type; ideally we'd be able to use CAST here
         # but MySQL in particular can't CAST fully
-        t = Table("t", self.metadata, Column("x", type_))
-        t.create()
 
-        with testing.db.begin() as conn:
+        def run(type_, input_, output, filter_=None):
+            t = Table("t", metadata, Column("x", type_))
+            t.create(connection)
+
             for value in input_:
                 ins = (
                     t.insert()
@@ -68,7 +69,7 @@ class _LiteralRoundTripFixture(object):
                         compile_kwargs=dict(literal_binds=True),
                     )
                 )
-                conn.execute(ins)
+                connection.execute(ins)
 
             if self.supports_whereclause:
                 stmt = t.select().where(t.c.x == literal(value))
@@ -79,12 +80,14 @@ class _LiteralRoundTripFixture(object):
                 dialect=testing.db.dialect,
                 compile_kwargs=dict(literal_binds=True),
             )
-            for row in conn.execute(stmt):
+            for row in connection.execute(stmt):
                 value = row[0]
                 if filter_ is not None:
                     value = filter_(value)
                 assert value in output
 
+        return run
+
 
 class _UnicodeFixture(_LiteralRoundTripFixture, fixtures.TestBase):
     __requires__ = ("unicode_data",)
@@ -149,11 +152,11 @@ class _UnicodeFixture(_LiteralRoundTripFixture, fixtures.TestBase):
         row = connection.execute(select(unicode_table.c.unicode_data)).first()
         eq_(row, (u(""),))
 
-    def test_literal(self):
-        self._literal_round_trip(self.datatype, [self.data], [self.data])
+    def test_literal(self, literal_round_trip):
+        literal_round_trip(self.datatype, [self.data], [self.data])
 
-    def test_literal_non_ascii(self):
-        self._literal_round_trip(
+    def test_literal_non_ascii(self, literal_round_trip):
+        literal_round_trip(
             self.datatype, [util.u("réve🐍 illé")], [util.u("réve🐍 illé")]
         )
 
@@ -227,25 +230,25 @@ class TextTest(_LiteralRoundTripFixture, fixtures.TablesTest):
         row = connection.execute(select(text_table.c.text_data)).first()
         eq_(row, (None,))
 
-    def test_literal(self):
-        self._literal_round_trip(Text, ["some text"], ["some text"])
+    def test_literal(self, literal_round_trip):
+        literal_round_trip(Text, ["some text"], ["some text"])
 
-    def test_literal_non_ascii(self):
-        self._literal_round_trip(
+    def test_literal_non_ascii(self, literal_round_trip):
+        literal_round_trip(
             Text, [util.u("réve🐍 illé")], [util.u("réve🐍 illé")]
         )
 
-    def test_literal_quoting(self):
+    def test_literal_quoting(self, literal_round_trip):
         data = """some 'text' hey "hi there" that's text"""
-        self._literal_round_trip(Text, [data], [data])
+        literal_round_trip(Text, [data], [data])
 
-    def test_literal_backslashes(self):
+    def test_literal_backslashes(self, literal_round_trip):
         data = r"backslash one \ backslash two \\ end"
-        self._literal_round_trip(Text, [data], [data])
+        literal_round_trip(Text, [data], [data])
 
-    def test_literal_percentsigns(self):
+    def test_literal_percentsigns(self, literal_round_trip):
         data = r"percent % signs %% percent"
-        self._literal_round_trip(Text, [data], [data])
+        literal_round_trip(Text, [data], [data])
 
 
 class StringTest(_LiteralRoundTripFixture, fixtures.TestBase):
@@ -259,23 +262,23 @@ class StringTest(_LiteralRoundTripFixture, fixtures.TestBase):
         foo.create(config.db)
         foo.drop(config.db)
 
-    def test_literal(self):
+    def test_literal(self, literal_round_trip):
         # note that in Python 3, this invokes the Unicode
         # datatype for the literal part because all strings are unicode
-        self._literal_round_trip(String(40), ["some text"], ["some text"])
+        literal_round_trip(String(40), ["some text"], ["some text"])
 
-    def test_literal_non_ascii(self):
-        self._literal_round_trip(
+    def test_literal_non_ascii(self, literal_round_trip):
+        literal_round_trip(
             String(40), [util.u("réve🐍 illé")], [util.u("réve🐍 illé")]
         )
 
-    def test_literal_quoting(self):
+    def test_literal_quoting(self, literal_round_trip):
         data = """some 'text' hey "hi there" that's text"""
-        self._literal_round_trip(String(40), [data], [data])
+        literal_round_trip(String(40), [data], [data])
 
-    def test_literal_backslashes(self):
+    def test_literal_backslashes(self, literal_round_trip):
         data = r"backslash one \ backslash two \\ end"
-        self._literal_round_trip(String(40), [data], [data])
+        literal_round_trip(String(40), [data], [data])
 
 
 class _DateFixture(_LiteralRoundTripFixture, fixtures.TestBase):
@@ -331,9 +334,9 @@ class _DateFixture(_LiteralRoundTripFixture, fixtures.TestBase):
         eq_(row, (None,))
 
     @testing.requires.datetime_literals
-    def test_literal(self):
+    def test_literal(self, literal_round_trip):
         compare = self.compare or self.data
-        self._literal_round_trip(self.datatype, [self.data], [compare])
+        literal_round_trip(self.datatype, [self.data], [compare])
 
     @testing.requires.standalone_null_binds_whereclause
     def test_null_bound_comparison(self):
@@ -430,36 +433,41 @@ class DateHistoricTest(_DateFixture, fixtures.TablesTest):
 class IntegerTest(_LiteralRoundTripFixture, fixtures.TestBase):
     __backend__ = True
 
-    def test_literal(self):
-        self._literal_round_trip(Integer, [5], [5])
+    def test_literal(self, literal_round_trip):
+        literal_round_trip(Integer, [5], [5])
 
-    def test_huge_int(self, connection):
-        self._round_trip(BigInteger, 1376537018368127, connection)
+    def test_huge_int(self, integer_round_trip):
+        integer_round_trip(BigInteger, 1376537018368127)
 
-    @testing.provide_metadata
-    def _round_trip(self, datatype, data, connection):
-        metadata = self.metadata
-        int_table = Table(
-            "integer_table",
-            metadata,
-            Column(
-                "id", Integer, primary_key=True, test_needs_autoincrement=True
-            ),
-            Column("integer_data", datatype),
-        )
+    @testing.fixture
+    def integer_round_trip(self, metadata, connection):
+        def run(datatype, data):
+            int_table = Table(
+                "integer_table",
+                metadata,
+                Column(
+                    "id",
+                    Integer,
+                    primary_key=True,
+                    test_needs_autoincrement=True,
+                ),
+                Column("integer_data", datatype),
+            )
 
-        metadata.create_all(config.db)
+            metadata.create_all(config.db)
 
-        connection.execute(int_table.insert(), {"integer_data": data})
+            connection.execute(int_table.insert(), {"integer_data": data})
 
-        row = connection.execute(select(int_table.c.integer_data)).first()
+            row = connection.execute(select(int_table.c.integer_data)).first()
 
-        eq_(row, (data,))
+            eq_(row, (data,))
 
-        if util.py3k:
-            assert isinstance(row[0], int)
-        else:
-            assert isinstance(row[0], (long, int))  # noqa
+            if util.py3k:
+                assert isinstance(row[0], int)
+            else:
+                assert isinstance(row[0], (long, int))  # noqa
+
+        return run
 
 
 class CastTypeDecoratorTest(_LiteralRoundTripFixture, fixtures.TestBase):
@@ -481,12 +489,10 @@ class CastTypeDecoratorTest(_LiteralRoundTripFixture, fixtures.TestBase):
 
         return StringAsInt()
 
-    @testing.provide_metadata
-    def test_special_type(self, connection, string_as_int):
+    def test_special_type(self, metadata, connection, string_as_int):
 
         type_ = string_as_int
 
-        metadata = self.metadata
         t = Table("t", metadata, Column("x", type_))
         t.create(connection)
 
@@ -504,42 +510,46 @@ class CastTypeDecoratorTest(_LiteralRoundTripFixture, fixtures.TestBase):
 class NumericTest(_LiteralRoundTripFixture, fixtures.TestBase):
     __backend__ = True
 
-    @testing.emits_warning(r".*does \*not\* support Decimal objects natively")
-    @testing.provide_metadata
-    def _do_test(self, type_, input_, output, filter_=None, check_scale=False):
-        metadata = self.metadata
-        t = Table("t", metadata, Column("x", type_))
-        t.create()
-        with config.db.begin() as conn:
-            conn.execute(t.insert(), [{"x": x} for x in input_])
-
-            result = {row[0] for row in conn.execute(t.select())}
-            output = set(output)
-            if filter_:
-                result = set(filter_(x) for x in result)
-                output = set(filter_(x) for x in output)
-            eq_(result, output)
-            if check_scale:
-                eq_([str(x) for x in result], [str(x) for x in output])
+    @testing.fixture
+    def do_numeric_test(self, metadata):
+        @testing.emits_warning(
+            r".*does \*not\* support Decimal objects natively"
+        )
+        def run(type_, input_, output, filter_=None, check_scale=False):
+            t = Table("t", metadata, Column("x", type_))
+            t.create(testing.db)
+            with config.db.begin() as conn:
+                conn.execute(t.insert(), [{"x": x} for x in input_])
+
+                result = {row[0] for row in conn.execute(t.select())}
+                output = set(output)
+                if filter_:
+                    result = set(filter_(x) for x in result)
+                    output = set(filter_(x) for x in output)
+                eq_(result, output)
+                if check_scale:
+                    eq_([str(x) for x in result], [str(x) for x in output])
+
+        return run
 
     @testing.emits_warning(r".*does \*not\* support Decimal objects natively")
-    def test_render_literal_numeric(self):
-        self._literal_round_trip(
+    def test_render_literal_numeric(self, literal_round_trip):
+        literal_round_trip(
             Numeric(precision=8, scale=4),
             [15.7563, decimal.Decimal("15.7563")],
             [decimal.Decimal("15.7563")],
         )
 
     @testing.emits_warning(r".*does \*not\* support Decimal objects natively")
-    def test_render_literal_numeric_asfloat(self):
-        self._literal_round_trip(
+    def test_render_literal_numeric_asfloat(self, literal_round_trip):
+        literal_round_trip(
             Numeric(precision=8, scale=4, asdecimal=False),
             [15.7563, decimal.Decimal("15.7563")],
             [15.7563],
         )
 
-    def test_render_literal_float(self):
-        self._literal_round_trip(
+    def test_render_literal_float(self, literal_round_trip):
+        literal_round_trip(
             Float(4),
             [15.7563, decimal.Decimal("15.7563")],
             [15.7563],
@@ -547,49 +557,49 @@ class NumericTest(_LiteralRoundTripFixture, fixtures.TestBase):
         )
 
     @testing.requires.precision_generic_float_type
-    def test_float_custom_scale(self):
-        self._do_test(
+    def test_float_custom_scale(self, do_numeric_test):
+        do_numeric_test(
             Float(None, decimal_return_scale=7, asdecimal=True),
             [15.7563827, decimal.Decimal("15.7563827")],
             [decimal.Decimal("15.7563827")],
             check_scale=True,
         )
 
-    def test_numeric_as_decimal(self):
-        self._do_test(
+    def test_numeric_as_decimal(self, do_numeric_test):
+        do_numeric_test(
             Numeric(precision=8, scale=4),
             [15.7563, decimal.Decimal("15.7563")],
             [decimal.Decimal("15.7563")],
         )
 
-    def test_numeric_as_float(self):
-        self._do_test(
+    def test_numeric_as_float(self, do_numeric_test):
+        do_numeric_test(
             Numeric(precision=8, scale=4, asdecimal=False),
             [15.7563, decimal.Decimal("15.7563")],
             [15.7563],
         )
 
     @testing.requires.fetch_null_from_numeric
-    def test_numeric_null_as_decimal(self):
-        self._do_test(Numeric(precision=8, scale=4), [None], [None])
+    def test_numeric_null_as_decimal(self, do_numeric_test):
+        do_numeric_test(Numeric(precision=8, scale=4), [None], [None])
 
     @testing.requires.fetch_null_from_numeric
-    def test_numeric_null_as_float(self):
-        self._do_test(
+    def test_numeric_null_as_float(self, do_numeric_test):
+        do_numeric_test(
             Numeric(precision=8, scale=4, asdecimal=False), [None], [None]
         )
 
     @testing.requires.floats_to_four_decimals
-    def test_float_as_decimal(self):
-        self._do_test(
+    def test_float_as_decimal(self, do_numeric_test):
+        do_numeric_test(
             Float(precision=8, asdecimal=True),
             [15.7563, decimal.Decimal("15.7563"), None],
             [decimal.Decimal("15.7563"), None],
             filter_=lambda n: n is not None and round(n, 4) or None,
         )
 
-    def test_float_as_float(self):
-        self._do_test(
+    def test_float_as_float(self, do_numeric_test):
+        do_numeric_test(
             Float(precision=8),
             [15.7563, decimal.Decimal("15.7563")],
             [15.7563],
@@ -621,7 +631,7 @@ class NumericTest(_LiteralRoundTripFixture, fixtures.TestBase):
         eq_(val, expr)
 
     @testing.requires.precision_numerics_general
-    def test_precision_decimal(self):
+    def test_precision_decimal(self, do_numeric_test):
         numbers = set(
             [
                 decimal.Decimal("54.234246451650"),
@@ -630,10 +640,10 @@ class NumericTest(_LiteralRoundTripFixture, fixtures.TestBase):
             ]
         )
 
-        self._do_test(Numeric(precision=18, scale=12), numbers, numbers)
+        do_numeric_test(Numeric(precision=18, scale=12), numbers, numbers)
 
     @testing.requires.precision_numerics_enotation_large
-    def test_enotation_decimal(self):
+    def test_enotation_decimal(self, do_numeric_test):
         """test exceedingly small decimals.
 
         Decimal reports values with E notation when the exponent
@@ -657,10 +667,10 @@ class NumericTest(_LiteralRoundTripFixture, fixtures.TestBase):
                 decimal.Decimal("696E-12"),
             ]
         )
-        self._do_test(Numeric(precision=18, scale=14), numbers, numbers)
+        do_numeric_test(Numeric(precision=18, scale=14), numbers, numbers)
 
     @testing.requires.precision_numerics_enotation_large
-    def test_enotation_decimal_large(self):
+    def test_enotation_decimal_large(self, do_numeric_test):
         """test exceedingly large decimals."""
 
         numbers = set(
@@ -671,10 +681,10 @@ class NumericTest(_LiteralRoundTripFixture, fixtures.TestBase):
                 decimal.Decimal("00000000000000.1E+12"),
             ]
         )
-        self._do_test(Numeric(precision=25, scale=2), numbers, numbers)
+        do_numeric_test(Numeric(precision=25, scale=2), numbers, numbers)
 
     @testing.requires.precision_numerics_many_significant_digits
-    def test_many_significant_digits(self):
+    def test_many_significant_digits(self, do_numeric_test):
         numbers = set(
             [
                 decimal.Decimal("31943874831932418390.01"),
@@ -682,12 +692,12 @@ class NumericTest(_LiteralRoundTripFixture, fixtures.TestBase):
                 decimal.Decimal("87673.594069654243"),
             ]
         )
-        self._do_test(Numeric(precision=38, scale=12), numbers, numbers)
+        do_numeric_test(Numeric(precision=38, scale=12), numbers, numbers)
 
     @testing.requires.precision_numerics_retains_significant_digits
-    def test_numeric_no_decimal(self):
+    def test_numeric_no_decimal(self, do_numeric_test):
         numbers = set([decimal.Decimal("1.000")])
-        self._do_test(
+        do_numeric_test(
             Numeric(precision=5, scale=3), numbers, numbers, check_scale=True
         )
 
@@ -705,8 +715,8 @@ class BooleanTest(_LiteralRoundTripFixture, fixtures.TablesTest):
             Column("unconstrained_value", Boolean(create_constraint=False)),
         )
 
-    def test_render_literal_bool(self):
-        self._literal_round_trip(Boolean(), [True, False], [True, False])
+    def test_render_literal_bool(self, literal_round_trip):
+        literal_round_trip(Boolean(), [True, False], [True, False])
 
     def test_round_trip(self, connection):
         boolean_table = self.tables.boolean_table
index bbaf5034f85953dd5c8505dfaca6d822c0d8ef1b..d28c36504e7a385a39b1344b28dc13de7be17a8f 100644 (file)
@@ -213,7 +213,7 @@ def provide_metadata(fn, *args, **kw):
     # in py3k this can be moved top level.
     from . import engines
 
-    metadata = schema.MetaData(config.db)
+    metadata = schema.MetaData()
     self = args[0]
     prev_meta = getattr(self, "metadata", None)
     self.metadata = metadata
index 735fb82e4ff1313b74542750ccfe8573eee2b07d..e37ca72dd1490677a72c72288bbef6869675bafc 100644 (file)
@@ -64,7 +64,6 @@ def setup_filters():
         #
         # bound metadaa
         #
-        r"The MetaData.bind argument is deprecated",
         r"The ``bind`` argument for schema methods that invoke SQL ",
         r"The Function.bind argument",
         r"The select.bind argument",
index 2284c1326c86c52a5f58477f14b8aa2ddb775802..7858e697d9345a90a7162dda64094263c821c57e 100644 (file)
@@ -360,7 +360,7 @@ class MemUsageWBackendTest(EnsureZeroed):
         go()
 
     def test_session(self):
-        metadata = MetaData(self.engine)
+        metadata = MetaData()
 
         table1 = Table(
             "mytable",
@@ -387,7 +387,7 @@ class MemUsageWBackendTest(EnsureZeroed):
             Column("col3", Integer, ForeignKey("mytable.col1")),
         )
 
-        metadata.create_all()
+        metadata.create_all(self.engine)
 
         m1 = mapper(
             A,
@@ -402,7 +402,7 @@ class MemUsageWBackendTest(EnsureZeroed):
 
         @profile_memory()
         def go():
-            with Session() as sess:
+            with Session(self.engine) as sess:
                 a1 = A(col2="a1")
                 a2 = A(col2="a2")
                 a3 = A(col2="a3")
@@ -429,7 +429,7 @@ class MemUsageWBackendTest(EnsureZeroed):
 
         go()
 
-        metadata.drop_all()
+        metadata.drop_all(self.engine)
         del m1, m2
         assert_no_mappers()
 
@@ -625,7 +625,7 @@ class MemUsageWBackendTest(EnsureZeroed):
 
     @testing.crashes("mysql+cymysql", "blocking")
     def test_unicode_warnings(self):
-        metadata = MetaData(self.engine)
+        metadata = MetaData()
         table1 = Table(
             "mytable",
             metadata,
@@ -637,7 +637,7 @@ class MemUsageWBackendTest(EnsureZeroed):
             ),
             Column("col2", Unicode(30)),
         )
-        metadata.create_all()
+        metadata.create_all(self.engine)
         i = [1]
 
         # the times here is cranked way up so that we can see
@@ -659,7 +659,7 @@ class MemUsageWBackendTest(EnsureZeroed):
         try:
             go()
         finally:
-            metadata.drop_all()
+            metadata.drop_all(self.engine)
 
     def test_warnings_util(self):
         counter = itertools.count()
@@ -677,7 +677,7 @@ class MemUsageWBackendTest(EnsureZeroed):
         go()
 
     def test_mapper_reset(self):
-        metadata = MetaData(self.engine)
+        metadata = MetaData()
 
         table1 = Table(
             "mytable",
@@ -713,7 +713,7 @@ class MemUsageWBackendTest(EnsureZeroed):
             )
             mapper(B, table2)
 
-            sess = create_session()
+            sess = create_session(self.engine)
             a1 = A(col2="a1")
             a2 = A(col2="a2")
             a3 = A(col2="a3")
@@ -741,15 +741,15 @@ class MemUsageWBackendTest(EnsureZeroed):
             sess.close()
             clear_mappers()
 
-        metadata.create_all()
+        metadata.create_all(self.engine)
         try:
             go()
         finally:
-            metadata.drop_all()
+            metadata.drop_all(self.engine)
         assert_no_mappers()
 
     def test_alias_pathing(self):
-        metadata = MetaData(self.engine)
+        metadata = MetaData()
 
         a = Table(
             "a",
@@ -779,8 +779,8 @@ class MemUsageWBackendTest(EnsureZeroed):
         mapper(ASub, asub, inherits=A, polymorphic_identity="asub")
         mapper(B, b, properties={"as_": relationship(A)})
 
-        metadata.create_all()
-        sess = Session()
+        metadata.create_all(self.engine)
+        sess = Session(self.engine)
         a1 = ASub(data="a1")
         a2 = ASub(data="a2")
         a3 = ASub(data="a3")
@@ -794,7 +794,7 @@ class MemUsageWBackendTest(EnsureZeroed):
         # "dip" again
         @profile_memory(maxtimes=120)
         def go():
-            sess = Session()
+            sess = Session(self.engine)
             sess.query(B).options(subqueryload(B.as_.of_type(ASub))).all()
             sess.close()
             del sess
@@ -802,7 +802,7 @@ class MemUsageWBackendTest(EnsureZeroed):
         try:
             go()
         finally:
-            metadata.drop_all()
+            metadata.drop_all(self.engine)
         clear_mappers()
 
     def test_path_registry(self):
@@ -832,7 +832,7 @@ class MemUsageWBackendTest(EnsureZeroed):
         clear_mappers()
 
     def test_with_inheritance(self):
-        metadata = MetaData(self.engine)
+        metadata = MetaData()
 
         table1 = Table(
             "mytable",
@@ -875,7 +875,7 @@ class MemUsageWBackendTest(EnsureZeroed):
             )
             mapper(B, table2, inherits=A, polymorphic_identity="b")
 
-            sess = create_session()
+            sess = create_session(self.engine)
             a1 = A()
             a2 = A()
             b1 = B(col3="b1")
@@ -896,15 +896,15 @@ class MemUsageWBackendTest(EnsureZeroed):
             del B
             del A
 
-        metadata.create_all()
+        metadata.create_all(self.engine)
         try:
             go()
         finally:
-            metadata.drop_all()
+            metadata.drop_all(self.engine)
         assert_no_mappers()
 
     def test_with_manytomany(self):
-        metadata = MetaData(self.engine)
+        metadata = MetaData()
 
         table1 = Table(
             "mytable",
@@ -956,7 +956,7 @@ class MemUsageWBackendTest(EnsureZeroed):
             )
             mapper(B, table2)
 
-            sess = create_session()
+            sess = create_session(self.engine)
             a1 = A(col2="a1")
             a2 = A(col2="a2")
             b1 = B(col2="b1")
@@ -981,11 +981,11 @@ class MemUsageWBackendTest(EnsureZeroed):
             del B
             del A
 
-        metadata.create_all()
+        metadata.create_all(self.engine)
         try:
             go()
         finally:
-            metadata.drop_all()
+            metadata.drop_all(self.engine)
         assert_no_mappers()
 
     @testing.uses_deprecated()
@@ -1043,7 +1043,7 @@ class MemUsageWBackendTest(EnsureZeroed):
 
     @testing.crashes("mysql+cymysql", "blocking")
     def test_join_cache_deprecated_coercion(self):
-        metadata = MetaData(self.engine)
+        metadata = MetaData()
         table1 = Table(
             "table1",
             metadata,
@@ -1071,8 +1071,8 @@ class MemUsageWBackendTest(EnsureZeroed):
         mapper(
             Foo, table1, properties={"bars": relationship(mapper(Bar, table2))}
         )
-        metadata.create_all()
-        session = sessionmaker()
+        metadata.create_all(self.engine)
+        session = sessionmaker(self.engine)
 
         @profile_memory()
         def go():
@@ -1087,11 +1087,11 @@ class MemUsageWBackendTest(EnsureZeroed):
         try:
             go()
         finally:
-            metadata.drop_all()
+            metadata.drop_all(self.engine)
 
     @testing.crashes("mysql+cymysql", "blocking")
     def test_join_cache(self):
-        metadata = MetaData(self.engine)
+        metadata = MetaData()
         table1 = Table(
             "table1",
             metadata,
@@ -1119,8 +1119,8 @@ class MemUsageWBackendTest(EnsureZeroed):
         mapper(
             Foo, table1, properties={"bars": relationship(mapper(Bar, table2))}
         )
-        metadata.create_all()
-        session = sessionmaker()
+        metadata.create_all(self.engine)
+        session = sessionmaker(self.engine)
 
         @profile_memory()
         def go():
@@ -1132,7 +1132,7 @@ class MemUsageWBackendTest(EnsureZeroed):
         try:
             go()
         finally:
-            metadata.drop_all()
+            metadata.drop_all(self.engine)
 
 
 class CycleTest(_fixtures.FixtureTest):
index d36a0c9e1b17b4594cb87dd3d2a9f9eca5853cb8..c48ff53d4a65577912b15c44aef57ceb871e3fd1 100644 (file)
@@ -3,7 +3,6 @@ import sys
 from sqlalchemy import Column
 from sqlalchemy import create_engine
 from sqlalchemy import Integer
-from sqlalchemy import MetaData
 from sqlalchemy import String
 from sqlalchemy import Table
 from sqlalchemy import testing
@@ -20,17 +19,13 @@ from sqlalchemy.util import u
 NUM_FIELDS = 10
 NUM_RECORDS = 1000
 
-t = t2 = metadata = None
 
-
-class ResultSetTest(fixtures.TestBase, AssertsExecutionResults):
+class ResultSetTest(fixtures.TablesTest, AssertsExecutionResults):
     __backend__ = True
 
     @classmethod
-    def setup_class(cls):
-        global t, t2, metadata
-        metadata = MetaData(testing.db)
-        t = Table(
+    def define_tables(cls, metadata):
+        Table(
             "table1",
             metadata,
             *[
@@ -38,7 +33,7 @@ class ResultSetTest(fixtures.TestBase, AssertsExecutionResults):
                 for fnum in range(NUM_FIELDS)
             ]
         )
-        t2 = Table(
+        Table(
             "table2",
             metadata,
             *[
@@ -47,48 +42,46 @@ class ResultSetTest(fixtures.TestBase, AssertsExecutionResults):
             ]
         )
 
-    def setup(self):
-        with testing.db.begin() as conn:
-            metadata.create_all(conn)
-            conn.execute(
-                t.insert(),
-                [
-                    dict(
-                        ("field%d" % fnum, u("value%d" % fnum))
-                        for fnum in range(NUM_FIELDS)
-                    )
-                    for r_num in range(NUM_RECORDS)
-                ],
-            )
-            conn.execute(
-                t2.insert(),
-                [
-                    dict(
-                        ("field%d" % fnum, u("value%d" % fnum))
-                        for fnum in range(NUM_FIELDS)
-                    )
-                    for r_num in range(NUM_RECORDS)
-                ],
-            )
+    @classmethod
+    def insert_data(cls, connection):
+        conn = connection
+        t, t2 = cls.tables("table1", "table2")
+        conn.execute(
+            t.insert(),
+            [
+                dict(
+                    ("field%d" % fnum, u("value%d" % fnum))
+                    for fnum in range(NUM_FIELDS)
+                )
+                for r_num in range(NUM_RECORDS)
+            ],
+        )
+        conn.execute(
+            t2.insert(),
+            [
+                dict(
+                    ("field%d" % fnum, u("value%d" % fnum))
+                    for fnum in range(NUM_FIELDS)
+                )
+                for r_num in range(NUM_RECORDS)
+            ],
+        )
 
         # warm up type caches
-        with testing.db.connect() as conn:
-            conn.execute(t.select()).fetchall()
-            conn.execute(t2.select()).fetchall()
-            conn.exec_driver_sql(
-                "SELECT %s FROM table1"
-                % (", ".join("field%d" % fnum for fnum in range(NUM_FIELDS)))
-            ).fetchall()
-            conn.exec_driver_sql(
-                "SELECT %s FROM table2"
-                % (", ".join("field%d" % fnum for fnum in range(NUM_FIELDS)))
-            ).fetchall()
-
-    def teardown(self):
-        metadata.drop_all()
+        conn.execute(t.select()).fetchall()
+        conn.execute(t2.select()).fetchall()
+        conn.exec_driver_sql(
+            "SELECT %s FROM table1"
+            % (", ".join("field%d" % fnum for fnum in range(NUM_FIELDS)))
+        ).fetchall()
+        conn.exec_driver_sql(
+            "SELECT %s FROM table2"
+            % (", ".join("field%d" % fnum for fnum in range(NUM_FIELDS)))
+        ).fetchall()
 
     @profiling.function_call_count(variance=0.15)
     def test_string(self):
+        t, t2 = self.tables("table1", "table2")
         with testing.db.connect().execution_options(
             compiled_cache=None
         ) as conn:
@@ -96,6 +89,8 @@ class ResultSetTest(fixtures.TestBase, AssertsExecutionResults):
 
     @profiling.function_call_count(variance=0.15)
     def test_unicode(self):
+        t, t2 = self.tables("table1", "table2")
+
         with testing.db.connect().execution_options(
             compiled_cache=None
         ) as conn:
@@ -119,6 +114,7 @@ class ResultSetTest(fixtures.TestBase, AssertsExecutionResults):
 
     @profiling.function_call_count()
     def test_fetch_by_key_legacy(self):
+        t, t2 = self.tables("table1", "table2")
         with testing.db.connect().execution_options(
             compiled_cache=None
         ) as conn:
@@ -127,6 +123,7 @@ class ResultSetTest(fixtures.TestBase, AssertsExecutionResults):
 
     @profiling.function_call_count()
     def test_fetch_by_key_mappings(self):
+        t, t2 = self.tables("table1", "table2")
         with testing.db.connect().execution_options(
             compiled_cache=None
         ) as conn:
@@ -142,6 +139,8 @@ class ResultSetTest(fixtures.TestBase, AssertsExecutionResults):
     def test_one_or_none(self, one_or_first, rows_present):
         # TODO: this is not testing the ORM level "scalar_mapping"
         # mode which has a different performance profile
+        t, t2 = self.tables("table1", "table2")
+
         with testing.db.connect().execution_options(
             compiled_cache=None
         ) as conn:
@@ -168,6 +167,8 @@ class ResultSetTest(fixtures.TestBase, AssertsExecutionResults):
                 result.close()
 
     def test_contains_doesnt_compile(self):
+        t, t2 = self.tables("table1", "table2")
+
         row = t.select().execute().first()
         c1 = Column("some column", Integer) + Column(
             "some other column", Integer
index 668df6ecbc83967075548e2c097f5a56cde1e351..bbdbe5cca4bbe0f6ad7eb866ac9af5865131191d 100644 (file)
@@ -363,15 +363,14 @@ class FastExecutemanyTest(fixtures.TestBase):
     __backend__ = True
     __requires__ = ("pyodbc_fast_executemany",)
 
-    @testing.provide_metadata
-    def test_flag_on(self):
+    def test_flag_on(self, metadata):
         t = Table(
             "t",
-            self.metadata,
+            metadata,
             Column("id", Integer, primary_key=True),
             Column("data", String(50)),
         )
-        t.create()
+        t.create(testing.db)
 
         eng = engines.testing_engine(options={"fast_executemany": True})
 
@@ -446,10 +445,9 @@ class RealIsolationLevelTest(fixtures.TestBase):
     __only_on__ = "mssql"
     __backend__ = True
 
-    @testing.provide_metadata
-    def test_isolation_level(self):
-        Table("test", self.metadata, Column("id", Integer)).create(
-            checkfirst=True
+    def test_isolation_level(self, metadata):
+        Table("test", metadata, Column("id", Integer)).create(
+            testing.db, checkfirst=True
         )
 
         with testing.db.connect() as c:
index 86c97316ad2a9143d2617dd2b3ae8d7eb197ab9c..86eff0fe488e11c9460c1634180f653797c985b6 100644 (file)
@@ -39,9 +39,8 @@ class ReflectionTest(fixtures.TestBase, ComparesTables, AssertsCompiledSQL):
     __only_on__ = "mssql"
     __backend__ = True
 
-    @testing.provide_metadata
-    def test_basic_reflection(self):
-        meta = self.metadata
+    def test_basic_reflection(self, metadata, connection):
+        meta = metadata
 
         users = Table(
             "engine_users",
@@ -77,59 +76,44 @@ class ReflectionTest(fixtures.TestBase, ComparesTables, AssertsCompiledSQL):
             ),
             Column("email_address", types.String(20)),
         )
-        meta.create_all()
+        meta.create_all(connection)
 
         meta2 = MetaData()
         reflected_users = Table(
-            "engine_users", meta2, autoload_with=testing.db
+            "engine_users", meta2, autoload_with=connection
         )
         reflected_addresses = Table(
             "engine_email_addresses",
             meta2,
-            autoload_with=testing.db,
+            autoload_with=connection,
         )
         self.assert_tables_equal(users, reflected_users)
         self.assert_tables_equal(addresses, reflected_addresses)
 
-    @testing.provide_metadata
-    def _test_specific_type(self, type_obj, ddl):
-        metadata = self.metadata
+    @testing.combinations(
+        (mssql.XML, "XML"),
+        (mssql.IMAGE, "IMAGE"),
+        (mssql.MONEY, "MONEY"),
+        (mssql.NUMERIC(10, 2), "NUMERIC(10, 2)"),
+        (mssql.FLOAT, "FLOAT(53)"),
+        (mssql.REAL, "REAL"),
+        # FLOAT(5) comes back as REAL
+        (mssql.FLOAT(5), "REAL"),
+        argnames="type_obj,ddl",
+    )
+    def test_assorted_types(self, metadata, connection, type_obj, ddl):
 
         table = Table("type_test", metadata, Column("col1", type_obj))
-        table.create()
+        table.create(connection)
 
         m2 = MetaData()
-        table2 = Table("type_test", m2, autoload_with=testing.db)
+        table2 = Table("type_test", m2, autoload_with=connection)
         self.assert_compile(
             schema.CreateTable(table2),
             "CREATE TABLE type_test (col1 %s NULL)" % ddl,
         )
 
-    def test_xml_type(self):
-        self._test_specific_type(mssql.XML, "XML")
-
-    def test_image_type(self):
-        self._test_specific_type(mssql.IMAGE, "IMAGE")
-
-    def test_money_type(self):
-        self._test_specific_type(mssql.MONEY, "MONEY")
-
-    def test_numeric_prec_scale(self):
-        self._test_specific_type(mssql.NUMERIC(10, 2), "NUMERIC(10, 2)")
-
-    def test_float(self):
-        self._test_specific_type(mssql.FLOAT, "FLOAT(53)")
-
-    def test_real(self):
-        self._test_specific_type(mssql.REAL, "REAL")
-
-    def test_float_as_real(self):
-        # FLOAT(5) comes back as REAL
-        self._test_specific_type(mssql.FLOAT(5), "REAL")
-
-    @testing.provide_metadata
-    def test_identity(self):
-        metadata = self.metadata
+    def test_identity(self, metadata, connection):
         table = Table(
             "identity_test",
             metadata,
@@ -144,10 +128,10 @@ class ReflectionTest(fixtures.TestBase, ComparesTables, AssertsCompiledSQL):
         with testing.expect_deprecated(
             "The dialect options 'mssql_identity_start' and"
         ):
-            table.create()
+            table.create(connection)
 
         meta2 = MetaData()
-        table2 = Table("identity_test", meta2, autoload_with=testing.db)
+        table2 = Table("identity_test", meta2, autoload_with=connection)
         eq_(table2.c["col1"].dialect_options["mssql"]["identity_start"], None)
         eq_(
             table2.c["col1"].dialect_options["mssql"]["identity_increment"],
@@ -156,7 +140,6 @@ class ReflectionTest(fixtures.TestBase, ComparesTables, AssertsCompiledSQL):
         eq_(table2.c["col1"].identity.start, 2)
         eq_(table2.c["col1"].identity.increment, 3)
 
-    @testing.provide_metadata
     def test_skip_types(self, connection):
         connection.exec_driver_sql(
             "create table foo (id integer primary key, data xml)"
@@ -189,10 +172,8 @@ class ReflectionTest(fixtures.TestBase, ComparesTables, AssertsCompiledSQL):
                     ],
                 )
 
-    @testing.provide_metadata
-    def test_cross_schema_fk_pk_name_overlaps(self):
+    def test_cross_schema_fk_pk_name_overlaps(self, metadata, connection):
         # test for issue #4228
-        metadata = self.metadata
 
         Table(
             "subject",
@@ -224,9 +205,9 @@ class ReflectionTest(fixtures.TestBase, ComparesTables, AssertsCompiledSQL):
             schema=testing.config.test_schema_2,
         )
 
-        metadata.create_all()
+        metadata.create_all(connection)
 
-        insp = inspect(testing.db)
+        insp = inspect(connection)
         eq_(
             insp.get_foreign_keys("referrer", testing.config.test_schema),
             [
@@ -240,9 +221,9 @@ class ReflectionTest(fixtures.TestBase, ComparesTables, AssertsCompiledSQL):
             ],
         )
 
-    @testing.provide_metadata
-    def test_table_name_that_is_greater_than_16_chars(self):
-        metadata = self.metadata
+    def test_table_name_that_is_greater_than_16_chars(
+        self, metadata, connection
+    ):
         Table(
             "ABCDEFGHIJKLMNOPQRSTUVWXYZ",
             metadata,
@@ -250,14 +231,13 @@ class ReflectionTest(fixtures.TestBase, ComparesTables, AssertsCompiledSQL):
             Column("foo", Integer),
             Index("foo_idx", "foo"),
         )
-        metadata.create_all()
+        metadata.create_all(connection)
 
         t = Table(
-            "ABCDEFGHIJKLMNOPQRSTUVWXYZ", MetaData(), autoload_with=testing.db
+            "ABCDEFGHIJKLMNOPQRSTUVWXYZ", MetaData(), autoload_with=connection
         )
         eq_(t.name, "ABCDEFGHIJKLMNOPQRSTUVWXYZ")
 
-    @testing.provide_metadata
     @testing.combinations(
         ("local_temp", "#tmp", True),
         ("global_temp", "##tmp", True),
@@ -265,12 +245,11 @@ class ReflectionTest(fixtures.TestBase, ComparesTables, AssertsCompiledSQL):
         id_="iaa",
         argnames="table_name, exists",
     )
-    def test_temporary_table(self, connection, table_name, exists):
-        metadata = self.metadata
+    def test_temporary_table(self, metadata, connection, table_name, exists):
         if exists:
             tt = Table(
                 table_name,
-                self.metadata,
+                metadata,
                 Column("id", Integer, primary_key=True),
                 Column("txt", mssql.NVARCHAR(50)),
                 Column("dt2", mssql.DATETIME2),
@@ -309,7 +288,6 @@ class ReflectionTest(fixtures.TestBase, ComparesTables, AssertsCompiledSQL):
                 [(2, "bar", datetime.datetime(2020, 2, 2, 2, 2, 2))],
             )
 
-    @testing.provide_metadata
     @testing.combinations(
         ("local_temp", "#tmp", True),
         ("global_temp", "##tmp", True),
@@ -317,11 +295,13 @@ class ReflectionTest(fixtures.TestBase, ComparesTables, AssertsCompiledSQL):
         id_="iaa",
         argnames="table_name, exists",
     )
-    def test_has_table_temporary(self, connection, table_name, exists):
+    def test_has_table_temporary(
+        self, metadata, connection, table_name, exists
+    ):
         if exists:
             tt = Table(
                 table_name,
-                self.metadata,
+                metadata,
                 Column("id", Integer),
             )
             tt.create(connection)
@@ -329,9 +309,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables, AssertsCompiledSQL):
         found_it = testing.db.dialect.has_table(connection, table_name)
         eq_(found_it, exists)
 
-    @testing.provide_metadata
-    def test_db_qualified_items(self):
-        metadata = self.metadata
+    def test_db_qualified_items(self, metadata, connection):
         Table("foo", metadata, Column("id", Integer, primary_key=True))
         Table(
             "bar",
@@ -339,17 +317,16 @@ class ReflectionTest(fixtures.TestBase, ComparesTables, AssertsCompiledSQL):
             Column("id", Integer, primary_key=True),
             Column("foo_id", Integer, ForeignKey("foo.id", name="fkfoo")),
         )
-        metadata.create_all()
+        metadata.create_all(connection)
 
-        with testing.db.connect() as c:
-            dbname = c.exec_driver_sql("select db_name()").scalar()
-            owner = c.exec_driver_sql("SELECT user_name()").scalar()
+        dbname = connection.exec_driver_sql("select db_name()").scalar()
+        owner = connection.exec_driver_sql("SELECT user_name()").scalar()
         referred_schema = "%(dbname)s.%(owner)s" % {
             "dbname": dbname,
             "owner": owner,
         }
 
-        inspector = inspect(testing.db)
+        inspector = inspect(connection)
         bar_via_db = inspector.get_foreign_keys("bar", schema=referred_schema)
         eq_(
             bar_via_db,
@@ -364,33 +341,29 @@ class ReflectionTest(fixtures.TestBase, ComparesTables, AssertsCompiledSQL):
             ],
         )
 
-        assert inspect(testing.db).has_table("bar", schema=referred_schema)
+        assert inspect(connection).has_table("bar", schema=referred_schema)
 
         m2 = MetaData()
         Table(
             "bar",
             m2,
             schema=referred_schema,
-            autoload_with=testing.db,
+            autoload_with=connection,
         )
         eq_(m2.tables["%s.foo" % referred_schema].schema, referred_schema)
 
-    @testing.provide_metadata
-    def test_indexes_cols(self):
-        metadata = self.metadata
+    def test_indexes_cols(self, metadata, connection):
 
         t1 = Table("t", metadata, Column("x", Integer), Column("y", Integer))
         Index("foo", t1.c.x, t1.c.y)
-        metadata.create_all()
+        metadata.create_all(connection)
 
         m2 = MetaData()
-        t2 = Table("t", m2, autoload_with=testing.db)
+        t2 = Table("t", m2, autoload_with=connection)
 
         eq_(set(list(t2.indexes)[0].columns), set([t2.c["x"], t2.c.y]))
 
-    @testing.provide_metadata
-    def test_indexes_cols_with_commas(self):
-        metadata = self.metadata
+    def test_indexes_cols_with_commas(self, metadata, connection):
 
         t1 = Table(
             "t",
@@ -399,16 +372,14 @@ class ReflectionTest(fixtures.TestBase, ComparesTables, AssertsCompiledSQL):
             Column("y", Integer),
         )
         Index("foo", t1.c.x, t1.c.y)
-        metadata.create_all()
+        metadata.create_all(connection)
 
         m2 = MetaData()
-        t2 = Table("t", m2, autoload_with=testing.db)
+        t2 = Table("t", m2, autoload_with=connection)
 
         eq_(set(list(t2.indexes)[0].columns), set([t2.c["x, col"], t2.c.y]))
 
-    @testing.provide_metadata
-    def test_indexes_cols_with_spaces(self):
-        metadata = self.metadata
+    def test_indexes_cols_with_spaces(self, metadata, connection):
 
         t1 = Table(
             "t",
@@ -417,16 +388,14 @@ class ReflectionTest(fixtures.TestBase, ComparesTables, AssertsCompiledSQL):
             Column("y", Integer),
         )
         Index("foo", t1.c.x, t1.c.y)
-        metadata.create_all()
+        metadata.create_all(connection)
 
         m2 = MetaData()
-        t2 = Table("t", m2, autoload_with=testing.db)
+        t2 = Table("t", m2, autoload_with=connection)
 
         eq_(set(list(t2.indexes)[0].columns), set([t2.c["x col"], t2.c.y]))
 
-    @testing.provide_metadata
-    def test_indexes_with_filtered(self, connection):
-        metadata = self.metadata
+    def test_indexes_with_filtered(self, metadata, connection):
 
         t1 = Table(
             "t",
@@ -454,8 +423,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables, AssertsCompiledSQL):
             CreateIndex(idx), "CREATE INDEX idx_x ON t (x) WHERE ([x]='test')"
         )
 
-    @testing.provide_metadata
-    def test_max_ident_in_varchar_not_present(self):
+    def test_max_ident_in_varchar_not_present(self, metadata, connection):
         """test [ticket:3504].
 
         Here we are testing not just that the "max" token comes back
@@ -464,7 +432,6 @@ class ReflectionTest(fixtures.TestBase, ComparesTables, AssertsCompiledSQL):
         pattern however is likely in common use.
 
         """
-        metadata = self.metadata
 
         Table(
             "t",
@@ -475,10 +442,10 @@ class ReflectionTest(fixtures.TestBase, ComparesTables, AssertsCompiledSQL):
             Column("t4", types.LargeBinary("max")),
             Column("t5", types.VARBINARY("max")),
         )
-        metadata.create_all()
-        for col in inspect(testing.db).get_columns("t"):
+        metadata.create_all(connection)
+        for col in inspect(connection).get_columns("t"):
             is_(col["type"].length, None)
-            in_("max", str(col["type"].compile(dialect=testing.db.dialect)))
+            in_("max", str(col["type"].compile(dialect=connection.dialect)))
 
 
 class InfoCoerceUnicodeTest(fixtures.TestBase, AssertsCompiledSQL):
@@ -510,42 +477,35 @@ class InfoCoerceUnicodeTest(fixtures.TestBase, AssertsCompiledSQL):
         )
 
 
-class ReflectHugeViewTest(fixtures.TestBase):
+class ReflectHugeViewTest(fixtures.TablesTest):
     __only_on__ = "mssql"
     __backend__ = True
 
     # crashes on freetds 0.91, not worth it
     __skip_if__ = (lambda: testing.requires.mssql_freetds.enabled,)
 
-    def setup(self):
-        self.col_num = 150
+    @classmethod
+    def define_tables(cls, metadata):
+        col_num = 150
 
-        self.metadata = MetaData(testing.db)
         t = Table(
             "base_table",
-            self.metadata,
+            metadata,
             *[
                 Column("long_named_column_number_%d" % i, Integer)
-                for i in range(self.col_num)
+                for i in range(col_num)
             ]
         )
-        self.view_str = (
+        cls.view_str = (
             view_str
         ) = "CREATE VIEW huge_named_view AS SELECT %s FROM base_table" % (
-            ",".join(
-                "long_named_column_number_%d" % i for i in range(self.col_num)
-            )
+            ",".join("long_named_column_number_%d" % i for i in range(col_num))
         )
         assert len(view_str) > 4000
 
         event.listen(t, "after_create", DDL(view_str))
         event.listen(t, "before_drop", DDL("DROP VIEW huge_named_view"))
 
-        self.metadata.create_all()
-
-    def teardown(self):
-        self.metadata.drop_all()
-
     def test_inspect_view_definition(self):
         inspector = inspect(testing.db)
         view_def = inspector.get_view_definition("huge_named_view")
@@ -712,10 +672,10 @@ class IdentityReflectionTest(fixtures.TablesTest):
         ):
             Table("t%s" % i, metadata, col)
 
-    def test_reflect_identity(self):
-        insp = inspect(testing.db)
+    def test_reflect_identity(self, connection):
+        insp = inspect(connection)
         cols = []
-        for t in self.metadata.tables.keys():
+        for t in self.tables_test_metadata.tables.keys():
             cols.extend(insp.get_columns(t))
         for col in cols:
             is_true("dialect_options" not in col)
index a4a3bedda3c26f67a142190a09b73e02438ec2f8..c2231d105bd6b48eb85d4878040211f8e873b8a3 100644 (file)
@@ -544,9 +544,7 @@ class TypeRoundTripTest(
 
     __backend__ = True
 
-    @testing.provide_metadata
-    def test_decimal_notation(self, connection):
-        metadata = self.metadata
+    def test_decimal_notation(self, metadata, connection):
         numeric_table = Table(
             "numeric_table",
             metadata,
@@ -635,9 +633,7 @@ class TypeRoundTripTest(
             )
             eq_(value, returned)
 
-    @testing.provide_metadata
-    def test_float(self, connection):
-        metadata = self.metadata
+    def test_float(self, metadata, connection):
 
         float_table = Table(
             "float_table",
@@ -693,10 +689,8 @@ class TypeRoundTripTest(
             )
             eq_(value, returned)
 
-    # todo this should suppress warnings, but it does not
     @emits_warning_on("mssql+mxodbc", r".*does not have any indexes.*")
-    @testing.provide_metadata
-    def test_dates(self):
+    def test_dates(self, metadata, connection):
         "Exercise type specification for date types."
 
         columns = [
@@ -727,8 +721,6 @@ class TypeRoundTripTest(
             (mssql.MSDateTime2, [1], {}, "DATETIME2(1)", [">=", (10,)]),
         ]
 
-        metadata = self.metadata
-
         table_args = ["test_mssql_dates", metadata]
         for index, spec in enumerate(columns):
             type_, args, kw, res, requires = spec[0:5]
@@ -738,11 +730,11 @@ class TypeRoundTripTest(
                 or not requires
             ):
                 c = Column("c%s" % index, type_(*args, **kw), nullable=None)
-                testing.db.dialect.type_descriptor(c.type)
+                connection.dialect.type_descriptor(c.type)
                 table_args.append(c)
         dates_table = Table(*table_args)
-        gen = testing.db.dialect.ddl_compiler(
-            testing.db.dialect, schema.CreateTable(dates_table)
+        gen = connection.dialect.ddl_compiler(
+            connection.dialect, schema.CreateTable(dates_table)
         )
         for col in dates_table.c:
             index = int(col.name[1:])
@@ -751,9 +743,9 @@ class TypeRoundTripTest(
                 "%s %s" % (col.name, columns[index][3]),
             )
             self.assert_(repr(col))
-        dates_table.create(checkfirst=True)
+        dates_table.create(connection)
         reflected_dates = Table(
-            "test_mssql_dates", MetaData(), autoload_with=testing.db
+            "test_mssql_dates", MetaData(), autoload_with=connection
         )
         for col in reflected_dates.c:
             self.assert_types_base(col, dates_table.c[col.key])
@@ -915,13 +907,13 @@ class TypeRoundTripTest(
             )
 
     @emits_warning_on("mssql+mxodbc", r".*does not have any indexes.*")
-    @testing.provide_metadata
     @testing.combinations(
         ("legacy_large_types", False),
         ("sql2012_large_types", True, lambda: testing.only_on("mssql >= 11")),
         id_="ia",
+        argnames="deprecate_large_types",
     )
-    def test_binary_reflection(self, deprecate_large_types):
+    def test_binary_reflection(self, metadata, deprecate_large_types):
         "Exercise type specification for binary types."
 
         columns = [
@@ -944,47 +936,45 @@ class TypeRoundTripTest(
             ),
         ]
 
-        metadata = self.metadata
-        metadata.bind = engines.testing_engine(
+        engine = engines.testing_engine(
             options={"deprecate_large_types": deprecate_large_types}
         )
-        table_args = ["test_mssql_binary", metadata]
-        for index, spec in enumerate(columns):
-            type_, args, kw, res = spec
-            table_args.append(
-                Column("c%s" % index, type_(*args, **kw), nullable=None)
+        with engine.begin() as conn:
+            table_args = ["test_mssql_binary", metadata]
+            for index, spec in enumerate(columns):
+                type_, args, kw, res = spec
+                table_args.append(
+                    Column("c%s" % index, type_(*args, **kw), nullable=None)
+                )
+            binary_table = Table(*table_args)
+            metadata.create_all(conn)
+            reflected_binary = Table(
+                "test_mssql_binary", MetaData(), autoload_with=conn
             )
-        binary_table = Table(*table_args)
-        metadata.create_all()
-        reflected_binary = Table(
-            "test_mssql_binary", MetaData(), autoload_with=testing.db
-        )
-        for col, spec in zip(reflected_binary.c, columns):
-            eq_(
-                col.type.compile(dialect=mssql.dialect()),
-                spec[3],
-                "column %s %s != %s"
-                % (
-                    col.key,
+            for col, spec in zip(reflected_binary.c, columns):
+                eq_(
                     col.type.compile(dialect=mssql.dialect()),
                     spec[3],
-                ),
-            )
-            c1 = testing.db.dialect.type_descriptor(col.type).__class__
-            c2 = testing.db.dialect.type_descriptor(
-                binary_table.c[col.name].type
-            ).__class__
-            assert issubclass(
-                c1, c2
-            ), "column %s: %r is not a subclass of %r" % (col.key, c1, c2)
-            if binary_table.c[col.name].type.length:
-                testing.eq_(
-                    col.type.length, binary_table.c[col.name].type.length
+                    "column %s %s != %s"
+                    % (
+                        col.key,
+                        col.type.compile(dialect=conn.dialect),
+                        spec[3],
+                    ),
                 )
+                c1 = conn.dialect.type_descriptor(col.type).__class__
+                c2 = conn.dialect.type_descriptor(
+                    binary_table.c[col.name].type
+                ).__class__
+                assert issubclass(
+                    c1, c2
+                ), "column %s: %r is not a subclass of %r" % (col.key, c1, c2)
+                if binary_table.c[col.name].type.length:
+                    testing.eq_(
+                        col.type.length, binary_table.c[col.name].type.length
+                    )
 
-    @testing.provide_metadata
-    def test_autoincrement(self):
-        metadata = self.metadata
+    def test_autoincrement(self, metadata, connection):
         Table(
             "ai_1",
             metadata,
@@ -1035,7 +1025,7 @@ class TypeRoundTripTest(
             Column("o1", String(1), DefaultClause("x"), primary_key=True),
             Column("o2", String(1), DefaultClause("x"), primary_key=True),
         )
-        metadata.create_all()
+        metadata.create_all(connection)
 
         table_names = [
             "ai_1",
@@ -1050,7 +1040,7 @@ class TypeRoundTripTest(
         mr = MetaData()
 
         for name in table_names:
-            tbl = Table(name, mr, autoload_with=testing.db)
+            tbl = Table(name, mr, autoload_with=connection)
             tbl = metadata.tables[name]
 
             # test that the flag itself reflects appropriately
@@ -1081,24 +1071,23 @@ class TypeRoundTripTest(
                 ]
 
             for counter, engine in enumerate(eng):
-                with engine.begin() as conn:
-                    conn.execute(tbl.insert())
-                    if "int_y" in tbl.c:
-                        eq_(
-                            conn.execute(select(tbl.c.int_y)).scalar(),
-                            counter + 1,
-                        )
-                        assert (
-                            list(conn.execute(tbl.select()).first()).count(
-                                counter + 1
-                            )
-                            == 1
-                        )
-                    else:
-                        assert 1 not in list(
-                            conn.execute(tbl.select()).first()
+                connection.execute(tbl.insert())
+                if "int_y" in tbl.c:
+                    eq_(
+                        connection.execute(select(tbl.c.int_y)).scalar(),
+                        counter + 1,
+                    )
+                    assert (
+                        list(connection.execute(tbl.select()).first()).count(
+                            counter + 1
                         )
-                    conn.execute(tbl.delete())
+                        == 1
+                    )
+                else:
+                    assert 1 not in list(
+                        connection.execute(tbl.select()).first()
+                    )
+                connection.execute(tbl.delete())
 
 
 class StringTest(fixtures.TestBase, AssertsCompiledSQL):
@@ -1144,17 +1133,87 @@ class StringTest(fixtures.TestBase, AssertsCompiledSQL):
         )
 
 
+class MyPickleType(types.TypeDecorator):
+    impl = PickleType
+
+    def process_bind_param(self, value, dialect):
+        if value:
+            value.stuff = "BIND" + value.stuff
+        return value
+
+    def process_result_value(self, value, dialect):
+        if value:
+            value.stuff = value.stuff + "RESULT"
+        return value
+
+
 class BinaryTest(fixtures.TestBase):
     __only_on__ = "mssql"
     __requires__ = ("non_broken_binary",)
     __backend__ = True
 
-    def test_character_binary(self):
-        self._test_round_trip(mssql.MSVarBinary(800), b("some normal data"))
-
-    @testing.provide_metadata
-    def _test_round_trip(
-        self, type_, data, deprecate_large_types=True, expected=None
+    @testing.combinations(
+        (
+            mssql.MSVarBinary(800),
+            b("some normal data"),
+            None,
+            True,
+            None,
+            False,
+        ),
+        (
+            mssql.VARBINARY("max"),
+            "binary_data_one.dat",
+            None,
+            False,
+            None,
+            False,
+        ),
+        (
+            mssql.VARBINARY("max"),
+            "binary_data_one.dat",
+            None,
+            True,
+            None,
+            False,
+        ),
+        (
+            sqltypes.LargeBinary,
+            "binary_data_one.dat",
+            None,
+            False,
+            None,
+            False,
+        ),
+        (sqltypes.LargeBinary, "binary_data_one.dat", None, True, None, False),
+        (mssql.MSImage, "binary_data_one.dat", None, True, None, False),
+        (PickleType, pickleable.Foo("im foo 1"), None, True, None, False),
+        (
+            MyPickleType,
+            pickleable.Foo("im foo 1"),
+            pickleable.Foo("im foo 1", stuff="BINDim stuffRESULT"),
+            True,
+            None,
+            False,
+        ),
+        (types.BINARY(100), "binary_data_one.dat", None, True, 100, False),
+        (types.VARBINARY(100), "binary_data_one.dat", None, True, 100, False),
+        (mssql.VARBINARY(100), "binary_data_one.dat", None, True, 100, False),
+        (types.BINARY(100), "binary_data_two.dat", None, True, 99, True),
+        (types.VARBINARY(100), "binary_data_two.dat", None, True, 99, False),
+        (mssql.VARBINARY(100), "binary_data_two.dat", None, True, 99, False),
+        argnames="type_, data, expected, deprecate_large_types, "
+        "slice_, zeropad",
+    )
+    def test_round_trip(
+        self,
+        metadata,
+        type_,
+        data,
+        expected,
+        deprecate_large_types,
+        slice_,
+        zeropad,
     ):
         if (
             testing.db.dialect.deprecate_large_types
@@ -1168,14 +1227,25 @@ class BinaryTest(fixtures.TestBase):
 
         binary_table = Table(
             "binary_table",
-            self.metadata,
+            metadata,
             Column("id", Integer, primary_key=True),
             Column("data", type_),
         )
         binary_table.create(engine)
 
+        if isinstance(data, str) and (
+            data == "binary_data_one.dat" or data == "binary_data_two.dat"
+        ):
+            data = self._load_stream(data)
+
+        if slice_ is not None:
+            data = data[0:slice_]
+
         if expected is None:
-            expected = data
+            if zeropad:
+                expected = data[0:slice_] + b"\x00"
+            else:
+                expected = data
 
         with engine.begin() as conn:
             conn.execute(binary_table.insert(), data=data)
@@ -1205,95 +1275,6 @@ class BinaryTest(fixtures.TestBase):
                 None,
             )
 
-    def test_plain_pickle(self):
-        self._test_round_trip(PickleType, pickleable.Foo("im foo 1"))
-
-    def test_custom_pickle(self):
-        class MyPickleType(types.TypeDecorator):
-            impl = PickleType
-
-            def process_bind_param(self, value, dialect):
-                if value:
-                    value.stuff = "BIND" + value.stuff
-                return value
-
-            def process_result_value(self, value, dialect):
-                if value:
-                    value.stuff = value.stuff + "RESULT"
-                return value
-
-        data = pickleable.Foo("im foo 1")
-        expected = pickleable.Foo("im foo 1")
-        expected.stuff = "BINDim stuffRESULT"
-
-        self._test_round_trip(MyPickleType, data, expected=expected)
-
-    def test_image(self):
-        stream1 = self._load_stream("binary_data_one.dat")
-        self._test_round_trip(mssql.MSImage, stream1)
-
-    def test_large_binary(self):
-        stream1 = self._load_stream("binary_data_one.dat")
-        self._test_round_trip(sqltypes.LargeBinary, stream1)
-
-    def test_large_legacy_types(self):
-        stream1 = self._load_stream("binary_data_one.dat")
-        self._test_round_trip(
-            sqltypes.LargeBinary, stream1, deprecate_large_types=False
-        )
-
-    def test_mssql_varbinary_max(self):
-        stream1 = self._load_stream("binary_data_one.dat")
-        self._test_round_trip(mssql.VARBINARY("max"), stream1)
-
-    def test_mssql_legacy_varbinary_max(self):
-        stream1 = self._load_stream("binary_data_one.dat")
-        self._test_round_trip(
-            mssql.VARBINARY("max"), stream1, deprecate_large_types=False
-        )
-
-    def test_binary_slice(self):
-        self._test_var_slice(types.BINARY)
-
-    def test_binary_slice_zeropadding(self):
-        self._test_var_slice_zeropadding(types.BINARY, True)
-
-    def test_varbinary_slice(self):
-        self._test_var_slice(types.VARBINARY)
-
-    def test_varbinary_slice_zeropadding(self):
-        self._test_var_slice_zeropadding(types.VARBINARY, False)
-
-    def test_mssql_varbinary_slice(self):
-        self._test_var_slice(mssql.VARBINARY)
-
-    def test_mssql_varbinary_slice_zeropadding(self):
-        self._test_var_slice_zeropadding(mssql.VARBINARY, False)
-
-    def _test_var_slice(self, type_):
-        stream1 = self._load_stream("binary_data_one.dat")
-
-        data = stream1[0:100]
-
-        self._test_round_trip(type_(100), data)
-
-    def _test_var_slice_zeropadding(
-        self, type_, pad, deprecate_large_types=True
-    ):
-        stream2 = self._load_stream("binary_data_two.dat")
-
-        data = stream2[0:99]
-
-        # the type we used here is 100 bytes
-        # so we will get 100 bytes zero-padded
-
-        if pad:
-            paddedstream = stream2[0:99] + b"\x00"
-        else:
-            paddedstream = stream2[0:99]
-
-        self._test_round_trip(type_(100), data, expected=paddedstream)
-
     def _load_stream(self, name, len_=3000):
         fp = open(
             os.path.join(os.path.dirname(__file__), "..", "..", name), "rb"
index 55d88957a31855805667a8df1be69af39d201d65..40617e59cedfe85d2b5f4a2d04fcd81c62b0b193 100644 (file)
@@ -44,15 +44,14 @@ class TypeReflectionTest(fixtures.TestBase):
     __only_on__ = "mysql", "mariadb"
     __backend__ = True
 
-    @testing.provide_metadata
-    def _run_test(self, specs, attributes):
+    def _run_test(self, metadata, connection, specs, attributes):
         columns = [Column("c%i" % (i + 1), t[0]) for i, t in enumerate(specs)]
 
         # Early 5.0 releases seem to report more "general" for columns
         # in a view, e.g. char -> varchar, tinyblob -> mediumblob
         use_views = testing.db.dialect.server_version_info > (5, 0, 10)
 
-        m = self.metadata
+        m = metadata
         Table("mysql_types", m, *columns)
 
         if use_views:
@@ -67,12 +66,12 @@ class TypeReflectionTest(fixtures.TestBase):
             event.listen(
                 m, "before_drop", DDL("DROP VIEW IF EXISTS mysql_types_v")
             )
-        m.create_all()
+        m.create_all(connection)
 
         m2 = MetaData()
-        tables = [Table("mysql_types", m2, autoload_with=testing.db)]
+        tables = [Table("mysql_types", m2, autoload_with=connection)]
         if use_views:
-            tables.append(Table("mysql_types_v", m2, autoload_with=testing.db))
+            tables.append(Table("mysql_types_v", m2, autoload_with=connection))
 
         for table in tables:
             for i, (reflected_col, spec) in enumerate(zip(table.c, specs)):
@@ -95,7 +94,7 @@ class TypeReflectionTest(fixtures.TestBase):
                         ),
                     )
 
-    def test_time_types(self):
+    def test_time_types(self, metadata, connection):
         specs = []
 
         if testing.requires.mysql_fsp.enabled:
@@ -118,20 +117,24 @@ class TypeReflectionTest(fixtures.TestBase):
         )
 
         # note 'timezone' should always be None on both
-        self._run_test(specs, ["fsp", "timezone"])
+        self._run_test(metadata, connection, specs, ["fsp", "timezone"])
 
-    def test_year_types(self):
+    def test_year_types(self, metadata, connection):
         specs = [
             (mysql.YEAR(), mysql.YEAR(display_width=4)),
             (mysql.YEAR(display_width=4), mysql.YEAR(display_width=4)),
         ]
 
         if testing.against("mysql>=8.0.19"):
-            self._run_test(specs, [])
+            self._run_test(metadata, connection, specs, [])
         else:
-            self._run_test(specs, ["display_width"])
+            self._run_test(metadata, connection, specs, ["display_width"])
 
-    def test_string_types(self):
+    def test_string_types(
+        self,
+        metadata,
+        connection,
+    ):
         specs = [
             (String(1), mysql.MSString(1)),
             (String(3), mysql.MSString(3)),
@@ -145,9 +148,9 @@ class TypeReflectionTest(fixtures.TestBase):
             (mysql.MSNChar(2), mysql.MSChar(2)),
             (mysql.MSNVarChar(22), mysql.MSString(22)),
         ]
-        self._run_test(specs, ["length"])
+        self._run_test(metadata, connection, specs, ["length"])
 
-    def test_integer_types(self):
+    def test_integer_types(self, metadata, connection):
         specs = []
         for type_ in [
             mysql.TINYINT,
@@ -201,11 +204,22 @@ class TypeReflectionTest(fixtures.TestBase):
         # on display_width.   need to test this more accurately though
         # for the cases where it does
         if testing.against("mysql >= 8.0.19"):
-            self._run_test(specs, ["unsigned", "zerofill"])
+            self._run_test(
+                metadata, connection, specs, ["unsigned", "zerofill"]
+            )
         else:
-            self._run_test(specs, ["display_width", "unsigned", "zerofill"])
+            self._run_test(
+                metadata,
+                connection,
+                specs,
+                ["display_width", "unsigned", "zerofill"],
+            )
 
-    def test_binary_types(self):
+    def test_binary_types(
+        self,
+        metadata,
+        connection,
+    ):
         specs = [
             (LargeBinary(3), mysql.TINYBLOB()),
             (LargeBinary(), mysql.BLOB()),
@@ -217,13 +231,17 @@ class TypeReflectionTest(fixtures.TestBase):
             (mysql.MSMediumBlob(), mysql.MSMediumBlob()),
             (mysql.MSLongBlob(), mysql.MSLongBlob()),
         ]
-        self._run_test(specs, [])
+        self._run_test(metadata, connection, specs, [])
 
-    def test_legacy_enum_types(self):
+    def test_legacy_enum_types(
+        self,
+        metadata,
+        connection,
+    ):
 
         specs = [(mysql.ENUM("", "fleem"), mysql.ENUM("", "fleem"))]
 
-        self._run_test(specs, ["enums"])
+        self._run_test(metadata, connection, specs, ["enums"])
 
 
 class ReflectionTest(fixtures.TestBase, AssertsCompiledSQL):
@@ -324,8 +342,7 @@ class ReflectionTest(fixtures.TestBase, AssertsCompiledSQL):
             str(reflected.c.c6.server_default.arg).upper(),
         )
 
-    @testing.provide_metadata
-    def test_reflection_with_table_options(self, connection):
+    def test_reflection_with_table_options(self, metadata, connection):
         comment = r"""Comment types type speedily ' " \ '' Fun!"""
         if testing.against("mariadb"):
             kwargs = dict(
@@ -348,7 +365,7 @@ class ReflectionTest(fixtures.TestBase, AssertsCompiledSQL):
 
         def_table = Table(
             "mysql_def",
-            self.metadata,
+            metadata,
             Column("c1", Integer()),
             comment=comment,
             **kwargs
@@ -403,11 +420,10 @@ class ReflectionTest(fixtures.TestBase, AssertsCompiledSQL):
             # This is explicitly ignored when reflecting schema.
             # assert reflected.kwargs['mysql_auto_increment'] == '5'
 
-    @testing.provide_metadata
-    def test_reflection_on_include_columns(self):
+    def test_reflection_on_include_columns(self, metadata, connection):
         """Test reflection of include_columns to be sure they respect case."""
 
-        meta = self.metadata
+        meta = metadata
         case_table = Table(
             "mysql_case",
             meta,
@@ -416,11 +432,11 @@ class ReflectionTest(fixtures.TestBase, AssertsCompiledSQL):
             Column("C3", String(10)),
         )
 
-        case_table.create(testing.db)
+        case_table.create(connection)
         reflected = Table(
             "mysql_case",
             MetaData(),
-            autoload_with=testing.db,
+            autoload_with=connection,
             include_columns=["c1", "C2"],
         )
         for t in case_table, reflected:
@@ -429,16 +445,15 @@ class ReflectionTest(fixtures.TestBase, AssertsCompiledSQL):
         reflected2 = Table(
             "mysql_case",
             MetaData(),
-            autoload_with=testing.db,
+            autoload_with=connection,
             include_columns=["c1", "c2"],
         )
         assert "c1" in reflected2.c.keys()
         for c in ["c2", "C2", "C3"]:
             assert c not in reflected2.c.keys()
 
-    @testing.provide_metadata
-    def test_autoincrement(self):
-        meta = self.metadata
+    def test_autoincrement(self, metadata, connection):
+        meta = metadata
         Table(
             "ai_1",
             meta,
@@ -520,7 +535,7 @@ class ReflectionTest(fixtures.TestBase, AssertsCompiledSQL):
             Column("o2", String(1), DefaultClause("x"), primary_key=True),
             mysql_engine="MyISAM",
         )
-        meta.create_all(testing.db)
+        meta.create_all(connection)
 
         table_names = [
             "ai_1",
@@ -533,30 +548,27 @@ class ReflectionTest(fixtures.TestBase, AssertsCompiledSQL):
             "ai_8",
         ]
         mr = MetaData()
-        mr.reflect(testing.db, only=table_names)
-
-        with testing.db.begin() as conn:
-            for tbl in [mr.tables[name] for name in table_names]:
-                for c in tbl.c:
-                    if c.name.startswith("int_y"):
-                        assert c.autoincrement
-                    elif c.name.startswith("int_n"):
-                        assert not c.autoincrement
-                conn.execute(tbl.insert())
-                if "int_y" in tbl.c:
-                    assert conn.scalar(select(tbl.c.int_y)) == 1
-                    assert (
-                        list(conn.execute(tbl.select()).first()).count(1) == 1
-                    )
-                else:
-                    assert 1 not in list(conn.execute(tbl.select()).first())
+        mr.reflect(connection, only=table_names)
+
+        for tbl in [mr.tables[name] for name in table_names]:
+            for c in tbl.c:
+                if c.name.startswith("int_y"):
+                    assert c.autoincrement
+                elif c.name.startswith("int_n"):
+                    assert not c.autoincrement
+            connection.execute(tbl.insert())
+            if "int_y" in tbl.c:
+                assert connection.scalar(select(tbl.c.int_y)) == 1
+                assert (
+                    list(connection.execute(tbl.select()).first()).count(1)
+                    == 1
+                )
+            else:
+                assert 1 not in list(connection.execute(tbl.select()).first())
 
-    @testing.provide_metadata
-    def test_view_reflection(self, connection):
-        Table(
-            "x", self.metadata, Column("a", Integer), Column("b", String(50))
-        )
-        self.metadata.create_all(connection)
+    def test_view_reflection(self, metadata, connection):
+        Table("x", metadata, Column("a", Integer), Column("b", String(50)))
+        metadata.create_all(connection)
 
         conn = connection
         conn.exec_driver_sql("CREATE VIEW v1 AS SELECT * FROM x")
@@ -570,7 +582,7 @@ class ReflectionTest(fixtures.TestBase, AssertsCompiledSQL):
             "CREATE DEFINER=CURRENT_USER VIEW v4 AS SELECT * FROM x"
         )
 
-        @event.listens_for(self.metadata, "before_drop")
+        @event.listens_for(metadata, "before_drop")
         def cleanup(*arg, **kw):
             with testing.db.begin() as conn:
                 for v in ["v1", "v2", "v3", "v4"]:
@@ -586,9 +598,8 @@ class ReflectionTest(fixtures.TestBase, AssertsCompiledSQL):
                 [("a", mysql.INTEGER), ("b", mysql.VARCHAR)],
             )
 
-    @testing.provide_metadata
-    def test_skip_not_describable(self, connection):
-        @event.listens_for(self.metadata, "before_drop")
+    def test_skip_not_describable(self, metadata, connection):
+        @event.listens_for(metadata, "before_drop")
         def cleanup(*arg, **kw):
             with testing.db.begin() as conn:
                 conn.exec_driver_sql("DROP TABLE IF EXISTS test_t1")
@@ -625,20 +636,18 @@ class ReflectionTest(fixtures.TestBase, AssertsCompiledSQL):
         view_names = dialect.get_view_names(connection, "information_schema")
         self.assert_("TABLES" in view_names)
 
-    @testing.provide_metadata
-    def test_nullable_reflection(self):
+    def test_nullable_reflection(self, metadata, connection):
         """test reflection of NULL/NOT NULL, in particular with TIMESTAMP
         defaults where MySQL is inconsistent in how it reports CREATE TABLE.
 
         """
-        meta = self.metadata
+        meta = metadata
 
         # this is ideally one table, but older MySQL versions choke
         # on the multiple TIMESTAMP columns
-        with testing.db.connect() as c:
-            row = c.exec_driver_sql(
-                "show variables like '%%explicit_defaults_for_timestamp%%'"
-            ).first()
+        row = connection.exec_driver_sql(
+            "show variables like '%%explicit_defaults_for_timestamp%%'"
+        ).first()
         explicit_defaults_for_timestamp = row[1].lower() in ("on", "1", "true")
 
         reflected = []
@@ -659,15 +668,14 @@ class ReflectionTest(fixtures.TestBase, AssertsCompiledSQL):
         ):
             Table("nn_t%d" % idx, meta)  # to allow DROP
 
-            with testing.db.begin() as c:
-                c.exec_driver_sql(
-                    """
-                        CREATE TABLE nn_t%d (
-                            %s
-                        )
-                    """
-                    % (idx, ", \n".join(cols))
-                )
+            connection.exec_driver_sql(
+                """
+                    CREATE TABLE nn_t%d (
+                        %s
+                    )
+                """
+                % (idx, ", \n".join(cols))
+            )
 
             reflected.extend(
                 {
@@ -675,10 +683,10 @@ class ReflectionTest(fixtures.TestBase, AssertsCompiledSQL):
                     "nullable": d["nullable"],
                     "default": d["default"],
                 }
-                for d in inspect(testing.db).get_columns("nn_t%d" % idx)
+                for d in inspect(connection).get_columns("nn_t%d" % idx)
             )
 
-        if testing.db.dialect._is_mariadb_102:
+        if connection.dialect._is_mariadb_102:
             current_timestamp = "current_timestamp()"
         else:
             current_timestamp = "CURRENT_TIMESTAMP"
@@ -726,11 +734,10 @@ class ReflectionTest(fixtures.TestBase, AssertsCompiledSQL):
             ],
         )
 
-    @testing.provide_metadata
-    def test_reflection_with_unique_constraint(self):
-        insp = inspect(testing.db)
+    def test_reflection_with_unique_constraint(self, metadata, connection):
+        insp = inspect(connection)
 
-        meta = self.metadata
+        meta = metadata
         uc_table = Table(
             "mysql_uc",
             meta,
@@ -738,7 +745,7 @@ class ReflectionTest(fixtures.TestBase, AssertsCompiledSQL):
             UniqueConstraint("a", name="uc_a"),
         )
 
-        uc_table.create()
+        uc_table.create(connection)
 
         # MySQL converts unique constraints into unique indexes.
         # separately we get both
@@ -762,11 +769,10 @@ class ReflectionTest(fixtures.TestBase, AssertsCompiledSQL):
         self.assert_(indexes["uc_a"].unique)
         self.assert_("uc_a" not in constraints)
 
-    @testing.provide_metadata
-    def test_reflect_fulltext(self):
+    def test_reflect_fulltext(self, metadata, connection):
         mt = Table(
             "mytable",
-            self.metadata,
+            metadata,
             Column("id", Integer, primary_key=True),
             Column("textdata", String(50)),
             mariadb_engine="InnoDB",
@@ -779,7 +785,7 @@ class ReflectionTest(fixtures.TestBase, AssertsCompiledSQL):
             mysql_prefix="FULLTEXT",
             mariadb_prefix="FULLTEXT",
         )
-        self.metadata.create_all(testing.db)
+        metadata.create_all(connection)
 
         mt = Table("mytable", MetaData(), autoload_with=testing.db)
         idx = list(mt.indexes)[0]
@@ -791,11 +797,14 @@ class ReflectionTest(fixtures.TestBase, AssertsCompiledSQL):
         )
 
     @testing.requires.mysql_ngram_fulltext
-    @testing.provide_metadata
-    def test_reflect_fulltext_comment(self):
+    def test_reflect_fulltext_comment(
+        self,
+        metadata,
+        connection,
+    ):
         mt = Table(
             "mytable",
-            self.metadata,
+            metadata,
             Column("id", Integer, primary_key=True),
             Column("textdata", String(50)),
             mysql_engine="InnoDB",
@@ -807,9 +816,9 @@ class ReflectionTest(fixtures.TestBase, AssertsCompiledSQL):
             mysql_with_parser="ngram",
         )
 
-        self.metadata.create_all(testing.db)
+        metadata.create_all(connection)
 
-        mt = Table("mytable", MetaData(), autoload_with=testing.db)
+        mt = Table("mytable", MetaData(), autoload_with=connection)
         idx = list(mt.indexes)[0]
         eq_(idx.name, "textdata_ix")
         eq_(idx.dialect_options["mysql"]["prefix"], "FULLTEXT")
@@ -820,16 +829,15 @@ class ReflectionTest(fixtures.TestBase, AssertsCompiledSQL):
             "(textdata) WITH PARSER ngram",
         )
 
-    @testing.provide_metadata
-    def test_non_column_index(self):
-        m1 = self.metadata
+    def test_non_column_index(self, metadata, connection):
+        m1 = metadata
         t1 = Table(
             "add_ix", m1, Column("x", String(50)), mysql_engine="InnoDB"
         )
         Index("foo_idx", t1.c.x.desc())
-        m1.create_all()
+        m1.create_all(connection)
 
-        insp = inspect(testing.db)
+        insp = inspect(connection)
         eq_(
             insp.get_indexes("add_ix"),
             [{"name": "foo_idx", "column_names": ["x"], "unique": False}],
@@ -950,12 +958,13 @@ class ReflectionTest(fixtures.TestBase, AssertsCompiledSQL):
                 ],
             )
 
-    @testing.provide_metadata
-    def test_case_sensitive_column_constraint_reflection(self):
+    def test_case_sensitive_column_constraint_reflection(
+        self, metadata, connection
+    ):
         # test for issue #4344 which works around
         # MySQL 8.0 bug https://bugs.mysql.com/bug.php?id=88718
 
-        m1 = self.metadata
+        m1 = metadata
 
         Table(
             "Track",
@@ -987,9 +996,9 @@ class ReflectionTest(fixtures.TestBase, AssertsCompiledSQL):
             ),
             mysql_engine="InnoDB",
         )
-        m1.create_all()
+        m1.create_all(connection)
 
-        if testing.db.dialect._casing in (1, 2):
+        if connection.dialect._casing in (1, 2):
             # the original test for the 88718 fix here in [ticket:4344]
             # actually set  referred_table='track', with the wrong casing!
             # this test was never run. with [ticket:4751], I've gone through
@@ -999,7 +1008,7 @@ class ReflectionTest(fixtures.TestBase, AssertsCompiledSQL):
             # lower case is also an 8.0 regression.
 
             eq_(
-                inspect(testing.db).get_foreign_keys("PlaylistTrack"),
+                inspect(connection).get_foreign_keys("PlaylistTrack"),
                 [
                     {
                         "name": "FK_PlaylistTTrackId",
@@ -1022,7 +1031,7 @@ class ReflectionTest(fixtures.TestBase, AssertsCompiledSQL):
         else:
             eq_(
                 sorted(
-                    inspect(testing.db).get_foreign_keys("PlaylistTrack"),
+                    inspect(connection).get_foreign_keys("PlaylistTrack"),
                     key=lambda elem: elem["name"],
                 ),
                 [
@@ -1046,12 +1055,13 @@ class ReflectionTest(fixtures.TestBase, AssertsCompiledSQL):
             )
 
     @testing.requires.mysql_fully_case_sensitive
-    @testing.provide_metadata
-    def test_case_sensitive_reflection_dual_case_references(self):
+    def test_case_sensitive_reflection_dual_case_references(
+        self, metadata, connection
+    ):
         # this tests that within the fix we do for MySQL bug
         # 88718, we don't do case-insensitive logic if the backend
         # is case sensitive
-        m = self.metadata
+        m = metadata
         Table(
             "t1",
             m,
@@ -1074,12 +1084,12 @@ class ReflectionTest(fixtures.TestBase, AssertsCompiledSQL):
             Column("cap_t1id", ForeignKey("T1.Some_Id", name="cap_t1id_fk")),
             mysql_engine="InnoDB",
         )
-        m.create_all(testing.db)
+        m.create_all(connection)
 
         eq_(
             dict(
                 (rec["name"], rec)
-                for rec in inspect(testing.db).get_foreign_keys("t2")
+                for rec in inspect(connection).get_foreign_keys("t2")
             ),
             {
                 "cap_t1id_fk": {
index f4621dce332596aedc8f0a3f204b7fe85efd6f7d..3e8aa0fb54f0680f7914a1e2545291de8876aef7 100644 (file)
@@ -474,11 +474,10 @@ class TypeRoundTripTest(fixtures.TestBase, AssertsExecutionResults):
 
     # fixed in mysql-connector as of 2.0.1,
     # see http://bugs.mysql.com/bug.php?id=73266
-    @testing.provide_metadata
-    def test_precision_float_roundtrip(self, connection):
+    def test_precision_float_roundtrip(self, metadata, connection):
         t = Table(
             "t",
-            self.metadata,
+            metadata,
             Column(
                 "scale_value",
                 mysql.DOUBLE(precision=15, scale=12, asdecimal=True),
@@ -503,11 +502,10 @@ class TypeRoundTripTest(fixtures.TestBase, AssertsExecutionResults):
         eq_(result, decimal.Decimal("45.768392065789"))
 
     @testing.only_if("mysql")
-    @testing.provide_metadata
-    def test_charset_collate_table(self, connection):
+    def test_charset_collate_table(self, metadata, connection):
         t = Table(
             "foo",
-            self.metadata,
+            metadata,
             Column("id", Integer),
             Column("data", UnicodeText),
             mysql_default_charset="utf8",
@@ -657,19 +655,21 @@ class TypeRoundTripTest(fixtures.TestBase, AssertsExecutionResults):
         impl = TIMESTAMP
 
     @testing.combinations(
-        (TIMESTAMP,), (MyTime(),), (String().with_variant(TIMESTAMP, "mysql"),)
+        (TIMESTAMP,),
+        (MyTime(),),
+        (String().with_variant(TIMESTAMP, "mysql"),),
+        argnames="type_",
     )
     @testing.requires.mysql_zero_date
-    @testing.provide_metadata
-    def test_timestamp_nullable(self, type_):
+    def test_timestamp_nullable(self, metadata, connection, type_):
         ts_table = Table(
             "mysql_timestamp",
-            self.metadata,
+            metadata,
             Column("t1", type_),
             Column("t2", type_, nullable=False),
             mysql_engine="InnoDB",
         )
-        self.metadata.create_all()
+        metadata.create_all(connection)
 
         # TIMESTAMP without NULL inserts current time when passed
         # NULL.  when not passed, generates 0000-00-00 quite
@@ -687,25 +687,23 @@ class TypeRoundTripTest(fixtures.TestBase, AssertsExecutionResults):
             else:
                 return dt
 
-        with testing.db.begin() as conn:
-            now = conn.exec_driver_sql("select now()").scalar()
-            conn.execute(ts_table.insert(), {"t1": now, "t2": None})
-            conn.execute(ts_table.insert(), {"t1": None, "t2": None})
-            conn.execute(ts_table.insert(), {"t2": None})
+        now = connection.exec_driver_sql("select now()").scalar()
+        connection.execute(ts_table.insert(), {"t1": now, "t2": None})
+        connection.execute(ts_table.insert(), {"t1": None, "t2": None})
+        connection.execute(ts_table.insert(), {"t2": None})
 
-            new_now = conn.exec_driver_sql("select now()").scalar()
+        new_now = connection.exec_driver_sql("select now()").scalar()
 
-            eq_(
-                [
-                    tuple([normalize(dt) for dt in row])
-                    for row in conn.execute(ts_table.select())
-                ],
-                [(now, now), (None, now), (None, now)],
-            )
+        eq_(
+            [
+                tuple([normalize(dt) for dt in row])
+                for row in connection.execute(ts_table.select())
+            ],
+            [(now, now), (None, now), (None, now)],
+        )
 
-    @testing.provide_metadata
-    def test_time_roundtrip(self, connection):
-        t = Table("mysql_time", self.metadata, Column("t1", mysql.TIME()))
+    def test_time_roundtrip(self, metadata, connection):
+        t = Table("mysql_time", metadata, Column("t1", mysql.TIME()))
 
         t.create(connection)
 
@@ -715,13 +713,12 @@ class TypeRoundTripTest(fixtures.TestBase, AssertsExecutionResults):
             datetime.time(8, 37, 35),
         )
 
-    @testing.provide_metadata
-    def test_year(self, connection):
+    def test_year(self, metadata, connection):
         """Exercise YEAR."""
 
         year_table = Table(
             "mysql_year",
-            self.metadata,
+            metadata,
             Column("y1", mysql.MSYear),
             Column("y2", mysql.MSYear),
             Column("y3", mysql.MSYear),
@@ -748,26 +745,22 @@ class JSONTest(fixtures.TestBase):
     __only_on__ = "mysql", "mariadb"
     __backend__ = True
 
-    @testing.provide_metadata
     @testing.requires.reflects_json_type
-    def test_reflection(self, connection):
+    def test_reflection(self, metadata, connection):
 
-        Table("mysql_json", self.metadata, Column("foo", mysql.JSON))
-        self.metadata.create_all(connection)
+        Table("mysql_json", metadata, Column("foo", mysql.JSON))
+        metadata.create_all(connection)
 
         reflected = Table("mysql_json", MetaData(), autoload_with=connection)
         is_(reflected.c.foo.type._type_affinity, sqltypes.JSON)
         assert isinstance(reflected.c.foo.type, mysql.JSON)
 
-    @testing.provide_metadata
-    def test_rudimental_round_trip(self, connection):
+    def test_rudimental_round_trip(self, metadata, connection):
         # note that test_suite has many more JSON round trip tests
         # using the backend-agnostic JSON type
 
-        mysql_json = Table(
-            "mysql_json", self.metadata, Column("foo", mysql.JSON)
-        )
-        self.metadata.create_all(connection)
+        mysql_json = Table("mysql_json", metadata, Column("foo", mysql.JSON))
+        metadata.create_all(connection)
 
         value = {"json": {"foo": "bar"}, "recs": ["one", "two"]}
 
@@ -804,8 +797,7 @@ class EnumSetTest(
     def get_enum_string_values(some_enum):
         return [str(v.value) for v in some_enum.__members__.values()]
 
-    @testing.provide_metadata
-    def test_enum(self, connection):
+    def test_enum(self, metadata, connection):
         """Exercise the ENUM type."""
 
         e1 = mysql.ENUM("a", "b")
@@ -815,7 +807,7 @@ class EnumSetTest(
 
         enum_table = Table(
             "mysql_enum",
-            self.metadata,
+            metadata,
             Column("e1", e1),
             Column("e2", e2, nullable=False),
             Column(
@@ -857,11 +849,14 @@ class EnumSetTest(
 
         assert_raises(
             exc.DBAPIError,
-            enum_table.insert().execute,
-            e1=None,
-            e2=None,
-            e3=None,
-            e4=None,
+            connection.execute,
+            enum_table.insert(),
+            dict(
+                e1=None,
+                e2=None,
+                e3=None,
+                e4=None,
+            ),
         )
 
         assert enum_table.c.e2generic.type.validate_strings
@@ -948,7 +943,7 @@ class EnumSetTest(
 
         eq_(res, expected)
 
-    def _set_fixture_one(self):
+    def _set_fixture_one(self, metadata):
         e1 = mysql.SET("a", "b")
         e2 = mysql.SET("a", "b")
         e4 = mysql.SET("'a'", "b")
@@ -956,7 +951,7 @@ class EnumSetTest(
 
         set_table = Table(
             "mysql_set",
-            self.metadata,
+            metadata,
             Column("e1", e1),
             Column("e2", e2, nullable=False),
             Column("e3", mysql.SET("a", "b")),
@@ -965,18 +960,16 @@ class EnumSetTest(
         )
         return set_table
 
-    def test_set_colspec(self):
-        self.metadata = MetaData()
-        set_table = self._set_fixture_one()
+    def test_set_colspec(self, metadata):
+        set_table = self._set_fixture_one(metadata)
         eq_(colspec(set_table.c.e1), "e1 SET('a','b')")
         eq_(colspec(set_table.c.e2), "e2 SET('a','b') NOT NULL")
         eq_(colspec(set_table.c.e3), "e3 SET('a','b')")
         eq_(colspec(set_table.c.e4), "e4 SET('''a''','b')")
         eq_(colspec(set_table.c.e5), "e5 SET('a','b')")
 
-    @testing.provide_metadata
-    def test_no_null(self, connection):
-        set_table = self._set_fixture_one()
+    def test_no_null(self, metadata, connection):
+        set_table = self._set_fixture_one(metadata)
         set_table.create(connection)
         assert_raises(
             exc.DBAPIError,
@@ -986,11 +979,10 @@ class EnumSetTest(
         )
 
     @testing.requires.mysql_non_strict
-    @testing.provide_metadata
-    def test_empty_set_no_empty_string(self, connection):
+    def test_empty_set_no_empty_string(self, metadata, connection):
         t = Table(
             "t",
-            self.metadata,
+            metadata,
             Column("id", Integer),
             Column("data", mysql.SET("a", "b")),
         )
@@ -1020,11 +1012,10 @@ class EnumSetTest(
             "",
         )
 
-    @testing.provide_metadata
-    def test_empty_set_empty_string(self, connection):
+    def test_empty_set_empty_string(self, metadata, connection):
         t = Table(
             "t",
-            self.metadata,
+            metadata,
             Column("id", Integer),
             Column("data", mysql.SET("a", "b", "", retrieve_as_bitwise=True)),
         )
@@ -1048,9 +1039,8 @@ class EnumSetTest(
             ],
         )
 
-    @testing.provide_metadata
-    def test_string_roundtrip(self, connection):
-        set_table = self._set_fixture_one()
+    def test_string_roundtrip(self, metadata, connection):
+        set_table = self._set_fixture_one(metadata)
         set_table.create(connection)
         connection.execute(
             set_table.insert(),
@@ -1081,11 +1071,10 @@ class EnumSetTest(
 
         eq_(res, expected)
 
-    @testing.provide_metadata
-    def test_unicode_roundtrip(self, connection):
+    def test_unicode_roundtrip(self, metadata, connection):
         set_table = Table(
             "t",
-            self.metadata,
+            metadata,
             Column("id", Integer, primary_key=True),
             Column("data", mysql.SET(u("réveillé"), u("drôle"), u("S’il"))),
         )
@@ -1099,9 +1088,8 @@ class EnumSetTest(
 
         eq_(row, (1, set([u("réveillé"), u("drôle")])))
 
-    @testing.provide_metadata
-    def test_int_roundtrip(self, connection):
-        set_table = self._set_fixture_one()
+    def test_int_roundtrip(self, metadata, connection):
+        set_table = self._set_fixture_one(metadata)
         set_table.create(connection)
         connection.execute(
             set_table.insert(), dict(e1=1, e2=2, e3=3, e4=3, e5=0)
@@ -1118,11 +1106,10 @@ class EnumSetTest(
             ),
         )
 
-    @testing.provide_metadata
-    def test_set_roundtrip_plus_reflection(self, connection):
+    def test_set_roundtrip_plus_reflection(self, metadata, connection):
         set_table = Table(
             "mysql_set",
-            self.metadata,
+            metadata,
             Column("s1", mysql.SET("dq", "sq")),
             Column("s2", mysql.SET("a")),
             Column("s3", mysql.SET("5", "7", "9")),
@@ -1166,9 +1153,7 @@ class EnumSetTest(
 
         eq_(list(rows), [({"5"},), ({"7", "5"},)])
 
-    @testing.provide_metadata
-    def test_unicode_enum(self, connection):
-        metadata = self.metadata
+    def test_unicode_enum(self, metadata, connection):
         t1 = Table(
             "table",
             metadata,
@@ -1232,12 +1217,11 @@ class EnumSetTest(
             "'y', 'z')))",
         )
 
-    @testing.provide_metadata
-    def test_enum_parse(self, connection):
+    def test_enum_parse(self, metadata, connection):
 
         enum_table = Table(
             "mysql_enum",
-            self.metadata,
+            metadata,
             Column("e1", mysql.ENUM("a")),
             Column("e2", mysql.ENUM("")),
             Column("e3", mysql.ENUM("a")),
@@ -1261,11 +1245,10 @@ class EnumSetTest(
             eq_(t.c.e6.type.enums, ["", "a"])
             eq_(t.c.e7.type.enums, ["", "'a'", "b'b", "'"])
 
-    @testing.provide_metadata
-    def test_set_parse(self, connection):
+    def test_set_parse(self, metadata, connection):
         set_table = Table(
             "mysql_set",
-            self.metadata,
+            metadata,
             Column("e1", mysql.SET("a")),
             Column("e2", mysql.SET("", retrieve_as_bitwise=True)),
             Column("e3", mysql.SET("a")),
@@ -1301,11 +1284,10 @@ class EnumSetTest(
             eq_(t.c.e7.type.values, ("", "'a'", "b'b", "'"))
 
     @testing.requires.mysql_non_strict
-    @testing.provide_metadata
-    def test_broken_enum_returns_blanks(self, connection):
+    def test_broken_enum_returns_blanks(self, metadata, connection):
         t = Table(
             "enum_missing",
-            self.metadata,
+            metadata,
             Column("id", Integer, primary_key=True),
             Column("e1", sqltypes.Enum("one", "two", "three")),
             Column("e2", mysql.ENUM("one", "two", "three")),
index fa66a64d5671e8cb5f6d1cb960fe709d63bc9db5..df87fe89fc864b3babb630bf29f22a9f68a433c9 100644 (file)
@@ -481,11 +481,9 @@ class QuotedBindRoundTripTest(fixtures.TestBase):
     __only_on__ = "oracle"
     __backend__ = True
 
-    @testing.provide_metadata
-    def test_table_round_trip(self, connection):
+    def test_table_round_trip(self, metadata, connection):
         oracle.RESERVED_WORDS.discard("UNION")
 
-        metadata = self.metadata
         table = Table(
             "t1",
             metadata,
@@ -496,7 +494,7 @@ class QuotedBindRoundTripTest(fixtures.TestBase):
             # is set
             Column("union", Integer, quote=True),
         )
-        metadata.create_all()
+        metadata.create_all(connection)
 
         connection.execute(
             table.insert(), {"option": 1, "plain": 1, "union": 1}
@@ -516,17 +514,15 @@ class QuotedBindRoundTripTest(fixtures.TestBase):
             4,
         )
 
-    @testing.provide_metadata
-    def test_numeric_bind_in_crud(self, connection):
-        t = Table("asfd", self.metadata, Column("100K", Integer))
+    def test_numeric_bind_in_crud(self, metadata, connection):
+        t = Table("asfd", metadata, Column("100K", Integer))
         t.create(connection)
 
         connection.execute(t.insert(), {"100K": 10})
         eq_(connection.scalar(t.select()), 10)
 
-    @testing.provide_metadata
-    def test_expanding_quote_roundtrip(self, connection):
-        t = Table("asfd", self.metadata, Column("foo", Integer))
+    def test_expanding_quote_roundtrip(self, metadata, connection):
+        t = Table("asfd", metadata, Column("foo", Integer))
         t.create(connection)
 
         connection.execute(
@@ -747,9 +743,7 @@ class ExecuteTest(fixtures.TestBase):
         finally:
             seq.drop(connection)
 
-    @testing.provide_metadata
-    def test_limit_offset_for_update(self, connection):
-        metadata = self.metadata
+    def test_limit_offset_for_update(self, metadata, connection):
         # oracle can't actually do the ROWNUM thing with FOR UPDATE
         # very well.
 
@@ -794,15 +788,13 @@ class UnicodeSchemaTest(fixtures.TestBase):
     __only_on__ = "oracle"
     __backend__ = True
 
-    @testing.provide_metadata
-    def test_quoted_column_non_unicode(self, connection):
-        metadata = self.metadata
+    def test_quoted_column_non_unicode(self, metadata, connection):
         table = Table(
             "atable",
             metadata,
             Column("_underscorecolumn", Unicode(255), primary_key=True),
         )
-        metadata.create_all()
+        metadata.create_all(connection)
 
         connection.execute(table.insert(), {"_underscorecolumn": u("’é")})
         result = connection.execute(
@@ -810,15 +802,13 @@ class UnicodeSchemaTest(fixtures.TestBase):
         ).scalar()
         eq_(result, u("’é"))
 
-    @testing.provide_metadata
-    def test_quoted_column_unicode(self, connection):
-        metadata = self.metadata
+    def test_quoted_column_unicode(self, metadata, connection):
         table = Table(
             "atable",
             metadata,
             Column(u("méil"), Unicode(255), primary_key=True),
         )
-        metadata.create_all()
+        metadata.create_all(connection)
 
         connection.execute(table.insert(), {u("méil"): u("’é")})
         result = connection.execute(
index 2e515556f37db4330c561a7698a51e77543548a8..81e4e4ab5aa3b0de6ddd861fefd4ffe2b0c7f1e3 100644 (file)
@@ -103,10 +103,9 @@ class MultiSchemaTest(fixtures.TestBase, AssertsCompiledSQL):
                 if stmt.strip():
                     conn.exec_driver_sql(stmt)
 
-    @testing.provide_metadata
-    def test_create_same_names_explicit_schema(self):
+    def test_create_same_names_explicit_schema(self, metadata, connection):
         schema = testing.db.dialect.default_schema_name
-        meta = self.metadata
+        meta = metadata
         parent = Table(
             "parent",
             meta,
@@ -120,11 +119,10 @@ class MultiSchemaTest(fixtures.TestBase, AssertsCompiledSQL):
             Column("pid", Integer, ForeignKey("%s.parent.pid" % schema)),
             schema=schema,
         )
-        with testing.db.begin() as conn:
-            meta.create_all(conn)
-            conn.execute(parent.insert(), {"pid": 1})
-            conn.execute(child.insert(), {"cid": 1, "pid": 1})
-            eq_(conn.execute(child.select()).fetchall(), [(1, 1)])
+        meta.create_all(connection)
+        connection.execute(parent.insert(), {"pid": 1})
+        connection.execute(child.insert(), {"cid": 1, "pid": 1})
+        eq_(connection.execute(child.select()).fetchall(), [(1, 1)])
 
     def test_reflect_alt_table_owner_local_synonym(self):
         meta = MetaData()
@@ -158,9 +156,8 @@ class MultiSchemaTest(fixtures.TestBase, AssertsCompiledSQL):
             % {"test_schema": testing.config.test_schema},
         )
 
-    @testing.provide_metadata
-    def test_create_same_names_implicit_schema(self, connection):
-        meta = self.metadata
+    def test_create_same_names_implicit_schema(self, metadata, connection):
+        meta = metadata
         parent = Table(
             "parent", meta, Column("pid", Integer, primary_key=True)
         )
@@ -205,18 +202,17 @@ class MultiSchemaTest(fixtures.TestBase, AssertsCompiledSQL):
         # check table comment (#5146)
         eq_(parent.comment, "my table comment")
 
-    @testing.provide_metadata
-    def test_reflect_table_comment(self):
+    def test_reflect_table_comment(self, metadata, connection):
         local_parent = Table(
             "parent",
-            self.metadata,
+            metadata,
             Column("q", Integer),
             comment="my local comment",
         )
 
-        local_parent.create(testing.db)
+        local_parent.create(connection)
 
-        insp = inspect(testing.db)
+        insp = inspect(connection)
         eq_(
             insp.get_table_comment(
                 "parent", schema=testing.config.test_schema
@@ -231,7 +227,7 @@ class MultiSchemaTest(fixtures.TestBase, AssertsCompiledSQL):
         )
         eq_(
             insp.get_table_comment(
-                "parent", schema=testing.db.dialect.default_schema_name
+                "parent", schema=connection.dialect.default_schema_name
             ),
             {"text": "my local comment"},
         )
@@ -347,28 +343,28 @@ class ConstraintTest(fixtures.TablesTest):
     def define_tables(cls, metadata):
         Table("foo", metadata, Column("id", Integer, primary_key=True))
 
-    def test_oracle_has_no_on_update_cascade(self):
+    def test_oracle_has_no_on_update_cascade(self, connection):
         bar = Table(
             "bar",
-            self.metadata,
+            self.tables_test_metadata,
             Column("id", Integer, primary_key=True),
             Column(
                 "foo_id", Integer, ForeignKey("foo.id", onupdate="CASCADE")
             ),
         )
-        assert_raises(exc.SAWarning, bar.create)
+        assert_raises(exc.SAWarning, bar.create, connection)
 
         bat = Table(
             "bat",
-            self.metadata,
+            self.tables_test_metadata,
             Column("id", Integer, primary_key=True),
             Column("foo_id", Integer),
             ForeignKeyConstraint(["foo_id"], ["foo.id"], onupdate="CASCADE"),
         )
-        assert_raises(exc.SAWarning, bat.create)
+        assert_raises(exc.SAWarning, bat.create, connection)
 
-    def test_reflect_check_include_all(self):
-        insp = inspect(testing.db)
+    def test_reflect_check_include_all(self, connection):
+        insp = inspect(connection)
         eq_(insp.get_check_constraints("foo"), [])
         eq_(
             [
@@ -446,9 +442,9 @@ class DontReflectIOTTest(fixtures.TestBase):
         with testing.db.begin() as conn:
             conn.exec_driver_sql("drop table admin_docindex")
 
-    def test_reflect_all(self):
-        m = MetaData(testing.db)
-        m.reflect()
+    def test_reflect_all(self, connection):
+        m = MetaData()
+        m.reflect(connection)
         eq_(set(t.name for t in m.tables.values()), set(["admin_docindex"]))
 
 
@@ -477,10 +473,8 @@ class TableReflectionTest(fixtures.TestBase):
     __only_on__ = "oracle"
     __backend__ = True
 
-    @testing.provide_metadata
     @testing.fails_if(all_tables_compression_missing)
-    def test_reflect_basic_compression(self):
-        metadata = self.metadata
+    def test_reflect_basic_compression(self, metadata, connection):
 
         tbl = Table(
             "test_compress",
@@ -488,30 +482,27 @@ class TableReflectionTest(fixtures.TestBase):
             Column("data", Integer, primary_key=True),
             oracle_compress=True,
         )
-        metadata.create_all()
+        metadata.create_all(connection)
 
         m2 = MetaData()
 
-        tbl = Table("test_compress", m2, autoload_with=testing.db)
+        tbl = Table("test_compress", m2, autoload_with=connection)
         # Don't hardcode the exact value, but it must be non-empty
         assert tbl.dialect_options["oracle"]["compress"]
 
-    @testing.provide_metadata
     @testing.fails_if(all_tables_compress_for_missing)
-    def test_reflect_oltp_compression(self):
-        metadata = self.metadata
-
+    def test_reflect_oltp_compression(self, metadata, connection):
         tbl = Table(
             "test_compress",
             metadata,
             Column("data", Integer, primary_key=True),
             oracle_compress="OLTP",
         )
-        metadata.create_all()
+        metadata.create_all(connection)
 
         m2 = MetaData()
 
-        tbl = Table("test_compress", m2, autoload_with=testing.db)
+        tbl = Table("test_compress", m2, autoload_with=connection)
         assert tbl.dialect_options["oracle"]["compress"] == "OLTP"
 
 
@@ -519,10 +510,7 @@ class RoundTripIndexTest(fixtures.TestBase):
     __only_on__ = "oracle"
     __backend__ = True
 
-    @testing.provide_metadata
-    def test_no_pk(self):
-        metadata = self.metadata
-
+    def test_no_pk(self, metadata, connection):
         Table(
             "sometable",
             metadata,
@@ -531,9 +519,9 @@ class RoundTripIndexTest(fixtures.TestBase):
             Index("pk_idx_1", "id_a", "id_b", unique=True),
             Index("pk_idx_2", "id_b", "id_a", unique=True),
         )
-        metadata.create_all()
+        metadata.create_all(connection)
 
-        insp = inspect(testing.db)
+        insp = inspect(connection)
         eq_(
             insp.get_indexes("sometable"),
             [
@@ -552,10 +540,10 @@ class RoundTripIndexTest(fixtures.TestBase):
             ],
         )
 
-    @testing.combinations((True,), (False,))
-    @testing.provide_metadata
-    def test_include_indexes_resembling_pk(self, explicit_pk):
-        metadata = self.metadata
+    @testing.combinations((True,), (False,), argnames="explicit_pk")
+    def test_include_indexes_resembling_pk(
+        self, metadata, connection, explicit_pk
+    ):
 
         t = Table(
             "sometable",
@@ -575,9 +563,9 @@ class RoundTripIndexTest(fixtures.TestBase):
                     "id_a", "id_b", "group", name="some_primary_key"
                 )
             )
-        metadata.create_all()
+        metadata.create_all(connection)
 
-        insp = inspect(testing.db)
+        insp = inspect(connection)
         eq_(
             insp.get_indexes("sometable"),
             [
@@ -596,8 +584,7 @@ class RoundTripIndexTest(fixtures.TestBase):
             ],
         )
 
-    @testing.provide_metadata
-    def test_reflect_fn_index(self, connection):
+    def test_reflect_fn_index(self, metadata, connection):
         """test reflection of a functional index.
 
         it appears this emitted a warning at some point but does not right now.
@@ -606,7 +593,6 @@ class RoundTripIndexTest(fixtures.TestBase):
 
         """
 
-        metadata = self.metadata
         s_table = Table(
             "sometable",
             metadata,
@@ -630,9 +616,7 @@ class RoundTripIndexTest(fixtures.TestBase):
             ],
         )
 
-    @testing.provide_metadata
-    def test_basic(self):
-        metadata = self.metadata
+    def test_basic(self, metadata, connection):
 
         s_table = Table(
             "sometable",
@@ -657,16 +641,16 @@ class RoundTripIndexTest(fixtures.TestBase):
             oracle_compress=1,
         )
 
-        metadata.create_all()
+        metadata.create_all(connection)
 
-        mirror = MetaData(testing.db)
-        mirror.reflect()
+        mirror = MetaData()
+        mirror.reflect(connection)
 
-        metadata.drop_all()
-        mirror.create_all()
+        metadata.drop_all(connection)
+        mirror.create_all(connection)
 
-        inspect = MetaData(testing.db)
-        inspect.reflect()
+        inspect = MetaData()
+        inspect.reflect(connection)
 
         def obj_definition(obj):
             return (
@@ -676,7 +660,7 @@ class RoundTripIndexTest(fixtures.TestBase):
             )
 
         # find what the primary k constraint name should be
-        primaryconsname = testing.db.scalar(
+        primaryconsname = connection.scalar(
             text(
                 """SELECT constraint_name
                FROM all_constraints
@@ -773,14 +757,13 @@ class TypeReflectionTest(fixtures.TestBase):
     __only_on__ = "oracle"
     __backend__ = True
 
-    @testing.provide_metadata
-    def _run_test(self, specs, attributes):
+    def _run_test(self, metadata, connection, specs, attributes):
         columns = [Column("c%i" % (i + 1), t[0]) for i, t in enumerate(specs)]
-        m = self.metadata
+        m = metadata
         Table("oracle_types", m, *columns)
-        m.create_all()
+        m.create_all(connection)
         m2 = MetaData()
-        table = Table("oracle_types", m2, autoload_with=testing.db)
+        table = Table("oracle_types", m2, autoload_with=connection)
         for i, (reflected_col, spec) in enumerate(zip(table.c, specs)):
             expected_spec = spec[1]
             reflected_type = reflected_col.type
@@ -800,15 +783,23 @@ class TypeReflectionTest(fixtures.TestBase):
                     ),
                 )
 
-    def test_integer_types(self):
+    def test_integer_types(self, metadata, connection):
         specs = [(Integer, INTEGER()), (Numeric, INTEGER())]
-        self._run_test(specs, [])
+        self._run_test(metadata, connection, specs, [])
 
-    def test_number_types(self):
+    def test_number_types(
+        self,
+        metadata,
+        connection,
+    ):
         specs = [(Numeric(5, 2), NUMBER(5, 2)), (NUMBER, NUMBER())]
-        self._run_test(specs, ["precision", "scale"])
+        self._run_test(metadata, connection, specs, ["precision", "scale"])
 
-    def test_float_types(self):
+    def test_float_types(
+        self,
+        metadata,
+        connection,
+    ):
         specs = [
             (DOUBLE_PRECISION(), FLOAT()),
             # when binary_precision is supported
@@ -822,7 +813,7 @@ class TypeReflectionTest(fixtures.TestBase):
             # when binary_precision is supported
             # (FLOAT(5), oracle.FLOAT(binary_precision=126),),
         ]
-        self._run_test(specs, ["precision"])
+        self._run_test(metadata, connection, specs, ["precision"])
 
 
 class IdentityReflectionTest(fixtures.TablesTest):
index db3825d13750e4603d66475e19bc085c138822b0..60f47bf30e8809fa6aa2083a9199353a4deac317 100644 (file)
@@ -188,10 +188,9 @@ class TypesTest(fixtures.TestBase):
     __dialect__ = oracle.OracleDialect()
     __backend__ = True
 
-    @testing.combinations((CHAR,), (NCHAR,))
-    @testing.provide_metadata
-    def test_fixed_char(self, char_type):
-        m = self.metadata
+    @testing.combinations((CHAR,), (NCHAR,), argnames="char_type")
+    def test_fixed_char(self, metadata, connection, char_type):
+        m = metadata
         t = Table(
             "t1",
             m,
@@ -204,32 +203,30 @@ class TypesTest(fixtures.TestBase):
         else:
             v1, v2, v3 = "value 1", "value 2", "value 3"
 
-        with testing.db.begin() as conn:
-            t.create(conn)
-            conn.execute(
-                t.insert(),
-                dict(id=1, data=v1),
-                dict(id=2, data=v2),
-                dict(id=3, data=v3),
-            )
+        t.create(connection)
+        connection.execute(
+            t.insert(),
+            dict(id=1, data=v1),
+            dict(id=2, data=v2),
+            dict(id=3, data=v3),
+        )
 
-            eq_(
-                conn.execute(t.select().where(t.c.data == v2)).fetchall(),
-                [(2, "value 2                       ")],
-            )
+        eq_(
+            connection.execute(t.select().where(t.c.data == v2)).fetchall(),
+            [(2, "value 2                       ")],
+        )
 
-            m2 = MetaData()
-            t2 = Table("t1", m2, autoload_with=conn)
-            is_(type(t2.c.data.type), char_type)
-            eq_(
-                conn.execute(t2.select().where(t2.c.data == v2)).fetchall(),
-                [(2, "value 2                       ")],
-            )
+        m2 = MetaData()
+        t2 = Table("t1", m2, autoload_with=connection)
+        is_(type(t2.c.data.type), char_type)
+        eq_(
+            connection.execute(t2.select().where(t2.c.data == v2)).fetchall(),
+            [(2, "value 2                       ")],
+        )
 
     @testing.requires.returning
-    @testing.provide_metadata
-    def test_int_not_float(self, connection):
-        m = self.metadata
+    def test_int_not_float(self, metadata, connection):
+        m = metadata
         t1 = Table("t1", m, Column("foo", Integer))
         t1.create(connection)
         r = connection.execute(t1.insert().values(foo=5).returning(t1.c.foo))
@@ -242,14 +239,13 @@ class TypesTest(fixtures.TestBase):
         assert isinstance(x, int)
 
     @testing.requires.returning
-    @testing.provide_metadata
-    def test_int_not_float_no_coerce_decimal(self):
+    def test_int_not_float_no_coerce_decimal(self, metadata):
         engine = testing_engine(options=dict(coerce_to_decimal=False))
 
-        m = self.metadata
+        m = metadata
         t1 = Table("t1", m, Column("foo", Integer))
         with engine.begin() as conn:
-            t1.create()
+            t1.create(conn)
             r = conn.execute(t1.insert().values(foo=5).returning(t1.c.foo))
             x = r.scalar()
             assert x == 5
@@ -259,30 +255,25 @@ class TypesTest(fixtures.TestBase):
             assert x == 5
             assert isinstance(x, int)
 
-    @testing.provide_metadata
-    def test_rowid(self):
-        metadata = self.metadata
+    def test_rowid(self, metadata, connection):
         t = Table("t1", metadata, Column("x", Integer))
 
-        with testing.db.begin() as conn:
-            t.create(conn)
-            conn.execute(t.insert(), {"x": 5})
-            s1 = select(t).subquery()
-            s2 = select(column("rowid")).select_from(s1)
-            rowid = conn.scalar(s2)
-
-            # the ROWID type is not really needed here,
-            # as cx_oracle just treats it as a string,
-            # but we want to make sure the ROWID works...
-            rowid_col = column("rowid", oracle.ROWID)
-            s3 = select(t.c.x, rowid_col).where(
-                rowid_col == cast(rowid, oracle.ROWID)
-            )
-            eq_(conn.execute(s3).fetchall(), [(5, rowid)])
+        t.create(connection)
+        connection.execute(t.insert(), {"x": 5})
+        s1 = select(t).subquery()
+        s2 = select(column("rowid")).select_from(s1)
+        rowid = connection.scalar(s2)
+
+        # the ROWID type is not really needed here,
+        # as cx_oracle just treats it as a string,
+        # but we want to make sure the ROWID works...
+        rowid_col = column("rowid", oracle.ROWID)
+        s3 = select(t.c.x, rowid_col).where(
+            rowid_col == cast(rowid, oracle.ROWID)
+        )
+        eq_(connection.execute(s3).fetchall(), [(5, rowid)])
 
-    @testing.provide_metadata
-    def test_interval(self, connection):
-        metadata = self.metadata
+    def test_interval(self, metadata, connection):
         interval_table = Table(
             "intervaltable",
             metadata,
@@ -299,9 +290,8 @@ class TypesTest(fixtures.TestBase):
         row = connection.execute(interval_table.select()).first()
         eq_(row["day_interval"], datetime.timedelta(days=35, seconds=5743))
 
-    @testing.provide_metadata
-    def test_numerics(self):
-        m = self.metadata
+    def test_numerics(self, metadata, connection):
+        m = metadata
         t1 = Table(
             "t1",
             m,
@@ -314,51 +304,48 @@ class TypesTest(fixtures.TestBase):
             Column("numbercol2", oracle.NUMBER(9, 3)),
             Column("numbercol3", oracle.NUMBER),
         )
-        with testing.db.begin() as conn:
-            t1.create(conn)
-            conn.execute(
-                t1.insert(),
-                dict(
-                    intcol=1,
-                    numericcol=5.2,
-                    floatcol1=6.5,
-                    floatcol2=8.5,
-                    doubleprec=9.5,
-                    numbercol1=12,
-                    numbercol2=14.85,
-                    numbercol3=15.76,
-                ),
-            )
+        t1.create(connection)
+        connection.execute(
+            t1.insert(),
+            dict(
+                intcol=1,
+                numericcol=5.2,
+                floatcol1=6.5,
+                floatcol2=8.5,
+                doubleprec=9.5,
+                numbercol1=12,
+                numbercol2=14.85,
+                numbercol3=15.76,
+            ),
+        )
 
         m2 = MetaData()
-        t2 = Table("t1", m2, autoload_with=testing.db)
+        t2 = Table("t1", m2, autoload_with=connection)
 
-        with testing.db.connect() as conn:
-            for row in (
-                conn.execute(t1.select()).first(),
-                conn.execute(t2.select()).first(),
+        for row in (
+            connection.execute(t1.select()).first(),
+            connection.execute(t2.select()).first(),
+        ):
+            for i, (val, type_) in enumerate(
+                (
+                    (1, int),
+                    (decimal.Decimal("5.2"), decimal.Decimal),
+                    (6.5, float),
+                    (8.5, float),
+                    (9.5, float),
+                    (12, int),
+                    (decimal.Decimal("14.85"), decimal.Decimal),
+                    (15.76, float),
+                )
             ):
-                for i, (val, type_) in enumerate(
-                    (
-                        (1, int),
-                        (decimal.Decimal("5.2"), decimal.Decimal),
-                        (6.5, float),
-                        (8.5, float),
-                        (9.5, float),
-                        (12, int),
-                        (decimal.Decimal("14.85"), decimal.Decimal),
-                        (15.76, float),
-                    )
-                ):
-                    eq_(row[i], val)
-                    assert isinstance(row[i], type_), "%r is not %r" % (
-                        row[i],
-                        type_,
-                    )
+                eq_(row[i], val)
+                assert isinstance(row[i], type_), "%r is not %r" % (
+                    row[i],
+                    type_,
+                )
 
-    @testing.provide_metadata
-    def test_numeric_infinity_float(self, connection):
-        m = self.metadata
+    def test_numeric_infinity_float(self, metadata, connection):
+        m = metadata
         t1 = Table(
             "t1",
             m,
@@ -388,9 +375,8 @@ class TypesTest(fixtures.TestBase):
             [(float("inf"),), (float("-inf"),)],
         )
 
-    @testing.provide_metadata
-    def test_numeric_infinity_decimal(self, connection):
-        m = self.metadata
+    def test_numeric_infinity_decimal(self, metadata, connection):
+        m = metadata
         t1 = Table(
             "t1",
             m,
@@ -420,9 +406,8 @@ class TypesTest(fixtures.TestBase):
             [(decimal.Decimal("Infinity"),), (decimal.Decimal("-Infinity"),)],
         )
 
-    @testing.provide_metadata
-    def test_numeric_nan_float(self, connection):
-        m = self.metadata
+    def test_numeric_nan_float(self, metadata, connection):
+        m = metadata
         t1 = Table(
             "t1",
             m,
@@ -460,9 +445,8 @@ class TypesTest(fixtures.TestBase):
 
     # needs https://github.com/oracle/python-cx_Oracle/
     # issues/184#issuecomment-391399292
-    @testing.provide_metadata
-    def _dont_test_numeric_nan_decimal(self, connection):
-        m = self.metadata
+    def _dont_test_numeric_nan_decimal(self, metadata, connection):
+        m = metadata
         t1 = Table(
             "t1",
             m,
@@ -489,16 +473,13 @@ class TypesTest(fixtures.TestBase):
             [(decimal.Decimal("NaN"),), (decimal.Decimal("NaN"),)],
         )
 
-    @testing.provide_metadata
-    def test_numerics_broken_inspection(self, connection):
+    def test_numerics_broken_inspection(self, metadata, connection):
         """Numeric scenarios where Oracle type info is 'broken',
         returning us precision, scale of the form (0, 0) or (0, -127).
         We convert to Decimal and let int()/float() processors take over.
 
         """
 
-        metadata = self.metadata
-
         # this test requires cx_oracle 5
 
         foo = Table(
@@ -743,9 +724,7 @@ class TypesTest(fixtures.TestBase):
         value = exec_sql(connection, "SELECT 'hello' FROM DUAL").scalar()
         assert isinstance(value, util.text_type)
 
-    @testing.provide_metadata
-    def test_reflect_dates(self):
-        metadata = self.metadata
+    def test_reflect_dates(self, metadata, connection):
         Table(
             "date_types",
             metadata,
@@ -755,9 +734,9 @@ class TypesTest(fixtures.TestBase):
             Column("d4", TIMESTAMP(timezone=True)),
             Column("d5", oracle.INTERVAL(second_precision=5)),
         )
-        metadata.create_all()
+        metadata.create_all(connection)
         m = MetaData()
-        t1 = Table("date_types", m, autoload_with=testing.db)
+        t1 = Table("date_types", m, autoload_with=connection)
         assert isinstance(t1.c.d1.type, oracle.DATE)
         assert isinstance(t1.c.d1.type, DateTime)
         assert isinstance(t1.c.d2.type, oracle.DATE)
@@ -780,22 +759,18 @@ class TypesTest(fixtures.TestBase):
         for row in types_table.select().execute().fetchall():
             [row[k] for k in row.keys()]
 
-    @testing.provide_metadata
-    def test_raw_roundtrip(self, connection):
-        metadata = self.metadata
+    def test_raw_roundtrip(self, metadata, connection):
         raw_table = Table(
             "raw",
             metadata,
             Column("id", Integer, primary_key=True),
             Column("data", oracle.RAW(35)),
         )
-        metadata.create_all()
+        metadata.create_all(connection)
         connection.execute(raw_table.insert(), id=1, data=b("ABCDEF"))
         eq_(connection.execute(raw_table.select()).first(), (1, b("ABCDEF")))
 
-    @testing.provide_metadata
-    def test_reflect_nvarchar(self, connection):
-        metadata = self.metadata
+    def test_reflect_nvarchar(self, metadata, connection):
         Table(
             "tnv",
             metadata,
@@ -827,31 +802,26 @@ class TypesTest(fixtures.TestBase):
         assert isinstance(nv_data, util.text_type)
         assert isinstance(c_data, util.text_type)
 
-    @testing.provide_metadata
-    def test_reflect_unicode_no_nvarchar(self):
-        metadata = self.metadata
+    def test_reflect_unicode_no_nvarchar(self, metadata, connection):
         Table("tnv", metadata, Column("data", sqltypes.Unicode(255)))
-        metadata.create_all()
+        metadata.create_all(connection)
         m2 = MetaData()
-        t2 = Table("tnv", m2, autoload_with=testing.db)
+        t2 = Table("tnv", m2, autoload_with=connection)
         assert isinstance(t2.c.data.type, sqltypes.VARCHAR)
 
         if testing.against("oracle+cx_oracle"):
             assert isinstance(
-                t2.c.data.type.dialect_impl(testing.db.dialect),
+                t2.c.data.type.dialect_impl(connection.dialect),
                 cx_oracle._OracleString,
             )
 
         data = u("m’a réveillé.")
-        with testing.db.begin() as conn:
-            conn.execute(t2.insert(), {"data": data})
-            res = conn.execute(t2.select()).first().data
-            eq_(res, data)
-            assert isinstance(res, util.text_type)
+        connection.execute(t2.insert(), {"data": data})
+        res = connection.execute(t2.select()).first().data
+        eq_(res, data)
+        assert isinstance(res, util.text_type)
 
-    @testing.provide_metadata
-    def test_char_length(self):
-        metadata = self.metadata
+    def test_char_length(self, metadata, connection):
         t1 = Table(
             "t1",
             metadata,
@@ -860,26 +830,22 @@ class TypesTest(fixtures.TestBase):
             Column("c3", CHAR(200)),
             Column("c4", NCHAR(180)),
         )
-        t1.create()
+        t1.create(connection)
         m2 = MetaData()
-        t2 = Table("t1", m2, autoload_with=testing.db)
+        t2 = Table("t1", m2, autoload_with=connection)
         eq_(t2.c.c1.type.length, 50)
         eq_(t2.c.c2.type.length, 250)
         eq_(t2.c.c3.type.length, 200)
         eq_(t2.c.c4.type.length, 180)
 
-    @testing.provide_metadata
-    def test_long_type(self, connection):
-        metadata = self.metadata
+    def test_long_type(self, metadata, connection):
 
         t = Table("t", metadata, Column("data", oracle.LONG))
-        metadata.create_all(testing.db)
+        metadata.create_all(connection)
         connection.execute(t.insert(), data="xyz")
         eq_(connection.scalar(select(t.c.data)), "xyz")
 
-    @testing.provide_metadata
-    def test_longstring(self, connection):
-        metadata = self.metadata
+    def test_longstring(self, metadata, connection):
         exec_sql(
             connection,
             """
@@ -1140,10 +1106,10 @@ class SetInputSizesTest(fixtures.TestBase):
         (CHAR(30), "test", "FIXED_CHAR", False),
         (NCHAR(30), u("test"), "FIXED_NCHAR", False),
         (oracle.LONG(), "test", None, False),
+        argnames="datatype, value, sis_value_text, set_nchar_flag",
     )
-    @testing.provide_metadata
     def test_setinputsizes(
-        self, datatype, value, sis_value_text, set_nchar_flag
+        self, metadata, datatype, value, sis_value_text, set_nchar_flag
     ):
         if isinstance(sis_value_text, str):
             sis_value = getattr(testing.db.dialect.dbapi, sis_value_text)
@@ -1159,7 +1125,7 @@ class SetInputSizesTest(fixtures.TestBase):
                 else:
                     return self.impl
 
-        m = self.metadata
+        m = metadata
         # Oracle can have only one column of type LONG so we make three
         # tables rather than one table w/ three columns
         t1 = Table("t1", m, Column("foo", datatype))
@@ -1167,7 +1133,7 @@ class SetInputSizesTest(fixtures.TestBase):
             "t2", m, Column("foo", NullType().with_variant(datatype, "oracle"))
         )
         t3 = Table("t3", m, Column("foo", TestTypeDec()))
-        m.create_all()
+        m.create_all(testing.db)
 
         class CursorWrapper(object):
             # cx_oracle cursor can't be modified so we have to
@@ -1211,7 +1177,7 @@ class SetInputSizesTest(fixtures.TestBase):
                         [mock.call.setinputsizes()],
                     )
 
-    def test_event_no_native_float(self):
+    def test_event_no_native_float(self, metadata):
         def _remove_type(inputsizes, cursor, statement, parameters, context):
             for param, dbapitype in list(inputsizes.items()):
                 if dbapitype is testing.db.dialect.dbapi.NATIVE_FLOAT:
@@ -1219,6 +1185,8 @@ class SetInputSizesTest(fixtures.TestBase):
 
         event.listen(testing.db, "do_setinputsizes", _remove_type)
         try:
-            self.test_setinputsizes(oracle.BINARY_FLOAT, 25.34534, None, False)
+            self.test_setinputsizes(
+                metadata, oracle.BINARY_FLOAT, 25.34534, None, False
+            )
         finally:
             event.remove(testing.db, "do_setinputsizes", _remove_type)
index 3bd8e9da0b214d9db1269f85862b2f93b5e19c0a..f760a309b429b8485e4228c647862086a0775d4b 100644 (file)
@@ -757,7 +757,7 @@ class MiscBackendTest(
             Column("date1", DateTime(timezone=True)),
             Column("date2", DateTime(timezone=False)),
         )
-        metadata.create_all()
+        metadata.create_all(testing.db)
         m2 = MetaData()
         t2 = Table("pgdate", m2, autoload_with=testing.db)
         assert t2.c.date1.type.timezone is True
index 824f6cd36dcab61948d7f2fa6a604599f9db1f3c..754eff25a0bfc82776035a2cdf1cf33afbe8f918 100644 (file)
@@ -457,12 +457,8 @@ class ReflectionTest(AssertsCompiledSQL, fixtures.TestBase):
     __only_on__ = "postgresql"
     __backend__ = True
 
-    @testing.fails_if(
-        "postgresql < 8.4", "Better int2vector functions not available"
-    )
-    @testing.provide_metadata
-    def test_reflected_primary_key_order(self):
-        meta1 = self.metadata
+    def test_reflected_primary_key_order(self, metadata, connection):
+        meta1 = metadata
         subject = Table(
             "subject",
             meta1,
@@ -470,9 +466,9 @@ class ReflectionTest(AssertsCompiledSQL, fixtures.TestBase):
             Column("p2", Integer, primary_key=True),
             PrimaryKeyConstraint("p2", "p1"),
         )
-        meta1.create_all()
+        meta1.create_all(connection)
         meta2 = MetaData()
-        subject = Table("subject", meta2, autoload_with=testing.db)
+        subject = Table("subject", meta2, autoload_with=connection)
         eq_(subject.primary_key.columns.keys(), ["p2", "p1"])
 
     @testing.provide_metadata
@@ -583,10 +579,9 @@ class ReflectionTest(AssertsCompiledSQL, fixtures.TestBase):
         user_tmp.create(testing.db)
         assert inspect(testing.db).has_table("some_temp_table")
 
-    @testing.provide_metadata
-    def test_cross_schema_reflection_one(self):
+    def test_cross_schema_reflection_one(self, metadata, connection):
 
-        meta1 = self.metadata
+        meta1 = metadata
 
         users = Table(
             "users",
@@ -603,12 +598,12 @@ class ReflectionTest(AssertsCompiledSQL, fixtures.TestBase):
             Column("email_address", String(20)),
             schema="test_schema",
         )
-        meta1.create_all()
+        meta1.create_all(connection)
         meta2 = MetaData()
         addresses = Table(
             "email_addresses",
             meta2,
-            autoload_with=testing.db,
+            autoload_with=connection,
             schema="test_schema",
         )
         users = Table("users", meta2, must_exist=True, schema="test_schema")
@@ -617,9 +612,8 @@ class ReflectionTest(AssertsCompiledSQL, fixtures.TestBase):
             (users.c.user_id == addresses.c.remote_user_id).compare(j.onclause)
         )
 
-    @testing.provide_metadata
-    def test_cross_schema_reflection_two(self):
-        meta1 = self.metadata
+    def test_cross_schema_reflection_two(self, metadata, connection):
+        meta1 = metadata
         subject = Table(
             "subject", meta1, Column("id", Integer, primary_key=True)
         )
@@ -630,11 +624,11 @@ class ReflectionTest(AssertsCompiledSQL, fixtures.TestBase):
             Column("ref", Integer, ForeignKey("subject.id")),
             schema="test_schema",
         )
-        meta1.create_all()
+        meta1.create_all(connection)
         meta2 = MetaData()
-        subject = Table("subject", meta2, autoload_with=testing.db)
+        subject = Table("subject", meta2, autoload_with=connection)
         referer = Table(
-            "referer", meta2, schema="test_schema", autoload_with=testing.db
+            "referer", meta2, schema="test_schema", autoload_with=connection
         )
         self.assert_(
             (subject.c.id == referer.c.ref).compare(
@@ -642,9 +636,8 @@ class ReflectionTest(AssertsCompiledSQL, fixtures.TestBase):
             )
         )
 
-    @testing.provide_metadata
-    def test_cross_schema_reflection_three(self):
-        meta1 = self.metadata
+    def test_cross_schema_reflection_three(self, metadata, connection):
+        meta1 = metadata
         subject = Table(
             "subject",
             meta1,
@@ -658,13 +651,13 @@ class ReflectionTest(AssertsCompiledSQL, fixtures.TestBase):
             Column("ref", Integer, ForeignKey("test_schema_2.subject.id")),
             schema="test_schema",
         )
-        meta1.create_all()
+        meta1.create_all(connection)
         meta2 = MetaData()
         subject = Table(
-            "subject", meta2, autoload_with=testing.db, schema="test_schema_2"
+            "subject", meta2, autoload_with=connection, schema="test_schema_2"
         )
         referer = Table(
-            "referer", meta2, autoload_with=testing.db, schema="test_schema"
+            "referer", meta2, autoload_with=connection, schema="test_schema"
         )
         self.assert_(
             (subject.c.id == referer.c.ref).compare(
@@ -672,9 +665,8 @@ class ReflectionTest(AssertsCompiledSQL, fixtures.TestBase):
             )
         )
 
-    @testing.provide_metadata
-    def test_cross_schema_reflection_four(self):
-        meta1 = self.metadata
+    def test_cross_schema_reflection_four(self, metadata, connection):
+        meta1 = metadata
         subject = Table(
             "subject",
             meta1,
@@ -688,23 +680,24 @@ class ReflectionTest(AssertsCompiledSQL, fixtures.TestBase):
             Column("ref", Integer, ForeignKey("test_schema_2.subject.id")),
             schema="test_schema",
         )
-        meta1.create_all()
+        meta1.create_all(connection)
 
-        conn = testing.db.connect()
-        conn.detach()
-        conn.exec_driver_sql("SET search_path TO test_schema, test_schema_2")
-        meta2 = MetaData(bind=conn)
+        connection.detach()
+        connection.exec_driver_sql(
+            "SET search_path TO test_schema, test_schema_2"
+        )
+        meta2 = MetaData()
         subject = Table(
             "subject",
             meta2,
-            autoload_with=testing.db,
+            autoload_with=connection,
             schema="test_schema_2",
             postgresql_ignore_search_path=True,
         )
         referer = Table(
             "referer",
             meta2,
-            autoload_with=testing.db,
+            autoload_with=connection,
             schema="test_schema",
             postgresql_ignore_search_path=True,
         )
@@ -713,14 +706,12 @@ class ReflectionTest(AssertsCompiledSQL, fixtures.TestBase):
                 subject.join(referer).onclause
             )
         )
-        conn.close()
 
-    @testing.provide_metadata
-    def test_cross_schema_reflection_five(self):
-        meta1 = self.metadata
+    def test_cross_schema_reflection_five(self, metadata, connection):
+        meta1 = metadata
 
         # we assume 'public'
-        default_schema = testing.db.dialect.default_schema_name
+        default_schema = connection.dialect.default_schema_name
         subject = Table(
             "subject", meta1, Column("id", Integer, primary_key=True)
         )
@@ -730,20 +721,20 @@ class ReflectionTest(AssertsCompiledSQL, fixtures.TestBase):
             Column("id", Integer, primary_key=True),
             Column("ref", Integer, ForeignKey("subject.id")),
         )
-        meta1.create_all()
+        meta1.create_all(connection)
 
         meta2 = MetaData()
         subject = Table(
             "subject",
             meta2,
-            autoload_with=testing.db,
+            autoload_with=connection,
             schema=default_schema,
             postgresql_ignore_search_path=True,
         )
         referer = Table(
             "referer",
             meta2,
-            autoload_with=testing.db,
+            autoload_with=connection,
             schema=default_schema,
             postgresql_ignore_search_path=True,
         )
@@ -754,11 +745,10 @@ class ReflectionTest(AssertsCompiledSQL, fixtures.TestBase):
             )
         )
 
-    @testing.provide_metadata
-    def test_cross_schema_reflection_six(self):
+    def test_cross_schema_reflection_six(self, metadata, connection):
         # test that the search path *is* taken into account
         # by default
-        meta1 = self.metadata
+        meta1 = metadata
 
         Table(
             "some_table",
@@ -773,60 +763,58 @@ class ReflectionTest(AssertsCompiledSQL, fixtures.TestBase):
             Column("sid", Integer, ForeignKey("test_schema.some_table.id")),
             schema="test_schema_2",
         )
-        meta1.create_all()
-        with testing.db.connect() as conn:
-            conn.detach()
+        meta1.create_all(connection)
+        connection.detach()
 
-            conn.exec_driver_sql(
-                "set search_path to test_schema_2, test_schema, public"
-            )
+        connection.exec_driver_sql(
+            "set search_path to test_schema_2, test_schema, public"
+        )
 
-            m1 = MetaData()
+        m1 = MetaData()
 
-            Table("some_table", m1, schema="test_schema", autoload_with=conn)
-            t2_schema = Table(
-                "some_other_table",
-                m1,
-                schema="test_schema_2",
-                autoload_with=conn,
-            )
+        Table("some_table", m1, schema="test_schema", autoload_with=connection)
+        t2_schema = Table(
+            "some_other_table",
+            m1,
+            schema="test_schema_2",
+            autoload_with=connection,
+        )
 
-            t2_no_schema = Table("some_other_table", m1, autoload_with=conn)
+        t2_no_schema = Table("some_other_table", m1, autoload_with=connection)
 
-            t1_no_schema = Table("some_table", m1, autoload_with=conn)
+        t1_no_schema = Table("some_table", m1, autoload_with=connection)
 
-            m2 = MetaData()
-            t1_schema_isp = Table(
-                "some_table",
-                m2,
-                schema="test_schema",
-                autoload_with=conn,
-                postgresql_ignore_search_path=True,
-            )
-            t2_schema_isp = Table(
-                "some_other_table",
-                m2,
-                schema="test_schema_2",
-                autoload_with=conn,
-                postgresql_ignore_search_path=True,
-            )
+        m2 = MetaData()
+        t1_schema_isp = Table(
+            "some_table",
+            m2,
+            schema="test_schema",
+            autoload_with=connection,
+            postgresql_ignore_search_path=True,
+        )
+        t2_schema_isp = Table(
+            "some_other_table",
+            m2,
+            schema="test_schema_2",
+            autoload_with=connection,
+            postgresql_ignore_search_path=True,
+        )
 
-            # t2_schema refers to t1_schema, but since "test_schema"
-            # is in the search path, we instead link to t2_no_schema
-            assert t2_schema.c.sid.references(t1_no_schema.c.id)
+        # t2_schema refers to t1_schema, but since "test_schema"
+        # is in the search path, we instead link to t2_no_schema
+        assert t2_schema.c.sid.references(t1_no_schema.c.id)
 
-            # the two no_schema tables refer to each other also.
-            assert t2_no_schema.c.sid.references(t1_no_schema.c.id)
+        # the two no_schema tables refer to each other also.
+        assert t2_no_schema.c.sid.references(t1_no_schema.c.id)
 
-            # but if we're ignoring search path, then we maintain
-            # those explicit schemas vs. what the "default" schema is
-            assert t2_schema_isp.c.sid.references(t1_schema_isp.c.id)
+        # but if we're ignoring search path, then we maintain
+        # those explicit schemas vs. what the "default" schema is
+        assert t2_schema_isp.c.sid.references(t1_schema_isp.c.id)
 
-    @testing.provide_metadata
-    def test_cross_schema_reflection_seven(self):
+    def test_cross_schema_reflection_seven(self, metadata, connection):
         # test that the search path *is* taken into account
         # by default
-        meta1 = self.metadata
+        meta1 = metadata
 
         Table(
             "some_table",
@@ -841,42 +829,42 @@ class ReflectionTest(AssertsCompiledSQL, fixtures.TestBase):
             Column("sid", Integer, ForeignKey("test_schema.some_table.id")),
             schema="test_schema_2",
         )
-        meta1.create_all()
-        with testing.db.connect() as conn:
-            conn.detach()
+        meta1.create_all(connection)
+        connection.detach()
 
-            conn.exec_driver_sql(
-                "set search_path to test_schema_2, test_schema, public"
-            )
-            meta2 = MetaData(conn)
-            meta2.reflect(schema="test_schema_2")
+        connection.exec_driver_sql(
+            "set search_path to test_schema_2, test_schema, public"
+        )
+        meta2 = MetaData()
+        meta2.reflect(connection, schema="test_schema_2")
 
-            eq_(
-                set(meta2.tables),
-                set(["test_schema_2.some_other_table", "some_table"]),
-            )
+        eq_(
+            set(meta2.tables),
+            set(["test_schema_2.some_other_table", "some_table"]),
+        )
 
-            meta3 = MetaData(conn)
-            meta3.reflect(
-                schema="test_schema_2", postgresql_ignore_search_path=True
-            )
+        meta3 = MetaData()
+        meta3.reflect(
+            connection,
+            schema="test_schema_2",
+            postgresql_ignore_search_path=True,
+        )
 
-            eq_(
-                set(meta3.tables),
-                set(
-                    [
-                        "test_schema_2.some_other_table",
-                        "test_schema.some_table",
-                    ]
-                ),
-            )
+        eq_(
+            set(meta3.tables),
+            set(
+                [
+                    "test_schema_2.some_other_table",
+                    "test_schema.some_table",
+                ]
+            ),
+        )
 
-    @testing.provide_metadata
-    def test_cross_schema_reflection_metadata_uses_schema(self):
+    def test_cross_schema_reflection_metadata_uses_schema(
+        self, metadata, connection
+    ):
         # test [ticket:3716]
 
-        metadata = self.metadata
-
         Table(
             "some_table",
             metadata,
@@ -890,28 +878,25 @@ class ReflectionTest(AssertsCompiledSQL, fixtures.TestBase):
             Column("id", Integer, primary_key=True),
             schema=None,
         )
-        metadata.create_all()
-        with testing.db.connect() as conn:
-            meta2 = MetaData(conn, schema="test_schema")
-            meta2.reflect()
+        metadata.create_all(connection)
+        meta2 = MetaData(schema="test_schema")
+        meta2.reflect(connection)
 
-            eq_(
-                set(meta2.tables),
-                set(["some_other_table", "test_schema.some_table"]),
-            )
+        eq_(
+            set(meta2.tables),
+            set(["some_other_table", "test_schema.some_table"]),
+        )
 
-    @testing.provide_metadata
-    def test_uppercase_lowercase_table(self):
-        metadata = self.metadata
+    def test_uppercase_lowercase_table(self, metadata, connection):
 
         a_table = Table("a", metadata, Column("x", Integer))
         A_table = Table("A", metadata, Column("x", Integer))
 
-        a_table.create()
-        assert inspect(testing.db).has_table("a")
-        assert not inspect(testing.db).has_table("A")
-        A_table.create(checkfirst=True)
-        assert inspect(testing.db).has_table("A")
+        a_table.create(connection)
+        assert inspect(connection).has_table("a")
+        assert not inspect(connection).has_table("A")
+        A_table.create(connection, checkfirst=True)
+        assert inspect(connection).has_table("A")
 
     def test_uppercase_lowercase_sequence(self):
 
@@ -927,12 +912,9 @@ class ReflectionTest(AssertsCompiledSQL, fixtures.TestBase):
         a_seq.drop(testing.db)
         A_seq.drop(testing.db)
 
-    @testing.provide_metadata
-    def test_index_reflection(self):
+    def test_index_reflection(self, metadata, connection):
         """Reflecting expression-based indexes should warn"""
 
-        metadata = self.metadata
-
         Table(
             "party",
             metadata,
@@ -940,22 +922,21 @@ class ReflectionTest(AssertsCompiledSQL, fixtures.TestBase):
             Column("name", String(20), index=True),
             Column("aname", String(20)),
         )
-        metadata.create_all(testing.db)
-        with testing.db.begin() as conn:
-            conn.exec_driver_sql("create index idx1 on party ((id || name))")
-            conn.exec_driver_sql(
-                "create unique index idx2 on party (id) where name = 'test'"
-            )
-            conn.exec_driver_sql(
-                """
-                create index idx3 on party using btree
-                    (lower(name::text), lower(aname::text))
-                """
-            )
+        metadata.create_all(connection)
+        connection.exec_driver_sql("create index idx1 on party ((id || name))")
+        connection.exec_driver_sql(
+            "create unique index idx2 on party (id) where name = 'test'"
+        )
+        connection.exec_driver_sql(
+            """
+            create index idx3 on party using btree
+                (lower(name::text), lower(aname::text))
+            """
+        )
 
         def go():
             m2 = MetaData()
-            t2 = Table("party", m2, autoload_with=testing.db)
+            t2 = Table("party", m2, autoload_with=connection)
             assert len(t2.indexes) == 2
 
             # Make sure indexes are in the order we expect them in
@@ -1020,51 +1001,46 @@ class ReflectionTest(AssertsCompiledSQL, fixtures.TestBase):
             "WHERE ((name)::text = 'test'::text)",
         )
 
-    @testing.fails_if("postgresql < 8.3", "index ordering not supported")
-    @testing.provide_metadata
-    def test_index_reflection_with_sorting(self):
+    def test_index_reflection_with_sorting(self, metadata, connection):
         """reflect indexes with sorting options set"""
 
         t1 = Table(
             "party",
-            self.metadata,
+            metadata,
             Column("id", String(10), nullable=False),
             Column("name", String(20)),
             Column("aname", String(20)),
         )
 
-        with testing.db.begin() as conn:
-
-            t1.create(conn)
+        t1.create(connection)
 
-            # check ASC, DESC options alone
-            conn.exec_driver_sql(
-                """
-                create index idx1 on party
-                    (id, name ASC, aname DESC)
+        # check ASC, DESC options alone
+        connection.exec_driver_sql(
             """
-            )
+            create index idx1 on party
+                (id, name ASC, aname DESC)
+        """
+        )
 
-            # check DESC w/ NULLS options
-            conn.exec_driver_sql(
-                """
-              create index idx2 on party
-                    (name DESC NULLS FIRST, aname DESC NULLS LAST)
+        # check DESC w/ NULLS options
+        connection.exec_driver_sql(
             """
-            )
+          create index idx2 on party
+                (name DESC NULLS FIRST, aname DESC NULLS LAST)
+        """
+        )
 
-            # check ASC w/ NULLS options
-            conn.exec_driver_sql(
-                """
-              create index idx3 on party
-                    (name ASC NULLS FIRST, aname ASC NULLS LAST)
+        # check ASC w/ NULLS options
+        connection.exec_driver_sql(
             """
-            )
+          create index idx3 on party
+                (name ASC NULLS FIRST, aname ASC NULLS LAST)
+        """
+        )
 
         # reflect data
-        with testing.db.connect() as conn:
-            m2 = MetaData(conn)
-            t2 = Table("party", m2, autoload_with=testing.db)
+        m2 = MetaData()
+        t2 = Table("party", m2, autoload_with=connection)
 
         eq_(len(t2.indexes), 3)
 
@@ -1206,12 +1182,9 @@ class ReflectionTest(AssertsCompiledSQL, fixtures.TestBase):
             )
 
     @testing.skip_if("postgresql < 11.0", "indnkeyatts not supported")
-    @testing.provide_metadata
-    def test_index_reflection_with_include(self):
+    def test_index_reflection_with_include(self, metadata, connection):
         """reflect indexes with include set"""
 
-        metadata = self.metadata
-
         Table(
             "t",
             metadata,
@@ -1219,30 +1192,27 @@ class ReflectionTest(AssertsCompiledSQL, fixtures.TestBase):
             Column("x", ARRAY(Integer)),
             Column("name", String(20)),
         )
-        metadata.create_all()
-        with testing.db.begin() as conn:
-            conn.exec_driver_sql("CREATE INDEX idx1 ON t (x) INCLUDE (name)")
+        metadata.create_all(connection)
+        connection.exec_driver_sql("CREATE INDEX idx1 ON t (x) INCLUDE (name)")
 
-            # prior to #5205, this would return:
-            # [{'column_names': ['x', 'name'],
-            #  'name': 'idx1', 'unique': False}]
+        # prior to #5205, this would return:
+        # [{'column_names': ['x', 'name'],
+        #  'name': 'idx1', 'unique': False}]
 
-            ind = testing.db.dialect.get_indexes(conn, "t", None)
-            eq_(
-                ind,
-                [
-                    {
-                        "unique": False,
-                        "column_names": ["x"],
-                        "include_columns": ["name"],
-                        "name": "idx1",
-                    }
-                ],
-            )
+        ind = testing.db.dialect.get_indexes(connection, "t", None)
+        eq_(
+            ind,
+            [
+                {
+                    "unique": False,
+                    "column_names": ["x"],
+                    "include_columns": ["name"],
+                    "name": "idx1",
+                }
+            ],
+        )
 
-    @testing.provide_metadata
-    def test_foreign_key_option_inspection(self):
-        metadata = self.metadata
+    def test_foreign_key_option_inspection(self, metadata, connection):
         Table(
             "person",
             metadata,
@@ -1308,8 +1278,8 @@ class ReflectionTest(AssertsCompiledSQL, fixtures.TestBase):
                 "options": {"onupdate": "CASCADE", "ondelete": "CASCADE"},
             },
         }
-        metadata.create_all()
-        inspector = inspect(testing.db)
+        metadata.create_all(connection)
+        inspector = inspect(connection)
         fks = inspector.get_foreign_keys(
             "person"
         ) + inspector.get_foreign_keys("company")
@@ -1543,12 +1513,10 @@ class ReflectionTest(AssertsCompiledSQL, fixtures.TestBase):
         t = Table("t", MetaData(), autoload_with=testing.db)
         eq_(t.c.x.type.enums, [])
 
-    @testing.provide_metadata
-    @testing.only_on("postgresql >= 8.5")
-    def test_reflection_with_unique_constraint(self):
-        insp = inspect(testing.db)
+    def test_reflection_with_unique_constraint(self, metadata, connection):
+        insp = inspect(connection)
 
-        meta = self.metadata
+        meta = metadata
         uc_table = Table(
             "pgsql_uc",
             meta,
@@ -1556,7 +1524,7 @@ class ReflectionTest(AssertsCompiledSQL, fixtures.TestBase):
             UniqueConstraint("a", name="uc_a"),
         )
 
-        uc_table.create()
+        uc_table.create(connection)
 
         # PostgreSQL will create an implicit index for a unique
         # constraint.   Separately we get both
@@ -1569,7 +1537,7 @@ class ReflectionTest(AssertsCompiledSQL, fixtures.TestBase):
         self.assert_("uc_a" in constraints)
 
         # reflection corrects for the dupe
-        reflected = Table("pgsql_uc", MetaData(), autoload_with=testing.db)
+        reflected = Table("pgsql_uc", MetaData(), autoload_with=connection)
 
         indexes = set(i.name for i in reflected.indexes)
         constraints = set(uc.name for uc in reflected.constraints)
@@ -1578,9 +1546,8 @@ class ReflectionTest(AssertsCompiledSQL, fixtures.TestBase):
         self.assert_("uc_a" in constraints)
 
     @testing.requires.btree_gist
-    @testing.provide_metadata
-    def test_reflection_with_exclude_constraint(self):
-        m = self.metadata
+    def test_reflection_with_exclude_constraint(self, metadata, connection):
+        m = metadata
         Table(
             "t",
             m,
@@ -1589,9 +1556,9 @@ class ReflectionTest(AssertsCompiledSQL, fixtures.TestBase):
             ExcludeConstraint(("period", "&&"), name="quarters_period_excl"),
         )
 
-        m.create_all()
+        m.create_all(connection)
 
-        insp = inspect(testing.db)
+        insp = inspect(connection)
 
         # PostgreSQL will create an implicit index for an exclude constraint.
         # we don't reflect the EXCLUDE yet.
@@ -1610,15 +1577,14 @@ class ReflectionTest(AssertsCompiledSQL, fixtures.TestBase):
         eq_(insp.get_indexes("t"), expected)
 
         # reflection corrects for the dupe
-        reflected = Table("t", MetaData(), autoload_with=testing.db)
+        reflected = Table("t", MetaData(), autoload_with=connection)
 
         eq_(set(reflected.indexes), set())
 
-    @testing.provide_metadata
-    def test_reflect_unique_index(self):
-        insp = inspect(testing.db)
+    def test_reflect_unique_index(self, metadata, connection):
+        insp = inspect(connection)
 
-        meta = self.metadata
+        meta = metadata
 
         # a unique index OTOH we are able to detect is an index
         # and not a unique constraint
@@ -1629,7 +1595,7 @@ class ReflectionTest(AssertsCompiledSQL, fixtures.TestBase):
             Index("ix_a", "a", unique=True),
         )
 
-        uc_table.create()
+        uc_table.create(connection)
 
         indexes = dict((i["name"], i) for i in insp.get_indexes("pgsql_uc"))
         constraints = set(
@@ -1640,7 +1606,7 @@ class ReflectionTest(AssertsCompiledSQL, fixtures.TestBase):
         assert indexes["ix_a"]["unique"]
         self.assert_("ix_a" not in constraints)
 
-        reflected = Table("pgsql_uc", MetaData(), autoload_with=testing.db)
+        reflected = Table("pgsql_uc", MetaData(), autoload_with=connection)
 
         indexes = dict((i.name, i) for i in reflected.indexes)
         constraints = set(uc.name for uc in reflected.constraints)
@@ -1649,9 +1615,8 @@ class ReflectionTest(AssertsCompiledSQL, fixtures.TestBase):
         assert indexes["ix_a"].unique
         self.assert_("ix_a" not in constraints)
 
-    @testing.provide_metadata
-    def test_reflect_check_constraint(self):
-        meta = self.metadata
+    def test_reflect_check_constraint(self, metadata, connection):
+        meta = metadata
 
         udf_create = """\
             CREATE OR REPLACE FUNCTION is_positive(
@@ -1666,7 +1631,9 @@ class ReflectionTest(AssertsCompiledSQL, fixtures.TestBase):
         """
         sa.event.listen(meta, "before_create", sa.DDL(udf_create))
         sa.event.listen(
-            meta, "after_drop", sa.DDL("DROP FUNCTION is_positive(integer)")
+            meta,
+            "after_drop",
+            sa.DDL("DROP FUNCTION IF EXISTS is_positive(integer)"),
         )
 
         Table(
@@ -1680,9 +1647,9 @@ class ReflectionTest(AssertsCompiledSQL, fixtures.TestBase):
             CheckConstraint("b != 'hi\nim a name   \nyup\n'", name="cc4"),
         )
 
-        meta.create_all()
+        meta.create_all(connection)
 
-        reflected = Table("pgsql_cc", MetaData(), autoload_with=testing.db)
+        reflected = Table("pgsql_cc", MetaData(), autoload_with=connection)
 
         check_constraints = dict(
             (uc.name, uc.sqltext.text)
index 38da60a434056edb9373a47090f883a016cfafab..e8a1876c7a3852e933389cba2cf50d6d69ff8331 100644 (file)
@@ -63,9 +63,6 @@ from sqlalchemy.testing.suite import test_types as suite
 from sqlalchemy.testing.util import round_decimal
 
 
-tztable = notztable = metadata = table = None
-
-
 class FloatCoercionTest(fixtures.TablesTest, AssertsExecutionResults):
     __only_on__ = "postgresql"
     __dialect__ = postgresql.dialect()
@@ -121,9 +118,7 @@ class FloatCoercionTest(fixtures.TablesTest, AssertsExecutionResults):
             ).scalar()
             eq_(round_decimal(ret, 9), result)
 
-    @testing.provide_metadata
-    def test_arrays_pg(self, connection):
-        metadata = self.metadata
+    def test_arrays_pg(self, connection, metadata):
         t1 = Table(
             "t",
             metadata,
@@ -132,16 +127,14 @@ class FloatCoercionTest(fixtures.TablesTest, AssertsExecutionResults):
             Column("z", postgresql.ARRAY(postgresql.DOUBLE_PRECISION)),
             Column("q", postgresql.ARRAY(Numeric)),
         )
-        metadata.create_all()
+        metadata.create_all(connection)
         connection.execute(
             t1.insert(), x=[5], y=[5], z=[6], q=[decimal.Decimal("6.4")]
         )
         row = connection.execute(t1.select()).first()
         eq_(row, ([5], [5], [6], [decimal.Decimal("6.4")]))
 
-    @testing.provide_metadata
-    def test_arrays_base(self, connection):
-        metadata = self.metadata
+    def test_arrays_base(self, connection, metadata):
         t1 = Table(
             "t",
             metadata,
@@ -150,7 +143,7 @@ class FloatCoercionTest(fixtures.TablesTest, AssertsExecutionResults):
             Column("z", sqltypes.ARRAY(postgresql.DOUBLE_PRECISION)),
             Column("q", sqltypes.ARRAY(Numeric)),
         )
-        metadata.create_all()
+        metadata.create_all(connection)
         connection.execute(
             t1.insert(), x=[5], y=[5], z=[6], q=[decimal.Decimal("6.4")]
         )
@@ -236,17 +229,14 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults):
             ]
             t1.drop(conn, checkfirst=True)
 
-    def test_name_required(self):
-        metadata = MetaData(testing.db)
+    def test_name_required(self, metadata, connection):
         etype = Enum("four", "five", "six", metadata=metadata)
-        assert_raises(exc.CompileError, etype.create)
+        assert_raises(exc.CompileError, etype.create, connection)
         assert_raises(
-            exc.CompileError, etype.compile, dialect=postgresql.dialect()
+            exc.CompileError, etype.compile, dialect=connection.dialect
         )
 
-    @testing.provide_metadata
-    def test_unicode_labels(self, connection):
-        metadata = self.metadata
+    def test_unicode_labels(self, connection, metadata):
         t1 = Table(
             "table",
             metadata,
@@ -261,7 +251,7 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults):
                 ),
             ),
         )
-        metadata.create_all()
+        metadata.create_all(connection)
         connection.execute(t1.insert(), value=util.u("drôle"))
         connection.execute(t1.insert(), value=util.u("réveillé"))
         connection.execute(t1.insert(), value=util.u("S’il"))
@@ -274,7 +264,7 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults):
             ],
         )
         m2 = MetaData()
-        t2 = Table("table", m2, autoload_with=testing.db)
+        t2 = Table("table", m2, autoload_with=connection)
         eq_(
             t2.c.value.type.enums,
             [util.u("réveillé"), util.u("drôle"), util.u("S’il")],
@@ -408,8 +398,7 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults):
             RegexSQL("DROP TYPE myenum", dialect="postgresql"),
         )
 
-    @testing.provide_metadata
-    def test_generate_multiple(self):
+    def test_generate_multiple(self, metadata, connection):
         """Test that the same enum twice only generates once
         for the create_all() call, without using checkfirst.
 
@@ -417,21 +406,18 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults):
         now handles this.
 
         """
-        metadata = self.metadata
-
         e1 = Enum("one", "two", "three", name="myenum")
         Table("e1", metadata, Column("c1", e1))
 
         Table("e2", metadata, Column("c1", e1))
 
-        metadata.create_all(checkfirst=False)
-        metadata.drop_all(checkfirst=False)
+        metadata.create_all(connection, checkfirst=False)
+        metadata.drop_all(connection, checkfirst=False)
         assert "myenum" not in [
-            e["name"] for e in inspect(testing.db).get_enums()
+            e["name"] for e in inspect(connection).get_enums()
         ]
 
-    @testing.provide_metadata
-    def test_generate_alone_on_metadata(self):
+    def test_generate_alone_on_metadata(self, connection, metadata):
         """Test that the same enum twice only generates once
         for the create_all() call, without using checkfirst.
 
@@ -439,20 +425,17 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults):
         now handles this.
 
         """
-        metadata = self.metadata
 
-        Enum("one", "two", "three", name="myenum", metadata=self.metadata)
+        Enum("one", "two", "three", name="myenum", metadata=metadata)
 
-        metadata.create_all(checkfirst=False)
-        assert "myenum" in [e["name"] for e in inspect(testing.db).get_enums()]
-        metadata.drop_all(checkfirst=False)
+        metadata.create_all(connection, checkfirst=False)
+        assert "myenum" in [e["name"] for e in inspect(connection).get_enums()]
+        metadata.drop_all(connection, checkfirst=False)
         assert "myenum" not in [
-            e["name"] for e in inspect(testing.db).get_enums()
+            e["name"] for e in inspect(connection).get_enums()
         ]
 
-    @testing.provide_metadata
-    def test_generate_multiple_on_metadata(self):
-        metadata = self.metadata
+    def test_generate_multiple_on_metadata(self, connection, metadata):
 
         e1 = Enum("one", "two", "three", name="myenum", metadata=metadata)
 
@@ -460,20 +443,20 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults):
 
         t2 = Table("e2", metadata, Column("c1", e1))
 
-        metadata.create_all(checkfirst=False)
-        assert "myenum" in [e["name"] for e in inspect(testing.db).get_enums()]
-        metadata.drop_all(checkfirst=False)
+        metadata.create_all(connection, checkfirst=False)
+        assert "myenum" in [e["name"] for e in inspect(connection).get_enums()]
+        metadata.drop_all(connection, checkfirst=False)
         assert "myenum" not in [
-            e["name"] for e in inspect(testing.db).get_enums()
+            e["name"] for e in inspect(connection).get_enums()
         ]
 
-        e1.create()  # creates ENUM
-        t1.create()  # does not create ENUM
-        t2.create()  # does not create ENUM
+        e1.create(connection)  # creates ENUM
+        t1.create(connection)  # does not create ENUM
+        t2.create(connection)  # does not create ENUM
 
-    @testing.provide_metadata
-    def test_generate_multiple_schemaname_on_metadata(self):
-        metadata = self.metadata
+    def test_generate_multiple_schemaname_on_metadata(
+        self, metadata, connection
+    ):
 
         Enum("one", "two", "three", name="myenum", metadata=metadata)
         Enum(
@@ -485,38 +468,36 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults):
             schema="test_schema",
         )
 
-        metadata.create_all(checkfirst=False)
-        assert "myenum" in [e["name"] for e in inspect(testing.db).get_enums()]
+        metadata.create_all(connection, checkfirst=False)
+        assert "myenum" in [e["name"] for e in inspect(connection).get_enums()]
         assert "myenum" in [
             e["name"]
-            for e in inspect(testing.db).get_enums(schema="test_schema")
+            for e in inspect(connection).get_enums(schema="test_schema")
         ]
-        metadata.drop_all(checkfirst=False)
+        metadata.drop_all(connection, checkfirst=False)
         assert "myenum" not in [
-            e["name"] for e in inspect(testing.db).get_enums()
+            e["name"] for e in inspect(connection).get_enums()
         ]
         assert "myenum" not in [
             e["name"]
-            for e in inspect(testing.db).get_enums(schema="test_schema")
+            for e in inspect(connection).get_enums(schema="test_schema")
         ]
 
-    @testing.provide_metadata
-    def test_drops_on_table(self):
-        metadata = self.metadata
+    def test_drops_on_table(self, connection, metadata):
 
         e1 = Enum("one", "two", "three", name="myenum")
         table = Table("e1", metadata, Column("c1", e1))
 
-        table.create()
-        table.drop()
+        table.create(connection)
+        table.drop(connection)
         assert "myenum" not in [
-            e["name"] for e in inspect(testing.db).get_enums()
+            e["name"] for e in inspect(connection).get_enums()
         ]
-        table.create()
-        assert "myenum" in [e["name"] for e in inspect(testing.db).get_enums()]
-        table.drop()
+        table.create(connection)
+        assert "myenum" in [e["name"] for e in inspect(connection).get_enums()]
+        table.drop(connection)
         assert "myenum" not in [
-            e["name"] for e in inspect(testing.db).get_enums()
+            e["name"] for e in inspect(connection).get_enums()
         ]
 
     def test_create_drop_schema_translate_map(self, connection):
@@ -554,9 +535,8 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults):
 
         assert_raises(exc.ProgrammingError, e1.drop, conn, checkfirst=False)
 
-    @testing.provide_metadata
-    def test_remain_on_table_metadata_wide(self):
-        metadata = self.metadata
+    def test_remain_on_table_metadata_wide(self, metadata, future_connection):
+        connection = future_connection
 
         e1 = Enum("one", "two", "three", name="myenum", metadata=metadata)
         table = Table("e1", metadata, Column("c1", e1))
@@ -566,15 +546,18 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults):
             sa.exc.ProgrammingError,
             '.*type "myenum" does not exist',
             table.create,
+            connection,
         )
-        table.create(checkfirst=True)
-        table.drop()
-        table.create(checkfirst=True)
-        table.drop()
-        assert "myenum" in [e["name"] for e in inspect(testing.db).get_enums()]
-        metadata.drop_all()
+        connection.rollback()
+
+        table.create(connection, checkfirst=True)
+        table.drop(connection)
+        table.create(connection, checkfirst=True)
+        table.drop(connection)
+        assert "myenum" in [e["name"] for e in inspect(connection).get_enums()]
+        metadata.drop_all(connection)
         assert "myenum" not in [
-            e["name"] for e in inspect(testing.db).get_enums()
+            e["name"] for e in inspect(connection).get_enums()
         ]
 
     def test_non_native_dialect(self):
@@ -616,26 +599,25 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults):
         finally:
             metadata.drop_all(engine)
 
-    def test_standalone_enum(self):
-        metadata = MetaData(testing.db)
+    def test_standalone_enum(self, connection, metadata):
         etype = Enum(
             "four", "five", "six", name="fourfivesixtype", metadata=metadata
         )
-        etype.create()
+        etype.create(connection)
         try:
-            assert testing.db.dialect.has_type(testing.db, "fourfivesixtype")
+            assert testing.db.dialect.has_type(connection, "fourfivesixtype")
         finally:
-            etype.drop()
+            etype.drop(connection)
             assert not testing.db.dialect.has_type(
-                testing.db, "fourfivesixtype"
+                connection, "fourfivesixtype"
             )
-        metadata.create_all()
+        metadata.create_all(connection)
         try:
-            assert testing.db.dialect.has_type(testing.db, "fourfivesixtype")
+            assert testing.db.dialect.has_type(connection, "fourfivesixtype")
         finally:
-            metadata.drop_all()
+            metadata.drop_all(connection)
             assert not testing.db.dialect.has_type(
-                testing.db, "fourfivesixtype"
+                connection, "fourfivesixtype"
             )
 
     def test_no_support(self):
@@ -655,9 +637,7 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults):
         e.connect()
         assert not dialect.supports_native_enum
 
-    @testing.provide_metadata
-    def test_reflection(self):
-        metadata = self.metadata
+    def test_reflection(self, metadata, connection):
         etype = Enum(
             "four", "five", "six", name="fourfivesixtype", metadata=metadata
         )
@@ -670,17 +650,15 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults):
             ),
             Column("value2", etype),
         )
-        metadata.create_all()
+        metadata.create_all(connection)
         m2 = MetaData()
-        t2 = Table("table", m2, autoload_with=testing.db)
+        t2 = Table("table", m2, autoload_with=connection)
         eq_(t2.c.value.type.enums, ["one", "two", "three"])
         eq_(t2.c.value.type.name, "onetwothreetype")
         eq_(t2.c.value2.type.enums, ["four", "five", "six"])
         eq_(t2.c.value2.type.name, "fourfivesixtype")
 
-    @testing.provide_metadata
-    def test_schema_reflection(self):
-        metadata = self.metadata
+    def test_schema_reflection(self, metadata, connection):
         etype = Enum(
             "four",
             "five",
@@ -705,9 +683,9 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults):
             ),
             Column("value2", etype),
         )
-        metadata.create_all()
+        metadata.create_all(connection)
         m2 = MetaData()
-        t2 = Table("table", m2, autoload_with=testing.db)
+        t2 = Table("table", m2, autoload_with=connection)
         eq_(t2.c.value.type.enums, ["one", "two", "three"])
         eq_(t2.c.value.type.name, "onetwothreetype")
         eq_(t2.c.value2.type.enums, ["four", "five", "six"])
@@ -810,21 +788,19 @@ class OIDTest(fixtures.TestBase):
     __only_on__ = "postgresql"
     __backend__ = True
 
-    @testing.provide_metadata
-    def test_reflection(self):
-        metadata = self.metadata
+    def test_reflection(self, connection, metadata):
         Table(
             "table",
             metadata,
             Column("x", Integer),
             Column("y", postgresql.OID),
         )
-        metadata.create_all()
+        metadata.create_all(connection)
         m2 = MetaData()
         t2 = Table(
             "table",
             m2,
-            autoload_with=testing.db,
+            autoload_with=connection,
         )
         assert isinstance(t2.c.y.type, postgresql.OID)
 
@@ -858,19 +834,18 @@ class RegClassTest(fixtures.TestBase):
             "pg_class",
         )
 
-    def test_cast_whereclause(self):
+    def test_cast_whereclause(self, connection):
         pga = Table(
             "pg_attribute",
-            MetaData(testing.db),
+            MetaData(),
             Column("attrelid", postgresql.OID),
             Column("attname", String(64)),
         )
-        with testing.db.connect() as conn:
-            oid = conn.scalar(
-                select(pga.c.attrelid).where(
-                    pga.c.attrelid == cast("pg_class", postgresql.REGCLASS)
-                )
+        oid = connection.scalar(
+            select(pga.c.attrelid).where(
+                pga.c.attrelid == cast("pg_class", postgresql.REGCLASS)
             )
+        )
         assert isinstance(oid, int)
 
 
@@ -904,9 +879,7 @@ class NumericInterpretationTest(fixtures.TestBase):
                     val = proc(val)
                 assert val in (23.7, decimal.Decimal("23.7"))
 
-    @testing.provide_metadata
-    def test_numeric_default(self, connection):
-        metadata = self.metadata
+    def test_numeric_default(self, connection, metadata):
         # pg8000 appears to fail when the value is 0,
         # returns an int instead of decimal.
         t = Table(
@@ -918,7 +891,7 @@ class NumericInterpretationTest(fixtures.TestBase):
             Column("fd", Float(asdecimal=True), default=1),
             Column("ff", Float(asdecimal=False), default=1),
         )
-        metadata.create_all()
+        metadata.create_all(connection)
         connection.execute(t.insert())
 
         row = connection.execute(t.select()).first()
@@ -934,7 +907,7 @@ class PythonTypeTest(fixtures.TestBase):
         is_(postgresql.INTERVAL().python_type, datetime.timedelta)
 
 
-class TimezoneTest(fixtures.TestBase):
+class TimezoneTest(fixtures.TablesTest):
     __backend__ = True
 
     """Test timezone-aware datetimes.
@@ -948,14 +921,11 @@ class TimezoneTest(fixtures.TestBase):
     __only_on__ = "postgresql"
 
     @classmethod
-    def setup_class(cls):
-        global tztable, notztable, metadata
-        metadata = MetaData(testing.db)
-
+    def define_tables(cls, metadata):
         # current_timestamp() in postgresql is assumed to return
         # TIMESTAMP WITH TIMEZONE
 
-        tztable = Table(
+        Table(
             "tztable",
             metadata,
             Column("id", Integer, primary_key=True),
@@ -966,7 +936,7 @@ class TimezoneTest(fixtures.TestBase):
             ),
             Column("name", String(20)),
         )
-        notztable = Table(
+        Table(
             "notztable",
             metadata,
             Column("id", Integer, primary_key=True),
@@ -979,19 +949,12 @@ class TimezoneTest(fixtures.TestBase):
             ),
             Column("name", String(20)),
         )
-        metadata.create_all()
-
-    @classmethod
-    def teardown_class(cls):
-        metadata.drop_all()
 
     def test_with_timezone(self, connection):
-
+        tztable, notztable = self.tables("tztable", "notztable")
         # get a date with a tzinfo
 
-        somedate = testing.db.connect().scalar(
-            func.current_timestamp().select()
-        )
+        somedate = connection.scalar(func.current_timestamp().select())
         assert somedate.tzinfo
         connection.execute(tztable.insert(), id=1, name="row1", date=somedate)
         row = connection.execute(
@@ -1012,6 +975,7 @@ class TimezoneTest(fixtures.TestBase):
     def test_without_timezone(self, connection):
 
         # get a date without a tzinfo
+        tztable, notztable = self.tables("tztable", "notztable")
 
         somedate = datetime.datetime(2005, 10, 20, 11, 52, 0)
         assert not somedate.tzinfo
@@ -1056,14 +1020,10 @@ class TimePrecisionCompileTest(fixtures.TestBase, AssertsCompiledSQL):
 
 class TimePrecisionTest(fixtures.TestBase):
 
-    __dialect__ = postgresql.dialect()
-    __prefer__ = "postgresql"
+    __only_on__ = "postgresql"
     __backend__ = True
 
-    @testing.only_on("postgresql", "DB specific feature")
-    @testing.provide_metadata
-    def test_reflection(self):
-        metadata = self.metadata
+    def test_reflection(self, metadata, connection):
         t1 = Table(
             "t1",
             metadata,
@@ -1074,9 +1034,9 @@ class TimePrecisionTest(fixtures.TestBase):
             Column("c5", postgresql.TIMESTAMP(precision=5)),
             Column("c6", postgresql.TIMESTAMP(timezone=True, precision=5)),
         )
-        t1.create()
+        t1.create(connection)
         m2 = MetaData()
-        t2 = Table("t1", m2, autoload_with=testing.db)
+        t2 = Table("t1", m2, autoload_with=connection)
         eq_(t2.c.c1.type.precision, None)
         eq_(t2.c.c2.type.precision, 5)
         eq_(t2.c.c3.type.precision, 5)
@@ -1391,22 +1351,18 @@ class ArrayRoundTripTest(object):
         assert isinstance(tbl.c.intarr.type.item_type, Integer)
         assert isinstance(tbl.c.strarr.type.item_type, String)
 
-    @testing.provide_metadata
-    def test_array_str_collation(self):
-        m = self.metadata
-
+    def test_array_str_collation(self, metadata, connection):
         t = Table(
             "t",
-            m,
+            metadata,
             Column("data", sqltypes.ARRAY(String(50, collation="en_US"))),
         )
 
-        t.create()
+        t.create(connection)
 
-    @testing.provide_metadata
-    def test_array_agg(self, connection):
-        values_table = Table("values", self.metadata, Column("value", Integer))
-        self.metadata.create_all(testing.db)
+    def test_array_agg(self, metadata, connection):
+        values_table = Table("values", metadata, Column("value", Integer))
+        metadata.create_all(connection)
         connection.execute(
             values_table.insert(), [{"value": i} for i in range(1, 10)]
         )
@@ -1658,9 +1614,7 @@ class ArrayRoundTripTest(object):
             [4, 5, 6],
         )
 
-    @testing.provide_metadata
-    def test_tuple_flag(self, connection):
-        metadata = self.metadata
+    def test_tuple_flag(self, connection, metadata):
 
         t1 = Table(
             "t1",
@@ -1671,7 +1625,7 @@ class ArrayRoundTripTest(object):
                 "data2", self.ARRAY(Numeric(asdecimal=False), as_tuple=True)
             ),
         )
-        metadata.create_all()
+        metadata.create_all(connection)
         connection.execute(
             t1.insert(), id=1, data=["1", "2", "3"], data2=[5.4, 5.6]
         )
@@ -2168,10 +2122,9 @@ class SpecialTypesTest(fixtures.TablesTest, ComparesTables):
         assert t.c.precision_interval.type.precision == 3
         assert t.c.bitstring.type.length == 4
 
-    @testing.provide_metadata
-    def test_tsvector_round_trip(self, connection):
-        t = Table("t1", self.metadata, Column("data", postgresql.TSVECTOR))
-        t.create()
+    def test_tsvector_round_trip(self, connection, metadata):
+        t = Table("t1", metadata, Column("data", postgresql.TSVECTOR))
+        t.create(connection)
         connection.execute(t.insert(), data="a fat cat sat")
         eq_(connection.scalar(select(t.c.data)), "'a' 'cat' 'fat' 'sat'")
 
@@ -2182,9 +2135,7 @@ class SpecialTypesTest(fixtures.TablesTest, ComparesTables):
             "'a' 'cat' 'fat' 'mat' 'sat'",
         )
 
-    @testing.provide_metadata
-    def test_bit_reflection(self):
-        metadata = self.metadata
+    def test_bit_reflection(self, metadata, connection):
         t1 = Table(
             "t1",
             metadata,
@@ -2193,9 +2144,9 @@ class SpecialTypesTest(fixtures.TablesTest, ComparesTables):
             Column("bitvarying", postgresql.BIT(varying=True)),
             Column("bitvarying5", postgresql.BIT(5, varying=True)),
         )
-        t1.create()
+        t1.create(connection)
         m2 = MetaData()
-        t2 = Table("t1", m2, autoload_with=testing.db)
+        t2 = Table("t1", m2, autoload_with=connection)
         eq_(t2.c.bit1.type.length, 1)
         eq_(t2.c.bit1.type.varying, False)
         eq_(t2.c.bit5.type.length, 5)
index 8eed21281c92b25e08a59ca31e172ba56de0ac0f..4658b40a8d0884d8bd49fa7139290563ffab5de3 100644 (file)
@@ -72,44 +72,30 @@ class TestTypes(fixtures.TestBase, AssertsExecutionResults):
 
     __only_on__ = "sqlite"
 
-    @testing.provide_metadata
-    def test_boolean(self):
+    def test_boolean(self, connection, metadata):
         """Test that the boolean only treats 1 as True"""
 
-        meta = self.metadata
         t = Table(
             "bool_table",
-            meta,
+            metadata,
             Column("id", Integer, primary_key=True),
             Column("boo", Boolean(create_constraint=False)),
         )
-        meta.create_all(testing.db)
-        exec_sql(
-            testing.db,
+        metadata.create_all(connection)
+        for stmt in [
             "INSERT INTO bool_table (id, boo) " "VALUES (1, 'false');",
-        )
-        exec_sql(
-            testing.db,
             "INSERT INTO bool_table (id, boo) " "VALUES (2, 'true');",
-        )
-        exec_sql(
-            testing.db,
             "INSERT INTO bool_table (id, boo) " "VALUES (3, '1');",
-        )
-        exec_sql(
-            testing.db,
             "INSERT INTO bool_table (id, boo) " "VALUES (4, '0');",
-        )
-        exec_sql(
-            testing.db,
             "INSERT INTO bool_table (id, boo) " "VALUES (5, 1);",
-        )
-        exec_sql(
-            testing.db,
             "INSERT INTO bool_table (id, boo) " "VALUES (6, 0);",
-        )
+        ]:
+            connection.exec_driver_sql(stmt)
+
         eq_(
-            t.select(t.c.boo).order_by(t.c.id).execute().fetchall(),
+            connection.execute(
+                t.select().where(t.c.boo).order_by(t.c.id)
+            ).fetchall(),
             [(3, True), (5, True)],
         )
 
@@ -301,51 +287,41 @@ class JSONTest(fixtures.TestBase):
     __requires__ = ("json_type",)
     __only_on__ = "sqlite"
 
-    @testing.provide_metadata
     @testing.requires.reflects_json_type
-    def test_reflection(self):
-        Table("json_test", self.metadata, Column("foo", sqlite.JSON))
-        self.metadata.create_all()
+    def test_reflection(self, connection, metadata):
+        Table("json_test", metadata, Column("foo", sqlite.JSON))
+        metadata.create_all(connection)
 
-        reflected = Table("json_test", MetaData(), autoload_with=testing.db)
+        reflected = Table("json_test", MetaData(), autoload_with=connection)
         is_(reflected.c.foo.type._type_affinity, sqltypes.JSON)
         assert isinstance(reflected.c.foo.type, sqlite.JSON)
 
-    @testing.provide_metadata
-    def test_rudimentary_roundtrip(self):
-        sqlite_json = Table(
-            "json_test", self.metadata, Column("foo", sqlite.JSON)
-        )
+    def test_rudimentary_roundtrip(self, metadata, connection):
+        sqlite_json = Table("json_test", metadata, Column("foo", sqlite.JSON))
 
-        self.metadata.create_all()
+        metadata.create_all(connection)
 
         value = {"json": {"foo": "bar"}, "recs": ["one", "two"]}
 
-        with testing.db.begin() as conn:
-            conn.execute(sqlite_json.insert(), foo=value)
+        connection.execute(sqlite_json.insert(), foo=value)
 
-            eq_(conn.scalar(select(sqlite_json.c.foo)), value)
+        eq_(connection.scalar(select(sqlite_json.c.foo)), value)
 
-    @testing.provide_metadata
-    def test_extract_subobject(self):
-        sqlite_json = Table(
-            "json_test", self.metadata, Column("foo", sqlite.JSON)
-        )
+    def test_extract_subobject(self, connection, metadata):
+        sqlite_json = Table("json_test", metadata, Column("foo", sqlite.JSON))
 
-        self.metadata.create_all()
+        metadata.create_all(connection)
 
         value = {"json": {"foo": "bar"}}
 
-        with testing.db.begin() as conn:
-            conn.execute(sqlite_json.insert(), foo=value)
-
-            eq_(conn.scalar(select(sqlite_json.c.foo["json"])), value["json"])
+        connection.execute(sqlite_json.insert(), foo=value)
 
-    @testing.provide_metadata
-    def test_deprecated_serializer_args(self):
-        sqlite_json = Table(
-            "json_test", self.metadata, Column("foo", sqlite.JSON)
+        eq_(
+            connection.scalar(select(sqlite_json.c.foo["json"])), value["json"]
         )
+
+    def test_deprecated_serializer_args(self, metadata):
+        sqlite_json = Table("json_test", metadata, Column("foo", sqlite.JSON))
         data_element = {"foo": "bar"}
 
         js = mock.Mock(side_effect=json.dumps)
@@ -360,7 +336,7 @@ class JSONTest(fixtures.TestBase):
             engine = engines.testing_engine(
                 options=dict(_json_serializer=js, _json_deserializer=jd)
             )
-        self.metadata.create_all(engine)
+            metadata.create_all(engine)
 
         with engine.begin() as conn:
             conn.execute(sqlite_json.insert(), {"foo": data_element})
@@ -468,17 +444,7 @@ class DefaultsTest(fixtures.TestBase, AssertsCompiledSQL):
 
     __only_on__ = "sqlite"
 
-    @testing.exclude(
-        "sqlite",
-        "<",
-        (3, 3, 8),
-        "sqlite3 changesets 3353 and 3440 modified "
-        "behavior of default displayed in pragma "
-        "table_info()",
-    )
-    def test_default_reflection(self):
-
-        # (ask_for, roundtripped_as_if_different)
+    def test_default_reflection(self, connection, metadata):
 
         specs = [
             (String(3), '"foo"'),
@@ -490,18 +456,13 @@ class DefaultsTest(fixtures.TestBase, AssertsCompiledSQL):
             Column("c%i" % (i + 1), t[0], server_default=text(t[1]))
             for (i, t) in enumerate(specs)
         ]
-        db = testing.db
-        m = MetaData(db)
-        Table("t_defaults", m, *columns)
-        try:
-            m.create_all()
-            m2 = MetaData()
-            rt = Table("t_defaults", m2, autoload_with=db)
-            expected = [c[1] for c in specs]
-            for i, reflected in enumerate(rt.c):
-                eq_(str(reflected.server_default.arg), expected[i])
-        finally:
-            m.drop_all()
+        Table("t_defaults", metadata, *columns)
+        metadata.create_all(connection)
+        m2 = MetaData()
+        rt = Table("t_defaults", m2, autoload_with=connection)
+        expected = [c[1] for c in specs]
+        for i, reflected in enumerate(rt.c):
+            eq_(str(reflected.server_default.arg), expected[i])
 
     @testing.exclude(
         "sqlite",
@@ -917,7 +878,7 @@ class AttachedDBTest(fixtures.TestBase):
         eq_(insp.get_schema_names(), ["main", "test_schema"])
 
     def test_reflect_system_table(self):
-        meta = MetaData(self.conn)
+        meta = MetaData()
         alt_master = Table(
             "sqlite_master",
             meta,
@@ -1758,8 +1719,8 @@ class KeywordInDatabaseNameTest(fixtures.TestBase):
             connection.exec_driver_sql('DETACH DATABASE "default"')
 
     def test_reflect(self, connection, db_fixture):
-        meta = MetaData(bind=connection, schema="default")
-        meta.reflect()
+        meta = MetaData(schema="default")
+        meta.reflect(connection)
         assert "default.a" in meta.tables
 
 
index 5cbb4785466da784858738acd630520a2b392f33..396b48aa4af18b943090dc3b1238e83470a30be6 100644 (file)
@@ -376,7 +376,7 @@ class DDLEventTest(fixtures.TestBase):
 class DDLExecutionTest(fixtures.TestBase):
     def setup(self):
         self.engine = engines.mock_engine()
-        self.metadata = MetaData(self.engine)
+        self.metadata = MetaData()
         self.users = Table(
             "users",
             self.metadata,
@@ -391,14 +391,14 @@ class DDLExecutionTest(fixtures.TestBase):
         event.listen(users, "before_drop", DDL("xyzzy"))
         event.listen(users, "after_drop", DDL("fnord"))
 
-        users.create()
+        users.create(self.engine)
         strings = [str(x) for x in engine.mock]
         assert "mxyzptlk" in strings
         assert "klptzyxm" in strings
         assert "xyzzy" not in strings
         assert "fnord" not in strings
         del engine.mock[:]
-        users.drop()
+        users.drop(self.engine)
         strings = [str(x) for x in engine.mock]
         assert "mxyzptlk" not in strings
         assert "klptzyxm" not in strings
@@ -413,14 +413,14 @@ class DDLExecutionTest(fixtures.TestBase):
         event.listen(users, "before_drop", DDL("xyzzy"))
         event.listen(users, "after_drop", DDL("fnord"))
 
-        metadata.create_all()
+        metadata.create_all(self.engine)
         strings = [str(x) for x in engine.mock]
         assert "mxyzptlk" in strings
         assert "klptzyxm" in strings
         assert "xyzzy" not in strings
         assert "fnord" not in strings
         del engine.mock[:]
-        metadata.drop_all()
+        metadata.drop_all(self.engine)
         strings = [str(x) for x in engine.mock]
         assert "mxyzptlk" not in strings
         assert "klptzyxm" not in strings
@@ -435,14 +435,14 @@ class DDLExecutionTest(fixtures.TestBase):
         event.listen(metadata, "before_drop", DDL("xyzzy"))
         event.listen(metadata, "after_drop", DDL("fnord"))
 
-        metadata.create_all()
+        metadata.create_all(self.engine)
         strings = [str(x) for x in engine.mock]
         assert "mxyzptlk" in strings
         assert "klptzyxm" in strings
         assert "xyzzy" not in strings
         assert "fnord" not in strings
         del engine.mock[:]
-        metadata.drop_all()
+        metadata.drop_all(self.engine)
         strings = [str(x) for x in engine.mock]
         assert "mxyzptlk" not in strings
         assert "klptzyxm" not in strings
index 4ca081be2ebf815507ba85fcd53de5a8de08fcb9..28623161475eb4c0843300e243b1930d1f47cfa2 100644 (file)
@@ -158,7 +158,10 @@ class ConnectionlessDeprecationTest(fixtures.TestBase):
                 bind.begin()
             try:
                 for args in (([bind], {}), ([], {"bind": bind})):
-                    metadata = MetaData(*args[0], **args[1])
+                    with testing.expect_deprecated_20(
+                        "The MetaData.bind argument is deprecated "
+                    ):
+                        metadata = MetaData(*args[0], **args[1])
                     table = Table(
                         "test_table", metadata, Column("foo", Integer)
                     )
@@ -315,11 +318,11 @@ class ConnectionlessDeprecationTest(fixtures.TestBase):
         ):
             eq_(testing.db.execute(stmt).fetchall(), [(1,)])
 
-    @testing.provide_metadata
-    def test_implicit_execute(self):
-        table = Table("t", self.metadata, Column("a", Integer))
+    def test_implicit_execute(self, metadata):
+        table = Table("t", metadata, Column("a", Integer))
         table.create(testing.db)
 
+        metadata.bind = testing.db
         stmt = table.insert().values(a=1)
         with testing.expect_deprecated_20(
             r"The Executable.execute\(\) method is considered legacy",
@@ -1225,7 +1228,7 @@ class DeprecatedReflectionTest(fixtures.TablesTest):
             is_true(testing.db.has_table("user"))
 
     def test_engine_table_names(self):
-        metadata = self.metadata
+        metadata = self.tables_test_metadata
 
         with testing.expect_deprecated(
             r"The Engine.table_names\(\) method is deprecated"
@@ -1235,7 +1238,8 @@ class DeprecatedReflectionTest(fixtures.TablesTest):
 
     def test_reflecttable(self):
         inspector = inspect(testing.db)
-        metadata = self.metadata
+        metadata = MetaData()
+
         table = Table("user", metadata)
         with testing.expect_deprecated_20(
             r"The Inspector.reflecttable\(\) method is considered "
@@ -1632,7 +1636,7 @@ class EngineEventsTest(fixtures.TestBase):
 class DDLExecutionTest(fixtures.TestBase):
     def setup(self):
         self.engine = engines.mock_engine()
-        self.metadata = MetaData(self.engine)
+        self.metadata = MetaData()
         self.users = Table(
             "users",
             self.metadata,
@@ -1742,7 +1746,7 @@ class AutocommitTextTest(AutocommitKeywordFixture, fixtures.TestBase):
         self._test_keyword("SELECT foo FROM table", False)
 
 
-class ExplicitAutoCommitTest(fixtures.TestBase):
+class ExplicitAutoCommitTest(fixtures.TablesTest):
 
     """test the 'autocommit' flag on select() and text() objects.
 
@@ -1752,36 +1756,31 @@ class ExplicitAutoCommitTest(fixtures.TestBase):
     __only_on__ = "postgresql"
 
     @classmethod
-    def setup_class(cls):
-        global metadata, foo
-        metadata = MetaData(testing.db)
-        foo = Table(
+    def define_tables(cls, metadata):
+        Table(
             "foo",
             metadata,
             Column("id", Integer, primary_key=True),
             Column("data", String(100)),
         )
-        with testing.db.begin() as conn:
-            metadata.create_all(conn)
-            conn.exec_driver_sql(
+
+        event.listen(
+            metadata,
+            "after_create",
+            DDL(
                 "create function insert_foo(varchar) "
                 "returns integer as 'insert into foo(data) "
                 "values ($1);select 1;' language sql"
-            )
-
-    def teardown(self):
-        with testing.db.begin() as conn:
-            conn.execute(foo.delete())
-
-    @classmethod
-    def teardown_class(cls):
-        with testing.db.begin() as conn:
-            conn.exec_driver_sql("drop function insert_foo(varchar)")
-            metadata.drop_all(conn)
+            ),
+        )
+        event.listen(
+            metadata, "before_drop", DDL("drop function insert_foo(varchar)")
+        )
 
     def test_control(self):
 
         # test that not using autocommit does not commit
+        foo = self.tables.foo
 
         conn1 = testing.db.connect()
         conn2 = testing.db.connect()
@@ -1799,6 +1798,8 @@ class ExplicitAutoCommitTest(fixtures.TestBase):
         conn2.close()
 
     def test_explicit_compiled(self):
+        foo = self.tables.foo
+
         conn1 = testing.db.connect()
         conn2 = testing.db.connect()
 
@@ -1816,6 +1817,8 @@ class ExplicitAutoCommitTest(fixtures.TestBase):
         conn2.close()
 
     def test_explicit_connection(self):
+        foo = self.tables.foo
+
         conn1 = testing.db.connect()
         conn2 = testing.db.connect()
         with testing.expect_deprecated_20(
@@ -1853,6 +1856,8 @@ class ExplicitAutoCommitTest(fixtures.TestBase):
         conn2.close()
 
     def test_explicit_text(self):
+        foo = self.tables.foo
+
         conn1 = testing.db.connect()
         conn2 = testing.db.connect()
         with testing.expect_deprecated_20(
@@ -1869,6 +1874,8 @@ class ExplicitAutoCommitTest(fixtures.TestBase):
         conn2.close()
 
     def test_implicit_text(self):
+        foo = self.tables.foo
+
         conn1 = testing.db.connect()
         conn2 = testing.db.connect()
         with testing.expect_deprecated_20(
index 6239d1f18b86e3dd8d03f766db5cdb43e6b8d37d..21d4e06e06ea38358f0345e654d463319c59f999 100644 (file)
@@ -830,12 +830,11 @@ class ConvenienceExecuteTest(fixtures.TablesTest):
             self._assert_no_data()
 
 
-class CompiledCacheTest(fixtures.TablesTest):
+class CompiledCacheTest(fixtures.TestBase):
     __backend__ = True
 
-    @classmethod
-    def define_tables(cls, metadata):
-        Table(
+    def test_cache(self, connection, metadata):
+        users = Table(
             "users",
             metadata,
             Column(
@@ -844,9 +843,7 @@ class CompiledCacheTest(fixtures.TablesTest):
             Column("user_name", VARCHAR(20)),
             Column("extra_data", VARCHAR(20)),
         )
-
-    def test_cache(self, connection):
-        users = self.tables.users
+        users.create(connection)
 
         conn = connection
         cache = {}
@@ -912,8 +909,17 @@ class CompiledCacheTest(fixtures.TablesTest):
         # the statement values (only the keys).
         eq_(ref_blob(), None)
 
-    def test_keys_independent_of_ordering(self, connection):
-        users = self.tables.users
+    def test_keys_independent_of_ordering(self, connection, metadata):
+        users = Table(
+            "users",
+            metadata,
+            Column(
+                "user_id", INT, primary_key=True, test_needs_autoincrement=True
+            ),
+            Column("user_name", VARCHAR(20)),
+            Column("extra_data", VARCHAR(20)),
+        )
+        users.create(connection)
 
         connection.execute(
             users.insert(),
@@ -961,13 +967,10 @@ class CompiledCacheTest(fixtures.TablesTest):
         eq_(len(cache), 1)
 
     @testing.requires.schemas
-    @testing.provide_metadata
-    def test_schema_translate_in_key(self):
-        Table("x", self.metadata, Column("q", Integer))
-        Table(
-            "x", self.metadata, Column("q", Integer), schema=config.test_schema
-        )
-        self.metadata.create_all()
+    def test_schema_translate_in_key(self, metadata, connection):
+        Table("x", metadata, Column("q", Integer))
+        Table("x", metadata, Column("q", Integer), schema=config.test_schema)
+        metadata.create_all(connection)
 
         m = MetaData()
         t1 = Table("x", m, Column("q", Integer))
@@ -975,33 +978,30 @@ class CompiledCacheTest(fixtures.TablesTest):
         stmt = select(t1.c.q)
 
         cache = {}
-        with config.db.begin() as conn:
-            conn = conn.execution_options(compiled_cache=cache)
-            conn.execute(ins, {"q": 1})
-            eq_(conn.scalar(stmt), 1)
 
-        with config.db.begin() as conn:
-            conn = conn.execution_options(
-                compiled_cache=cache,
-                schema_translate_map={None: config.test_schema},
-            )
-            conn.execute(ins, {"q": 2})
-            eq_(conn.scalar(stmt), 2)
+        conn = connection.execution_options(compiled_cache=cache)
+        conn.execute(ins, {"q": 1})
+        eq_(conn.scalar(stmt), 1)
 
-        with config.db.begin() as conn:
-            conn = conn.execution_options(
-                compiled_cache=cache,
-                schema_translate_map={None: None},
-            )
-            # should use default schema again even though statement
-            # was compiled with test_schema in the map
-            eq_(conn.scalar(stmt), 1)
+        conn = connection.execution_options(
+            compiled_cache=cache,
+            schema_translate_map={None: config.test_schema},
+        )
+        conn.execute(ins, {"q": 2})
+        eq_(conn.scalar(stmt), 2)
 
-        with config.db.begin() as conn:
-            conn = conn.execution_options(
-                compiled_cache=cache,
-            )
-            eq_(conn.scalar(stmt), 1)
+        conn = connection.execution_options(
+            compiled_cache=cache,
+            schema_translate_map={None: None},
+        )
+        # should use default schema again even though statement
+        # was compiled with test_schema in the map
+        eq_(conn.scalar(stmt), 1)
+
+        conn = connection.execution_options(
+            compiled_cache=cache,
+        )
+        eq_(conn.scalar(stmt), 1)
 
 
 class MockStrategyTest(fixtures.TestBase):
@@ -1079,7 +1079,7 @@ class SchemaTranslateTest(fixtures.TestBase, testing.AssertsExecutionResults):
         Table("t1", metadata, Column("x", Integer), schema=config.test_schema)
         Table("t2", metadata, Column("x", Integer), schema=config.test_schema)
         Table("t3", metadata, Column("x", Integer), schema=None)
-        metadata.create_all()
+        metadata.create_all(testing.db)
 
     def test_ddl_hastable(self):
 
@@ -1772,7 +1772,7 @@ class EngineEventsTest(fixtures.TestBase):
         ]:
             event.listen(engine, "before_execute", execute)
             event.listen(engine, "before_cursor_execute", cursor_execute)
-            m = MetaData(engine)
+            m = MetaData()
             t1 = Table(
                 "t1",
                 m,
index 48b6c40d771b4923cfe6dacf0aad1fe2d3b4650a..658cdd79f02276d106b6c00a8c08c660f96d171c 100644 (file)
@@ -21,7 +21,6 @@ from sqlalchemy.testing import assert_raises_message
 from sqlalchemy.testing import AssertsCompiledSQL
 from sqlalchemy.testing import ComparesTables
 from sqlalchemy.testing import config
-from sqlalchemy.testing import engines
 from sqlalchemy.testing import eq_
 from sqlalchemy.testing import eq_regex
 from sqlalchemy.testing import expect_warnings
@@ -43,13 +42,8 @@ from sqlalchemy.util import ue
 class ReflectionTest(fixtures.TestBase, ComparesTables):
     __backend__ = True
 
-    @testing.exclude(
-        "mssql", "<", (10, 0, 0), "Date is only supported on MSSQL 2008+"
-    )
-    @testing.exclude("mysql", "<", (4, 1, 1), "early types are squirrely")
-    @testing.provide_metadata
-    def test_basic_reflection(self):
-        meta = self.metadata
+    def test_basic_reflection(self, connection, metadata):
+        meta = metadata
 
         users = Table(
             "engine_users",
@@ -85,25 +79,22 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
             Column("email_address", sa.String(20)),
             test_needs_fk=True,
         )
-        meta.create_all()
+        meta.create_all(connection)
 
         meta2 = MetaData()
         reflected_users = Table(
-            "engine_users", meta2, autoload_with=testing.db
+            "engine_users", meta2, autoload_with=connection
         )
         reflected_addresses = Table(
             "engine_email_addresses",
             meta2,
-            autoload_with=testing.db,
+            autoload_with=connection,
         )
         self.assert_tables_equal(users, reflected_users)
         self.assert_tables_equal(addresses, reflected_addresses)
 
-    @testing.provide_metadata
-    def test_autoload_with_imply_autoload(
-        self,
-    ):
-        meta = self.metadata
+    def test_autoload_with_imply_autoload(self, metadata, connection):
+        meta = metadata
         t = Table(
             "t",
             meta,
@@ -111,15 +102,14 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
             Column("x", sa.String(20)),
             Column("y", sa.Integer),
         )
-        meta.create_all()
+        meta.create_all(connection)
 
         meta2 = MetaData()
-        reflected_t = Table("t", meta2, autoload_with=testing.db)
+        reflected_t = Table("t", meta2, autoload_with=connection)
         self.assert_tables_equal(t, reflected_t)
 
-    @testing.provide_metadata
-    def test_two_foreign_keys(self):
-        meta = self.metadata
+    def test_two_foreign_keys(self, metadata, connection):
+        meta = metadata
         Table(
             "t1",
             meta,
@@ -140,18 +130,17 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
             Column("id", sa.Integer, primary_key=True),
             test_needs_fk=True,
         )
-        meta.create_all()
+        meta.create_all(connection)
         meta2 = MetaData()
         t1r, t2r, t3r = [
-            Table(x, meta2, autoload_with=testing.db)
+            Table(x, meta2, autoload_with=connection)
             for x in ("t1", "t2", "t3")
         ]
         assert t1r.c.t2id.references(t2r.c.id)
         assert t1r.c.t3id.references(t3r.c.id)
 
-    @testing.provide_metadata
-    def test_resolve_fks_false_table(self):
-        meta = self.metadata
+    def test_resolve_fks_false_table(self, connection, metadata):
+        meta = metadata
         Table(
             "t1",
             meta,
@@ -165,9 +154,9 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
             Column("id", sa.Integer, primary_key=True),
             test_needs_fk=True,
         )
-        meta.create_all()
+        meta.create_all(connection)
         meta2 = MetaData()
-        t1 = Table("t1", meta2, resolve_fks=False, autoload_with=testing.db)
+        t1 = Table("t1", meta2, resolve_fks=False, autoload_with=connection)
         in_("t1", meta2.tables)
         not_in("t2", meta2.tables)
 
@@ -176,14 +165,13 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
             lambda: list(t1.c.t2id.foreign_keys)[0].column,
         )
 
-        t2 = Table("t2", meta2, autoload_with=testing.db)
+        t2 = Table("t2", meta2, autoload_with=connection)
 
         # now it resolves
         is_true(t1.c.t2id.references(t2.c.id))
 
-    @testing.provide_metadata
-    def test_resolve_fks_false_extend_existing(self):
-        meta = self.metadata
+    def test_resolve_fks_false_extend_existing(self, connection, metadata):
+        meta = metadata
         Table(
             "t1",
             meta,
@@ -197,7 +185,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
             Column("id", sa.Integer, primary_key=True),
             test_needs_fk=True,
         )
-        meta.create_all()
+        meta.create_all(connection)
         meta2 = MetaData()
         Table("t1", meta2)
         in_("t1", meta2.tables)
@@ -206,7 +194,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
             "t1",
             meta2,
             resolve_fks=False,
-            autoload_with=testing.db,
+            autoload_with=connection,
             extend_existing=True,
         )
         not_in("t2", meta2.tables)
@@ -216,14 +204,13 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
             lambda: list(t1.c.t2id.foreign_keys)[0].column,
         )
 
-        t2 = Table("t2", meta2, autoload_with=testing.db)
+        t2 = Table("t2", meta2, autoload_with=connection)
 
         # now it resolves
         is_true(t1.c.t2id.references(t2.c.id))
 
-    @testing.provide_metadata
-    def test_resolve_fks_false_metadata(self):
-        meta = self.metadata
+    def test_resolve_fks_false_metadata(self, connection, metadata):
+        meta = metadata
         Table(
             "t1",
             meta,
@@ -237,9 +224,9 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
             Column("id", sa.Integer, primary_key=True),
             test_needs_fk=True,
         )
-        meta.create_all()
+        meta.create_all(connection)
         meta2 = MetaData()
-        meta2.reflect(testing.db, resolve_fks=False, only=["t1"])
+        meta2.reflect(connection, resolve_fks=False, only=["t1"])
         in_("t1", meta2.tables)
         not_in("t2", meta2.tables)
 
@@ -250,36 +237,35 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
             lambda: list(t1.c.t2id.foreign_keys)[0].column,
         )
 
-        meta2.reflect(testing.db, resolve_fks=False)
+        meta2.reflect(connection, resolve_fks=False)
 
         t2 = meta2.tables["t2"]
         is_true(t1.c.t2id.references(t2.c.id))
 
-    def test_nonexistent(self):
+    def test_nonexistent(self, connection):
         meta = MetaData()
         assert_raises(
             sa.exc.NoSuchTableError,
             Table,
             "nonexistent",
             meta,
-            autoload_with=testing.db,
+            autoload_with=connection,
         )
         assert "nonexistent" not in meta.tables
 
-    @testing.provide_metadata
-    def test_include_columns(self):
-        meta = self.metadata
+    def test_include_columns(self, connection, metadata):
+        meta = metadata
         foo = Table(
             "foo",
             meta,
             *[Column(n, sa.String(30)) for n in ["a", "b", "c", "d", "e", "f"]]
         )
-        meta.create_all()
+        meta.create_all(connection)
         meta2 = MetaData()
         foo = Table(
             "foo",
             meta2,
-            autoload_with=testing.db,
+            autoload_with=connection,
             include_columns=["b", "f", "e"],
         )
         # test that cols come back in original order
@@ -291,7 +277,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
 
         # test against a table which is already reflected
         meta3 = MetaData()
-        foo = Table("foo", meta3, autoload_with=testing.db)
+        foo = Table("foo", meta3, autoload_with=connection)
 
         foo = Table(
             "foo", meta3, include_columns=["b", "f", "e"], extend_existing=True
@@ -302,9 +288,8 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
         for c in ("a", "c", "d"):
             assert c not in foo.c
 
-    @testing.provide_metadata
-    def test_extend_existing(self):
-        meta = self.metadata
+    def test_extend_existing(self, connection, metadata):
+        meta = metadata
 
         Table(
             "t",
@@ -314,7 +299,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
             Column("y", Integer),
             Column("z", Integer, server_default="5"),
         )
-        meta.create_all()
+        meta.create_all(connection)
 
         m2 = MetaData()
         old_z = Column("z", String, primary_key=True)
@@ -327,7 +312,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
             m2,
             old_y,
             extend_existing=True,
-            autoload_with=testing.db,
+            autoload_with=connection,
         )
         eq_(set(t2.columns.keys()), set(["x", "y", "z", "q", "id"]))
 
@@ -346,7 +331,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
             "t",
             m3,
             extend_existing=False,
-            autoload_with=testing.db,
+            autoload_with=connection,
         )
         eq_(set(t3.columns.keys()), set(["z"]))
 
@@ -362,7 +347,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
             old_y,
             extend_existing=True,
             autoload_replace=False,
-            autoload_with=testing.db,
+            autoload_with=connection,
         )
         eq_(set(t4.columns.keys()), set(["x", "y", "z", "q", "id"]))
         eq_(list(t4.primary_key.columns), [t4.c.z, t4.c.id])
@@ -371,9 +356,10 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
         assert t4.c.z.type._type_affinity is String
         assert t4.c.q is old_q
 
-    @testing.provide_metadata
-    def test_extend_existing_reflect_all_dont_dupe_index(self):
-        m = self.metadata
+    def test_extend_existing_reflect_all_dont_dupe_index(
+        self, connection, metadata
+    ):
+        m = metadata
         d = Table(
             "d",
             m,
@@ -389,10 +375,10 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
             Column("id", Integer, primary_key=True),
             Column("aid", ForeignKey("d.id")),
         )
-        m.create_all()
+        m.create_all(connection)
 
         m2 = MetaData()
-        m2.reflect(testing.db, extend_existing=True)
+        m2.reflect(connection, extend_existing=True)
 
         eq_(
             len(
@@ -422,51 +408,51 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
             )
 
     @testing.emits_warning(r".*omitted columns")
-    @testing.provide_metadata
-    def test_include_columns_indexes(self):
-        m = self.metadata
+    def test_include_columns_indexes(self, connection, metadata):
+        m = metadata
 
         t1 = Table("t1", m, Column("a", sa.Integer), Column("b", sa.Integer))
         sa.Index("foobar", t1.c.a, t1.c.b)
         sa.Index("bat", t1.c.a)
-        m.create_all()
+        m.create_all(connection)
         m2 = MetaData()
-        t2 = Table("t1", m2, autoload_with=testing.db)
+        t2 = Table("t1", m2, autoload_with=connection)
         assert len(t2.indexes) == 2
 
         m2 = MetaData()
-        t2 = Table("t1", m2, autoload_with=testing.db, include_columns=["a"])
+        t2 = Table("t1", m2, autoload_with=connection, include_columns=["a"])
         assert len(t2.indexes) == 1
 
         m2 = MetaData()
         t2 = Table(
-            "t1", m2, autoload_with=testing.db, include_columns=["a", "b"]
+            "t1", m2, autoload_with=connection, include_columns=["a", "b"]
         )
         assert len(t2.indexes) == 2
 
-    @testing.provide_metadata
-    def test_autoload_replace_foreign_key_nonpresent(self):
+    def test_autoload_replace_foreign_key_nonpresent(
+        self, connection, metadata
+    ):
         """test autoload_replace=False with col plus FK
         establishes the FK not present in the DB.
 
         """
-        Table("a", self.metadata, Column("id", Integer, primary_key=True))
+        Table("a", metadata, Column("id", Integer, primary_key=True))
         Table(
             "b",
-            self.metadata,
+            metadata,
             Column("id", Integer, primary_key=True),
             Column("a_id", Integer),
         )
-        self.metadata.create_all()
+        metadata.create_all(connection)
 
         m2 = MetaData()
         b2 = Table("b", m2, Column("a_id", Integer, sa.ForeignKey("a.id")))
-        a2 = Table("a", m2, autoload_with=testing.db)
+        a2 = Table("a", m2, autoload_with=connection)
         b2 = Table(
             "b",
             m2,
             extend_existing=True,
-            autoload_with=testing.db,
+            autoload_with=connection,
             autoload_replace=False,
         )
 
@@ -474,30 +460,31 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
         assert b2.c.a_id.references(a2.c.id)
         eq_(len(b2.constraints), 2)
 
-    @testing.provide_metadata
-    def test_autoload_replace_foreign_key_ispresent(self):
+    def test_autoload_replace_foreign_key_ispresent(
+        self, connection, metadata
+    ):
         """test autoload_replace=False with col plus FK mirroring
         DB-reflected FK skips the reflected FK and installs
         the in-python one only.
 
         """
-        Table("a", self.metadata, Column("id", Integer, primary_key=True))
+        Table("a", metadata, Column("id", Integer, primary_key=True))
         Table(
             "b",
-            self.metadata,
+            metadata,
             Column("id", Integer, primary_key=True),
             Column("a_id", Integer, sa.ForeignKey("a.id")),
         )
-        self.metadata.create_all()
+        metadata.create_all(connection)
 
         m2 = MetaData()
         b2 = Table("b", m2, Column("a_id", Integer, sa.ForeignKey("a.id")))
-        a2 = Table("a", m2, autoload_with=testing.db)
+        a2 = Table("a", m2, autoload_with=connection)
         b2 = Table(
             "b",
             m2,
             extend_existing=True,
-            autoload_with=testing.db,
+            autoload_with=connection,
             autoload_replace=False,
         )
 
@@ -505,29 +492,28 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
         assert b2.c.a_id.references(a2.c.id)
         eq_(len(b2.constraints), 2)
 
-    @testing.provide_metadata
-    def test_autoload_replace_foreign_key_removed(self):
+    def test_autoload_replace_foreign_key_removed(self, connection, metadata):
         """test autoload_replace=False with col minus FK that's in the
         DB means the FK is skipped and doesn't get installed at all.
 
         """
-        Table("a", self.metadata, Column("id", Integer, primary_key=True))
+        Table("a", metadata, Column("id", Integer, primary_key=True))
         Table(
             "b",
-            self.metadata,
+            metadata,
             Column("id", Integer, primary_key=True),
             Column("a_id", Integer, sa.ForeignKey("a.id")),
         )
-        self.metadata.create_all()
+        metadata.create_all(connection)
 
         m2 = MetaData()
         b2 = Table("b", m2, Column("a_id", Integer))
-        a2 = Table("a", m2, autoload_with=testing.db)
+        a2 = Table("a", m2, autoload_with=connection)
         b2 = Table(
             "b",
             m2,
             extend_existing=True,
-            autoload_with=testing.db,
+            autoload_with=connection,
             autoload_replace=False,
         )
 
@@ -535,10 +521,9 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
         assert not b2.c.a_id.references(a2.c.id)
         eq_(len(b2.constraints), 1)
 
-    @testing.provide_metadata
-    def test_autoload_replace_primary_key(self):
-        Table("a", self.metadata, Column("id", Integer))
-        self.metadata.create_all()
+    def test_autoload_replace_primary_key(self, connection, metadata):
+        Table("a", metadata, Column("id", Integer))
+        metadata.create_all(connection)
 
         m2 = MetaData()
         a2 = Table("a", m2, Column("id", Integer, primary_key=True))
@@ -546,7 +531,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
         Table(
             "a",
             m2,
-            autoload_with=testing.db,
+            autoload_with=connection,
             autoload_replace=False,
             extend_existing=True,
         )
@@ -555,15 +540,14 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
     def test_autoload_replace_arg(self):
         Table("t", MetaData(), autoload_replace=False)
 
-    @testing.provide_metadata
-    def test_autoincrement_col(self):
+    def test_autoincrement_col(self, connection, metadata):
         """test that 'autoincrement' is reflected according to sqla's policy.
 
         Don't mark this test as unsupported for any backend !
 
         """
 
-        meta = self.metadata
+        meta = metadata
         Table(
             "test",
             meta,
@@ -581,41 +565,35 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
             Column("data", sa.String(50)),
             mysql_engine="InnoDB",
         )
-        meta.create_all()
+        meta.create_all(connection)
         m2 = MetaData()
-        t1a = Table("test", m2, autoload_with=testing.db)
+        t1a = Table("test", m2, autoload_with=connection)
         assert t1a._autoincrement_column is t1a.c.id
 
-        t2a = Table("test2", m2, autoload_with=testing.db)
+        t2a = Table("test2", m2, autoload_with=connection)
         assert t2a._autoincrement_column is None
 
     @skip("sqlite")
-    @testing.provide_metadata
-    def test_unknown_types(self):
+    def test_unknown_types(self, connection, metadata):
         """Test the handling of unknown types for the given dialect.
 
         sqlite is skipped because it has special rules for unknown types using
         'affinity types' - this feature is tested in that dialect's test spec.
         """
-        meta = self.metadata
+        meta = metadata
         t = Table("test", meta, Column("foo", sa.DateTime))
 
-        ischema_names = testing.db.dialect.ischema_names
-        t.create()
-        testing.db.dialect.ischema_names = {}
-        try:
-            m2 = MetaData(testing.db)
+        t.create(connection)
+
+        with mock.patch.object(connection.dialect, "ischema_names", {}):
+            m2 = MetaData()
 
             with testing.expect_warnings("Did not recognize type"):
-                t3 = Table("test", m2, autoload_with=testing.db)
+                t3 = Table("test", m2, autoload_with=connection)
                 is_(t3.c.foo.type.__class__, sa.types.NullType)
 
-        finally:
-            testing.db.dialect.ischema_names = ischema_names
-
-    @testing.provide_metadata
-    def test_basic_override(self):
-        meta = self.metadata
+    def test_basic_override(self, connection, metadata):
+        meta = metadata
         table = Table(
             "override_test",
             meta,
@@ -623,7 +601,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
             Column("col2", sa.String(20)),
             Column("col3", sa.Numeric),
         )
-        table.create()
+        table.create(connection)
 
         meta2 = MetaData()
         table = Table(
@@ -631,16 +609,15 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
             meta2,
             Column("col2", sa.Unicode()),
             Column("col4", sa.String(30)),
-            autoload_with=testing.db,
+            autoload_with=connection,
         )
 
         self.assert_(isinstance(table.c.col1.type, sa.Integer))
         self.assert_(isinstance(table.c.col2.type, sa.Unicode))
         self.assert_(isinstance(table.c.col4.type, sa.String))
 
-    @testing.provide_metadata
-    def test_override_upgrade_pk_flag(self):
-        meta = self.metadata
+    def test_override_upgrade_pk_flag(self, connection, metadata):
+        meta = metadata
         table = Table(
             "override_test",
             meta,
@@ -648,26 +625,25 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
             Column("col2", sa.String(20)),
             Column("col3", sa.Numeric),
         )
-        table.create()
+        table.create(connection)
 
         meta2 = MetaData()
         table = Table(
             "override_test",
             meta2,
             Column("col1", sa.Integer, primary_key=True),
-            autoload_with=testing.db,
+            autoload_with=connection,
         )
 
         eq_(list(table.primary_key), [table.c.col1])
         eq_(table.c.col1.primary_key, True)
 
-    @testing.provide_metadata
-    def test_override_pkfk(self):
+    def test_override_pkfk(self, connection, metadata):
         """test that you can override columns which contain foreign keys
         to other reflected tables, where the foreign key column is also
         a primary key column"""
 
-        meta = self.metadata
+        meta = metadata
         Table(
             "users",
             meta,
@@ -681,7 +657,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
             Column("street", sa.String(30)),
         )
 
-        meta.create_all()
+        meta.create_all(connection)
         meta2 = MetaData()
         a2 = Table(
             "addresses",
@@ -689,36 +665,35 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
             Column(
                 "id", sa.Integer, sa.ForeignKey("users.id"), primary_key=True
             ),
-            autoload_with=testing.db,
+            autoload_with=connection,
         )
-        u2 = Table("users", meta2, autoload_with=testing.db)
+        u2 = Table("users", meta2, autoload_with=connection)
 
         assert list(a2.primary_key) == [a2.c.id]
         assert list(u2.primary_key) == [u2.c.id]
         assert u2.join(a2).onclause.compare(u2.c.id == a2.c.id)
 
         meta3 = MetaData()
-        u3 = Table("users", meta3, autoload_with=testing.db)
+        u3 = Table("users", meta3, autoload_with=connection)
         a3 = Table(
             "addresses",
             meta3,
             Column(
                 "id", sa.Integer, sa.ForeignKey("users.id"), primary_key=True
             ),
-            autoload_with=testing.db,
+            autoload_with=connection,
         )
 
         assert list(a3.primary_key) == [a3.c.id]
         assert list(u3.primary_key) == [u3.c.id]
         assert u3.join(a3).onclause.compare(u3.c.id == a3.c.id)
 
-    @testing.provide_metadata
-    def test_override_nonexistent_fk(self):
+    def test_override_nonexistent_fk(self, connection, metadata):
         """test that you can override columns and create new foreign
         keys to other reflected tables which have no foreign keys.  this
         is common with MySQL MyISAM tables."""
 
-        meta = self.metadata
+        meta = metadata
         Table(
             "users",
             meta,
@@ -733,15 +708,15 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
             Column("user_id", sa.Integer),
         )
 
-        meta.create_all()
+        meta.create_all(connection)
         meta2 = MetaData()
         a2 = Table(
             "addresses",
             meta2,
             Column("user_id", sa.Integer, sa.ForeignKey("users.id")),
-            autoload_with=testing.db,
+            autoload_with=connection,
         )
-        u2 = Table("users", meta2, autoload_with=testing.db)
+        u2 = Table("users", meta2, autoload_with=connection)
         assert len(a2.c.user_id.foreign_keys) == 1
         assert len(a2.foreign_keys) == 1
         assert [c.parent for c in a2.foreign_keys] == [a2.c.user_id]
@@ -750,13 +725,13 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
         assert u2.join(a2).onclause.compare(u2.c.id == a2.c.user_id)
         meta3 = MetaData()
 
-        u3 = Table("users", meta3, autoload_with=testing.db)
+        u3 = Table("users", meta3, autoload_with=connection)
 
         a3 = Table(
             "addresses",
             meta3,
             Column("user_id", sa.Integer, sa.ForeignKey("users.id")),
-            autoload_with=testing.db,
+            autoload_with=connection,
         )
         assert u3.join(a3).onclause.compare(u3.c.id == a3.c.user_id)
 
@@ -766,7 +741,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
             "users",
             meta4,
             Column("id", sa.Integer, key="u_id", primary_key=True),
-            autoload_with=testing.db,
+            autoload_with=connection,
         )
 
         a4 = Table(
@@ -777,7 +752,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
             Column(
                 "user_id", sa.Integer, sa.ForeignKey("users.u_id"), key="id"
             ),
-            autoload_with=testing.db,
+            autoload_with=connection,
         )
 
         # for the thing happening here with the column collection,
@@ -789,12 +764,9 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
         assert len(a4.columns) == 3
         assert len(a4.constraints) == 2
 
-    @testing.provide_metadata
-    def test_override_composite_fk(self):
+    def test_override_composite_fk(self, connection, metadata):
         """Test double-remove of composite foreign key, when replaced."""
 
-        metadata = self.metadata
-
         Table(
             "a",
             metadata,
@@ -810,26 +782,25 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
             sa.ForeignKeyConstraint(["x", "y"], ["a.x", "a.y"]),
         )
 
-        metadata.create_all()
+        metadata.create_all(connection)
 
         meta2 = MetaData()
 
         c1 = Column("x", sa.Integer, primary_key=True)
         c2 = Column("y", sa.Integer, primary_key=True)
         f1 = sa.ForeignKeyConstraint(["x", "y"], ["a.x", "a.y"])
-        b1 = Table("b", meta2, c1, c2, f1, autoload_with=testing.db)
+        b1 = Table("b", meta2, c1, c2, f1, autoload_with=connection)
 
         assert b1.c.x is c1
         assert b1.c.y is c2
         assert f1 in b1.constraints
         assert len(b1.constraints) == 2
 
-    @testing.provide_metadata
-    def test_override_keys(self):
+    def test_override_keys(self, connection, metadata):
         """test that columns can be overridden with a 'key',
         and that ForeignKey targeting during reflection still works."""
 
-        meta = self.metadata
+        meta = metadata
         Table(
             "a",
             meta,
@@ -843,27 +814,26 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
             Column("y", sa.Integer, sa.ForeignKey("a.x")),
             test_needs_fk=True,
         )
-        meta.create_all(testing.db)
+        meta.create_all(connection)
         m2 = MetaData()
         a2 = Table(
             "a",
             m2,
             Column("x", sa.Integer, primary_key=True, key="x1"),
-            autoload_with=testing.db,
+            autoload_with=connection,
         )
-        b2 = Table("b", m2, autoload_with=testing.db)
+        b2 = Table("b", m2, autoload_with=connection)
         assert a2.join(b2).onclause.compare(a2.c.x1 == b2.c.y)
         assert b2.c.y.references(a2.c.x1)
 
-    @testing.provide_metadata
-    def test_nonreflected_fk_raises(self):
+    def test_nonreflected_fk_raises(self, connection, metadata):
         """test that a NoReferencedColumnError is raised when reflecting
         a table with an FK to another table which has not included the target
         column in its reflection.
 
         """
 
-        meta = self.metadata
+        meta = metadata
         Table(
             "a",
             meta,
@@ -877,21 +847,19 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
             Column("y", sa.Integer, sa.ForeignKey("a.x")),
             test_needs_fk=True,
         )
-        meta.create_all()
+        meta.create_all(connection)
         m2 = MetaData()
-        a2 = Table("a", m2, include_columns=["z"], autoload_with=testing.db)
-        b2 = Table("b", m2, autoload_with=testing.db)
+        a2 = Table("a", m2, include_columns=["z"], autoload_with=connection)
+        b2 = Table("b", m2, autoload_with=connection)
 
         assert_raises(sa.exc.NoReferencedColumnError, a2.join, b2)
 
-    @testing.exclude("mysql", "<", (4, 1, 1), "innodb funkiness")
-    @testing.provide_metadata
-    def test_override_existing_fk(self):
+    def test_override_existing_fk(self, connection, metadata):
         """test that you can override columns and specify new foreign
         keys to other reflected tables, on columns which *do* already
         have that foreign key, and that the FK is not duped."""
 
-        meta = self.metadata
+        meta = metadata
         Table(
             "users",
             meta,
@@ -907,15 +875,15 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
             test_needs_fk=True,
         )
 
-        meta.create_all(testing.db)
+        meta.create_all(connection)
         meta2 = MetaData()
         a2 = Table(
             "addresses",
             meta2,
             Column("user_id", sa.Integer, sa.ForeignKey("users.id")),
-            autoload_with=testing.db,
+            autoload_with=connection,
         )
-        u2 = Table("users", meta2, autoload_with=testing.db)
+        u2 = Table("users", meta2, autoload_with=connection)
         s = sa.select(a2).subquery()
 
         assert s.c.user_id is not None
@@ -932,14 +900,14 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
             "users",
             meta2,
             Column("id", sa.Integer, primary_key=True),
-            autoload_with=testing.db,
+            autoload_with=connection,
         )
         a2 = Table(
             "addresses",
             meta2,
             Column("id", sa.Integer, primary_key=True),
             Column("user_id", sa.Integer, sa.ForeignKey("users.id")),
-            autoload_with=testing.db,
+            autoload_with=connection,
         )
         s = sa.select(a2).subquery()
 
@@ -953,8 +921,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
         assert u2.join(a2).onclause.compare(u2.c.id == a2.c.user_id)
 
     @testing.only_on(["postgresql", "mysql"])
-    @testing.provide_metadata
-    def test_fk_options(self):
+    def test_fk_options(self, connection, metadata):
         """test that foreign key reflection includes options (on
         backends with {dialect}.get_foreign_keys() support)"""
 
@@ -989,7 +956,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
             )
             test_attrs = ("onupdate", "ondelete")
 
-        meta = self.metadata
+        meta = metadata
         Table(
             "users",
             meta,
@@ -1004,40 +971,38 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
             Column("user_id", sa.Integer, addresses_user_id_fkey),
             test_needs_fk=True,
         )
-        meta.create_all()
+        meta.create_all(connection)
 
         meta2 = MetaData()
-        meta2.reflect(testing.db)
+        meta2.reflect(connection)
         for fk in meta2.tables["addresses"].foreign_keys:
             ref = addresses_user_id_fkey
             for attr in test_attrs:
                 eq_(getattr(fk, attr), getattr(ref, attr))
 
-    @testing.provide_metadata
-    def test_pks_not_uniques(self):
+    def test_pks_not_uniques(self, connection, metadata):
         """test that primary key reflection not tripped up by unique
         indexes"""
 
-        with testing.db.begin() as conn:
-            conn.exec_driver_sql(
-                """
-                CREATE TABLE book (
-                    id INTEGER NOT NULL,
-                    title VARCHAR(100) NOT NULL,
-                    series INTEGER,
-                    series_id INTEGER,
-                    UNIQUE(series, series_id),
-                    PRIMARY KEY(id)
-                )"""
-            )
+        conn = connection
+        conn.exec_driver_sql(
+            """
+            CREATE TABLE book (
+                id INTEGER NOT NULL,
+                title VARCHAR(100) NOT NULL,
+                series INTEGER,
+                series_id INTEGER,
+                UNIQUE(series, series_id),
+                PRIMARY KEY(id)
+            )"""
+        )
 
-        book = Table("book", self.metadata, autoload_with=testing.db)
+        book = Table("book", metadata, autoload_with=connection)
         assert book.primary_key.contains_column(book.c.id)
         assert not book.primary_key.contains_column(book.c.series)
         eq_(len(book.primary_key), 1)
 
-    def test_fk_error(self):
-        metadata = MetaData(testing.db)
+    def test_fk_error(self, connection, metadata):
         Table(
             "slots",
             metadata,
@@ -1052,37 +1017,35 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
             "could not find table 'pkgs' with which to generate "
             "a foreign key to target column 'pkg_id'",
             metadata.create_all,
+            connection,
         )
 
-    @testing.provide_metadata
-    def test_composite_pks(self):
+    def test_composite_pks(self, connection, metadata):
         """test reflection of a composite primary key"""
 
-        with testing.db.begin() as conn:
-            conn.exec_driver_sql(
-                """
-                CREATE TABLE book (
-                    id INTEGER NOT NULL,
-                    isbn VARCHAR(50) NOT NULL,
-                    title VARCHAR(100) NOT NULL,
-                    series INTEGER NOT NULL,
-                    series_id INTEGER NOT NULL,
-                    UNIQUE(series, series_id),
-                    PRIMARY KEY(id, isbn)
-                )"""
-            )
-        book = Table("book", self.metadata, autoload_with=testing.db)
+        conn = connection
+        conn.exec_driver_sql(
+            """
+            CREATE TABLE book (
+                id INTEGER NOT NULL,
+                isbn VARCHAR(50) NOT NULL,
+                title VARCHAR(100) NOT NULL,
+                series INTEGER NOT NULL,
+                series_id INTEGER NOT NULL,
+                UNIQUE(series, series_id),
+                PRIMARY KEY(id, isbn)
+            )"""
+        )
+        book = Table("book", metadata, autoload_with=connection)
         assert book.primary_key.contains_column(book.c.id)
         assert book.primary_key.contains_column(book.c.isbn)
         assert not book.primary_key.contains_column(book.c.series)
         eq_(len(book.primary_key), 2)
 
-    @testing.exclude("mysql", "<", (4, 1, 1), "innodb funkiness")
-    @testing.provide_metadata
-    def test_composite_fk(self):
+    def test_composite_fk(self, connection, metadata):
         """test reflection of composite foreign keys"""
 
-        meta = self.metadata
+        meta = metadata
         multi = Table(
             "multi",
             meta,
@@ -1107,11 +1070,11 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
             ),
             test_needs_fk=True,
         )
-        meta.create_all()
+        meta.create_all(connection)
 
         meta2 = MetaData()
-        table = Table("multi", meta2, autoload_with=testing.db)
-        table2 = Table("multi2", meta2, autoload_with=testing.db)
+        table = Table("multi", meta2, autoload_with=connection)
+        table2 = Table("multi2", meta2, autoload_with=connection)
         self.assert_tables_equal(multi, table)
         self.assert_tables_equal(multi2, table2)
         j = sa.join(table, table2)
@@ -1126,13 +1089,12 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
 
     @testing.crashes("oracle", "FIXME: unknown, confirm not fails_on")
     @testing.requires.check_constraints
-    @testing.provide_metadata
-    def test_reserved(self):
+    def test_reserved(self, connection, metadata):
 
         # check a table that uses a SQL reserved name doesn't cause an
         # error
 
-        meta = self.metadata
+        meta = metadata
         table_a = Table(
             "select",
             meta,
@@ -1142,11 +1104,11 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
         )
         sa.Index("where", table_a.c["from"])
 
-        if meta.bind.dialect.requires_name_normalize:
+        if connection.dialect.requires_name_normalize:
             check_col = "TRUE"
         else:
             check_col = "true"
-        quoter = meta.bind.dialect.identifier_preparer.quote_identifier
+        quoter = connection.dialect.identifier_preparer.quote_identifier
 
         Table(
             "false",
@@ -1164,120 +1126,81 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
             sa.PrimaryKeyConstraint("or", "join", name="to"),
         )
         index_c = sa.Index("else", table_c.c.join)
-        meta.create_all()
-        index_c.drop()
+        meta.create_all(connection)
+        index_c.drop(connection)
         meta2 = MetaData()
-        Table("select", meta2, autoload_with=testing.db)
-        Table("false", meta2, autoload_with=testing.db)
-        Table("is", meta2, autoload_with=testing.db)
+        Table("select", meta2, autoload_with=connection)
+        Table("false", meta2, autoload_with=connection)
+        Table("is", meta2, autoload_with=connection)
 
-    @testing.provide_metadata
-    def _test_reflect_uses_bind(self, fn):
-        from sqlalchemy.pool import AssertionPool
-
-        e = engines.testing_engine(options={"poolclass": AssertionPool})
-        fn(e)
-
-    def test_reflect_uses_bind_constructor_conn_reflect(self):
-        self._test_reflect_uses_bind(lambda e: MetaData(e.connect()).reflect())
-
-    def test_reflect_uses_bind_constructor_engine_reflect(self):
-        self._test_reflect_uses_bind(lambda e: MetaData(e).reflect())
-
-    def test_reflect_uses_bind_conn_reflect(self):
-        self._test_reflect_uses_bind(lambda e: MetaData().reflect(e.connect()))
-
-    def test_reflect_uses_bind_engine_reflect(self):
-        self._test_reflect_uses_bind(lambda e: MetaData().reflect(e))
-
-    def test_reflect_uses_bind_option_engine_reflect(self):
-        self._test_reflect_uses_bind(
-            lambda e: MetaData().reflect(e.execution_options(foo="bar"))
-        )
-
-    @testing.provide_metadata
-    def test_reflect_all(self):
-        existing = inspect(testing.db).get_table_names()
+    def test_reflect_all(self, connection, metadata):
         names = ["rt_%s" % name for name in ("a", "b", "c", "d", "e")]
         nameset = set(names)
-        for name in names:
-            # be sure our starting environment is sane
-            self.assert_(name not in existing)
-        self.assert_("rt_f" not in existing)
 
-        baseline = self.metadata
+        baseline = metadata
         for name in names:
             Table(name, baseline, Column("id", sa.Integer, primary_key=True))
-        baseline.create_all()
+        baseline.create_all(connection)
 
-        m1 = MetaData(testing.db)
-        self.assert_(not m1.tables)
-        m1.reflect()
-        self.assert_(nameset.issubset(set(m1.tables.keys())))
+        m1 = MetaData()
+        is_false(m1.tables)
+        m1.reflect(connection)
+        is_true(nameset.issubset(set(m1.tables.keys())))
 
         m2 = MetaData()
-        m2.reflect(testing.db, only=["rt_a", "rt_b"])
-        self.assert_(set(m2.tables.keys()) == set(["rt_a", "rt_b"]))
+        m2.reflect(connection, only=["rt_a", "rt_b"])
+        eq_(set(m2.tables.keys()), set(["rt_a", "rt_b"]))
 
         m3 = MetaData()
-        c = testing.db.connect()
-        m3.reflect(bind=c, only=lambda name, meta: name == "rt_c")
-        self.assert_(set(m3.tables.keys()) == set(["rt_c"]))
+        m3.reflect(connection, only=lambda name, meta: name == "rt_c")
+        eq_(set(m3.tables.keys()), set(["rt_c"]))
 
-        m4 = MetaData(testing.db)
+        m4 = MetaData()
 
         assert_raises_message(
             sa.exc.InvalidRequestError,
             r"Could not reflect: requested table\(s\) not available in "
             r"Engine\(.*?\): \(rt_f\)",
             m4.reflect,
+            connection,
             only=["rt_a", "rt_f"],
         )
 
-        m5 = MetaData(testing.db)
-        m5.reflect(only=[])
-        self.assert_(not m5.tables)
+        m5 = MetaData()
+        m5.reflect(connection, only=[])
+        is_false(m5.tables)
 
-        m6 = MetaData(testing.db)
-        m6.reflect(only=lambda n, m: False)
-        self.assert_(not m6.tables)
+        m6 = MetaData()
+        m6.reflect(connection, only=lambda n, m: False)
+        is_false(m6.tables)
 
-        m7 = MetaData(testing.db)
-        m7.reflect()
-        self.assert_(nameset.issubset(set(m7.tables.keys())))
+        m7 = MetaData()
+        m7.reflect(connection)
+        is_true(nameset.issubset(set(m7.tables.keys())))
 
-        m8 = MetaData()
-        assert_raises(sa.exc.UnboundExecutionError, m8.reflect)
-
-        m8_e1 = MetaData(testing.db)
+        m8_e1 = MetaData()
         rt_c = Table("rt_c", m8_e1)
-        m8_e1.reflect(extend_existing=True)
+        m8_e1.reflect(connection, extend_existing=True)
         eq_(set(m8_e1.tables.keys()), set(names))
         eq_(rt_c.c.keys(), ["id"])
 
-        m8_e2 = MetaData(testing.db)
+        m8_e2 = MetaData()
         rt_c = Table("rt_c", m8_e2)
-        m8_e2.reflect(extend_existing=True, only=["rt_a", "rt_c"])
+        m8_e2.reflect(connection, extend_existing=True, only=["rt_a", "rt_c"])
         eq_(set(m8_e2.tables.keys()), set(["rt_a", "rt_c"]))
         eq_(rt_c.c.keys(), ["id"])
 
-        if existing:
-            print("Other tables present in database, skipping some checks.")
-        else:
-            baseline.drop_all()
-            m9 = MetaData(testing.db)
-            m9.reflect()
-            self.assert_(not m9.tables)
+        baseline.drop_all(connection)
+        m9 = MetaData()
+        m9.reflect(connection)
+        is_false(m9.tables)
 
-    @testing.provide_metadata
-    def test_reflect_all_unreflectable_table(self):
+    def test_reflect_all_unreflectable_table(self, connection, metadata):
         names = ["rt_%s" % name for name in ("a", "b", "c", "d", "e")]
 
         for name in names:
-            Table(
-                name, self.metadata, Column("id", sa.Integer, primary_key=True)
-            )
-        self.metadata.create_all()
+            Table(name, metadata, Column("id", sa.Integer, primary_key=True))
+        metadata.create_all(connection)
 
         m = MetaData()
 
@@ -1292,7 +1215,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
 
         with mock.patch.object(inspector, "reflect_table", patched):
             with expect_warnings("Skipping table rt_c: Can't reflect rt_c"):
-                m.reflect(bind=testing.db)
+                m.reflect(connection)
 
             assert_raises_message(
                 sa.exc.UnreflectableTableError,
@@ -1300,23 +1223,11 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
                 Table,
                 "rt_c",
                 m,
-                autoload_with=testing.db,
+                autoload_with=connection,
             )
 
-    def test_reflect_all_conn_closing(self):
-        m1 = MetaData()
-        c = testing.db.connect()
-        m1.reflect(bind=c)
-        assert not c.closed
-
-    def test_inspector_conn_closing(self):
-        c = testing.db.connect()
-        inspect(c)
-        assert not c.closed
-
-    @testing.provide_metadata
-    def test_index_reflection(self):
-        m1 = self.metadata
+    def test_index_reflection(self, connection, metadata):
+        m1 = metadata
         t1 = Table(
             "party",
             m1,
@@ -1325,9 +1236,9 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
         )
         sa.Index("idx1", t1.c.id, unique=True)
         sa.Index("idx2", t1.c.name, t1.c.id, unique=False)
-        m1.create_all()
+        m1.create_all(connection)
         m2 = MetaData()
-        t2 = Table("party", m2, autoload_with=testing.db)
+        t2 = Table("party", m2, autoload_with=connection)
 
         assert len(t2.indexes) == 3
         # Make sure indexes are in the order we expect them in
@@ -1345,18 +1256,17 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
         assert set([t2.c.name]) == set(r3.columns)
 
     @testing.requires.comment_reflection
-    @testing.provide_metadata
-    def test_comment_reflection(self):
-        m1 = self.metadata
+    def test_comment_reflection(self, connection, metadata):
+        m1 = metadata
         Table(
             "sometable",
             m1,
             Column("id", sa.Integer, comment="c1 comment"),
             comment="t1 comment",
         )
-        m1.create_all()
+        m1.create_all(connection)
         m2 = MetaData()
-        t2 = Table("sometable", m2, autoload_with=testing.db)
+        t2 = Table("sometable", m2, autoload_with=connection)
 
         eq_(t2.comment, "t1 comment")
         eq_(t2.c.id.comment, "c1 comment")
@@ -1366,18 +1276,17 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
         eq_(t3.c.id.comment, "c1 comment")
 
     @testing.requires.check_constraint_reflection
-    @testing.provide_metadata
-    def test_check_constraint_reflection(self):
-        m1 = self.metadata
+    def test_check_constraint_reflection(self, connection, metadata):
+        m1 = metadata
         Table(
             "x",
             m1,
             Column("q", Integer),
             sa.CheckConstraint("q > 10", name="ck1"),
         )
-        m1.create_all()
+        m1.create_all(connection)
         m2 = MetaData()
-        t2 = Table("x", m2, autoload_with=testing.db)
+        t2 = Table("x", m2, autoload_with=connection)
 
         ck = [
             const
@@ -1388,40 +1297,35 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
         eq_regex(ck.sqltext.text, r"[\(`]*q[\)`]* > 10")
         eq_(ck.name, "ck1")
 
-    @testing.provide_metadata
-    def test_index_reflection_cols_busted(self):
-        t = Table(
-            "x", self.metadata, Column("a", Integer), Column("b", Integer)
-        )
+    def test_index_reflection_cols_busted(self, connection, metadata):
+        t = Table("x", metadata, Column("a", Integer), Column("b", Integer))
         sa.Index("x_ix", t.c.a, t.c.b)
-        self.metadata.create_all()
+        metadata.create_all(connection)
 
         def mock_get_columns(self, connection, table_name, **kw):
             return [{"name": "b", "type": Integer, "primary_key": False}]
 
         with testing.mock.patch.object(
-            testing.db.dialect, "get_columns", mock_get_columns
+            connection.dialect, "get_columns", mock_get_columns
         ):
             m = MetaData()
             with testing.expect_warnings(
                 "index key 'a' was not located in columns"
             ):
-                t = Table("x", m, autoload_with=testing.db)
+                t = Table("x", m, autoload_with=connection)
 
         eq_(list(t.indexes)[0].columns, [t.c.b])
 
     @testing.requires.views
-    @testing.provide_metadata
-    def test_views(self):
-        metadata = self.metadata
+    def test_views(self, connection, metadata):
         users, addresses, dingalings = createTables(metadata)
         try:
-            metadata.create_all()
-            _create_views(metadata.bind, None)
+            metadata.create_all(connection)
+            _create_views(connection, None)
             m2 = MetaData()
-            users_v = Table("users_v", m2, autoload_with=testing.db)
+            users_v = Table("users_v", m2, autoload_with=connection)
             addresses_v = Table(
-                "email_addresses_v", m2, autoload_with=testing.db
+                "email_addresses_v", m2, autoload_with=connection
             )
 
             for c1, c2 in zip(users_v.c, users.c):
@@ -1432,25 +1336,23 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
                 eq_(c1.name, c2.name)
                 self.assert_types_base(c1, c2)
         finally:
-            _drop_views(metadata.bind)
+            _drop_views(connection)
 
     @testing.requires.views
-    @testing.provide_metadata
-    def test_reflect_all_with_views(self):
-        metadata = self.metadata
+    def test_reflect_all_with_views(self, connection, metadata):
         users, addresses, dingalings = createTables(metadata, None)
         try:
-            metadata.create_all()
-            _create_views(metadata.bind, None)
-            m2 = MetaData(testing.db)
+            metadata.create_all(connection)
+            _create_views(connection, None)
+            m2 = MetaData()
 
-            m2.reflect(views=False)
+            m2.reflect(connection, views=False)
             eq_(
                 set(m2.tables), set(["users", "email_addresses", "dingalings"])
             )
 
-            m2 = MetaData(testing.db)
-            m2.reflect(views=True)
+            m2 = MetaData()
+            m2.reflect(connection, views=True)
             eq_(
                 set(m2.tables),
                 set(
@@ -1464,7 +1366,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
                 ),
             )
         finally:
-            _drop_views(metadata.bind)
+            _drop_views(connection)
 
 
 class CreateDropTest(fixtures.TablesTest):
@@ -1472,6 +1374,13 @@ class CreateDropTest(fixtures.TablesTest):
 
     run_create_tables = None
 
+    @classmethod
+    def teardown_class(cls):
+        # TablesTest is used here without
+        # run_create_tables, so add an explicit drop of whatever is in
+        # metadata
+        cls._tables_metadata.drop_all(testing.db)
+
     @classmethod
     def define_tables(cls, metadata):
         Table(
@@ -1525,11 +1434,8 @@ class CreateDropTest(fixtures.TablesTest):
             Column("item_name", sa.VARCHAR(50)),
         )
 
-    def teardown(self):
-        self.metadata.drop_all(testing.db)
-
     def test_sorter(self):
-        tables = self.metadata.sorted_tables
+        tables = self.tables_test_metadata.sorted_tables
         table_names = [t.name for t in tables]
         ua = [n for n in table_names if n in ("users", "email_addresses")]
         oi = [n for n in table_names if n in ("orders", "items")]
@@ -1537,39 +1443,41 @@ class CreateDropTest(fixtures.TablesTest):
         eq_(ua, ["users", "email_addresses"])
         eq_(oi, ["orders", "items"])
 
-    def test_checkfirst(self):
-        insp = inspect(testing.db)
+    def test_checkfirst(self, connection):
+        insp = inspect(connection)
         users = self.tables.users
 
         is_false(insp.has_table("users"))
-        users.create(bind=testing.db)
+        users.create(connection)
         is_true(insp.has_table("users"))
-        users.create(bind=testing.db, checkfirst=True)
-        users.drop(bind=testing.db)
-        users.drop(bind=testing.db, checkfirst=True)
+        users.create(connection, checkfirst=True)
+        users.drop(connection)
+        users.drop(connection, checkfirst=True)
         is_false(insp.has_table("users"))
-        users.create(bind=testing.db, checkfirst=True)
-        users.drop(bind=testing.db)
+        users.create(connection, checkfirst=True)
+        users.drop(connection)
 
-    def test_createdrop(self):
-        insp = inspect(testing.db)
-        metadata = self.metadata
-        metadata.create_all(bind=testing.db)
+    def test_createdrop(self, connection):
+        insp = inspect(connection)
+
+        metadata = self.tables_test_metadata
+
+        metadata.create_all(connection)
         is_true(insp.has_table("items"))
         is_true(insp.has_table("email_addresses"))
-        metadata.create_all(bind=testing.db)
+        metadata.create_all(connection)
         is_true(insp.has_table("items"))
 
-        metadata.drop_all(bind=testing.db)
+        metadata.drop_all(connection)
         is_false(insp.has_table("items"))
         is_false(insp.has_table("email_addresses"))
-        metadata.drop_all(bind=testing.db)
+        metadata.drop_all(connection)
         is_false(insp.has_table("items"))
 
-    def test_tablenames(self):
-        metadata = self.metadata
-        metadata.create_all(bind=testing.db)
-        insp = inspect(testing.db)
+    def test_tablenames(self, connection):
+        metadata = self.tables_test_metadata
+        metadata.create_all(bind=connection)
+        insp = inspect(connection)
 
         # ensure all tables we created are in the list.
         is_true(set(insp.get_table_names()).issuperset(metadata.tables))
@@ -1597,12 +1505,11 @@ class SchemaManipulationTest(fixtures.TestBase):
         assert addresses.constraints == set([addresses.primary_key, fk])
 
 
-class UnicodeReflectionTest(fixtures.TestBase):
+class UnicodeReflectionTest(fixtures.TablesTest):
     __backend__ = True
 
     @classmethod
-    def setup_class(cls):
-        cls.metadata = metadata = MetaData()
+    def define_tables(cls, metadata):
 
         no_multibyte_period = set([("plain", "col_plain", "ix_plain")])
         no_has_table = [
@@ -1671,32 +1578,24 @@ class UnicodeReflectionTest(fixtures.TestBase):
             )
             schema.Index(ixname, t.c[cname])
 
-        metadata.create_all(testing.db)
         cls.names = names
 
-    @classmethod
-    def teardown_class(cls):
-        cls.metadata.drop_all(testing.db, checkfirst=False)
-
     @testing.requires.unicode_connections
-    def test_has_table(self):
-        insp = inspect(testing.db)
+    def test_has_table(self, connection):
+        insp = inspect(connection)
         for tname, cname, ixname in self.names:
             assert insp.has_table(tname), "Can't detect name %s" % tname
 
     @testing.requires.unicode_connections
-    def test_basic(self):
+    def test_basic(self, connection):
         # the 'convert_unicode' should not get in the way of the
         # reflection process.  reflect_table for oracle, postgresql
         # (others?) expect non-unicode strings in result sets/bind
         # params
 
-        bind = testing.db
         names = set([rec[0] for rec in self.names])
 
-        reflected = set(inspect(bind).get_table_names())
-
-        # Jython 2.5 on Java 5 lacks unicodedata.normalize
+        reflected = set(inspect(connection).get_table_names())
 
         if not names.issubset(reflected) and hasattr(unicodedata, "normalize"):
 
@@ -1711,14 +1610,14 @@ class UnicodeReflectionTest(fixtures.TestBase):
             # Yep.  But still ensure that bulk reflection and
             # create/drop work with either normalization.
 
-        r = MetaData(bind)
-        r.reflect()
-        r.drop_all(checkfirst=False)
-        r.create_all(checkfirst=False)
+        r = MetaData()
+        r.reflect(connection)
+        r.drop_all(connection, checkfirst=False)
+        r.create_all(connection, checkfirst=False)
 
     @testing.requires.unicode_connections
-    def test_get_names(self):
-        inspector = inspect(testing.db)
+    def test_get_names(self, connection):
+        inspector = inspect(connection)
         names = dict(
             (tname, (cname, ixname)) for tname, cname, ixname in self.names
         )
@@ -1760,8 +1659,7 @@ class SchemaTest(fixtures.TestBase):
     @testing.requires.cross_schema_fk_reflection
     @testing.requires.implicit_default_schema
     @testing.provide_metadata
-    def test_blank_schema_arg(self):
-        metadata = self.metadata
+    def test_blank_schema_arg(self, connection, metadata):
 
         Table(
             "some_table",
@@ -1778,37 +1676,27 @@ class SchemaTest(fixtures.TestBase):
             schema=None,
             test_needs_fk=True,
         )
-        metadata.create_all()
-        with testing.db.connect() as conn:
-            meta2 = MetaData(conn, schema=testing.config.test_schema)
-            meta2.reflect()
+        metadata.create_all(connection)
+        meta2 = MetaData(schema=testing.config.test_schema)
+        meta2.reflect(connection)
 
-            eq_(
-                set(meta2.tables),
-                set(
-                    [
-                        "some_other_table",
-                        "%s.some_table" % testing.config.test_schema,
-                    ]
-                ),
-            )
+        eq_(
+            set(meta2.tables),
+            set(
+                [
+                    "some_other_table",
+                    "%s.some_table" % testing.config.test_schema,
+                ]
+            ),
+        )
 
     @testing.requires.schemas
-    def test_explicit_default_schema(self):
-        engine = testing.db
-        engine.connect().close()
-
-        if testing.against("sqlite"):
-            # Works for CREATE TABLE main.foo, SELECT FROM main.foo, etc.,
-            # but fails on:
-            #   FOREIGN KEY(col2) REFERENCES main.table1 (col1)
-            schema = "main"
-        else:
-            schema = engine.dialect.default_schema_name
+    def test_explicit_default_schema(self, connection, metadata):
+
+        schema = connection.dialect.default_schema_name
 
         assert bool(schema)
 
-        metadata = MetaData()
         Table(
             "table1",
             metadata,
@@ -1826,54 +1714,41 @@ class SchemaTest(fixtures.TestBase):
             test_needs_fk=True,
             schema=schema,
         )
-        try:
-            metadata.create_all(engine)
-            metadata.create_all(engine, checkfirst=True)
-            assert len(metadata.tables) == 2
-            metadata.clear()
-
-            Table("table1", metadata, autoload_with=engine, schema=schema)
-            Table("table2", metadata, autoload_with=engine, schema=schema)
-            assert len(metadata.tables) == 2
-        finally:
-            metadata.drop_all(engine)
+        metadata.create_all(connection)
+        metadata.create_all(connection, checkfirst=True)
+        eq_(len(metadata.tables), 2)
+
+        m1 = MetaData()
+        Table("table1", m1, autoload_with=connection, schema=schema)
+        Table("table2", m1, autoload_with=connection, schema=schema)
+        eq_(len(m1.tables), 2)
 
     @testing.requires.schemas
-    @testing.provide_metadata
-    def test_schema_translation(self):
+    def test_schema_translation(self, connection, metadata):
         Table(
             "foob",
-            self.metadata,
+            metadata,
             Column("q", Integer),
             schema=config.test_schema,
         )
-        self.metadata.create_all()
+        metadata.create_all(connection)
 
         m = MetaData()
         map_ = {"foob": config.test_schema}
-        with config.db.connect().execution_options(
-            schema_translate_map=map_
-        ) as conn:
-            t = Table("foob", m, schema="foob", autoload_with=conn)
-            eq_(t.schema, "foob")
-            eq_(t.c.keys(), ["q"])
+
+        c2 = connection.execution_options(schema_translate_map=map_)
+        t = Table("foob", m, schema="foob", autoload_with=c2)
+        eq_(t.schema, "foob")
+        eq_(t.c.keys(), ["q"])
 
     @testing.requires.schemas
     @testing.fails_on("sybase", "FIXME: unknown")
-    def test_explicit_default_schema_metadata(self):
-        engine = testing.db
-
-        if testing.against("sqlite"):
-            # Works for CREATE TABLE main.foo, SELECT FROM main.foo, etc.,
-            # but fails on:
-            #   FOREIGN KEY(col2) REFERENCES main.table1 (col1)
-            schema = "main"
-        else:
-            schema = engine.dialect.default_schema_name
+    def test_explicit_default_schema_metadata(self, connection, metadata):
+        schema = connection.dialect.default_schema_name
 
-        assert bool(schema)
+        is_true(schema)
 
-        metadata = MetaData(schema=schema)
+        metadata.schema = schema
         Table(
             "table1",
             metadata,
@@ -1887,26 +1762,21 @@ class SchemaTest(fixtures.TestBase):
             Column("col2", sa.Integer, sa.ForeignKey("table1.col1")),
             test_needs_fk=True,
         )
-        try:
-            metadata.create_all(engine)
-            metadata.create_all(engine, checkfirst=True)
-            assert len(metadata.tables) == 2
-            metadata.clear()
-
-            Table("table1", metadata, autoload_with=engine)
-            Table("table2", metadata, autoload_with=engine)
-            assert len(metadata.tables) == 2
-        finally:
-            metadata.drop_all(engine)
+        metadata.create_all(connection)
+        metadata.create_all(connection, checkfirst=True)
+
+        m1 = MetaData(schema=schema)
+
+        Table("table1", m1, autoload_with=connection)
+        Table("table2", m1, autoload_with=connection)
+        eq_(len(m1.tables), 2)
 
     @testing.requires.schemas
-    @testing.provide_metadata
-    def test_metadata_reflect_schema(self):
-        metadata = self.metadata
+    def test_metadata_reflect_schema(self, connection, metadata):
         createTables(metadata, testing.config.test_schema)
-        metadata.create_all()
-        m2 = MetaData(schema=testing.config.test_schema, bind=testing.db)
-        m2.reflect()
+        metadata.create_all(connection)
+        m2 = MetaData(schema=testing.config.test_schema)
+        m2.reflect(connection)
         eq_(
             set(m2.tables),
             set(
@@ -1921,24 +1791,23 @@ class SchemaTest(fixtures.TestBase):
     @testing.requires.schemas
     @testing.requires.cross_schema_fk_reflection
     @testing.requires.implicit_default_schema
-    @testing.provide_metadata
-    def test_reflect_all_schemas_default_overlap(self):
-        Table("t", self.metadata, Column("id", Integer, primary_key=True))
+    def test_reflect_all_schemas_default_overlap(self, connection, metadata):
+        Table("t", metadata, Column("id", Integer, primary_key=True))
 
         Table(
             "t",
-            self.metadata,
+            metadata,
             Column("id1", sa.ForeignKey("t.id")),
             schema=testing.config.test_schema,
         )
 
-        self.metadata.create_all()
+        metadata.create_all(connection)
         m2 = MetaData()
-        m2.reflect(testing.db, schema=testing.config.test_schema)
+        m2.reflect(connection, schema=testing.config.test_schema)
 
         m3 = MetaData()
-        m3.reflect(testing.db)
-        m3.reflect(testing.db, schema=testing.config.test_schema)
+        m3.reflect(connection)
+        m3.reflect(connection, schema=testing.config.test_schema)
 
         eq_(
             set((t.name, t.schema) for t in m2.tables.values()),
@@ -2015,30 +1884,28 @@ def createIndexes(con, schema=None):
 
 
 @testing.requires.views
-def _create_views(con, schema=None):
-    with testing.db.begin() as conn:
-        for table_name in ("users", "email_addresses"):
-            fullname = table_name
-            if schema:
-                fullname = "%s.%s" % (schema, table_name)
-            view_name = fullname + "_v"
-            query = "CREATE VIEW %s AS SELECT * FROM %s" % (
-                view_name,
-                fullname,
-            )
-            conn.execute(sa.sql.text(query))
+def _create_views(conn, schema=None):
+    for table_name in ("users", "email_addresses"):
+        fullname = table_name
+        if schema:
+            fullname = "%s.%s" % (schema, table_name)
+        view_name = fullname + "_v"
+        query = "CREATE VIEW %s AS SELECT * FROM %s" % (
+            view_name,
+            fullname,
+        )
+        conn.execute(sa.sql.text(query))
 
 
 @testing.requires.views
-def _drop_views(con, schema=None):
-    with testing.db.begin() as conn:
-        for table_name in ("email_addresses", "users"):
-            fullname = table_name
-            if schema:
-                fullname = "%s.%s" % (schema, table_name)
-            view_name = fullname + "_v"
-            query = "DROP VIEW %s" % view_name
-            conn.execute(sa.sql.text(query))
+def _drop_views(conn, schema=None):
+    for table_name in ("email_addresses", "users"):
+        fullname = table_name
+        if schema:
+            fullname = "%s.%s" % (schema, table_name)
+        view_name = fullname + "_v"
+        query = "DROP VIEW %s" % view_name
+        conn.execute(sa.sql.text(query))
 
 
 class ReverseCasingReflectTest(fixtures.TestBase, AssertsCompiledSQL):
@@ -2064,9 +1931,9 @@ class ReverseCasingReflectTest(fixtures.TestBase, AssertsCompiledSQL):
             conn.exec_driver_sql("drop table weird_casing")
 
     @testing.requires.denormalized_names
-    def test_direct_quoting(self):
+    def test_direct_quoting(self, connection):
         m = MetaData()
-        t = Table("weird_casing", m, autoload_with=testing.db)
+        t = Table("weird_casing", m, autoload_with=connection)
         self.assert_compile(
             t.select(),
             "SELECT weird_casing.col1, "
@@ -2097,13 +1964,13 @@ class CaseSensitiveTest(fixtures.TablesTest):
         )
 
     @testing.fails_if(testing.requires._has_mysql_on_windows)
-    def test_table_names(self):
-        x = inspect(testing.db).get_table_names()
+    def test_table_names(self, connection):
+        x = inspect(connection).get_table_names()
         assert set(["SomeTable", "SomeOtherTable"]).issubset(x)
 
-    def test_reflect_exact_name(self):
+    def test_reflect_exact_name(self, connection):
         m = MetaData()
-        t1 = Table("SomeTable", m, autoload_with=testing.db)
+        t1 = Table("SomeTable", m, autoload_with=connection)
         eq_(t1.name, "SomeTable")
         assert t1.c.x is not None
 
@@ -2111,47 +1978,43 @@ class CaseSensitiveTest(fixtures.TablesTest):
         lambda: testing.against(("mysql", "<", (5, 5)))
         and not testing.requires._has_mysql_fully_case_sensitive()
     )
-    def test_reflect_via_fk(self):
+    def test_reflect_via_fk(self, connection):
         m = MetaData()
-        t2 = Table("SomeOtherTable", m, autoload_with=testing.db)
+        t2 = Table("SomeOtherTable", m, autoload_with=connection)
         eq_(t2.name, "SomeOtherTable")
         assert "SomeTable" in m.tables
 
     @testing.fails_if(testing.requires._has_mysql_fully_case_sensitive)
     @testing.fails_on_everything_except("sqlite", "mysql", "mssql")
-    def test_reflect_case_insensitive(self):
+    def test_reflect_case_insensitive(self, connection):
         m = MetaData()
-        t2 = Table("sOmEtAbLe", m, autoload_with=testing.db)
+        t2 = Table("sOmEtAbLe", m, autoload_with=connection)
         eq_(t2.name, "sOmEtAbLe")
 
 
-class ColumnEventsTest(fixtures.RemovesEvents, fixtures.TestBase):
+class ColumnEventsTest(fixtures.RemovesEvents, fixtures.TablesTest):
     __backend__ = True
 
     @classmethod
-    def setup_class(cls):
-        cls.metadata = MetaData()
-        cls.to_reflect = Table(
+    def define_tables(cls, metadata):
+        to_reflect = Table(
             "to_reflect",
-            cls.metadata,
+            metadata,
             Column("x", sa.Integer, primary_key=True, autoincrement=False),
             Column("y", sa.Integer),
             test_needs_fk=True,
         )
-        cls.related = Table(
+        Table(
             "related",
-            cls.metadata,
+            metadata,
             Column("q", sa.Integer, sa.ForeignKey("to_reflect.x")),
             test_needs_fk=True,
         )
-        sa.Index("some_index", cls.to_reflect.c.y)
-        cls.metadata.create_all(testing.db)
+        sa.Index("some_index", to_reflect.c.y)
 
-    @classmethod
-    def teardown_class(cls):
-        cls.metadata.drop_all(testing.db)
-
-    def _do_test(self, col, update, assert_, tablename="to_reflect"):
+    def _do_test(
+        self, connection, col, update, assert_, tablename="to_reflect"
+    ):
         # load the actual Table class, not the test
         # wrapper
         from sqlalchemy.schema import Table
@@ -2165,31 +2028,31 @@ class ColumnEventsTest(fixtures.RemovesEvents, fixtures.TestBase):
         t = Table(
             tablename,
             m,
-            autoload_with=testing.db,
+            autoload_with=connection,
             listeners=[("column_reflect", column_reflect)],
         )
         assert_(t)
 
         m = MetaData()
         self.event_listen(Table, "column_reflect", column_reflect)
-        t2 = Table(tablename, m, autoload_with=testing.db)
+        t2 = Table(tablename, m, autoload_with=connection)
         assert_(t2)
 
-    def test_override_key(self):
+    def test_override_key(self, connection):
         def assertions(table):
             eq_(table.c.YXZ.name, "x")
             eq_(set(table.primary_key), set([table.c.YXZ]))
 
-        self._do_test("x", {"key": "YXZ"}, assertions)
+        self._do_test(connection, "x", {"key": "YXZ"}, assertions)
 
-    def test_override_index(self):
+    def test_override_index(self, connection):
         def assertions(table):
             idx = list(table.indexes)[0]
             eq_(idx.columns, [table.c.YXZ])
 
-        self._do_test("y", {"key": "YXZ"}, assertions)
+        self._do_test(connection, "y", {"key": "YXZ"}, assertions)
 
-    def test_override_key_fk(self):
+    def test_override_key_fk(self, connection):
         m = MetaData()
 
         def column_reflect(insp, table, column_info):
@@ -2202,48 +2065,51 @@ class ColumnEventsTest(fixtures.RemovesEvents, fixtures.TestBase):
         to_reflect = Table(
             "to_reflect",
             m,
-            autoload_with=testing.db,
+            autoload_with=connection,
             listeners=[("column_reflect", column_reflect)],
         )
         related = Table(
             "related",
             m,
-            autoload_with=testing.db,
+            autoload_with=connection,
             listeners=[("column_reflect", column_reflect)],
         )
 
         assert related.c.qyz.references(to_reflect.c.xyz)
 
-    def test_override_type(self):
+    def test_override_type(self, connection):
         def assert_(table):
             assert isinstance(table.c.x.type, sa.String)
 
-        self._do_test("x", {"type": sa.String}, assert_)
+        self._do_test(connection, "x", {"type": sa.String}, assert_)
 
-    def test_override_info(self):
+    def test_override_info(self, connection):
         self._do_test(
+            connection,
             "x",
             {"info": {"a": "b"}},
             lambda table: eq_(table.c.x.info, {"a": "b"}),
         )
 
-    def test_override_server_default_fetchedvalue(self):
+    def test_override_server_default_fetchedvalue(self, connection):
         my_default = FetchedValue()
         self._do_test(
+            connection,
             "x",
             {"default": my_default},
             lambda table: eq_(table.c.x.server_default, my_default),
         )
 
-    def test_override_server_default_default_clause(self):
+    def test_override_server_default_default_clause(self, connection):
         my_default = DefaultClause("1")
         self._do_test(
+            connection,
             "x",
             {"default": my_default},
             lambda table: eq_(table.c.x.server_default, my_default),
         )
 
-    def test_override_server_default_plain_text(self):
+    def test_override_server_default_plain_text(self, connection):
         my_default = "1"
 
         def assert_text_of_one(table):
@@ -2254,9 +2120,11 @@ class ColumnEventsTest(fixtures.RemovesEvents, fixtures.TestBase):
             )
             eq_(str(table.c.x.server_default.arg), "1")
 
-        self._do_test("x", {"default": my_default}, assert_text_of_one)
+        self._do_test(
+            connection, "x", {"default": my_default}, assert_text_of_one
+        )
 
-    def test_override_server_default_textclause(self):
+    def test_override_server_default_textclause(self, connection):
         my_default = sa.text("1")
 
         def assert_text_of_one(table):
@@ -2267,9 +2135,11 @@ class ColumnEventsTest(fixtures.RemovesEvents, fixtures.TestBase):
             )
             eq_(str(table.c.x.server_default.arg), "1")
 
-        self._do_test("x", {"default": my_default}, assert_text_of_one)
+        self._do_test(
+            connection, "x", {"default": my_default}, assert_text_of_one
+        )
 
-    def test_listen_metadata_obj(self):
+    def test_listen_metadata_obj(self, connection):
         m1 = MetaData()
 
         m2 = MetaData()
@@ -2280,13 +2150,13 @@ class ColumnEventsTest(fixtures.RemovesEvents, fixtures.TestBase):
         def go(insp, table, info):
             canary.append(info["name"])
 
-        Table("related", m1, autoload_with=testing.db)
+        Table("related", m1, autoload_with=connection)
 
-        Table("related", m2, autoload_with=testing.db)
+        Table("related", m2, autoload_with=connection)
 
         eq_(canary, ["q", "x", "y"])
 
-    def test_listen_metadata_cls(self):
+    def test_listen_metadata_cls(self, connection):
         m1 = MetaData()
 
         m2 = MetaData()
@@ -2298,9 +2168,9 @@ class ColumnEventsTest(fixtures.RemovesEvents, fixtures.TestBase):
 
         self.event_listen(MetaData, "column_reflect", go)
 
-        Table("related", m1, autoload_with=testing.db)
+        Table("related", m1, autoload_with=connection)
 
-        Table("related", m2, autoload_with=testing.db)
+        Table("related", m2, autoload_with=connection)
 
         eq_(canary, ["q", "x", "y", "q", "x", "y"])
 
index 504025d6f81477eb3620703a4597c7e06e598a67..d7fcbf9e8e578135321bc88bc4819630043563e8 100644 (file)
@@ -1,6 +1,5 @@
 from sqlalchemy import ForeignKey
 from sqlalchemy import Integer
-from sqlalchemy import MetaData
 from sqlalchemy import String
 from sqlalchemy import testing
 from sqlalchemy.ext.declarative import DeferredReflection
@@ -26,7 +25,7 @@ class DeclarativeReflectionBase(fixtures.TablesTest):
     def setup(self):
         global Base, registry
 
-        registry = decl.registry(metadata=MetaData(bind=testing.db))
+        registry = decl.registry()
         Base = registry.generate_base()
 
     def teardown(self):
@@ -102,7 +101,7 @@ class DeferredReflectionTest(DeferredReflectBase):
         u1 = User(
             name="u1", addresses=[Address(email="one"), Address(email="two")]
         )
-        sess = create_session()
+        sess = create_session(testing.db)
         sess.add(u1)
         sess.flush()
         sess.expunge_all()
@@ -192,7 +191,7 @@ class DeferredReflectionTest(DeferredReflectBase):
                 return {"primary_key": cls.__table__.c.id}
 
         DeferredReflection.prepare(testing.db)
-        sess = Session()
+        sess = Session(testing.db)
         sess.add_all(
             [User(name="G"), User(name="Q"), User(name="A"), User(name="C")]
         )
@@ -256,7 +255,7 @@ class DeferredSecondaryReflectionTest(DeferredReflectBase):
 
         u1 = User(name="u1", items=[Item(name="i1"), Item(name="i2")])
 
-        sess = Session()
+        sess = Session(testing.db)
         sess.add(u1)
         sess.commit()
 
index df27c8d270f6593edfdeb7b20b06d7ed6d287e27..026b3cd03e400517c2503e9fcf4c75fc3a24cb60 100644 (file)
@@ -201,7 +201,7 @@ class _CollectionOperations(fixtures.TestBase):
     def setup(self):
         collection_class = self.collection_class
 
-        metadata = MetaData(testing.db)
+        metadata = MetaData()
 
         parents_table = Table(
             "Parent",
@@ -254,14 +254,14 @@ class _CollectionOperations(fixtures.TestBase):
         )
         mapper(Child, children_table)
 
-        metadata.create_all()
+        metadata.create_all(testing.db)
 
         self.metadata = metadata
-        self.session = create_session()
+        self.session = create_session(testing.db)
         self.Parent, self.Child = Parent, Child
 
     def teardown(self):
-        self.metadata.drop_all()
+        self.metadata.drop_all(testing.db)
 
     def roundtrip(self, obj):
         if obj not in self.session:
@@ -886,7 +886,7 @@ class CustomObjectTest(_CollectionOperations):
 
 class ProxyFactoryTest(ListTest):
     def setup(self):
-        metadata = MetaData(testing.db)
+        metadata = MetaData()
 
         parents_table = Table(
             "Parent",
@@ -940,10 +940,10 @@ class ProxyFactoryTest(ListTest):
         )
         mapper(Child, children_table)
 
-        metadata.create_all()
+        metadata.create_all(testing.db)
 
         self.metadata = metadata
-        self.session = create_session()
+        self.session = create_session(testing.db)
         self.Parent, self.Child = Parent, Child
 
     def test_sequence_ops(self):
@@ -1003,8 +1003,8 @@ class ScalarTest(fixtures.TestBase):
         )
         mapper(Child, children_table)
 
-        metadata.create_all()
-        session = create_session()
+        metadata.create_all(testing.db)
+        session = create_session(testing.db)
 
         def roundtrip(obj):
             if obj not in session:
@@ -1158,7 +1158,7 @@ class ScalarTest(fixtures.TestBase):
 
 class LazyLoadTest(fixtures.TestBase):
     def setup(self):
-        metadata = MetaData(testing.db)
+        metadata = MetaData()
 
         parents_table = Table(
             "Parent",
@@ -1190,15 +1190,15 @@ class LazyLoadTest(fixtures.TestBase):
                 self.name = name
 
         mapper(Child, children_table)
-        metadata.create_all()
+        metadata.create_all(testing.db)
 
         self.metadata = metadata
-        self.session = create_session()
+        self.session = create_session(testing.db)
         self.Parent, self.Child = Parent, Child
         self.table = parents_table
 
     def teardown(self):
-        self.metadata.drop_all()
+        self.metadata.drop_all(testing.db)
 
     def roundtrip(self, obj):
         self.session.add(obj)
index da0e7c1338ba1c1b39c3b292993fa0b6c47ebfab..bddb42b03d858c19a39d218ceabe1ee310398985 100644 (file)
@@ -30,7 +30,7 @@ class AutomapTest(fixtures.MappedTest):
         FixtureTest.define_tables(metadata)
 
     def test_relationship_o2m_default(self):
-        Base = automap_base(metadata=self.metadata)
+        Base = automap_base(metadata=self.tables_test_metadata)
         Base.prepare()
 
         User = Base.classes.users
@@ -41,7 +41,7 @@ class AutomapTest(fixtures.MappedTest):
         assert a1.users is u1
 
     def test_relationship_explicit_override_o2m(self):
-        Base = automap_base(metadata=self.metadata)
+        Base = automap_base(metadata=self.tables_test_metadata)
         prop = relationship("addresses", collection_class=set)
 
         class User(Base):
@@ -58,7 +58,7 @@ class AutomapTest(fixtures.MappedTest):
         assert a1.user is u1
 
     def test_exception_prepare_not_called(self):
-        Base = automap_base(metadata=self.metadata)
+        Base = automap_base(metadata=self.tables_test_metadata)
 
         class User(Base):
             __tablename__ = "users"
@@ -75,7 +75,7 @@ class AutomapTest(fixtures.MappedTest):
         )
 
     def test_relationship_explicit_override_m2o(self):
-        Base = automap_base(metadata=self.metadata)
+        Base = automap_base(metadata=self.tables_test_metadata)
 
         prop = relationship("users")
 
@@ -93,7 +93,7 @@ class AutomapTest(fixtures.MappedTest):
         assert a1.users is u1
 
     def test_relationship_self_referential(self):
-        Base = automap_base(metadata=self.metadata)
+        Base = automap_base(metadata=self.tables_test_metadata)
         Base.prepare()
 
         Node = Base.classes.nodes
@@ -110,7 +110,7 @@ class AutomapTest(fixtures.MappedTest):
         This test verifies that prepare can accept an optional schema
         argument and pass it to reflect.
         """
-        Base = automap_base(metadata=self.metadata)
+        Base = automap_base(metadata=self.tables_test_metadata)
         engine_mock = Mock()
         with patch.object(Base.metadata, "reflect") as reflect_mock:
             Base.prepare(autoload_with=engine_mock, schema="some_schema")
@@ -128,7 +128,7 @@ class AutomapTest(fixtures.MappedTest):
         This test verifies that prepare passes a default None if no schema is
         provided.
         """
-        Base = automap_base(metadata=self.metadata)
+        Base = automap_base(metadata=self.tables_test_metadata)
         engine_mock = Mock()
         with patch.object(Base.metadata, "reflect") as reflect_mock:
             Base.prepare(autoload_with=engine_mock)
@@ -140,7 +140,7 @@ class AutomapTest(fixtures.MappedTest):
             )
 
     def test_naming_schemes(self):
-        Base = automap_base(metadata=self.metadata)
+        Base = automap_base(metadata=self.tables_test_metadata)
 
         def classname_for_table(base, tablename, table):
             return str("cls_" + tablename)
@@ -170,7 +170,7 @@ class AutomapTest(fixtures.MappedTest):
         assert a1.scalar_cls_users is u1
 
     def test_relationship_m2m(self):
-        Base = automap_base(metadata=self.metadata)
+        Base = automap_base(metadata=self.tables_test_metadata)
 
         Base.prepare()
 
@@ -182,7 +182,7 @@ class AutomapTest(fixtures.MappedTest):
         assert o1 in i1.orders_collection
 
     def test_relationship_explicit_override_forwards_m2m(self):
-        Base = automap_base(metadata=self.metadata)
+        Base = automap_base(metadata=self.tables_test_metadata)
 
         class Order(Base):
             __tablename__ = "orders"
@@ -205,7 +205,7 @@ class AutomapTest(fixtures.MappedTest):
         assert o1 in i1.order_collection
 
     def test_relationship_pass_params(self):
-        Base = automap_base(metadata=self.metadata)
+        Base = automap_base(metadata=self.tables_test_metadata)
 
         mock = Mock()
 
@@ -269,7 +269,7 @@ class CascadeTest(fixtures.MappedTest):
         )
 
     def test_o2m_relationship_cascade(self):
-        Base = automap_base(metadata=self.metadata)
+        Base = automap_base(metadata=self.tables_test_metadata)
         Base.prepare()
 
         configure_mappers()
index b209de36d70320ec6f7858d9c07645c88bb6ea39..b6976299b367f80bd94dfe3d928d5eae94042192 100644 (file)
@@ -16,7 +16,7 @@ class AutomapTest(fixtures.MappedTest):
         FixtureTest.define_tables(metadata)
 
     def test_reflect_true(self):
-        Base = automap_base(metadata=self.metadata)
+        Base = automap_base(metadata=self.tables_test_metadata)
         engine_mock = mock.Mock()
         with mock.patch.object(Base.metadata, "reflect") as reflect_mock:
             with testing.expect_deprecated(
index e46c65ff02f4eed5047678d5e26409d2e8b8acae..038bdd83e1b499b6dee45d98bded965612a57cd6 100644 (file)
@@ -47,7 +47,7 @@ class ShardTest(object):
 
         db1, db2, db3, db4 = self._dbs = self._init_dbs()
 
-        meta = self.metadata = MetaData()
+        meta = self.tables_test_metadata = MetaData()
         ids = Table("ids", meta, Column("nextid", Integer, nullable=False))
 
         def id_generator(ctx):
@@ -786,7 +786,7 @@ class MultipleDialectShardTest(ShardTest, fixtures.TestBase):
             os.remove("shard%d_%s.db" % (i, provision.FOLLOWER_IDENT))
 
         with self.postgresql_engine.begin() as conn:
-            self.metadata.drop_all(conn)
+            self.tables_test_metadata.drop_all(conn)
             for i in [2, 4]:
                 conn.exec_driver_sql("DROP SCHEMA shard%s CASCADE" % (i,))
 
@@ -898,7 +898,7 @@ class LazyLoadIdentityKeyTest(fixtures.DeclarativeMappedTest):
         )
 
         for db in (db1, db2):
-            self.metadata.create_all(db)
+            self.tables_test_metadata.create_all(db)
 
         self.dbs = [db1, db2]
 
index a1a6c69188d59550b5412a901499689d769f73f0..f23d6cb576307e117a72c3c94c16e63ee894d4bc 100644 (file)
@@ -4,11 +4,11 @@ from sqlalchemy import MetaData
 from sqlalchemy import String
 from sqlalchemy import testing
 from sqlalchemy.ext.orderinglist import ordering_list
-from sqlalchemy.orm import create_session
 from sqlalchemy.orm import mapper
 from sqlalchemy.orm import relationship
 from sqlalchemy.testing import eq_
 from sqlalchemy.testing import fixtures
+from sqlalchemy.testing.fixtures import create_session
 from sqlalchemy.testing.schema import Column
 from sqlalchemy.testing.schema import Table
 from sqlalchemy.testing.util import picklers
@@ -64,7 +64,7 @@ class OrderingListTest(fixtures.TestBase):
         global metadata, slides_table, bullets_table, Slide, Bullet
         slides_table, bullets_table = None, None
         Slide, Bullet = None, None
-        metadata = MetaData(testing.db)
+        metadata = MetaData()
 
     def _setup(self, test_collection_class):
         """Build a relationship situation using the given
@@ -120,10 +120,10 @@ class OrderingListTest(fixtures.TestBase):
         )
         mapper(Bullet, bullets_table)
 
-        metadata.create_all()
+        metadata.create_all(testing.db)
 
     def teardown(self):
-        metadata.drop_all()
+        metadata.drop_all(testing.db)
 
     def test_append_no_reorder(self):
         self._setup(
index fd00717f42fe0db95177bba6f651f01b0f35123b..a1bbce53d97a5985d018448799bbd9cea423e78f 100644 (file)
@@ -19,7 +19,6 @@ from sqlalchemy.orm import close_all_sessions
 from sqlalchemy.orm import column_property
 from sqlalchemy.orm import composite
 from sqlalchemy.orm import configure_mappers
-from sqlalchemy.orm import create_session
 from sqlalchemy.orm import decl_base
 from sqlalchemy.orm import declarative_base
 from sqlalchemy.orm import declared_attr
@@ -44,6 +43,7 @@ from sqlalchemy.testing import expect_warnings
 from sqlalchemy.testing import fixtures
 from sqlalchemy.testing import is_
 from sqlalchemy.testing import mock
+from sqlalchemy.testing.fixtures import create_session
 from sqlalchemy.testing.schema import Column
 from sqlalchemy.testing.schema import Table
 from sqlalchemy.util import with_metaclass
@@ -2100,7 +2100,7 @@ class DeclarativeTest(DeclarativeTestBase):
         m = MyObj(id="someid", data="somedata")
         sess.add(m)
         sess.flush()
-        eq_(t1.select().execute().fetchall(), [("someid", "somedata")])
+        eq_(sess.execute(t1.select()).fetchall(), [("someid", "somedata")])
 
     def test_synonym_for(self):
         class User(Base, fixtures.ComparableEntity):
index bc36ee9624fd68b60544f0c0ffd2b84df3227af0..ca2fb83ca9e2e1f4b42156210625f7c321929904 100644 (file)
@@ -12,7 +12,6 @@ from sqlalchemy.orm import clear_mappers
 from sqlalchemy.orm import close_all_sessions
 from sqlalchemy.orm import column_property
 from sqlalchemy.orm import configure_mappers
-from sqlalchemy.orm import create_session
 from sqlalchemy.orm import declarative_base
 from sqlalchemy.orm import declared_attr
 from sqlalchemy.orm import deferred
@@ -29,6 +28,7 @@ from sqlalchemy.testing import expect_warnings
 from sqlalchemy.testing import fixtures
 from sqlalchemy.testing import is_
 from sqlalchemy.testing import mock
+from sqlalchemy.testing.fixtures import create_session
 from sqlalchemy.testing.schema import Column
 from sqlalchemy.testing.schema import Table
 from sqlalchemy.testing.util import gc_collect
@@ -42,13 +42,14 @@ class DeclarativeTestBase(fixtures.TestBase, testing.AssertsExecutionResults):
     def setup(self):
         global Base, mapper_registry
 
-        mapper_registry = registry(metadata=MetaData(bind=testing.db))
+        mapper_registry = registry(metadata=MetaData())
         Base = mapper_registry.generate_base()
 
     def teardown(self):
         close_all_sessions()
         clear_mappers()
-        Base.metadata.drop_all()
+        with testing.db.begin() as conn:
+            Base.metadata.drop_all(conn)
 
 
 class DeclarativeMixinTest(DeclarativeTestBase):
@@ -459,7 +460,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
         )
         # do a brief round trip on this
         Base.metadata.create_all(testing.db)
-        session = Session()
+        session = create_session()
         o1, o2 = Other(), Other()
         session.add_all(
             [Engineer(target=o1), Manager(target=o2), Manager(target=o1)]
index 4253b4bee6cc73f1ea41f6cb8daf1b958372e79a..7a70810a158a253ecd44e48b39c92a6d4935f124 100644 (file)
@@ -575,6 +575,10 @@ class GeometryFixtureBase(fixtures.DeclarativeMappedTest):
             if "subclasses" in value:
                 self._fixture_from_geometry(value["subclasses"], klass)
 
-        if is_base and self.metadata.tables and self.run_create_tables:
-            self.tables.update(self.metadata.tables)
-            self.metadata.create_all(config.db)
+        if (
+            is_base
+            and self.tables_test_metadata.tables
+            and self.run_create_tables
+        ):
+            self.tables.update(self.tables_test_metadata.tables)
+            self.tables_test_metadata.create_all(config.db)
index 64f85b3351e72ec3b3b76a1993559562013abfa9..09ceaf82728911740cdaf317d463fdea17341e4d 100644 (file)
@@ -55,9 +55,7 @@ class BindIntegrationTest(_fixtures.FixtureTest):
             },
         )
 
-        sess = Session(
-            binds={User: self.metadata.bind, Address: self.metadata.bind}
-        )
+        sess = Session(binds={User: testing.db, Address: testing.db})
 
         u1 = User(id=1, name="ed")
         sess.add(u1)
@@ -114,8 +112,8 @@ class BindIntegrationTest(_fixtures.FixtureTest):
 
         Session = sessionmaker(
             binds={
-                users_unbound: self.metadata.bind,
-                addresses_unbound: self.metadata.bind,
+                users_unbound: testing.db,
+                addresses_unbound: testing.db,
             }
         )
         sess = Session()
@@ -495,7 +493,7 @@ class SessionBindTest(fixtures.MappedTest):
     def test_session_bind(self):
         Foo = self.classes.Foo
 
-        engine = self.metadata.bind
+        engine = testing.db
 
         for bind in (engine, engine.connect()):
             try:
index c6a1226d4bb4f7aa00d41f0e46d0b2dbc9fa4589..2404bf1289ae475bf30220e558d1497f31a89531 100644 (file)
@@ -5,7 +5,6 @@ from sqlalchemy import Integer
 from sqlalchemy import MetaData
 from sqlalchemy import String
 from sqlalchemy import Table
-from sqlalchemy import testing
 from sqlalchemy import Unicode
 from sqlalchemy.orm import backref
 from sqlalchemy.orm import clear_mappers
@@ -24,7 +23,7 @@ class CompileTest(fixtures.ORMTest):
         clear_mappers()
 
     def test_with_polymorphic(self):
-        metadata = MetaData(testing.db)
+        metadata = MetaData()
 
         order = Table(
             "orders",
@@ -122,7 +121,7 @@ class CompileTest(fixtures.ORMTest):
     def test_conflicting_backref_one(self):
         """test that conflicting backrefs raises an exception"""
 
-        metadata = MetaData(testing.db)
+        metadata = MetaData()
 
         order = Table(
             "orders",
@@ -190,9 +189,7 @@ class CompileTest(fixtures.ORMTest):
             sa_exc.ArgumentError, "Error creating backref", configure_mappers
         )
 
-    @testing.provide_metadata
-    def test_misc_one(self, connection):
-        metadata = self.metadata
+    def test_misc_one(self, connection, metadata):
         node_table = Table(
             "node",
             metadata,
index c81de142c790f3ebfbda9ff574c16d4000f6baab..f439b6b08e5ac0546a1c083b60d815b6d9f2bc44 100644 (file)
@@ -938,8 +938,8 @@ class GetterStateTest(_fixtures.FixtureTest):
             },
         )
 
-        metadata.create_all()
-        sess = Session(autoflush=False)
+        metadata.create_all(testing.db)
+        sess = Session(testing.db, autoflush=False)
         data = {"im": "unhashable"}
         a1 = Article(id=1, data=data)
         c1 = Category(id=1, data=data)
index edbb4b0cd0ea7fe991fb3b495bd527f0344a5df0..35e0b0042269bc44db623ac8a3a82986ab23c932 100644 (file)
@@ -1118,9 +1118,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
         assert hasattr(Foo, "type")
         assert Foo.type.property.columns[0] is t.c.type
 
-    @testing.provide_metadata
-    def test_prop_filters_defaults(self):
-        metadata = self.metadata
+    def test_prop_filters_defaults(self, metadata, connection):
         t = Table(
             "t",
             metadata,
@@ -1132,13 +1130,14 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
             ),
             Column("x", Integer(), nullable=False, server_default="0"),
         )
-        t.create()
+
+        t.create(connection)
 
         class A(object):
             pass
 
         self.mapper(A, t, include_properties=["id"])
-        s = Session()
+        s = Session(connection)
         s.add(A())
         s.commit()
 
index 9e528dc0d42035a1450c1e1f149914559ff8de68..cee87c80510864019f6105b9b5d099f2219e2d19 100644 (file)
@@ -490,14 +490,14 @@ class RowTupleTest(QueryTest):
 
 
 class BindSensitiveStringifyTest(fixtures.TestBase):
-    def _fixture(self, bind_to=None):
+    def _fixture(self):
         # building a totally separate metadata /mapping here
         # because we need to control if the MetaData is bound or not
 
         class User(object):
             pass
 
-        m = MetaData(bind=bind_to)
+        m = MetaData()
         user_table = Table(
             "users",
             m,
@@ -516,15 +516,13 @@ class BindSensitiveStringifyTest(fixtures.TestBase):
 
         return base.Engine(mock.Mock(), MyDialect(), mock.Mock())
 
-    def _test(
-        self, bound_metadata, bound_session, session_present, expect_bound
-    ):
-        if bound_metadata or bound_session:
+    def _test(self, bound_session, session_present, expect_bound):
+        if bound_session:
             eng = self._dialect_fixture()
         else:
             eng = None
 
-        User = self._fixture(bind_to=eng if bound_metadata else None)
+        User = self._fixture()
 
         s = Session(eng if bound_session else None)
         q = s.query(User).filter(User.id == 7)
@@ -540,20 +538,14 @@ class BindSensitiveStringifyTest(fixtures.TestBase):
             "FROM users WHERE users.id = :id_1",
         )
 
-    def test_query_unbound_metadata_bound_session(self):
-        self._test(False, True, True, True)
-
-    def test_query_bound_metadata_unbound_session(self):
-        self._test(True, False, True, True)
-
-    def test_query_unbound_metadata_no_session(self):
-        self._test(False, False, False, False)
+    def test_query_bound_session(self):
+        self._test(True, True, True)
 
-    def test_query_unbound_metadata_unbound_session(self):
-        self._test(False, False, True, False)
+    def test_query_no_session(self):
+        self._test(False, False, False)
 
-    def test_query_bound_metadata_bound_session(self):
-        self._test(True, True, True, True)
+    def test_query_unbound_session(self):
+        self._test(False, True, False)
 
 
 class GetTest(QueryTest):
index 22315e176b4eef5b7ebabc64eb64f23f878a0625..996488fb1ef15283b0035fed308977f0ea6697bc 100644 (file)
@@ -718,7 +718,7 @@ class OverlappingFksSiblingTest(fixtures.TestBase):
             __mapper_args__ = {"polymorphic_identity": "bsub2"}
 
         configure_mappers()
-        self.metadata.create_all()
+        self.metadata.create_all(testing.db)
 
         return A, AMember, B, BSub1, BSub2
 
index a9e962cdedaf4d6fd1af74d21a2392be34f65a3b..c5cf275253e6dabf8a6112addf904cd4ee6ee388 100644 (file)
@@ -392,7 +392,7 @@ class SessionStateTest(_fixtures.FixtureTest):
     def test_autoflush(self):
         User, users = self.classes.User, self.tables.users
 
-        bind = self.metadata.bind
+        bind = testing.db
         mapper(User, users)
         conn1 = bind.connect()
         conn2 = bind.connect()
index 019409ba3d1bd591aa0c1494dfd4fc91766837fa..8c1fa54243bfa0c5567b7f039feb6a67e38838d9 100644 (file)
@@ -59,7 +59,7 @@ class ConstraintGenTest(fixtures.TestBase, AssertsExecutionResults):
         )
         self.assert_sql_execution(
             testing.db,
-            lambda: metadata.create_all(checkfirst=False),
+            lambda: metadata.create_all(testing.db, checkfirst=False),
             CompiledSQL(
                 "CREATE TABLE employees ("
                 "id INTEGER NOT NULL, "
@@ -292,7 +292,7 @@ class ConstraintGenTest(fixtures.TestBase, AssertsExecutionResults):
         assertions.append(AllOf(*fk_assertions))
 
         with self.sql_execution_asserter() as asserter:
-            metadata.create_all(checkfirst=False)
+            metadata.create_all(testing.db, checkfirst=False)
         asserter.assert_(*assertions)
 
         assertions = [
@@ -302,7 +302,7 @@ class ConstraintGenTest(fixtures.TestBase, AssertsExecutionResults):
         ]
 
         with self.sql_execution_asserter() as asserter:
-            metadata.drop_all(checkfirst=False),
+            metadata.drop_all(testing.db, checkfirst=False),
         asserter.assert_(*assertions)
 
     def _assert_cyclic_constraint_no_alter(
@@ -356,7 +356,7 @@ class ConstraintGenTest(fixtures.TestBase, AssertsExecutionResults):
         assertions = [AllOf(*table_assertions)]
 
         with self.sql_execution_asserter() as asserter:
-            metadata.create_all(checkfirst=False)
+            metadata.create_all(testing.db, checkfirst=False)
         asserter.assert_(*assertions)
 
         assertions = [
@@ -366,15 +366,15 @@ class ConstraintGenTest(fixtures.TestBase, AssertsExecutionResults):
         if sqlite_warning:
             with expect_warnings("Can't sort tables for DROP; "):
                 with self.sql_execution_asserter() as asserter:
-                    metadata.drop_all(checkfirst=False),
+                    metadata.drop_all(testing.db, checkfirst=False),
         else:
             with self.sql_execution_asserter() as asserter:
-                metadata.drop_all(checkfirst=False),
+                metadata.drop_all(testing.db, checkfirst=False),
         asserter.assert_(*assertions)
 
     @testing.force_drop_names("a", "b")
     def test_cycle_unnamed_fks(self):
-        metadata = MetaData(testing.db)
+        metadata = MetaData()
 
         Table(
             "a",
@@ -417,7 +417,7 @@ class ConstraintGenTest(fixtures.TestBase, AssertsExecutionResults):
             ),
         ]
         with self.sql_execution_asserter() as asserter:
-            metadata.create_all(checkfirst=False)
+            metadata.create_all(testing.db, checkfirst=False)
 
         if testing.db.dialect.supports_alter:
             asserter.assert_(*assertions)
@@ -431,6 +431,7 @@ class ConstraintGenTest(fixtures.TestBase, AssertsExecutionResults):
                 "cycle have names so that they can be dropped using "
                 "DROP CONSTRAINT.",
                 metadata.drop_all,
+                testing.db,
                 checkfirst=False,
             )
         else:
@@ -439,7 +440,7 @@ class ConstraintGenTest(fixtures.TestBase, AssertsExecutionResults):
                 "foreign key dependency exists between tables"
             ):
                 with self.sql_execution_asserter() as asserter:
-                    metadata.drop_all(checkfirst=False)
+                    metadata.drop_all(testing.db, checkfirst=False)
 
             asserter.assert_(
                 AllOf(CompiledSQL("DROP TABLE b"), CompiledSQL("DROP TABLE a"))
@@ -447,7 +448,7 @@ class ConstraintGenTest(fixtures.TestBase, AssertsExecutionResults):
 
     @testing.force_drop_names("a", "b")
     def test_cycle_named_fks(self):
-        metadata = MetaData(testing.db)
+        metadata = MetaData()
 
         Table(
             "a",
@@ -491,13 +492,13 @@ class ConstraintGenTest(fixtures.TestBase, AssertsExecutionResults):
             ),
         ]
         with self.sql_execution_asserter() as asserter:
-            metadata.create_all(checkfirst=False)
+            metadata.create_all(testing.db, checkfirst=False)
 
         if testing.db.dialect.supports_alter:
             asserter.assert_(*assertions)
 
             with self.sql_execution_asserter() as asserter:
-                metadata.drop_all(checkfirst=False)
+                metadata.drop_all(testing.db, checkfirst=False)
 
             asserter.assert_(
                 CompiledSQL("ALTER TABLE b DROP CONSTRAINT aidfk"),
@@ -507,7 +508,7 @@ class ConstraintGenTest(fixtures.TestBase, AssertsExecutionResults):
             )
         else:
             with self.sql_execution_asserter() as asserter:
-                metadata.drop_all(checkfirst=False)
+                metadata.drop_all(testing.db, checkfirst=False)
 
             asserter.assert_(
                 AllOf(CompiledSQL("DROP TABLE b"), CompiledSQL("DROP TABLE a"))
@@ -536,7 +537,7 @@ class ConstraintGenTest(fixtures.TestBase, AssertsExecutionResults):
 
         self.assert_sql_execution(
             testing.db,
-            lambda: metadata.create_all(checkfirst=False),
+            lambda: metadata.create_all(testing.db, checkfirst=False),
             AllOf(
                 CompiledSQL(
                     "CREATE TABLE foo ("
@@ -579,7 +580,7 @@ class ConstraintGenTest(fixtures.TestBase, AssertsExecutionResults):
 
         self.assert_sql_execution(
             testing.db,
-            lambda: metadata.create_all(checkfirst=False),
+            lambda: metadata.create_all(testing.db, checkfirst=False),
             AllOf(
                 CompiledSQL(
                     "CREATE TABLE foo ("
@@ -628,7 +629,7 @@ class ConstraintGenTest(fixtures.TestBase, AssertsExecutionResults):
 
         self.assert_sql_execution(
             testing.db,
-            lambda: metadata.create_all(checkfirst=False),
+            lambda: metadata.create_all(testing.db, checkfirst=False),
             RegexSQL("^CREATE TABLE"),
             AllOf(
                 CompiledSQL(
@@ -665,7 +666,7 @@ class ConstraintGenTest(fixtures.TestBase, AssertsExecutionResults):
 
         self.assert_sql_execution(
             testing.db,
-            lambda: metadata.create_all(checkfirst=False),
+            lambda: metadata.create_all(testing.db, checkfirst=False),
             RegexSQL("^CREATE TABLE"),
             AllOf(
                 CompiledSQL(
index 6d26f79758c9a1116eccf0192e138cedbb06a609..9af8ef6da85f3df058309d44420f4f57d9863375 100644 (file)
@@ -27,7 +27,6 @@ from sqlalchemy import union_all
 from sqlalchemy import VARCHAR
 from sqlalchemy.engine import default
 from sqlalchemy.testing import assert_raises_message
-from sqlalchemy.testing import engines
 from sqlalchemy.testing import eq_
 from sqlalchemy.testing import fixtures
 from sqlalchemy.testing import is_
@@ -35,22 +34,13 @@ from sqlalchemy.testing.schema import Column
 from sqlalchemy.testing.schema import Table
 
 
-# ongoing - these are old tests.  those which are of general use
-# to test a dialect are being slowly migrated to
-# sqlalhcemy.testing.suite
-
-users = users2 = addresses = metadata = None
-
-
-class QueryTest(fixtures.TestBase):
+class QueryTest(fixtures.TablesTest):
     __backend__ = True
 
     @classmethod
-    def setup_class(cls):
-        global users, users2, addresses, metadata
-        metadata = MetaData(testing.db)
-        users = Table(
-            "query_users",
+    def define_tables(cls, metadata):
+        Table(
+            "users",
             metadata,
             Column(
                 "user_id", INT, primary_key=True, test_needs_autoincrement=True
@@ -58,8 +48,8 @@ class QueryTest(fixtures.TestBase):
             Column("user_name", VARCHAR(20)),
             test_needs_acid=True,
         )
-        addresses = Table(
-            "query_addresses",
+        Table(
+            "addresses",
             metadata,
             Column(
                 "address_id",
@@ -67,12 +57,12 @@ class QueryTest(fixtures.TestBase):
                 primary_key=True,
                 test_needs_autoincrement=True,
             ),
-            Column("user_id", Integer, ForeignKey("query_users.user_id")),
+            Column("user_id", Integer, ForeignKey("users.user_id")),
             Column("address", String(30)),
             test_needs_acid=True,
         )
 
-        users2 = Table(
+        Table(
             "u2",
             metadata,
             Column("user_id", INT, primary_key=True),
@@ -80,19 +70,6 @@ class QueryTest(fixtures.TestBase):
             test_needs_acid=True,
         )
 
-        metadata.create_all()
-
-    @engines.close_first
-    def teardown(self):
-        with testing.db.begin() as conn:
-            conn.execute(addresses.delete())
-            conn.execute(users.delete())
-            conn.execute(users2.delete())
-
-    @classmethod
-    def teardown_class(cls):
-        metadata.drop_all()
-
     @testing.fails_on(
         "firebird", "kinterbasdb doesn't send full type information"
     )
@@ -105,6 +82,8 @@ class QueryTest(fixtures.TestBase):
 
         """
 
+        users = self.tables.users
+
         connection.execute(
             users.insert(),
             {"user_id": 7, "user_name": "jack"},
@@ -133,6 +112,7 @@ class QueryTest(fixtures.TestBase):
 
     @testing.requires.order_by_label_with_expression
     def test_order_by_label_compound(self, connection):
+        users = self.tables.users
         connection.execute(
             users.insert(),
             {"user_id": 7, "user_name": "jack"},
@@ -174,6 +154,7 @@ class QueryTest(fixtures.TestBase):
         assert row.y == False  # noqa
 
     def test_select_tuple(self, connection):
+        users = self.tables.users
         connection.execute(
             users.insert(),
             {"user_id": 1, "user_name": "apples"},
@@ -187,6 +168,7 @@ class QueryTest(fixtures.TestBase):
         )
 
     def test_like_ops(self, connection):
+        users = self.tables.users
         connection.execute(
             users.insert(),
             {"user_id": 1, "user_name": "apples"},
@@ -238,6 +220,7 @@ class QueryTest(fixtures.TestBase):
             eq_(connection.scalar(expr), result)
 
     def test_ilike(self, connection):
+        users = self.tables.users
         connection.execute(
             users.insert(),
             {"user_id": 1, "user_name": "one"},
@@ -279,11 +262,13 @@ class QueryTest(fixtures.TestBase):
             )
 
     def test_compiled_execute(self, connection):
+        users = self.tables.users
         connection.execute(users.insert(), user_id=7, user_name="jack")
         s = select(users).where(users.c.user_id == bindparam("id")).compile()
         eq_(connection.execute(s, id=7).first()._mapping["user_id"], 7)
 
     def test_compiled_insert_execute(self, connection):
+        users = self.tables.users
         connection.execute(
             users.insert().compile(), user_id=7, user_name="jack"
         )
@@ -296,6 +281,7 @@ class QueryTest(fixtures.TestBase):
         This should be run for DB-APIs with both positional and named
         paramstyles.
         """
+        users = self.tables.users
 
         connection.execute(users.insert(), user_id=7, user_name="jack")
         connection.execute(users.insert(), user_id=8, user_name="fred")
@@ -369,6 +355,8 @@ class QueryTest(fixtures.TestBase):
         Tests simple, compound, aliased and DESC clauses.
         """
 
+        users = self.tables.users
+
         connection.execute(users.insert(), user_id=1, user_name="c")
         connection.execute(users.insert(), user_id=2, user_name="b")
         connection.execute(users.insert(), user_id=3, user_name="a")
@@ -469,6 +457,8 @@ class QueryTest(fixtures.TestBase):
         Tests simple, compound, aliased and DESC clauses.
         """
 
+        users = self.tables.users
+
         connection.execute(users.insert(), user_id=1)
         connection.execute(users.insert(), user_id=2, user_name="b")
         connection.execute(users.insert(), user_id=3, user_name="a")
@@ -563,6 +553,7 @@ class QueryTest(fixtures.TestBase):
 
     def test_in_filtering(self, connection):
         """test the behavior of the in_() function."""
+        users = self.tables.users
 
         connection.execute(users.insert(), user_id=7, user_name="jack")
         connection.execute(users.insert(), user_id=8, user_name="fred")
@@ -587,6 +578,7 @@ class QueryTest(fixtures.TestBase):
         assert len(r) == 0
 
     def test_expanding_in(self, connection):
+        users = self.tables.users
         connection.execute(
             users.insert(),
             [
@@ -626,6 +618,7 @@ class QueryTest(fixtures.TestBase):
 
     @testing.requires.no_quoting_special_bind_names
     def test_expanding_in_special_chars(self, connection):
+        users = self.tables.users
         connection.execute(
             users.insert(),
             [
@@ -663,6 +656,8 @@ class QueryTest(fixtures.TestBase):
         )
 
     def test_expanding_in_multiple(self, connection):
+        users = self.tables.users
+
         connection.execute(
             users.insert(),
             [
@@ -687,6 +682,8 @@ class QueryTest(fixtures.TestBase):
         )
 
     def test_expanding_in_repeated(self, connection):
+        users = self.tables.users
+
         connection.execute(
             users.insert(),
             [
@@ -727,6 +724,8 @@ class QueryTest(fixtures.TestBase):
 
     @testing.requires.tuple_in
     def test_expanding_in_composite(self, connection):
+        users = self.tables.users
+
         connection.execute(
             users.insert(),
             [
@@ -768,7 +767,7 @@ class QueryTest(fixtures.TestBase):
                 return value[3:]
 
         users = Table(
-            "query_users",
+            "users",
             MetaData(),
             Column("user_id", Integer, primary_key=True),
             Column("user_name", NameWithProcess()),
@@ -812,6 +811,8 @@ class QueryTest(fixtures.TestBase):
 
         """
 
+        users = self.tables.users
+
         connection.execute(users.insert(), user_id=7, user_name="jack")
         connection.execute(users.insert(), user_id=8, user_name="fred")
         connection.execute(users.insert(), user_id=9, user_name=None)
@@ -827,6 +828,8 @@ class QueryTest(fixtures.TestBase):
     def test_literal_in(self, connection):
         """similar to test_bind_in but use a bind with a value."""
 
+        users = self.tables.users
+
         connection.execute(users.insert(), user_id=7, user_name="jack")
         connection.execute(users.insert(), user_id=8, user_name="fred")
         connection.execute(users.insert(), user_id=9, user_name=None)
@@ -842,6 +845,7 @@ class QueryTest(fixtures.TestBase):
         that a proper boolean value is generated.
 
         """
+        users = self.tables.users
 
         connection.execute(
             users.insert(),
@@ -932,63 +936,60 @@ class RequiredBindTest(fixtures.TablesTest):
         is_(bindparam("foo", callable_=c, required=False).required, False)
 
 
-class LimitTest(fixtures.TestBase):
+class LimitTest(fixtures.TablesTest):
     __backend__ = True
 
     @classmethod
-    def setup_class(cls):
-        global users, addresses, metadata
-        metadata = MetaData(testing.db)
-        users = Table(
-            "query_users",
+    def define_tables(cls, metadata):
+        Table(
+            "users",
             metadata,
             Column("user_id", INT, primary_key=True),
             Column("user_name", VARCHAR(20)),
         )
-        addresses = Table(
-            "query_addresses",
+        Table(
+            "addresses",
             metadata,
             Column("address_id", Integer, primary_key=True),
-            Column("user_id", Integer, ForeignKey("query_users.user_id")),
+            Column("user_id", Integer, ForeignKey("users.user_id")),
             Column("address", String(30)),
         )
-        metadata.create_all()
-
-        with testing.db.begin() as conn:
-            conn.execute(users.insert(), user_id=1, user_name="john")
-            conn.execute(
-                addresses.insert(), address_id=1, user_id=1, address="addr1"
-            )
-            conn.execute(users.insert(), user_id=2, user_name="jack")
-            conn.execute(
-                addresses.insert(), address_id=2, user_id=2, address="addr1"
-            )
-            conn.execute(users.insert(), user_id=3, user_name="ed")
-            conn.execute(
-                addresses.insert(), address_id=3, user_id=3, address="addr2"
-            )
-            conn.execute(users.insert(), user_id=4, user_name="wendy")
-            conn.execute(
-                addresses.insert(), address_id=4, user_id=4, address="addr3"
-            )
-            conn.execute(users.insert(), user_id=5, user_name="laura")
-            conn.execute(
-                addresses.insert(), address_id=5, user_id=5, address="addr4"
-            )
-            conn.execute(users.insert(), user_id=6, user_name="ralph")
-            conn.execute(
-                addresses.insert(), address_id=6, user_id=6, address="addr5"
-            )
-            conn.execute(users.insert(), user_id=7, user_name="fido")
-            conn.execute(
-                addresses.insert(), address_id=7, user_id=7, address="addr5"
-            )
 
     @classmethod
-    def teardown_class(cls):
-        metadata.drop_all()
+    def insert_data(cls, connection):
+        users, addresses = cls.tables("users", "addresses")
+        conn = connection
+        conn.execute(users.insert(), user_id=1, user_name="john")
+        conn.execute(
+            addresses.insert(), address_id=1, user_id=1, address="addr1"
+        )
+        conn.execute(users.insert(), user_id=2, user_name="jack")
+        conn.execute(
+            addresses.insert(), address_id=2, user_id=2, address="addr1"
+        )
+        conn.execute(users.insert(), user_id=3, user_name="ed")
+        conn.execute(
+            addresses.insert(), address_id=3, user_id=3, address="addr2"
+        )
+        conn.execute(users.insert(), user_id=4, user_name="wendy")
+        conn.execute(
+            addresses.insert(), address_id=4, user_id=4, address="addr3"
+        )
+        conn.execute(users.insert(), user_id=5, user_name="laura")
+        conn.execute(
+            addresses.insert(), address_id=5, user_id=5, address="addr4"
+        )
+        conn.execute(users.insert(), user_id=6, user_name="ralph")
+        conn.execute(
+            addresses.insert(), address_id=6, user_id=6, address="addr5"
+        )
+        conn.execute(users.insert(), user_id=7, user_name="fido")
+        conn.execute(
+            addresses.insert(), address_id=7, user_id=7, address="addr5"
+        )
 
     def test_select_limit(self, connection):
+        users, addresses = self.tables("users", "addresses")
         r = connection.execute(
             users.select(limit=3, order_by=[users.c.user_id])
         ).fetchall()
@@ -998,6 +999,8 @@ class LimitTest(fixtures.TestBase):
     def test_select_limit_offset(self, connection):
         """Test the interaction between limit and offset"""
 
+        users, addresses = self.tables("users", "addresses")
+
         r = connection.execute(
             users.select(limit=3, offset=2, order_by=[users.c.user_id])
         ).fetchall()
@@ -1010,6 +1013,8 @@ class LimitTest(fixtures.TestBase):
     def test_select_distinct_limit(self, connection):
         """Test the interaction between limit and distinct"""
 
+        users, addresses = self.tables("users", "addresses")
+
         r = sorted(
             [
                 x[0]
@@ -1025,6 +1030,8 @@ class LimitTest(fixtures.TestBase):
     def test_select_distinct_offset(self, connection):
         """Test the interaction between distinct and offset"""
 
+        users, addresses = self.tables("users", "addresses")
+
         r = sorted(
             [
                 x[0]
@@ -1043,6 +1050,8 @@ class LimitTest(fixtures.TestBase):
     def test_select_distinct_limit_offset(self, connection):
         """Test the interaction between limit and limit/offset"""
 
+        users, addresses = self.tables("users", "addresses")
+
         r = connection.execute(
             select(addresses.c.address)
             .order_by(addresses.c.address)
@@ -1054,18 +1063,18 @@ class LimitTest(fixtures.TestBase):
         self.assert_(r[0] != r[1] and r[1] != r[2], repr(r))
 
 
-class CompoundTest(fixtures.TestBase):
+class CompoundTest(fixtures.TablesTest):
 
     """test compound statements like UNION, INTERSECT, particularly their
     ability to nest on different databases."""
 
     __backend__ = True
 
+    run_inserts = "each"
+
     @classmethod
-    def setup_class(cls):
-        global metadata, t1, t2, t3
-        metadata = MetaData(testing.db)
-        t1 = Table(
+    def define_tables(cls, metadata):
+        Table(
             "t1",
             metadata,
             Column(
@@ -1078,7 +1087,7 @@ class CompoundTest(fixtures.TestBase):
             Column("col3", String(40)),
             Column("col4", String(30)),
         )
-        t2 = Table(
+        Table(
             "t2",
             metadata,
             Column(
@@ -1091,7 +1100,7 @@ class CompoundTest(fixtures.TestBase):
             Column("col3", String(40)),
             Column("col4", String(30)),
         )
-        t3 = Table(
+        Table(
             "t3",
             metadata,
             Column(
@@ -1104,47 +1113,42 @@ class CompoundTest(fixtures.TestBase):
             Column("col3", String(40)),
             Column("col4", String(30)),
         )
-        metadata.create_all()
-
-        with testing.db.begin() as conn:
-            conn.execute(
-                t1.insert(),
-                [
-                    dict(col2="t1col2r1", col3="aaa", col4="aaa"),
-                    dict(col2="t1col2r2", col3="bbb", col4="bbb"),
-                    dict(col2="t1col2r3", col3="ccc", col4="ccc"),
-                ],
-            )
-            conn.execute(
-                t2.insert(),
-                [
-                    dict(col2="t2col2r1", col3="aaa", col4="bbb"),
-                    dict(col2="t2col2r2", col3="bbb", col4="ccc"),
-                    dict(col2="t2col2r3", col3="ccc", col4="aaa"),
-                ],
-            )
-            conn.execute(
-                t3.insert(),
-                [
-                    dict(col2="t3col2r1", col3="aaa", col4="ccc"),
-                    dict(col2="t3col2r2", col3="bbb", col4="aaa"),
-                    dict(col2="t3col2r3", col3="ccc", col4="bbb"),
-                ],
-            )
-
-    @engines.close_first
-    def teardown(self):
-        pass
 
     @classmethod
-    def teardown_class(cls):
-        metadata.drop_all()
+    def insert_data(cls, connection):
+        t1, t2, t3 = cls.tables("t1", "t2", "t3")
+        conn = connection
+        conn.execute(
+            t1.insert(),
+            [
+                dict(col2="t1col2r1", col3="aaa", col4="aaa"),
+                dict(col2="t1col2r2", col3="bbb", col4="bbb"),
+                dict(col2="t1col2r3", col3="ccc", col4="ccc"),
+            ],
+        )
+        conn.execute(
+            t2.insert(),
+            [
+                dict(col2="t2col2r1", col3="aaa", col4="bbb"),
+                dict(col2="t2col2r2", col3="bbb", col4="ccc"),
+                dict(col2="t2col2r3", col3="ccc", col4="aaa"),
+            ],
+        )
+        conn.execute(
+            t3.insert(),
+            [
+                dict(col2="t3col2r1", col3="aaa", col4="ccc"),
+                dict(col2="t3col2r2", col3="bbb", col4="aaa"),
+                dict(col2="t3col2r3", col3="ccc", col4="bbb"),
+            ],
+        )
 
     def _fetchall_sorted(self, executed):
         return sorted([tuple(row) for row in executed.fetchall()])
 
     @testing.requires.subqueries
     def test_union(self, connection):
+        t1, t2, t3 = self.tables("t1", "t2", "t3")
         (s1, s2) = (
             select(t1.c.col3.label("col3"), t1.c.col4.label("col4")).where(
                 t1.c.col2.in_(["t1col2r1", "t1col2r2"]),
@@ -1171,6 +1175,8 @@ class CompoundTest(fixtures.TestBase):
 
     @testing.fails_on("firebird", "doesn't like ORDER BY with UNIONs")
     def test_union_ordered(self, connection):
+        t1, t2, t3 = self.tables("t1", "t2", "t3")
+
         (s1, s2) = (
             select(t1.c.col3.label("col3"), t1.c.col4.label("col4")).where(
                 t1.c.col2.in_(["t1col2r1", "t1col2r2"]),
@@ -1192,6 +1198,8 @@ class CompoundTest(fixtures.TestBase):
     @testing.fails_on("firebird", "doesn't like ORDER BY with UNIONs")
     @testing.requires.subqueries
     def test_union_ordered_alias(self, connection):
+        t1, t2, t3 = self.tables("t1", "t2", "t3")
+
         (s1, s2) = (
             select(t1.c.col3.label("col3"), t1.c.col4.label("col4")).where(
                 t1.c.col2.in_(["t1col2r1", "t1col2r2"]),
@@ -1220,6 +1228,8 @@ class CompoundTest(fixtures.TestBase):
     )
     @testing.fails_on("sqlite", "FIXME: unknown")
     def test_union_all(self, connection):
+        t1, t2, t3 = self.tables("t1", "t2", "t3")
+
         e = union_all(
             select(t1.c.col3),
             union(select(t1.c.col3), select(t1.c.col3)),
@@ -1241,6 +1251,8 @@ class CompoundTest(fixtures.TestBase):
 
         """
 
+        t1, t2, t3 = self.tables("t1", "t2", "t3")
+
         u = union(select(t1.c.col3), select(t1.c.col3)).alias()
 
         e = union_all(select(t1.c.col3), select(u.c.col3))
@@ -1256,6 +1268,8 @@ class CompoundTest(fixtures.TestBase):
 
     @testing.requires.intersect
     def test_intersect(self, connection):
+        t1, t2, t3 = self.tables("t1", "t2", "t3")
+
         i = intersect(
             select(t2.c.col3, t2.c.col4),
             select(t2.c.col3, t2.c.col4).where(t2.c.col4 == t3.c.col3),
@@ -1274,6 +1288,8 @@ class CompoundTest(fixtures.TestBase):
     @testing.requires.except_
     @testing.fails_on("sqlite", "Can't handle this style of nesting")
     def test_except_style1(self, connection):
+        t1, t2, t3 = self.tables("t1", "t2", "t3")
+
         e = except_(
             union(
                 select(t1.c.col3, t1.c.col4),
@@ -1300,6 +1316,8 @@ class CompoundTest(fixtures.TestBase):
         # same as style1, but add alias().select() to the except_().
         # sqlite can handle it now.
 
+        t1, t2, t3 = self.tables("t1", "t2", "t3")
+
         e = except_(
             union(
                 select(t1.c.col3, t1.c.col4),
@@ -1333,6 +1351,8 @@ class CompoundTest(fixtures.TestBase):
     @testing.requires.except_
     def test_except_style3(self, connection):
         # aaa, bbb, ccc - (aaa, bbb, ccc - (ccc)) = ccc
+        t1, t2, t3 = self.tables("t1", "t2", "t3")
+
         e = except_(
             select(t1.c.col3),  # aaa, bbb, ccc
             except_(
@@ -1346,6 +1366,8 @@ class CompoundTest(fixtures.TestBase):
     @testing.requires.except_
     def test_except_style4(self, connection):
         # aaa, bbb, ccc - (aaa, bbb, ccc - (ccc)) = ccc
+        t1, t2, t3 = self.tables("t1", "t2", "t3")
+
         e = except_(
             select(t1.c.col3),  # aaa, bbb, ccc
             except_(
@@ -1365,6 +1387,8 @@ class CompoundTest(fixtures.TestBase):
         "sqlite can't handle leading parenthesis",
     )
     def test_intersect_unions(self, connection):
+        t1, t2, t3 = self.tables("t1", "t2", "t3")
+
         u = intersect(
             union(select(t1.c.col3, t1.c.col4), select(t3.c.col3, t3.c.col4)),
             union(select(t2.c.col3, t2.c.col4), select(t3.c.col3, t3.c.col4))
@@ -1378,6 +1402,8 @@ class CompoundTest(fixtures.TestBase):
 
     @testing.requires.intersect
     def test_intersect_unions_2(self, connection):
+        t1, t2, t3 = self.tables("t1", "t2", "t3")
+
         u = intersect(
             union(select(t1.c.col3, t1.c.col4), select(t3.c.col3, t3.c.col4))
             .alias()
@@ -1393,6 +1419,8 @@ class CompoundTest(fixtures.TestBase):
 
     @testing.requires.intersect
     def test_intersect_unions_3(self, connection):
+        t1, t2, t3 = self.tables("t1", "t2", "t3")
+
         u = intersect(
             select(t2.c.col3, t2.c.col4),
             union(
@@ -1410,6 +1438,8 @@ class CompoundTest(fixtures.TestBase):
 
     @testing.requires.intersect
     def test_composite_alias(self, connection):
+        t1, t2, t3 = self.tables("t1", "t2", "t3")
+
         ua = intersect(
             select(t2.c.col3, t2.c.col4),
             union(
@@ -1426,10 +1456,7 @@ class CompoundTest(fixtures.TestBase):
         eq_(found, wanted)
 
 
-t1 = t2 = t3 = None
-
-
-class JoinTest(fixtures.TestBase):
+class JoinTest(fixtures.TablesTest):
 
     """Tests join execution.
 
@@ -1443,56 +1470,48 @@ class JoinTest(fixtures.TestBase):
     __backend__ = True
 
     @classmethod
-    def setup_class(cls):
-        global metadata
-        global t1, t2, t3
-
-        metadata = MetaData(testing.db)
-        t1 = Table(
+    def define_tables(cls, metadata):
+        Table(
             "t1",
             metadata,
             Column("t1_id", Integer, primary_key=True),
             Column("name", String(32)),
         )
-        t2 = Table(
+        Table(
             "t2",
             metadata,
             Column("t2_id", Integer, primary_key=True),
             Column("t1_id", Integer, ForeignKey("t1.t1_id")),
             Column("name", String(32)),
         )
-        t3 = Table(
+        Table(
             "t3",
             metadata,
             Column("t3_id", Integer, primary_key=True),
             Column("t2_id", Integer, ForeignKey("t2.t2_id")),
             Column("name", String(32)),
         )
-        metadata.drop_all()
-        metadata.create_all()
-
-        with testing.db.begin() as conn:
-            # t1.10 -> t2.20 -> t3.30
-            # t1.11 -> t2.21
-            # t1.12
-            conn.execute(
-                t1.insert(),
-                {"t1_id": 10, "name": "t1 #10"},
-                {"t1_id": 11, "name": "t1 #11"},
-                {"t1_id": 12, "name": "t1 #12"},
-            )
-            conn.execute(
-                t2.insert(),
-                {"t2_id": 20, "t1_id": 10, "name": "t2 #20"},
-                {"t2_id": 21, "t1_id": 11, "name": "t2 #21"},
-            )
-            conn.execute(
-                t3.insert(), {"t3_id": 30, "t2_id": 20, "name": "t3 #30"}
-            )
 
     @classmethod
-    def teardown_class(cls):
-        metadata.drop_all()
+    def insert_data(cls, connection):
+        conn = connection
+        # t1.10 -> t2.20 -> t3.30
+        # t1.11 -> t2.21
+        # t1.12
+        t1, t2, t3 = cls.tables("t1", "t2", "t3")
+
+        conn.execute(
+            t1.insert(),
+            {"t1_id": 10, "name": "t1 #10"},
+            {"t1_id": 11, "name": "t1 #11"},
+            {"t1_id": 12, "name": "t1 #12"},
+        )
+        conn.execute(
+            t2.insert(),
+            {"t2_id": 20, "t1_id": 10, "name": "t2 #20"},
+            {"t2_id": 21, "t1_id": 11, "name": "t2 #21"},
+        )
+        conn.execute(t3.insert(), {"t3_id": 30, "t2_id": 20, "name": "t3 #30"})
 
     def assertRows(self, statement, expected):
         """Execute a statement and assert that rows returned equal expected."""
@@ -1504,6 +1523,7 @@ class JoinTest(fixtures.TestBase):
 
     def test_join_x1(self):
         """Joins t1->t2."""
+        t1, t2, t3 = self.tables("t1", "t2", "t3")
 
         for criteria in (t1.c.t1_id == t2.c.t1_id, t2.c.t1_id == t1.c.t1_id):
             expr = select(t1.c.t1_id, t2.c.t2_id).select_from(
@@ -1513,6 +1533,7 @@ class JoinTest(fixtures.TestBase):
 
     def test_join_x2(self):
         """Joins t1->t2->t3."""
+        t1, t2, t3 = self.tables("t1", "t2", "t3")
 
         for criteria in (t1.c.t1_id == t2.c.t1_id, t2.c.t1_id == t1.c.t1_id):
             expr = select(t1.c.t1_id, t2.c.t2_id).select_from(
@@ -1522,6 +1543,7 @@ class JoinTest(fixtures.TestBase):
 
     def test_outerjoin_x1(self):
         """Outer joins t1->t2."""
+        t1, t2, t3 = self.tables("t1", "t2", "t3")
 
         for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
             expr = select(t1.c.t1_id, t2.c.t2_id).select_from(
@@ -1531,6 +1553,7 @@ class JoinTest(fixtures.TestBase):
 
     def test_outerjoin_x2(self):
         """Outer joins t1->t2,t3."""
+        t1, t2, t3 = self.tables("t1", "t2", "t3")
 
         for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
             expr = select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id).select_from(
@@ -1544,6 +1567,7 @@ class JoinTest(fixtures.TestBase):
 
     def test_outerjoin_where_x2_t1(self):
         """Outer joins t1->t2,t3, where on t1."""
+        t1, t2, t3 = self.tables("t1", "t2", "t3")
 
         for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
             expr = (
@@ -1574,6 +1598,7 @@ class JoinTest(fixtures.TestBase):
 
     def test_outerjoin_where_x2_t2(self):
         """Outer joins t1->t2,t3, where on t2."""
+        t1, t2, t3 = self.tables("t1", "t2", "t3")
 
         for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
             expr = (
@@ -1604,6 +1629,7 @@ class JoinTest(fixtures.TestBase):
 
     def test_outerjoin_where_x2_t3(self):
         """Outer joins t1->t2,t3, where on t3."""
+        t1, t2, t3 = self.tables("t1", "t2", "t3")
 
         for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
             expr = (
@@ -1635,6 +1661,8 @@ class JoinTest(fixtures.TestBase):
     def test_outerjoin_where_x2_t1t3(self):
         """Outer joins t1->t2,t3, where on t1 and t3."""
 
+        t1, t2, t3 = self.tables("t1", "t2", "t3")
+
         for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
             expr = (
                 select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id)
@@ -1664,6 +1692,8 @@ class JoinTest(fixtures.TestBase):
     def test_outerjoin_where_x2_t1t2(self):
         """Outer joins t1->t2,t3, where on t1 and t2."""
 
+        t1, t2, t3 = self.tables("t1", "t2", "t3")
+
         for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
             expr = (
                 select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id)
@@ -1693,6 +1723,7 @@ class JoinTest(fixtures.TestBase):
 
     def test_outerjoin_where_x2_t1t2t3(self):
         """Outer joins t1->t2,t3, where on t1, t2 and t3."""
+        t1, t2, t3 = self.tables("t1", "t2", "t3")
 
         for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
             expr = (
@@ -1729,6 +1760,7 @@ class JoinTest(fixtures.TestBase):
 
     def test_mixed(self):
         """Joins t1->t2, outer t2->t3."""
+        t1, t2, t3 = self.tables("t1", "t2", "t3")
 
         for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
             expr = select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id).select_from(
@@ -1739,6 +1771,7 @@ class JoinTest(fixtures.TestBase):
 
     def test_mixed_where(self):
         """Joins t1->t2, outer t2->t3, plus a where on each table in turn."""
+        t1, t2, t3 = self.tables("t1", "t2", "t3")
 
         for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
             expr = (
@@ -1800,17 +1833,12 @@ class JoinTest(fixtures.TestBase):
             self.assertRows(expr, [(10, 20, 30)])
 
 
-metadata = flds = None
-
-
-class OperatorTest(fixtures.TestBase):
+class OperatorTest(fixtures.TablesTest):
     __backend__ = True
 
     @classmethod
-    def setup_class(cls):
-        global metadata, flds
-        metadata = MetaData(testing.db)
-        flds = Table(
+    def define_tables(cls, metadata):
+        Table(
             "flds",
             metadata,
             Column(
@@ -1822,20 +1850,19 @@ class OperatorTest(fixtures.TestBase):
             Column("intcol", Integer),
             Column("strcol", String(50)),
         )
-        metadata.create_all()
-
-        with testing.db.begin() as conn:
-            conn.execute(
-                flds.insert(),
-                [dict(intcol=5, strcol="foo"), dict(intcol=13, strcol="bar")],
-            )
 
     @classmethod
-    def teardown_class(cls):
-        metadata.drop_all()
+    def insert_data(cls, connection):
+        flds = cls.tables.flds
+        connection.execute(
+            flds.insert(),
+            [dict(intcol=5, strcol="foo"), dict(intcol=13, strcol="bar")],
+        )
 
     # TODO: seems like more tests warranted for this setup.
     def test_modulo(self, connection):
+        flds = self.tables.flds
+
         eq_(
             connection.execute(
                 select(flds.c.intcol % 3).order_by(flds.c.idcol)
@@ -1845,6 +1872,8 @@ class OperatorTest(fixtures.TestBase):
 
     @testing.requires.window_functions
     def test_over(self, connection):
+        flds = self.tables.flds
+
         eq_(
             connection.execute(
                 select(
index a78d6c16b5a5bd6ab20e2338ff8d5a42eef2e4b9..c743918c8f7ad95c37abdc31fcacce0aa25d4fb0 100644 (file)
@@ -187,31 +187,6 @@ class QuoteExecTest(fixtures.TablesTest):
 class QuoteTest(fixtures.TestBase, AssertsCompiledSQL):
     __dialect__ = "default"
 
-    @classmethod
-    def setup_class(cls):
-        # TODO: figure out which databases/which identifiers allow special
-        # characters to be used, such as: spaces, quote characters,
-        # punctuation characters, set up tests for those as well.
-
-        global table1, table2
-        metadata = MetaData(testing.db)
-
-        table1 = Table(
-            "WorstCase1",
-            metadata,
-            Column("lowercase", Integer, primary_key=True),
-            Column("UPPERCASE", Integer),
-            Column("MixedCase", Integer),
-            Column("ASC", Integer, key="a123"),
-        )
-        table2 = Table(
-            "WorstCase2",
-            metadata,
-            Column("desc", Integer, primary_key=True, key="d123"),
-            Column("Union", Integer, key="u123"),
-            Column("MixedCase", Integer),
-        )
-
     @testing.crashes("oracle", "FIXME: unknown, verify not fails_on")
     @testing.requires.subqueries
     def test_labels(self):
@@ -234,6 +209,23 @@ class QuoteTest(fixtures.TestBase, AssertsCompiledSQL):
         where the "UPPERCASE" column of "LaLa" doesn't exist.
         """
 
+        metadata = MetaData()
+        table1 = Table(
+            "WorstCase1",
+            metadata,
+            Column("lowercase", Integer, primary_key=True),
+            Column("UPPERCASE", Integer),
+            Column("MixedCase", Integer),
+            Column("ASC", Integer, key="a123"),
+        )
+        Table(
+            "WorstCase2",
+            metadata,
+            Column("desc", Integer, primary_key=True, key="d123"),
+            Column("Union", Integer, key="u123"),
+            Column("MixedCase", Integer),
+        )
+
         self.assert_compile(
             table1.select(distinct=True).alias("LaLa").select(),
             "SELECT "
index 1809e0cca0752fae9aef6d90f8c41dd33785e12c..b5ba28df73abd4dfbd4dbf5b744f0af5fa455efa 100644 (file)
@@ -520,7 +520,7 @@ class SequenceAsServerDefaultTest(
 
     def test_drop_ordering(self):
         with self.sql_execution_asserter(testing.db) as asserter:
-            self.metadata.drop_all(checkfirst=False)
+            self.tables_test_metadata.drop_all(checkfirst=False)
 
         asserter.assert_(
             AllOf(
index 919c4b4f9794d760c63c4a5e38fcb4e5fe0bff12..77aefc190f42be40083c42510608e5a8d67ed98d 100644 (file)
@@ -3505,34 +3505,26 @@ class PickleTest(fixtures.TestBase):
             assert p1.compare_values(p1.copy_value(obj), obj)
 
 
-meta = None
-
-
 class CallableTest(fixtures.TestBase):
-    @classmethod
-    def setup_class(cls):
-        global meta
-        meta = MetaData(testing.db)
-
-    @classmethod
-    def teardown_class(cls):
-        meta.drop_all()
-
-    def test_callable_as_arg(self):
+    @testing.provide_metadata
+    def test_callable_as_arg(self, connection):
         ucode = util.partial(Unicode)
 
-        thing_table = Table("thing", meta, Column("name", ucode(20)))
+        thing_table = Table("thing", self.metadata, Column("name", ucode(20)))
         assert isinstance(thing_table.c.name.type, Unicode)
-        thing_table.create()
+        thing_table.create(connection)
 
-    def test_callable_as_kwarg(self):
+    @testing.provide_metadata
+    def test_callable_as_kwarg(self, connection):
         ucode = util.partial(Unicode)
 
         thang_table = Table(
-            "thang", meta, Column("name", type_=ucode(20), primary_key=True)
+            "thang",
+            self.metadata,
+            Column("name", type_=ucode(20), primary_key=True),
         )
         assert isinstance(thang_table.c.name.type, Unicode)
-        thang_table.create()
+        thang_table.create(connection)
 
 
 class LiteralTest(fixtures.TestBase):