]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
Repair mssql dep tests; have __only_on__ imply __backend__
authorMike Bayer <mike_mp@zzzcomputing.com>
Sat, 19 Dec 2020 15:59:42 +0000 (10:59 -0500)
committerMike Bayer <mike_mp@zzzcomputing.com>
Sat, 19 Dec 2020 17:36:57 +0000 (12:36 -0500)
CI missed a few SQL Server tests because we run mssql-backendonly
in the gerrit job.   As there was a test that was "only on"
mssql but didn't have backendonly, it never got run and then
fails in master where we run mssql fully.

Any suite that has an __only_on__ is inherently specific to
a backend, so if present this should imply __backend__ so that
it definitely runs when we have that backend present.

This in turn meant we had to fix a few sqlite_file tests that
weren't cleaning up or sharing well as they suddenly became
backend tests under sqlite_file.  Added a sqlite_file cleanup
to test class cleanup for now.

Change-Id: I9de1ceabd6596547a65c59059a55b7e5156103fd

lib/sqlalchemy/dialects/sqlite/provision.py
lib/sqlalchemy/testing/plugin/plugin_base.py
test/dialect/mssql/test_deprecations.py
test/dialect/test_sqlite.py
test/requirements.py

index ce20ed991234eb7a3e96ed3583c35271b9027c50..f26c21e223351123b47b2731e02c305c8efa08e9 100644 (file)
@@ -7,6 +7,7 @@ from ...testing.provision import follower_url_from_main
 from ...testing.provision import log
 from ...testing.provision import post_configure_engine
 from ...testing.provision import run_reap_dbs
+from ...testing.provision import stop_test_class
 from ...testing.provision import temp_table_keyword_args
 
 
@@ -56,6 +57,25 @@ def _sqlite_drop_db(cfg, eng, ident):
             os.remove(path)
 
 
+@stop_test_class.for_db("sqlite")
+def stop_test_class(config, db, cls):
+    with db.connect() as conn:
+        files = [
+            row.file
+            for row in conn.exec_driver_sql("PRAGMA database_list")
+            if row.file
+        ]
+
+    if files:
+        db.dispose()
+
+        # some sqlite file tests are not cleaning up well yet, so do this
+        # just to make things simple for now
+        for file in files:
+            if file:
+                os.remove(file)
+
+
 @temp_table_keyword_args.for_db("sqlite")
 def _sqlite_temp_table_keyword_args(cfg, eng):
     return {"prefixes": ["TEMPORARY"]}
index b390158527e0e0c3fd3241422bf7edefc57d5ea6..5e41f2cdfc5b29ae9581a85c93aed461f1a67538 100644 (file)
@@ -553,6 +553,7 @@ def want_class(name, cls):
         config.options.backend_only
         and not getattr(cls, "__backend__", False)
         and not getattr(cls, "__sparse_backend__", False)
+        and not getattr(cls, "__only_on__", False)
     ):
         return False
     else:
index 8c2b80a56bd190537a3f59e396fcd2bcdad317d4..c869182c5ab5de3a81d911bdc469a4de5bfa76f9 100644 (file)
@@ -158,6 +158,7 @@ class LegacySchemaAliasingTest(fixtures.TestBase, AssertsCompiledSQL):
 class LegacySchemaAliasingBackendTest(
     testing.AssertsExecutionResults, fixtures.TestBase
 ):
+    __backend__ = True
     __only_on__ = "mssql"
 
     @testing.provide_metadata
@@ -176,7 +177,7 @@ class LegacySchemaAliasingBackendTest(
             schema=testing.config.test_schema,
         )
 
-        with eng.connect() as conn:
+        with eng.begin() as conn:
             tbl.create(conn)
             conn.execute(tbl.insert(), {"id": 1})
             eq_(conn.scalar(tbl.select()), 1)
@@ -197,7 +198,7 @@ class LegacySchemaAliasingBackendTest(
                 options=dict(legacy_schema_aliasing=True)
             )
 
-        with eng.connect() as conn:
+        with eng.begin() as conn:
 
             tbl.create(conn)
             conn.execute(tbl.insert(), {"id": 1})
@@ -236,7 +237,7 @@ class LegacySchemaAliasingBackendTest(
             schema=testing.config.test_schema,
         )
 
-        with eng.connect() as conn:
+        with eng.begin() as conn:
             tbl.create(conn)
             conn.execute(tbl.insert(), {"id": 1})
             eq_(conn.scalar(tbl.select()), 1)
index 16969467ed022f98fe7cab75f914667ab82914f6..8eed21281c92b25e08a59ca31e172ba56de0ac0f 100644 (file)
@@ -72,48 +72,46 @@ class TestTypes(fixtures.TestBase, AssertsExecutionResults):
 
     __only_on__ = "sqlite"
 
+    @testing.provide_metadata
     def test_boolean(self):
         """Test that the boolean only treats 1 as True"""
 
-        meta = MetaData(testing.db)
+        meta = self.metadata
         t = Table(
             "bool_table",
             meta,
             Column("id", Integer, primary_key=True),
             Column("boo", Boolean(create_constraint=False)),
         )
-        try:
-            meta.create_all()
-            exec_sql(
-                testing.db,
-                "INSERT INTO bool_table (id, boo) " "VALUES (1, 'false');",
-            )
-            exec_sql(
-                testing.db,
-                "INSERT INTO bool_table (id, boo) " "VALUES (2, 'true');",
-            )
-            exec_sql(
-                testing.db,
-                "INSERT INTO bool_table (id, boo) " "VALUES (3, '1');",
-            )
-            exec_sql(
-                testing.db,
-                "INSERT INTO bool_table (id, boo) " "VALUES (4, '0');",
-            )
-            exec_sql(
-                testing.db,
-                "INSERT INTO bool_table (id, boo) " "VALUES (5, 1);",
-            )
-            exec_sql(
-                testing.db,
-                "INSERT INTO bool_table (id, boo) " "VALUES (6, 0);",
-            )
-            eq_(
-                t.select(t.c.boo).order_by(t.c.id).execute().fetchall(),
-                [(3, True), (5, True)],
-            )
-        finally:
-            meta.drop_all()
+        meta.create_all(testing.db)
+        exec_sql(
+            testing.db,
+            "INSERT INTO bool_table (id, boo) " "VALUES (1, 'false');",
+        )
+        exec_sql(
+            testing.db,
+            "INSERT INTO bool_table (id, boo) " "VALUES (2, 'true');",
+        )
+        exec_sql(
+            testing.db,
+            "INSERT INTO bool_table (id, boo) " "VALUES (3, '1');",
+        )
+        exec_sql(
+            testing.db,
+            "INSERT INTO bool_table (id, boo) " "VALUES (4, '0');",
+        )
+        exec_sql(
+            testing.db,
+            "INSERT INTO bool_table (id, boo) " "VALUES (5, 1);",
+        )
+        exec_sql(
+            testing.db,
+            "INSERT INTO bool_table (id, boo) " "VALUES (6, 0);",
+        )
+        eq_(
+            t.select(t.c.boo).order_by(t.c.id).execute().fetchall(),
+            [(3, True), (5, True)],
+        )
 
     def test_string_dates_passed_raise(self, connection):
         assert_raises(
@@ -137,6 +135,7 @@ class TestTypes(fixtures.TestBase, AssertsExecutionResults):
                 ).scalar(),
             )
 
+    @testing.provide_metadata
     def test_native_datetime(self):
         dbapi = testing.db.dialect.dbapi
         connect_args = {
@@ -147,35 +146,31 @@ class TestTypes(fixtures.TestBase, AssertsExecutionResults):
         )
         t = Table(
             "datetest",
-            MetaData(),
+            self.metadata,
             Column("id", Integer, primary_key=True),
             Column("d1", Date),
             Column("d2", sqltypes.TIMESTAMP),
         )
         t.create(engine)
-        try:
-            with engine.begin() as conn:
-                conn.execute(
-                    t.insert(),
-                    {
-                        "d1": datetime.date(2010, 5, 10),
-                        "d2": datetime.datetime(2010, 5, 10, 12, 15, 25),
-                    },
-                )
-                row = conn.execute(t.select()).first()
-                eq_(
-                    row,
-                    (
-                        1,
-                        datetime.date(2010, 5, 10),
-                        datetime.datetime(2010, 5, 10, 12, 15, 25),
-                    ),
-                )
-                r = conn.execute(func.current_date()).scalar()
-                assert isinstance(r, util.string_types)
-        finally:
-            t.drop(engine)
-            engine.dispose()
+        with engine.begin() as conn:
+            conn.execute(
+                t.insert(),
+                {
+                    "d1": datetime.date(2010, 5, 10),
+                    "d2": datetime.datetime(2010, 5, 10, 12, 15, 25),
+                },
+            )
+            row = conn.execute(t.select()).first()
+            eq_(
+                row,
+                (
+                    1,
+                    datetime.date(2010, 5, 10),
+                    datetime.datetime(2010, 5, 10, 12, 15, 25),
+                ),
+            )
+            r = conn.execute(func.current_date()).scalar()
+            assert isinstance(r, util.string_types)
 
     @testing.provide_metadata
     def test_custom_datetime(self, connection):
@@ -1748,30 +1743,24 @@ class ReflectHeadlessFKsTest(fixtures.TestBase):
 class KeywordInDatabaseNameTest(fixtures.TestBase):
     __only_on__ = "sqlite"
 
-    @classmethod
-    def setup_class(cls):
-        with testing.db.begin() as conn:
-            conn.exec_driver_sql(
-                'ATTACH %r AS "default"' % conn.engine.url.database
-            )
-            conn.exec_driver_sql(
-                'CREATE TABLE "default".a (id INTEGER PRIMARY KEY)'
-            )
-
-    @classmethod
-    def teardown_class(cls):
-        with testing.db.begin() as conn:
-            try:
-                conn.exec_driver_sql('drop table "default".a')
-            except Exception:
-                pass
-            conn.exec_driver_sql('DETACH DATABASE "default"')
+    @testing.fixture
+    def db_fixture(self, connection):
+        connection.exec_driver_sql(
+            'ATTACH %r AS "default"' % connection.engine.url.database
+        )
+        connection.exec_driver_sql(
+            'CREATE TABLE "default".a (id INTEGER PRIMARY KEY)'
+        )
+        try:
+            yield
+        finally:
+            connection.exec_driver_sql('drop table "default".a')
+            connection.exec_driver_sql('DETACH DATABASE "default"')
 
-    def test_reflect(self):
-        with testing.db.begin() as conn:
-            meta = MetaData(bind=conn, schema="default")
-            meta.reflect()
-            assert "default.a" in meta.tables
+    def test_reflect(self, connection, db_fixture):
+        meta = MetaData(bind=connection, schema="default")
+        meta.reflect()
+        assert "default.a" in meta.tables
 
 
 class ConstraintReflectionTest(fixtures.TestBase):
@@ -1831,10 +1820,6 @@ class ConstraintReflectionTest(fixtures.TestBase):
             conn.exec_driver_sql(
                 "CREATE TABLE f (x INTEGER, CONSTRAINT foo_fx UNIQUE(x))"
             )
-            conn.exec_driver_sql(
-                "CREATE TEMPORARY TABLE g "
-                "(x INTEGER, CONSTRAINT foo_gx UNIQUE(x))"
-            )
             conn.exec_driver_sql(
                 # intentional broken casing
                 "CREATE TABLE h (x INTEGER, COnstraINT foo_hx unIQUE(x))"
@@ -1864,15 +1849,6 @@ class ConstraintReflectionTest(fixtures.TestBase):
                 UniqueConstraint("x"),
             )
 
-            Table(
-                "n",
-                meta,
-                Column("id", Integer, primary_key=True),
-                Column("x", String(30)),
-                UniqueConstraint("x"),
-                prefixes=["TEMPORARY"],
-            )
-
             Table(
                 "p",
                 meta,
@@ -1953,7 +1929,6 @@ class ConstraintReflectionTest(fixtures.TestBase):
                 "j",
                 "i",
                 "h",
-                "g",
                 "f",
                 "e",
                 "e1",
@@ -1965,10 +1940,31 @@ class ConstraintReflectionTest(fixtures.TestBase):
                 "a1",
                 "a2",
             ]:
-                try:
-                    conn.exec_driver_sql("drop table %s" % name)
-                except Exception:
-                    pass
+                conn.exec_driver_sql("drop table %s" % name)
+
+    @testing.fixture
+    def temp_table_fixture(self, connection):
+
+        connection.exec_driver_sql(
+            "CREATE TEMPORARY TABLE g "
+            "(x INTEGER, CONSTRAINT foo_gx UNIQUE(x))"
+        )
+
+        n = Table(
+            "n",
+            MetaData(),
+            Column("id", Integer, primary_key=True),
+            Column("x", String(30)),
+            UniqueConstraint("x"),
+            prefixes=["TEMPORARY"],
+        )
+
+        n.create(connection)
+        try:
+            yield
+        finally:
+            connection.exec_driver_sql("DROP TABLE g")
+            n.drop(connection)
 
     def test_legacy_quoted_identifiers_unit(self):
         dialect = sqlite.dialect()
@@ -2316,8 +2312,11 @@ class ConstraintReflectionTest(fixtures.TestBase):
             [{"column_names": ["x"], "name": "foo_hx"}],
         )
 
-    def test_unique_constraint_named_broken_temp(self):
-        inspector = inspect(testing.db)
+    def test_unique_constraint_named_broken_temp(
+        self, connection, temp_table_fixture
+    ):
+
+        inspector = inspect(connection)
         eq_(
             inspector.get_unique_constraints("g"),
             [{"column_names": ["x"], "name": "foo_gx"}],
@@ -2352,8 +2351,10 @@ class ConstraintReflectionTest(fixtures.TestBase):
             [{"column_names": ["x"], "name": None}],
         )
 
-    def test_unique_constraint_unnamed_normal_temporary(self):
-        inspector = inspect(testing.db)
+    def test_unique_constraint_unnamed_normal_temporary(
+        self, connection, temp_table_fixture
+    ):
+        inspector = inspect(connection)
         eq_(
             inspector.get_unique_constraints("n"),
             [{"column_names": ["x"], "name": None}],
index 61e5ace3a810439fe2d9be680f6a5c5ace105de4..7cb09c309c6fbdc2dbdd9f4d48d19dfd297642cb 100644 (file)
@@ -1424,10 +1424,13 @@ class DefaultRequirements(SuiteRequirements):
 
     @property
     def ad_hoc_engines(self):
-        return exclusions.skip_if(
-            ["oracle"],
-            "works, but Oracle just gets tired with "
-            "this much connection activity",
+        return (
+            exclusions.skip_if(
+                ["oracle"],
+                "works, but Oracle just gets tired with "
+                "this much connection activity",
+            )
+            + skip_if(self._sqlite_file_db)
         )
 
     @property