]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
stop using MyISAM; more oracle struggles
authorMike Bayer <mike_mp@zzzcomputing.com>
Wed, 26 Nov 2025 06:36:57 +0000 (01:36 -0500)
committerMike Bayer <mike_mp@zzzcomputing.com>
Thu, 27 Nov 2025 16:31:56 +0000 (11:31 -0500)
getting some fails on mariadb12 and likely 11 which appear to
be related to calling in MyISAM, which is not used in
modern mysql/mariadb.   see if we can just remove this whole
thing and rely on default engines for mariadb/mysql.

this change also removes the "ignore errors" part of the
run deletes for the TablesTest fixture, which was resulting
in compound failures, and apparently a lot of tests were relying
on it skipping nonexistent tables.   rather than check for that
we should just improve the tests and probably increase use of
pytest style fixtures overall.

this change also identifies and fixes that memusage_w_backend
tests were running for all backends with a tag like
py314_mysql_backendonly; the memusage tests should basically
never be run as part of the whole suite since they are entirely
unreliable within a full scale test run.

dialect suite tests are also further broken out into those where
every driver should be exercised (i.e. __backend__, for tests that
test datatypes going out and coming back from the database as well
as identity/autoincrement kinds of tests) vs. those where only
one driver per backend is needed (i.e. __sparse_driver_backend__,
for tests like reflection, DDL, CTEs, etc.).

we are also trying to get a --low-connections option that actually
works.  changed this so that the testing reaper aggressively disposes
the "global" engines (one per backend / driver) after test classes
are done and before any testing_engine() call.   This definitely
works, however some monitoring with PG shows the number of connections
still has brief bursts for some reason.   it should be much more
effective than before though as oracle 23/26 really does not handle
more than a few connections.

this change reverts oracle to oracle18c for now in setup.cfg;
further work will be needed to determine if oracle23c can be
run with this test suite

Change-Id: Id87d0ea15155c452615a7edeb9d434c8e55151e7
(cherry picked from commit 88028954576a8d7e772160d74b14da62c0b4fd43)

19 files changed:
lib/sqlalchemy/dialects/mysql/provision.py
lib/sqlalchemy/dialects/oracle/provision.py
lib/sqlalchemy/testing/engines.py
lib/sqlalchemy/testing/fixtures/sql.py
lib/sqlalchemy/testing/plugin/plugin_base.py
lib/sqlalchemy/testing/provision.py
lib/sqlalchemy/testing/requirements.py
lib/sqlalchemy/testing/schema.py
lib/sqlalchemy/testing/suite/test_cte.py
lib/sqlalchemy/testing/suite/test_ddl.py
lib/sqlalchemy/testing/suite/test_reflection.py
lib/sqlalchemy/testing/suite/test_select.py
lib/sqlalchemy/testing/suite/test_sequence.py
lib/sqlalchemy/testing/suite/test_update_delete.py
noxfile.py
setup.cfg
test/orm/inheritance/test_basic.py
test/orm/inheritance/test_relationship.py
test/orm/test_versioning.py

index 242adbf82fba5f2bc9aaf566344334514ec98c10..4559dfc77af6cc6cd4c165c6dacd0076373afacb 100644 (file)
@@ -12,6 +12,7 @@ from ... import exc
 from ...testing.provision import allow_stale_update_impl
 from ...testing.provision import configure_follower
 from ...testing.provision import create_db
+from ...testing.provision import delete_from_all_tables
 from ...testing.provision import drop_db
 from ...testing.provision import generate_driver_url
 from ...testing.provision import temp_table_keyword_args
@@ -117,6 +118,15 @@ def _upsert(
     return stmt
 
 
+@delete_from_all_tables.for_db("mysql", "mariadb")
+def _delete_from_all_tables(cfg, connection, metadata):
+    connection.exec_driver_sql("SET foreign_key_checks = 0")
+    try:
+        delete_from_all_tables.call_original(cfg, connection, metadata)
+    finally:
+        connection.exec_driver_sql("SET foreign_key_checks = 1")
+
+
 @allow_stale_update_impl.for_db("mariadb")
 def _allow_stale_update_impl(cfg):
     @contextlib.contextmanager
index 3587de9d011db55cc0eb13dcdfaab25ad7c87494..76d13c53bf4ef8483d133f2217453b067dfeed58 100644 (file)
@@ -106,10 +106,11 @@ def _ora_stop_test_class_outside_fixtures(config, db, cls):
     # clear statement cache on all connections that were used
     # https://github.com/oracle/python-cx_Oracle/issues/519
 
+    all_dbapis = {cfg.db.dialect.dbapi for cfg in config.Config.all_configs()}
     for cx_oracle_conn in _all_conns:
         try:
             sc = cx_oracle_conn.stmtcachesize
-        except db.dialect.dbapi.InterfaceError:
+        except tuple(dbapi.InterfaceError for dbapi in all_dbapis):
             # connection closed
             pass
         else:
index 51beed98b192ea0829238db97df5de2a1a9a60ff..f771fe07b1dc014d3438d3bde2f1c4dc7cd39868 100644 (file)
@@ -115,8 +115,17 @@ class ConnectionKiller:
                 await_only(rec.dispose())
             else:
                 rec.dispose()
+
         eng.clear()
 
+    def _dispose_testing_engines(self, scope):
+        eng = self.testing_engines[scope]
+        for rec in list(eng):
+            if hasattr(rec, "sync_engine"):
+                await_only(rec.dispose())
+            else:
+                rec.dispose()
+
     def after_test(self):
         self._drop_testing_engines("function")
 
@@ -155,6 +164,10 @@ class ConnectionKiller:
                 assert (
                     False
                 ), "%d connection recs not cleared after test suite" % (ln)
+        if config.options and config.options.low_connections:
+            # for suites running with --low-connections, dispose the "global"
+            # engines to disconnect everything before making a testing engine
+            self._dispose_testing_engines("global")
 
     def final_cleanup(self):
         self.checkin_all()
@@ -309,6 +322,7 @@ def testing_engine(
     share_pool=False,
     _sqlite_savepoint=False,
 ):
+
     if asyncio:
         assert not _sqlite_savepoint
         from sqlalchemy.ext.asyncio import (
@@ -361,6 +375,10 @@ def testing_engine(
             engine.pool._transfer_from(config.db.pool)
     elif share_pool:
         engine.pool = config.db.pool
+    elif config.options and config.options.low_connections:
+        # for suites running with --low-connections, dispose the "global"
+        # engines to disconnect everything before making a testing engine
+        testing_reaper._dispose_testing_engines("global")
 
     if scope == "global":
         if asyncio:
index 44cf21c24fe08ee4ecaee85bab554523ad347331..1321b099dfbdf2a036c3888b8dfd39a9500d90c2 100644 (file)
@@ -10,12 +10,12 @@ from __future__ import annotations
 import itertools
 import random
 import re
-import sys
 
 import sqlalchemy as sa
 from .base import TestBase
 from .. import config
 from .. import mock
+from .. import provision
 from ..assertions import eq_
 from ..assertions import ne_
 from ..util import adict
@@ -133,37 +133,16 @@ class TablesTest(TestBase):
         elif self.run_create_tables == "each":
             drop_all_tables_from_metadata(self._tables_metadata, self.bind)
 
-        savepoints = getattr(config.requirements, "savepoints", False)
-        if savepoints:
-            savepoints = savepoints.enabled
-
         # no need to run deletes if tables are recreated on setup
         if (
             self.run_define_tables != "each"
-            and self.run_create_tables != "each"
+            and self.run_create_tables == "once"
             and self.run_deletes == "each"
         ):
             with self.bind.begin() as conn:
-                for table in reversed(
-                    [
-                        t
-                        for (t, fks) in sort_tables_and_constraints(
-                            self._tables_metadata.tables.values()
-                        )
-                        if t is not None
-                    ]
-                ):
-                    try:
-                        if savepoints:
-                            with conn.begin_nested():
-                                conn.execute(table.delete())
-                        else:
-                            conn.execute(table.delete())
-                    except sa.exc.DBAPIError as ex:
-                        print(
-                            ("Error emptying table %s: %r" % (table, ex)),
-                            file=sys.stderr,
-                        )
+                provision.delete_from_all_tables(
+                    config, conn, self._tables_metadata
+                )
 
     @classmethod
     def _teardown_once_metadata_bind(cls):
index 4f35b1029431d26772955f8f4ee3a95b19ddc441..fb534d5e1a19921666fb92bfd79fc86617b3dcdf 100644 (file)
@@ -588,8 +588,8 @@ def stop_test_class(cls):
 
 
 def stop_test_class_outside_fixtures(cls):
-    engines.testing_reaper.stop_test_class_outside_fixtures()
     provision.stop_test_class_outside_fixtures(config, config.db, cls)
+    engines.testing_reaper.stop_test_class_outside_fixtures()
     try:
         if not options.low_connections:
             assertions.global_cleanup_assertions()
index 5d60eea32e37a2e7d20b1c7a698c36201c16afd1..aba439decbbb788789edd3db823fefa1ecb6c288 100644 (file)
@@ -18,6 +18,7 @@ from . import util
 from .. import exc
 from .. import inspect
 from ..engine import url as sa_url
+from ..schema import sort_tables_and_constraints
 from ..sql import ddl
 from ..sql import schema
 from ..util import decorator
@@ -51,6 +52,9 @@ class register:
 
         return decorate
 
+    def call_original(self, cfg, *arg, **kw):
+        return self.fns["*"](cfg, *arg, **kw)
+
     def __call__(self, cfg, *arg, **kw):
         if isinstance(cfg, str):
             url = sa_url.make_url(cfg)
@@ -522,3 +526,36 @@ def allow_stale_updates(fn, *arg, **kw):
     """
     with allow_stale_update_impl(config._current):
         return fn(*arg, **kw)
+
+
+@register.init
+def delete_from_all_tables(cfg, connection, metadata):
+    """an absolutely foolproof delete from all tables routine.
+
+    dialects should override this to add special instructions like
+    disable constraints etc.
+
+    """
+    savepoints = getattr(cfg.requirements, "savepoints", False)
+    if savepoints:
+        savepoints = savepoints.enabled
+
+    inspector = inspect(connection)
+
+    for table in reversed(
+        [
+            t
+            for (t, fks) in sort_tables_and_constraints(
+                metadata.tables.values()
+            )
+            if t is not None
+            # remember that inspector.get_table_names() is cached,
+            # so this emits SQL once per unique schema name
+            and t.name in inspector.get_table_names(schema=t.schema)
+        ]
+    ):
+        if savepoints:
+            with connection.begin_nested():
+                connection.execute(table.delete())
+        else:
+            connection.execute(table.delete())
index 0a96ffcce3ef0afe05d641189aa2710173018ca3..14a0b94815f15ae44e29905143e7103d60541f6e 100644 (file)
@@ -1513,14 +1513,12 @@ class SuiteRequirements(Requirements):
     def ad_hoc_engines(self):
         """Test environment must allow ad-hoc engine/connection creation.
 
-        DBs that scale poorly for many connections, even when closed, i.e.
-        Oracle, may use the "--low-connections" option which flags this
-        requirement as not present.
+        This is now a no-op since we reconfigured ``options.low_connections``
+        to cause the ``testing_engine()`` to close off other open connections
+        when its invoked.
 
         """
-        return exclusions.skip_if(
-            lambda config: config.options.low_connections
-        )
+        return exclusions.open()
 
     @property
     def no_windows(self):
index 0dd7de2029de644a400dd4c521c0f8b680c1647a..516af2f367ba47317944cb5b832f3bfefbba2ac3 100644 (file)
@@ -26,37 +26,11 @@ table_options = {}
 def Table(*args, **kw) -> schema.Table:
     """A schema.Table wrapper/hook for dialect-specific tweaks."""
 
-    test_opts = {k: kw.pop(k) for k in list(kw) if k.startswith("test_")}
+    # pop out local options; these are not used at the moment
+    _ = {k: kw.pop(k) for k in list(kw) if k.startswith("test_")}
 
     kw.update(table_options)
 
-    if exclusions.against(config._current, "mysql"):
-        if (
-            "mysql_engine" not in kw
-            and "mysql_type" not in kw
-            and "autoload_with" not in kw
-        ):
-            if "test_needs_fk" in test_opts or "test_needs_acid" in test_opts:
-                kw["mysql_engine"] = "InnoDB"
-            else:
-                # there are in fact test fixtures that rely upon MyISAM,
-                # due to MySQL / MariaDB having poor FK behavior under innodb,
-                # such as a self-referential table can't be deleted from at
-                # once without attending to per-row dependencies.  We'd need to
-                # add special steps to some fixtures if we want to not
-                # explicitly state MyISAM here
-                kw["mysql_engine"] = "MyISAM"
-    elif exclusions.against(config._current, "mariadb"):
-        if (
-            "mariadb_engine" not in kw
-            and "mariadb_type" not in kw
-            and "autoload_with" not in kw
-        ):
-            if "test_needs_fk" in test_opts or "test_needs_acid" in test_opts:
-                kw["mariadb_engine"] = "InnoDB"
-            else:
-                kw["mariadb_engine"] = "MyISAM"
-
     return schema.Table(*args, **kw)
 
 
index e6e852bee5de00feed7dbd9c3a6e04d71fccd2d7..ca9a2e50e5598d6c6f8be2b900895edba554f611 100644 (file)
@@ -20,7 +20,7 @@ from ... import values
 
 
 class CTETest(fixtures.TablesTest):
-    __backend__ = True
+    __sparse_driver_backend__ = True
     __requires__ = ("ctes",)
 
     run_inserts = "each"
index c7e7d817d8e4babdcc97b8c6552848616b70e6a5..1c621bada1d0f601a41db4d471929c83fd165043 100644 (file)
@@ -29,7 +29,7 @@ from ... import UniqueConstraint
 
 
 class TableDDLTest(fixtures.TestBase):
-    __backend__ = True
+    __sparse_driver_backend__ = True
 
     def _simple_fixture(self, schema=None):
         return Table(
@@ -202,7 +202,7 @@ class LongNameBlowoutTest(fixtures.TestBase):
 
     """
 
-    __backend__ = True
+    __sparse_driver_backend__ = True
 
     def fk(self, metadata, connection):
         convention = {
index 8ba588d2b5af5c4ba61e9c4d44ebb23bae18ec1a..fd3765770f03d8e573942de91c176524aaca2088 100644 (file)
@@ -73,7 +73,9 @@ class OneConnectionTablesTest(fixtures.TablesTest):
 
 
 class HasTableTest(OneConnectionTablesTest):
-    __backend__ = True
+    __sparse_driver_backend__ = True
+
+    run_deletes = None
 
     @classmethod
     def define_tables(cls, metadata):
@@ -223,7 +225,7 @@ class HasTableTest(OneConnectionTablesTest):
 
 
 class HasIndexTest(fixtures.TablesTest):
-    __backend__ = True
+    __sparse_driver_backend__ = True
     __requires__ = ("index_reflection",)
 
     @classmethod
@@ -301,7 +303,7 @@ class HasIndexTest(fixtures.TablesTest):
 
 class BizarroCharacterTest(fixtures.TestBase):
 
-    __backend__ = True
+    __sparse_driver_backend__ = True
 
     def column_names():
         return testing.combinations(
@@ -404,7 +406,7 @@ class BizarroCharacterTest(fixtures.TestBase):
 
 class TempTableElementsTest(fixtures.TestBase):
 
-    __backend__ = True
+    __sparse_driver_backend__ = True
 
     __requires__ = ("temp_table_reflection",)
 
@@ -442,7 +444,7 @@ class TempTableElementsTest(fixtures.TestBase):
 
 class QuotedNameArgumentTest(fixtures.TablesTest):
     run_create_tables = "once"
-    __backend__ = True
+    __sparse_driver_backend__ = True
 
     @classmethod
     def define_tables(cls, metadata):
@@ -624,7 +626,7 @@ def _multi_combination(fn):
 class ComponentReflectionTest(ComparesTables, OneConnectionTablesTest):
     run_inserts = run_deletes = None
 
-    __backend__ = True
+    __sparse_driver_backend__ = True
 
     @classmethod
     def define_tables(cls, metadata):
@@ -2656,7 +2658,7 @@ class ComponentReflectionTest(ComparesTables, OneConnectionTablesTest):
 
 class TableNoColumnsTest(fixtures.TestBase):
     __requires__ = ("reflect_tables_no_columns",)
-    __backend__ = True
+    __sparse_driver_backend__ = True
 
     @testing.fixture
     def table_no_columns(self, connection, metadata):
@@ -2708,7 +2710,7 @@ class TableNoColumnsTest(fixtures.TestBase):
 
 
 class ComponentReflectionTestExtra(ComparesIndexes, fixtures.TestBase):
-    __backend__ = True
+    __sparse_driver_backend__ = True
 
     @testing.fixture(params=[True, False])
     def use_schema_fixture(self, request):
@@ -3211,7 +3213,7 @@ class ComponentReflectionTestExtra(ComparesIndexes, fixtures.TestBase):
 
 class NormalizedNameTest(fixtures.TablesTest):
     __requires__ = ("denormalized_names",)
-    __backend__ = True
+    __sparse_driver_backend__ = True
 
     @classmethod
     def define_tables(cls, metadata):
@@ -3352,7 +3354,7 @@ class ComputedReflectionTest(fixtures.ComputedReflectionFixtureTest):
 class IdentityReflectionTest(fixtures.TablesTest):
     run_inserts = run_deletes = None
 
-    __backend__ = True
+    __sparse_driver_backend__ = True
     __requires__ = ("identity_columns", "table_reflection")
 
     @classmethod
@@ -3491,7 +3493,7 @@ class IdentityReflectionTest(fixtures.TablesTest):
 
 
 class CompositeKeyReflectionTest(fixtures.TablesTest):
-    __backend__ = True
+    __sparse_driver_backend__ = True
 
     @classmethod
     def define_tables(cls, metadata):
index 84a18e3e7d55eaa658cfa131bfca3838a1a8195a..11af26b17ffe81b9f2193ddbbcdc64c0ef79e97a 100644 (file)
@@ -47,7 +47,7 @@ from ...exc import ProgrammingError
 
 
 class CollateTest(fixtures.TablesTest):
-    __backend__ = True
+    __sparse_driver_backend__ = True
 
     @classmethod
     def define_tables(cls, metadata):
@@ -93,7 +93,7 @@ class OrderByLabelTest(fixtures.TablesTest):
 
     """
 
-    __backend__ = True
+    __sparse_driver_backend__ = True
 
     @classmethod
     def define_tables(cls, metadata):
@@ -164,7 +164,7 @@ class OrderByLabelTest(fixtures.TablesTest):
 class ValuesExpressionTest(fixtures.TestBase):
     __requires__ = ("table_value_constructor",)
 
-    __backend__ = True
+    __sparse_driver_backend__ = True
 
     def test_tuples(self, connection):
         value_expr = values(
@@ -637,7 +637,7 @@ class FetchLimitOffsetTest(fixtures.TablesTest):
 class SameNamedSchemaTableTest(fixtures.TablesTest):
     """tests for #7471"""
 
-    __backend__ = True
+    __sparse_driver_backend__ = True
 
     __requires__ = ("schemas",)
 
@@ -734,7 +734,7 @@ class SameNamedSchemaTableTest(fixtures.TablesTest):
 
 
 class JoinTest(fixtures.TablesTest):
-    __backend__ = True
+    __sparse_driver_backend__ = True
 
     def _assert_result(self, select, result, params=()):
         with config.db.connect() as conn:
@@ -834,7 +834,7 @@ class JoinTest(fixtures.TablesTest):
 
 
 class CompoundSelectTest(fixtures.TablesTest):
-    __backend__ = True
+    __sparse_driver_backend__ = True
 
     @classmethod
     def define_tables(cls, metadata):
@@ -1495,7 +1495,7 @@ class ExpandingBoundInTest(fixtures.TablesTest):
 
 
 class LikeFunctionsTest(fixtures.TablesTest):
-    __backend__ = True
+    __sparse_driver_backend__ = True
 
     run_inserts = "once"
     run_deletes = None
@@ -1640,7 +1640,7 @@ class LikeFunctionsTest(fixtures.TablesTest):
 
 
 class ComputedColumnTest(fixtures.TablesTest):
-    __backend__ = True
+    __sparse_driver_backend__ = True
     __requires__ = ("computed_columns",)
 
     @classmethod
@@ -1788,7 +1788,7 @@ class IdentityAutoincrementTest(fixtures.TablesTest):
 
 
 class ExistsTest(fixtures.TablesTest):
-    __backend__ = True
+    __sparse_driver_backend__ = True
 
     @classmethod
     def define_tables(cls, metadata):
@@ -1835,7 +1835,7 @@ class ExistsTest(fixtures.TablesTest):
 
 
 class DistinctOnTest(AssertsCompiledSQL, fixtures.TablesTest):
-    __backend__ = True
+    __sparse_driver_backend__ = True
 
     @testing.fails_if(testing.requires.supports_distinct_on)
     def test_distinct_on(self):
@@ -1847,7 +1847,7 @@ class DistinctOnTest(AssertsCompiledSQL, fixtures.TablesTest):
 
 
 class IsOrIsNotDistinctFromTest(fixtures.TablesTest):
-    __backend__ = True
+    __sparse_driver_backend__ = True
     __requires__ = ("supports_is_distinct_from",)
 
     @classmethod
@@ -1902,7 +1902,7 @@ class IsOrIsNotDistinctFromTest(fixtures.TablesTest):
 class WindowFunctionTest(fixtures.TablesTest):
     __requires__ = ("window_functions",)
 
-    __backend__ = True
+    __sparse_driver_backend__ = True
 
     @classmethod
     def define_tables(cls, metadata):
index f0e6575370b76c6e2c5a5c8372c023f4aad65514..780084304368d63fa20c55b88d22e4dc406c05d8 100644 (file)
@@ -24,7 +24,7 @@ from ... import testing
 
 class SequenceTest(fixtures.TablesTest):
     __requires__ = ("sequences",)
-    __backend__ = True
+    __sparse_driver_backend__ = True
 
     run_create_tables = "each"
 
@@ -163,7 +163,7 @@ class SequenceTest(fixtures.TablesTest):
 
 class SequenceCompilerTest(testing.AssertsCompiledSQL, fixtures.TestBase):
     __requires__ = ("sequences",)
-    __backend__ = True
+    __sparse_driver_backend__ = True
 
     def test_literal_binds_inline_compile(self, connection):
         table = Table(
@@ -192,7 +192,7 @@ class HasSequenceTest(fixtures.TablesTest):
     run_deletes = None
 
     __requires__ = ("sequences",)
-    __backend__ = True
+    __sparse_driver_backend__ = True
 
     @classmethod
     def define_tables(cls, metadata):
@@ -308,7 +308,7 @@ class HasSequenceTest(fixtures.TablesTest):
 
 class HasSequenceTestEmpty(fixtures.TestBase):
     __requires__ = ("sequences",)
-    __backend__ = True
+    __sparse_driver_backend__ = True
 
     def test_get_sequence_names_no_sequence(self, connection):
         eq_(
index 85a8d393391029c52b10cbefb5eea1dd41f39f65..50f0ca9fca4d630624929631d306abf541e225cb 100644 (file)
@@ -18,7 +18,7 @@ from ... import testing
 class SimpleUpdateDeleteTest(fixtures.TablesTest):
     run_deletes = "each"
     __requires__ = ("sane_rowcount",)
-    __backend__ = True
+    __sparse_driver_backend__ = True
 
     @classmethod
     def define_tables(cls, metadata):
index 2e05c0bef852abb8ce31c181dd2fe55f51de3eb4..77deb64f7d69853e6c2055f5f4cde178e956811c 100644 (file)
@@ -3,6 +3,7 @@
 from __future__ import annotations
 
 import os
+from pathlib import Path
 import sys
 from typing import Dict
 from typing import List
@@ -250,8 +251,9 @@ def _tests(
     elif backendonly:
         # with "-m backendonly", only tests with the backend pytest mark
         # (or pytestplugin equivalent, like __backend__) will be selected
-        # by pytest
-        includes_excludes["m"].append("backend")
+        # by pytest.
+        # memory intensive is deselected to prevent these from running
+        includes_excludes["m"].extend(["backend", "not memory_intensive"])
     else:
         includes_excludes["m"].append("not memory_intensive")
 
@@ -305,10 +307,13 @@ def _tests(
         coverage=coverage,
     )
 
+    if database in ["oracle", "mssql"]:
+        cmd.extend(["--low-connections"])
+
     if database in ["oracle", "mssql", "sqlite_file"]:
         # use equals sign so that we avoid
         # https://github.com/pytest-dev/pytest/issues/13913
-        cmd.extend(["--write-idents=db_idents.txt", "--low-connections"])
+        cmd.extend(["--write-idents=db_idents.txt"])
 
     cmd.extend(posargs)
 
@@ -361,6 +366,10 @@ def test_mypy(session: nox.Session) -> None:
 def test_pep8(session: nox.Session) -> None:
     """Run linting and formatting checks."""
 
+    for pattern in ["*.so", "*.pyd", "*.dylib"]:
+        for filepath in Path("lib/sqlalchemy").rglob(pattern):
+            filepath.unlink()
+
     session.install("-e", ".")
 
     session.install(*nox.project.dependency_groups(pyproject, "lint"))
index afeec927f203f95f0750506746fc59e88cb8bab8..8604d7e0df90d6536309c303b3492d409168c44e 100644 (file)
--- a/setup.cfg
+++ b/setup.cfg
@@ -151,8 +151,8 @@ mssql = mssql+pyodbc://scott:tiger^5HHH@mssql2022:1433/test?driver=ODBC+Driver+1
 mssql_async = mssql+aioodbc://scott:tiger^5HHH@mssql2022:1433/test?driver=ODBC+Driver+18+for+SQL+Server&TrustServerCertificate=yes&Encrypt=Optional
 pymssql = mssql+pymssql://scott:tiger^5HHH@mssql2022:1433/test
 docker_mssql = mssql+pyodbc://scott:tiger^5HHH@127.0.0.1:1433/test?driver=ODBC+Driver+18+for+SQL+Server&TrustServerCertificate=yes&Encrypt=Optional
-oracle = oracle+cx_oracle://scott:tiger@oracle23c/freepdb1
-cxoracle = oracle+cx_oracle://scott:tiger@oracle23c/freepdb1
-oracledb = oracle+oracledb://scott:tiger@oracle23c/freepdb1
-oracledb_async = oracle+oracledb_async://scott:tiger@oracle23c/freepdb1
+oracle = oracle+cx_oracle://scott:tiger@oracle18c/xe
+cxoracle = oracle+cx_oracle://scott:tiger@oracle18c/xe
+oracledb = oracle+oracledb://scott:tiger@oracle18c/xe
+oracledb_async = oracle+oracledb_async://scott:tiger@oracle18c/xe
 docker_oracle = oracle+cx_oracle://scott:tiger@127.0.0.1:1521/?service_name=FREEPDB1
index 20d9ee069cf5d2c54572e848b1e87225bf0ca01b..4315a0e5c3705c0e3480cfbf415aaf7ad8350b54 100644 (file)
@@ -40,6 +40,7 @@ from sqlalchemy.testing import expect_warnings
 from sqlalchemy.testing import fixtures
 from sqlalchemy.testing import is_
 from sqlalchemy.testing import mock
+from sqlalchemy.testing import provision
 from sqlalchemy.testing.assertions import assert_warns_message
 from sqlalchemy.testing.assertsql import AllOf
 from sqlalchemy.testing.assertsql import CompiledSQL
@@ -2103,6 +2104,7 @@ class VersioningTest(fixtures.MappedTest):
         )
 
     @testing.requires.sane_rowcount
+    @provision.allow_stale_updates
     def test_save_update(self):
         subtable, base, stuff = (
             self.tables.subtable,
@@ -2556,98 +2558,6 @@ class OverrideColKeyTest(fixtures.MappedTest):
         # PK col
         assert s2.id == s2.base_id != 15
 
-    def test_subclass_renames_superclass_col_single_inh(self, decl_base):
-        """tested as part of #8705.
-
-        The step where we configure columns mapped to specific keys must
-        take place even if the given column is already in _columntoproperty,
-        as would be the case if the superclass maps that column already.
-
-        """
-
-        class A(decl_base):
-            __tablename__ = "a"
-
-            id = Column(Integer, primary_key=True)
-            a_data = Column(String)
-
-        class B(A):
-            b_data = column_property(A.__table__.c.a_data)
-
-        is_(A.a_data.property.columns[0], A.__table__.c.a_data)
-        is_(B.a_data.property.columns[0], A.__table__.c.a_data)
-        is_(B.b_data.property.columns[0], A.__table__.c.a_data)
-
-    def test_subsubclass_groups_super_cols(self, decl_base):
-        """tested for #9220, which is a regression caused by #8705."""
-
-        class BaseClass(decl_base):
-            __tablename__ = "basetable"
-
-            id = Column(Integer, primary_key=True)
-            name = Column(String(50))
-            type = Column(String(20))
-
-            __mapper_args__ = {
-                "polymorphic_on": type,
-                "polymorphic_identity": "base",
-            }
-
-        class SubClass(BaseClass):
-            __tablename__ = "subtable"
-
-            id = column_property(
-                Column(Integer, primary_key=True), BaseClass.id
-            )
-            base_id = Column(Integer, ForeignKey("basetable.id"))
-            subdata1 = Column(String(50))
-
-            __mapper_args__ = {"polymorphic_identity": "sub"}
-
-        class SubSubClass(SubClass):
-            __tablename__ = "subsubtable"
-
-            id = column_property(
-                Column(Integer, ForeignKey("subtable.id"), primary_key=True),
-                SubClass.id,
-                BaseClass.id,
-            )
-            subdata2 = Column(String(50))
-
-            __mapper_args__ = {"polymorphic_identity": "subsub"}
-
-        is_(SubSubClass.id.property.columns[0], SubSubClass.__table__.c.id)
-        is_(
-            SubSubClass.id.property.columns[1]._deannotate(),
-            SubClass.__table__.c.id,
-        )
-        is_(
-            SubSubClass.id.property.columns[2]._deannotate(),
-            BaseClass.__table__.c.id,
-        )
-
-    def test_column_setup_sanity_check(self, decl_base):
-        class A(decl_base):
-            __tablename__ = "a"
-
-            id = Column(Integer, primary_key=True)
-            a_data = Column(String)
-
-        class B(A):
-            __tablename__ = "b"
-            id = Column(Integer, ForeignKey("a.id"), primary_key=True)
-            b_data = Column(String)
-
-        is_(A.id.property.parent, inspect(A))
-        # overlapping cols get a new prop on the subclass, with cols merged
-        is_(B.id.property.parent, inspect(B))
-        eq_(B.id.property.columns, [B.__table__.c.id, A.__table__.c.id])
-
-        # totally independent cols remain w/ parent on the originating
-        # mapper
-        is_(B.a_data.property.parent, inspect(A))
-        is_(B.b_data.property.parent, inspect(B))
-
     def test_override_implicit(self):
         # this is originally [ticket:1111].
         # the pattern here is now disallowed by [ticket:1892]
@@ -2794,6 +2704,102 @@ class OverrideColKeyTest(fixtures.MappedTest):
         assert sess.get(Sub, s1.base_id).data == "this is base"
 
 
+class OverrideColKeyTestDeclarative(fixtures.TestBase):
+    """test overriding of column attributes."""
+
+    def test_subclass_renames_superclass_col_single_inh(self, decl_base):
+        """tested as part of #8705.
+
+        The step where we configure columns mapped to specific keys must
+        take place even if the given column is already in _columntoproperty,
+        as would be the case if the superclass maps that column already.
+
+        """
+
+        class A(decl_base):
+            __tablename__ = "a"
+
+            id = Column(Integer, primary_key=True)
+            a_data = Column(String)
+
+        class B(A):
+            b_data = column_property(A.__table__.c.a_data)
+
+        is_(A.a_data.property.columns[0], A.__table__.c.a_data)
+        is_(B.a_data.property.columns[0], A.__table__.c.a_data)
+        is_(B.b_data.property.columns[0], A.__table__.c.a_data)
+
+    def test_subsubclass_groups_super_cols(self, decl_base):
+        """tested for #9220, which is a regression caused by #8705."""
+
+        class BaseClass(decl_base):
+            __tablename__ = "basetable"
+
+            id = Column(Integer, primary_key=True)
+            name = Column(String(50))
+            type = Column(String(20))
+
+            __mapper_args__ = {
+                "polymorphic_on": type,
+                "polymorphic_identity": "base",
+            }
+
+        class SubClass(BaseClass):
+            __tablename__ = "subtable"
+
+            id = column_property(
+                Column(Integer, primary_key=True), BaseClass.id
+            )
+            base_id = Column(Integer, ForeignKey("basetable.id"))
+            subdata1 = Column(String(50))
+
+            __mapper_args__ = {"polymorphic_identity": "sub"}
+
+        class SubSubClass(SubClass):
+            __tablename__ = "subsubtable"
+
+            id = column_property(
+                Column(Integer, ForeignKey("subtable.id"), primary_key=True),
+                SubClass.id,
+                BaseClass.id,
+            )
+            subdata2 = Column(String(50))
+
+            __mapper_args__ = {"polymorphic_identity": "subsub"}
+
+        is_(SubSubClass.id.property.columns[0], SubSubClass.__table__.c.id)
+        is_(
+            SubSubClass.id.property.columns[1]._deannotate(),
+            SubClass.__table__.c.id,
+        )
+        is_(
+            SubSubClass.id.property.columns[2]._deannotate(),
+            BaseClass.__table__.c.id,
+        )
+
+    def test_column_setup_sanity_check(self, decl_base):
+        class A(decl_base):
+            __tablename__ = "a"
+
+            id = Column(Integer, primary_key=True)
+            a_data = Column(String)
+
+        class B(A):
+            __tablename__ = "b"
+            id = Column(Integer, ForeignKey("a.id"), primary_key=True)
+            b_data = Column(String)
+
+        is_(A.id.property.parent, inspect(A))
+        # overlapping cols get a new prop on the subclass, with cols merged
+        is_(B.id.property.parent, inspect(B))
+        eq_(B.id.property.columns, [B.__table__.c.id, A.__table__.c.id])
+
+        # totally independent cols remain w/ parent on the originating
+        # mapper
+        is_(B.a_data.property.parent, inspect(A))
+        is_(B.b_data.property.parent, inspect(B))
+
+
 class OptimizedLoadTest(fixtures.MappedTest):
     """tests for the "optimized load" routine."""
 
index be42dc60904a7fcca0f2cc394e67042f90634c91..cc1e0b084a513d16952ac914cabd96e5f27a6eff 100644 (file)
@@ -431,7 +431,9 @@ class SelfReferentialJ2JSelfTest(fixtures.MappedTest):
                 primary_key=True,
             ),
             Column(
-                "reports_to_id", Integer, ForeignKey("engineers.person_id")
+                "reports_to_id",
+                Integer,
+                ForeignKey("engineers.person_id"),
             ),
         )
 
index 39b4b2fcb43c1df399fb0511c3ae85a502b8443b..9b19ec1abe59921d3bae7afb85dd7aed848b6ed2 100644 (file)
@@ -1107,6 +1107,7 @@ class AlternateGeneratorTest(fixtures.MappedTest):
             session.commit()
 
     @testing.requires.sane_rowcount
+    @provision.allow_stale_updates
     def test_child_row_switch_two(self):
         P = self.classes.P
 
@@ -1675,6 +1676,7 @@ class ServerVersioningTest(fixtures.MappedTest):
             self.assert_sql_execution(testing.db, sess.flush, *statements)
 
     @testing.requires.independent_connections
+    @provision.allow_stale_updates
     def test_concurrent_mod_err_expire_on_commit(self):
         sess = self._fixture()