from ...testing.provision import allow_stale_update_impl
from ...testing.provision import configure_follower
from ...testing.provision import create_db
+from ...testing.provision import delete_from_all_tables
from ...testing.provision import drop_db
from ...testing.provision import generate_driver_url
from ...testing.provision import temp_table_keyword_args
return stmt
+@delete_from_all_tables.for_db("mysql", "mariadb")
+def _delete_from_all_tables(cfg, connection, metadata):
+ connection.exec_driver_sql("SET foreign_key_checks = 0")
+ try:
+ delete_from_all_tables.call_original(cfg, connection, metadata)
+ finally:
+ connection.exec_driver_sql("SET foreign_key_checks = 1")
+
+
@allow_stale_update_impl.for_db("mariadb")
def _allow_stale_update_impl(cfg):
@contextlib.contextmanager
# clear statement cache on all connections that were used
# https://github.com/oracle/python-cx_Oracle/issues/519
+ all_dbapis = {cfg.db.dialect.dbapi for cfg in config.Config.all_configs()}
for cx_oracle_conn in _all_conns:
try:
sc = cx_oracle_conn.stmtcachesize
- except db.dialect.dbapi.InterfaceError:
+ except tuple(dbapi.InterfaceError for dbapi in all_dbapis):
# connection closed
pass
else:
await_(rec.dispose())
else:
rec.dispose()
+
eng.clear()
+ def _dispose_testing_engines(self, scope):
+ eng = self.testing_engines[scope]
+ for rec in list(eng):
+ if hasattr(rec, "sync_engine"):
+ await_(rec.dispose())
+ else:
+ rec.dispose()
+
def after_test(self):
self._drop_testing_engines("function")
assert (
False
), "%d connection recs not cleared after test suite" % (ln)
+ if config.options and config.options.low_connections:
+ # for suites running with --low-connections, dispose the "global"
+ # engines to disconnect everything before making a testing engine
+ self._dispose_testing_engines("global")
def final_cleanup(self):
self.checkin_all()
share_pool=False,
_sqlite_savepoint=False,
):
+
if asyncio:
assert not _sqlite_savepoint
from sqlalchemy.ext.asyncio import (
engine.pool._transfer_from(config.db.pool)
elif share_pool:
engine.pool = config.db.pool
+ elif config.options and config.options.low_connections:
+ # for suites running with --low-connections, dispose the "global"
+ # engines to disconnect everything before making a testing engine
+ testing_reaper._dispose_testing_engines("global")
if scope == "global":
if asyncio:
import itertools
import random
import re
-import sys
import sqlalchemy as sa
from .base import TestBase
from .. import config
from .. import mock
+from .. import provision
from ..assertions import eq_
from ..assertions import expect_deprecated
from ..assertions import ne_
elif self.run_create_tables == "each":
drop_all_tables_from_metadata(self._tables_metadata, self.bind)
- savepoints = getattr(config.requirements, "savepoints", False)
- if savepoints:
- savepoints = savepoints.enabled
-
# no need to run deletes if tables are recreated on setup
if (
self.run_define_tables != "each"
- and self.run_create_tables != "each"
+ and self.run_create_tables == "once"
and self.run_deletes == "each"
):
with self.bind.begin() as conn:
- for table in reversed(
- [
- t
- for (t, fks) in sort_tables_and_constraints(
- self._tables_metadata.tables.values()
- )
- if t is not None
- ]
- ):
- try:
- if savepoints:
- with conn.begin_nested():
- conn.execute(table.delete())
- else:
- conn.execute(table.delete())
- except sa.exc.DBAPIError as ex:
- print(
- ("Error emptying table %s: %r" % (table, ex)),
- file=sys.stderr,
- )
+ provision.delete_from_all_tables(
+ config, conn, self._tables_metadata
+ )
@classmethod
def _teardown_once_metadata_bind(cls):
def stop_test_class_outside_fixtures(cls):
- engines.testing_reaper.stop_test_class_outside_fixtures()
provision.stop_test_class_outside_fixtures(config, config.db, cls)
+ engines.testing_reaper.stop_test_class_outside_fixtures()
try:
if not options.low_connections:
assertions.global_cleanup_assertions()
from .. import exc
from .. import inspect
from ..engine import url as sa_url
+from ..schema import sort_tables_and_constraints
from ..sql import ddl
from ..sql import schema
from ..util import decorator
return decorate
+ def call_original(self, cfg, *arg, **kw):
+ return self.fns["*"](cfg, *arg, **kw)
+
def __call__(self, cfg, *arg, **kw):
if isinstance(cfg, str):
url = sa_url.make_url(cfg)
"""
with allow_stale_update_impl(config._current):
return fn(*arg, **kw)
+
+
+@register.init
+def delete_from_all_tables(cfg, connection, metadata):
+ """an absolutely foolproof delete from all tables routine.
+
+ dialects should override this to add special instructions like
+ disable constraints etc.
+
+ """
+ savepoints = getattr(cfg.requirements, "savepoints", False)
+ if savepoints:
+ savepoints = savepoints.enabled
+
+ inspector = inspect(connection)
+
+ for table in reversed(
+ [
+ t
+ for (t, fks) in sort_tables_and_constraints(
+ metadata.tables.values()
+ )
+ if t is not None
+ # remember that inspector.get_table_names() is cached,
+ # so this emits SQL once per unique schema name
+ and t.name in inspector.get_table_names(schema=t.schema)
+ ]
+ ):
+ if savepoints:
+ with connection.begin_nested():
+ connection.execute(table.delete())
+ else:
+ connection.execute(table.delete())
def ad_hoc_engines(self):
"""Test environment must allow ad-hoc engine/connection creation.
- DBs that scale poorly for many connections, even when closed, i.e.
- Oracle, may use the "--low-connections" option which flags this
- requirement as not present.
+ This is now a no-op since we reconfigured ``options.low_connections``
+ to cause the ``testing_engine()`` to close off other open connections
+ when its invoked.
"""
- return exclusions.skip_if(
- lambda config: config.options.low_connections
- )
+ return exclusions.open()
@property
def no_windows(self):
def Table(*args, **kw) -> schema.Table:
"""A schema.Table wrapper/hook for dialect-specific tweaks."""
- test_opts = {k: kw.pop(k) for k in list(kw) if k.startswith("test_")}
+ # pop out local options; these are not used at the moment
+ _ = {k: kw.pop(k) for k in list(kw) if k.startswith("test_")}
kw.update(table_options)
- if exclusions.against(config._current, "mysql"):
- if (
- "mysql_engine" not in kw
- and "mysql_type" not in kw
- and "autoload_with" not in kw
- ):
- if "test_needs_fk" in test_opts or "test_needs_acid" in test_opts:
- kw["mysql_engine"] = "InnoDB"
- else:
- # there are in fact test fixtures that rely upon MyISAM,
- # due to MySQL / MariaDB having poor FK behavior under innodb,
- # such as a self-referential table can't be deleted from at
- # once without attending to per-row dependencies. We'd need to
- # add special steps to some fixtures if we want to not
- # explicitly state MyISAM here
- kw["mysql_engine"] = "MyISAM"
- elif exclusions.against(config._current, "mariadb"):
- if (
- "mariadb_engine" not in kw
- and "mariadb_type" not in kw
- and "autoload_with" not in kw
- ):
- if "test_needs_fk" in test_opts or "test_needs_acid" in test_opts:
- kw["mariadb_engine"] = "InnoDB"
- else:
- kw["mariadb_engine"] = "MyISAM"
-
return schema.Table(*args, **kw)
class CTETest(fixtures.TablesTest):
- __backend__ = True
+ __sparse_driver_backend__ = True
__requires__ = ("ctes",)
run_inserts = "each"
class TableDDLTest(fixtures.TestBase):
- __backend__ = True
+ __sparse_driver_backend__ = True
def _simple_fixture(self, schema=None):
return Table(
"""
- __backend__ = True
+ __sparse_driver_backend__ = True
def fk(self, metadata, connection):
convention = {
class HasTableTest(OneConnectionTablesTest):
- __backend__ = True
+ __sparse_driver_backend__ = True
+
+ run_deletes = None
@classmethod
def define_tables(cls, metadata):
class HasIndexTest(fixtures.TablesTest):
- __backend__ = True
+ __sparse_driver_backend__ = True
__requires__ = ("index_reflection",)
@classmethod
class BizarroCharacterTest(fixtures.TestBase):
- __backend__ = True
+ __sparse_driver_backend__ = True
def column_names():
return testing.combinations(
class TempTableElementsTest(fixtures.TestBase):
- __backend__ = True
+ __sparse_driver_backend__ = True
__requires__ = ("temp_table_reflection",)
class QuotedNameArgumentTest(fixtures.TablesTest):
run_create_tables = "once"
- __backend__ = True
+ __sparse_driver_backend__ = True
@classmethod
def define_tables(cls, metadata):
class ComponentReflectionTest(ComparesTables, OneConnectionTablesTest):
run_inserts = run_deletes = None
- __backend__ = True
+ __sparse_driver_backend__ = True
@classmethod
def define_tables(cls, metadata):
class TableNoColumnsTest(fixtures.TestBase):
__requires__ = ("reflect_tables_no_columns",)
- __backend__ = True
+ __sparse_driver_backend__ = True
@testing.fixture
def table_no_columns(self, connection, metadata):
class ComponentReflectionTestExtra(ComparesIndexes, fixtures.TestBase):
- __backend__ = True
+ __sparse_driver_backend__ = True
@testing.fixture(params=[True, False])
def use_schema_fixture(self, request):
class NormalizedNameTest(fixtures.TablesTest):
__requires__ = ("denormalized_names",)
- __backend__ = True
+ __sparse_driver_backend__ = True
@classmethod
def define_tables(cls, metadata):
class IdentityReflectionTest(fixtures.TablesTest):
run_inserts = run_deletes = None
- __backend__ = True
+ __sparse_driver_backend__ = True
__requires__ = ("identity_columns", "table_reflection")
@classmethod
class CompositeKeyReflectionTest(fixtures.TablesTest):
- __backend__ = True
+ __sparse_driver_backend__ = True
@classmethod
def define_tables(cls, metadata):
class CollateTest(fixtures.TablesTest):
- __backend__ = True
+ __sparse_driver_backend__ = True
@classmethod
def define_tables(cls, metadata):
"""
- __backend__ = True
+ __sparse_driver_backend__ = True
@classmethod
def define_tables(cls, metadata):
class ValuesExpressionTest(fixtures.TestBase):
__requires__ = ("table_value_constructor",)
- __backend__ = True
+ __sparse_driver_backend__ = True
def test_tuples(self, connection):
value_expr = values(
class SameNamedSchemaTableTest(fixtures.TablesTest):
"""tests for #7471"""
- __backend__ = True
+ __sparse_driver_backend__ = True
__requires__ = ("schemas",)
class JoinTest(fixtures.TablesTest):
- __backend__ = True
+ __sparse_driver_backend__ = True
def _assert_result(self, select, result, params=None):
with config.db.connect() as conn:
class CompoundSelectTest(fixtures.TablesTest):
- __backend__ = True
+ __sparse_driver_backend__ = True
@classmethod
def define_tables(cls, metadata):
class LikeFunctionsTest(fixtures.TablesTest):
- __backend__ = True
+ __sparse_driver_backend__ = True
run_inserts = "once"
run_deletes = None
class ComputedColumnTest(fixtures.TablesTest):
- __backend__ = True
+ __sparse_driver_backend__ = True
__requires__ = ("computed_columns",)
@classmethod
class ExistsTest(fixtures.TablesTest):
- __backend__ = True
+ __sparse_driver_backend__ = True
@classmethod
def define_tables(cls, metadata):
class DistinctOnTest(AssertsCompiledSQL, fixtures.TablesTest):
- __backend__ = True
+ __sparse_driver_backend__ = True
@testing.fails_if(testing.requires.supports_distinct_on)
def test_distinct_on(self):
class IsOrIsNotDistinctFromTest(fixtures.TablesTest):
- __backend__ = True
+ __sparse_driver_backend__ = True
__requires__ = ("supports_is_distinct_from",)
@classmethod
class WindowFunctionTest(fixtures.TablesTest):
__requires__ = ("window_functions",)
- __backend__ = True
+ __sparse_driver_backend__ = True
@classmethod
def define_tables(cls, metadata):
class SequenceTest(fixtures.TablesTest):
__requires__ = ("sequences",)
- __backend__ = True
+ __sparse_driver_backend__ = True
run_create_tables = "each"
class SequenceCompilerTest(testing.AssertsCompiledSQL, fixtures.TestBase):
__requires__ = ("sequences",)
- __backend__ = True
+ __sparse_driver_backend__ = True
def test_literal_binds_inline_compile(self, connection):
table = Table(
run_deletes = None
__requires__ = ("sequences",)
- __backend__ = True
+ __sparse_driver_backend__ = True
@classmethod
def define_tables(cls, metadata):
class HasSequenceTestEmpty(fixtures.TestBase):
__requires__ = ("sequences",)
- __backend__ = True
+ __sparse_driver_backend__ = True
def test_get_sequence_names_no_sequence(self, connection):
eq_(
class TableViaSelectTest(fixtures.TablesTest):
- __backend__ = True
+ __sparse_driver_backend__ = True
@classmethod
def temp_table_name(cls):
class SimpleUpdateDeleteTest(fixtures.TablesTest):
run_deletes = "each"
__requires__ = ("sane_rowcount",)
- __backend__ = True
+ __sparse_driver_backend__ = True
@classmethod
def define_tables(cls, metadata):
elif backendonly:
# with "-m backendonly", only tests with the backend pytest mark
# (or pytestplugin equivalent, like __backend__) will be selected
- # by pytest
- includes_excludes["m"].append("backend")
+ # by pytest.
+ # memory intensive is deselected to prevent these from running
+ includes_excludes["m"].extend(["backend", "not memory_intensive"])
else:
includes_excludes["m"].append("not memory_intensive")
coverage=coverage,
)
+ if database in ["oracle", "mssql"]:
+ cmd.extend(["--low-connections"])
+
if database in ["oracle", "mssql", "sqlite_file"]:
# use equals sign so that we avoid
# https://github.com/pytest-dev/pytest/issues/13913
- cmd.extend(["--write-idents=db_idents.txt", "--low-connections"])
+ cmd.extend(["--write-idents=db_idents.txt"])
cmd.extend(posargs)
mssql_async = mssql+aioodbc://scott:tiger^5HHH@mssql2022:1433/test?driver=ODBC+Driver+18+for+SQL+Server&TrustServerCertificate=yes&Encrypt=Optional
pymssql = mssql+pymssql://scott:tiger^5HHH@mssql2022:1433/test
docker_mssql = mssql+pyodbc://scott:tiger^5HHH@127.0.0.1:1433/test?driver=ODBC+Driver+18+for+SQL+Server&TrustServerCertificate=yes&Encrypt=Optional
-oracle = oracle+cx_oracle://scott:tiger@oracle23c/freepdb1
-cxoracle = oracle+cx_oracle://scott:tiger@oracle23c/freepdb1
-oracledb = oracle+oracledb://scott:tiger@oracle23c/freepdb1
-oracledb_async = oracle+oracledb_async://scott:tiger@oracle23c/freepdb1
+oracle = oracle+cx_oracle://scott:tiger@oracle18c/xe
+cxoracle = oracle+cx_oracle://scott:tiger@oracle18c/xe
+oracledb = oracle+oracledb://scott:tiger@oracle18c/xe
+oracledb_async = oracle+oracledb_async://scott:tiger@oracle18c/xe
docker_oracle = oracle+cx_oracle://scott:tiger@127.0.0.1:1521/?service_name=FREEPDB1
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import is_
from sqlalchemy.testing import mock
+from sqlalchemy.testing import provision
from sqlalchemy.testing.assertions import assert_warns_message
from sqlalchemy.testing.assertsql import AllOf
from sqlalchemy.testing.assertsql import CompiledSQL
)
@testing.requires.sane_rowcount
+ @provision.allow_stale_updates
def test_save_update(self):
subtable, base, stuff = (
self.tables.subtable,
# PK col
assert s2.id == s2.base_id != 15
- def test_subclass_renames_superclass_col_single_inh(self, decl_base):
- """tested as part of #8705.
-
- The step where we configure columns mapped to specific keys must
- take place even if the given column is already in _columntoproperty,
- as would be the case if the superclass maps that column already.
-
- """
-
- class A(decl_base):
- __tablename__ = "a"
-
- id = Column(Integer, primary_key=True)
- a_data = Column(String)
-
- class B(A):
- b_data = column_property(A.__table__.c.a_data)
-
- is_(A.a_data.property.columns[0], A.__table__.c.a_data)
- is_(B.a_data.property.columns[0], A.__table__.c.a_data)
- is_(B.b_data.property.columns[0], A.__table__.c.a_data)
-
- def test_subsubclass_groups_super_cols(self, decl_base):
- """tested for #9220, which is a regression caused by #8705."""
-
- class BaseClass(decl_base):
- __tablename__ = "basetable"
-
- id = Column(Integer, primary_key=True)
- name = Column(String(50))
- type = Column(String(20))
-
- __mapper_args__ = {
- "polymorphic_on": type,
- "polymorphic_identity": "base",
- }
-
- class SubClass(BaseClass):
- __tablename__ = "subtable"
-
- id = column_property(
- Column(Integer, primary_key=True), BaseClass.id
- )
- base_id = Column(Integer, ForeignKey("basetable.id"))
- subdata1 = Column(String(50))
-
- __mapper_args__ = {"polymorphic_identity": "sub"}
-
- class SubSubClass(SubClass):
- __tablename__ = "subsubtable"
-
- id = column_property(
- Column(Integer, ForeignKey("subtable.id"), primary_key=True),
- SubClass.id,
- BaseClass.id,
- )
- subdata2 = Column(String(50))
-
- __mapper_args__ = {"polymorphic_identity": "subsub"}
-
- is_(SubSubClass.id.property.columns[0], SubSubClass.__table__.c.id)
- is_(
- SubSubClass.id.property.columns[1]._deannotate(),
- SubClass.__table__.c.id,
- )
- is_(
- SubSubClass.id.property.columns[2]._deannotate(),
- BaseClass.__table__.c.id,
- )
-
- def test_column_setup_sanity_check(self, decl_base):
- class A(decl_base):
- __tablename__ = "a"
-
- id = Column(Integer, primary_key=True)
- a_data = Column(String)
-
- class B(A):
- __tablename__ = "b"
- id = Column(Integer, ForeignKey("a.id"), primary_key=True)
- b_data = Column(String)
-
- is_(A.id.property.parent, inspect(A))
- # overlapping cols get a new prop on the subclass, with cols merged
- is_(B.id.property.parent, inspect(B))
- eq_(B.id.property.columns, [B.__table__.c.id, A.__table__.c.id])
-
- # totally independent cols remain w/ parent on the originating
- # mapper
- is_(B.a_data.property.parent, inspect(A))
- is_(B.b_data.property.parent, inspect(B))
-
def test_override_implicit(self):
# this is originally [ticket:1111].
# the pattern here is now disallowed by [ticket:1892]
assert sess.get(Sub, s1.base_id).data == "this is base"
+class OverrideColKeyTestDeclarative(fixtures.TestBase):
+ """test overriding of column attributes."""
+
+ def test_subclass_renames_superclass_col_single_inh(self, decl_base):
+ """tested as part of #8705.
+
+ The step where we configure columns mapped to specific keys must
+ take place even if the given column is already in _columntoproperty,
+ as would be the case if the superclass maps that column already.
+
+ """
+
+ class A(decl_base):
+ __tablename__ = "a"
+
+ id = Column(Integer, primary_key=True)
+ a_data = Column(String)
+
+ class B(A):
+ b_data = column_property(A.__table__.c.a_data)
+
+ is_(A.a_data.property.columns[0], A.__table__.c.a_data)
+ is_(B.a_data.property.columns[0], A.__table__.c.a_data)
+ is_(B.b_data.property.columns[0], A.__table__.c.a_data)
+
+ def test_subsubclass_groups_super_cols(self, decl_base):
+ """tested for #9220, which is a regression caused by #8705."""
+
+ class BaseClass(decl_base):
+ __tablename__ = "basetable"
+
+ id = Column(Integer, primary_key=True)
+ name = Column(String(50))
+ type = Column(String(20))
+
+ __mapper_args__ = {
+ "polymorphic_on": type,
+ "polymorphic_identity": "base",
+ }
+
+ class SubClass(BaseClass):
+ __tablename__ = "subtable"
+
+ id = column_property(
+ Column(Integer, primary_key=True), BaseClass.id
+ )
+ base_id = Column(Integer, ForeignKey("basetable.id"))
+ subdata1 = Column(String(50))
+
+ __mapper_args__ = {"polymorphic_identity": "sub"}
+
+ class SubSubClass(SubClass):
+ __tablename__ = "subsubtable"
+
+ id = column_property(
+ Column(Integer, ForeignKey("subtable.id"), primary_key=True),
+ SubClass.id,
+ BaseClass.id,
+ )
+ subdata2 = Column(String(50))
+
+ __mapper_args__ = {"polymorphic_identity": "subsub"}
+
+ is_(SubSubClass.id.property.columns[0], SubSubClass.__table__.c.id)
+ is_(
+ SubSubClass.id.property.columns[1]._deannotate(),
+ SubClass.__table__.c.id,
+ )
+ is_(
+ SubSubClass.id.property.columns[2]._deannotate(),
+ BaseClass.__table__.c.id,
+ )
+
+ def test_column_setup_sanity_check(self, decl_base):
+ class A(decl_base):
+ __tablename__ = "a"
+
+ id = Column(Integer, primary_key=True)
+ a_data = Column(String)
+
+ class B(A):
+ __tablename__ = "b"
+ id = Column(Integer, ForeignKey("a.id"), primary_key=True)
+ b_data = Column(String)
+
+ is_(A.id.property.parent, inspect(A))
+ # overlapping cols get a new prop on the subclass, with cols merged
+ is_(B.id.property.parent, inspect(B))
+ eq_(B.id.property.columns, [B.__table__.c.id, A.__table__.c.id])
+
+ # totally independent cols remain w/ parent on the originating
+ # mapper
+ is_(B.a_data.property.parent, inspect(A))
+ is_(B.b_data.property.parent, inspect(B))
+
+
class OptimizedLoadTest(fixtures.MappedTest):
"""tests for the "optimized load" routine."""
primary_key=True,
),
Column(
- "reports_to_id", Integer, ForeignKey("engineers.person_id")
+ "reports_to_id",
+ Integer,
+ ForeignKey("engineers.person_id"),
),
)
session.commit()
@testing.requires.sane_rowcount
+ @provision.allow_stale_updates
def test_child_row_switch_two(self):
P = self.classes.P
self.assert_sql_execution(testing.db, sess.flush, *statements)
@testing.requires.independent_connections
+ @provision.allow_stale_updates
def test_concurrent_mod_err_expire_on_commit(self):
sess = self._fixture()