import time
from ... import exc
+from ... import inspect
from ... import text
from ...testing.provision import create_db
+from ...testing.provision import drop_all_schema_objects_post_tables
+from ...testing.provision import drop_all_schema_objects_pre_tables
from ...testing.provision import drop_db
from ...testing.provision import log
from ...testing.provision import set_default_schema_on_connection
cursor.execute("SET SESSION search_path='%s'" % schema_name)
cursor.close()
dbapi_connection.autocommit = existing_autocommit
+
+
+@drop_all_schema_objects_pre_tables.for_db("postgresql")
+def drop_all_schema_objects_pre_tables(cfg, eng):
+ with eng.connect().execution_options(isolation_level="AUTOCOMMIT") as conn:
+ for xid in conn.execute("select gid from pg_prepared_xacts").scalars():
+ conn.execute("ROLLBACK PREPARED '%s'" % xid)
+
+
+@drop_all_schema_objects_post_tables.for_db("postgresql")
+def drop_all_schema_objects_post_tables(cfg, eng):
+ from sqlalchemy.dialects import postgresql
+
+ inspector = inspect(eng)
+ with eng.begin() as conn:
+ for enum in inspector.get_enums("*"):
+ conn.execute(
+ postgresql.DropEnumType(
+ postgresql.ENUM(name=enum["name"], schema=enum["schema"])
+ )
+ )
def is_true(a, msg=None):
- is_(a, True, msg=msg)
+ is_(bool(a), True, msg=msg)
def is_false(a, msg=None):
- is_(a, False, msg=msg)
+ is_(bool(a), False, msg=msg)
def is_(a, b, msg=None):
if isinstance(engine.pool, pool.QueuePool):
engine.pool._timeout = 0
- engine.pool._max_overflow = 0
+ engine.pool._max_overflow = 5
if use_reaper:
testing_reaper.add_engine(engine)
from . import config
from . import schema
from .engines import drop_all_tables
+from .engines import testing_engine
from .entities import BasicEntity
from .entities import ComparableEntity
from .entities import ComparableMixin # noqa
from ..orm.decl_api import DeclarativeMeta
from ..schema import sort_tables_and_constraints
-
# whether or not we use unittest changes things dramatically,
# as far as how pytest collection works.
trans.rollback()
conn.close()
- # propose a replacement for @testing.provide_metadata.
- # the problem with this is that TablesTest below has a ".metadata"
- # attribute already which is accessed directly as part of the
- # @testing.provide_metadata pattern. Might need to call this _metadata
- # for it to be useful.
- # @config.fixture()
- # def metadata(self):
- # """Provide bound MetaData for a single test, dropping afterwards."""
- #
- # from . import engines
- # metadata = schema.MetaData(config.db)
- # try:
- # yield metadata
- # finally:
- # engines.drop_all_tables(metadata, config.db)
+ @config.fixture()
+ def future_connection(self):
+
+ eng = testing_engine(future=True)
+ conn = eng.connect()
+ trans = conn.begin()
+ try:
+ yield conn
+ finally:
+ if trans.is_active:
+ trans.rollback()
+ conn.close()
+
+ @config.fixture()
+ def metadata(self):
+ """Provide bound MetaData for a single test, dropping afterwards."""
+
+ from . import engines
+ from ..sql import schema
+
+ metadata = schema.MetaData()
+ try:
+ yield metadata
+ finally:
+ engines.drop_all_tables(metadata, config.db)
class FutureEngineMixin(object):
run_dispose_bind = None
bind = None
- metadata = None
+ _tables_metadata = None
tables = None
other = None
sequences = None
+ @property
+ def tables_test_metadata(self):
+ return self._tables_metadata
+
@classmethod
def setup_class(cls):
cls._init_class()
cls.sequences = adict()
cls.bind = cls.setup_bind()
- cls.metadata = sa.MetaData()
- cls.metadata.bind = cls.bind
+ cls._tables_metadata = sa.MetaData()
+ cls._tables_metadata.bind = cls.bind
@classmethod
def _setup_once_inserts(cls):
@classmethod
def _setup_once_tables(cls):
if cls.run_define_tables == "once":
- cls.define_tables(cls.metadata)
+ cls.define_tables(cls._tables_metadata)
if cls.run_create_tables == "once":
- cls.metadata.create_all(cls.bind)
- cls.tables.update(cls.metadata.tables)
- cls.sequences.update(cls.metadata._sequences)
+ cls._tables_metadata.create_all(cls.bind)
+ cls.tables.update(cls._tables_metadata.tables)
+ cls.sequences.update(cls._tables_metadata._sequences)
def _setup_each_tables(self):
if self.run_define_tables == "each":
- self.define_tables(self.metadata)
+ self.define_tables(self._tables_metadata)
if self.run_create_tables == "each":
- self.metadata.create_all(self.bind)
- self.tables.update(self.metadata.tables)
- self.sequences.update(self.metadata._sequences)
+ self._tables_metadata.create_all(self.bind)
+ self.tables.update(self._tables_metadata.tables)
+ self.sequences.update(self._tables_metadata._sequences)
elif self.run_create_tables == "each":
- self.metadata.create_all(self.bind)
+ self._tables_metadata.create_all(self.bind)
def _setup_each_inserts(self):
if self.run_inserts == "each":
if self.run_define_tables == "each":
self.tables.clear()
if self.run_create_tables == "each":
- drop_all_tables(self.metadata, self.bind)
- self.metadata.clear()
+ drop_all_tables(self._tables_metadata, self.bind)
+ self._tables_metadata.clear()
elif self.run_create_tables == "each":
- drop_all_tables(self.metadata, self.bind)
+ drop_all_tables(self._tables_metadata, self.bind)
# no need to run deletes if tables are recreated on setup
if (
[
t
for (t, fks) in sort_tables_and_constraints(
- self.metadata.tables.values()
+ self._tables_metadata.tables.values()
)
if t is not None
]
@classmethod
def _teardown_once_metadata_bind(cls):
if cls.run_create_tables:
- drop_all_tables(cls.metadata, cls.bind)
+ drop_all_tables(cls._tables_metadata, cls.bind)
if cls.run_dispose_bind == "once":
cls.dispose_bind(cls.bind)
- cls.metadata.bind = None
+ cls._tables_metadata.bind = None
if cls.run_setup_bind is not None:
cls.bind = None
headers[table] = data[0]
rows[table] = data[1:]
for table, fks in sort_tables_and_constraints(
- cls.metadata.tables.values()
+ cls._tables_metadata.tables.values()
):
if table is None:
continue
__table_cls__ = schema.Table
_DeclBase = declarative_base(
- metadata=cls.metadata,
+ metadata=cls._tables_metadata,
metaclass=FindFixtureDeclarative,
cls=DeclarativeBasic,
)
# classes
super(DeclarativeMappedTest, cls)._with_register_classes(fn)
- if cls.metadata.tables and cls.run_create_tables:
- cls.metadata.create_all(config.db)
+ if cls._tables_metadata.tables and cls.run_create_tables:
+ cls._tables_metadata.create_all(config.db)
class ComputedReflectionFixtureTest(TablesTest):
class Foo(object):
- def __init__(self, moredata):
+ def __init__(self, moredata, stuff="im stuff"):
self.data = "im data"
- self.stuff = "im stuff"
+ self.stuff = stuff
self.moredata = moredata
__hash__ = object.__hash__
@post
def _prep_testing_database(options, file_config):
- from sqlalchemy.testing import config, util
- from sqlalchemy.testing.exclusions import against
- from sqlalchemy import schema, inspect
+ from sqlalchemy.testing import config
if options.dropfirst:
- for cfg in config.Config.all_configs():
- e = cfg.db
-
- # TODO: this has to be part of provision.py in postgresql
- if against(cfg, "postgresql"):
- with e.connect().execution_options(
- isolation_level="AUTOCOMMIT"
- ) as conn:
- for xid in conn.execute(
- "select gid from pg_prepared_xacts"
- ).scalars():
- conn.execute("ROLLBACK PREPARED '%s'" % xid)
-
- inspector = inspect(e)
- try:
- view_names = inspector.get_view_names()
- except NotImplementedError:
- pass
- else:
- for vname in view_names:
- e.execute(
- schema._DropView(
- schema.Table(vname, schema.MetaData())
- )
- )
+ from sqlalchemy.testing import provision
- if config.requirements.schemas.enabled_for_config(cfg):
- try:
- view_names = inspector.get_view_names(schema="test_schema")
- except NotImplementedError:
- pass
- else:
- for vname in view_names:
- e.execute(
- schema._DropView(
- schema.Table(
- vname,
- schema.MetaData(),
- schema="test_schema",
- )
- )
- )
-
- util.drop_all_tables(e, inspector)
-
- if config.requirements.schemas.enabled_for_config(cfg):
- util.drop_all_tables(e, inspector, schema=cfg.test_schema)
-
- # TODO: this has to be part of provision.py in postgresql
- if against(cfg, "postgresql"):
- from sqlalchemy.dialects import postgresql
-
- for enum in inspector.get_enums("*"):
- e.execute(
- postgresql.DropEnumType(
- postgresql.ENUM(
- name=enum["name"], schema=enum["schema"]
- )
- )
- )
-
- # TODO: need to do a get_sequences and drop them also after tables
+ for cfg in config.Config.all_configs():
+ provision.drop_all_schema_objects(cfg, cfg.db)
@post
from . import config
from . import engines
+from . import util
from .. import exc
+from .. import inspect
from ..engine import url as sa_url
+from ..sql import ddl
+from ..sql import schema
from ..util import compat
+
log = logging.getLogger(__name__)
FOLLOWER_IDENT = None
cfg.db.dispose()
+@register.init
+def drop_all_schema_objects_pre_tables(cfg, eng):
+ pass
+
+
+@register.init
+def drop_all_schema_objects_post_tables(cfg, eng):
+ pass
+
+
+def drop_all_schema_objects(cfg, eng):
+
+ drop_all_schema_objects_pre_tables(cfg, eng)
+
+ inspector = inspect(eng)
+ try:
+ view_names = inspector.get_view_names()
+ except NotImplementedError:
+ pass
+ else:
+ with eng.begin() as conn:
+ for vname in view_names:
+ conn.execute(
+ ddl._DropView(schema.Table(vname, schema.MetaData()))
+ )
+
+ if config.requirements.schemas.enabled_for_config(cfg):
+ try:
+ view_names = inspector.get_view_names(schema="test_schema")
+ except NotImplementedError:
+ pass
+ else:
+ with eng.begin() as conn:
+ for vname in view_names:
+ conn.execute(
+ ddl._DropView(
+ schema.Table(
+ vname,
+ schema.MetaData(),
+ schema="test_schema",
+ )
+ )
+ )
+
+ util.drop_all_tables(eng, inspector)
+
+ if config.requirements.schemas.enabled_for_config(cfg):
+ util.drop_all_tables(eng, inspector, schema=cfg.test_schema)
+
+ drop_all_schema_objects_post_tables(cfg, eng)
+
+ if config.requirements.sequences.enabled_for_config(cfg):
+ with eng.begin() as conn:
+ for seq in inspector.get_sequence_names():
+ conn.execute(ddl.DropSequence(schema.Sequence(seq)))
+
+
@register.init
def create_db(cfg, eng, ident):
"""Dynamically create a database for testing.
]
for name in names:
query = "CREATE VIEW %s AS SELECT * FROM %s" % (
- testing.db.dialect.identifier_preparer.quote(
+ config.db.dialect.identifier_preparer.quote(
"view %s" % name
),
- testing.db.dialect.identifier_preparer.quote(name),
+ config.db.dialect.identifier_preparer.quote(name),
)
event.listen(metadata, "after_create", DDL(query))
"before_drop",
DDL(
"DROP VIEW %s"
- % testing.db.dialect.identifier_preparer.quote(
+ % config.db.dialect.identifier_preparer.quote(
"view %s" % name
)
),
@quote_fixtures
def test_get_table_options(self, name):
- insp = inspect(testing.db)
+ insp = inspect(config.db)
insp.get_table_options(name)
@quote_fixtures
@testing.requires.view_column_reflection
def test_get_view_definition(self, name):
- insp = inspect(testing.db)
+ insp = inspect(config.db)
assert insp.get_view_definition("view %s" % name)
@quote_fixtures
def test_get_columns(self, name):
- insp = inspect(testing.db)
+ insp = inspect(config.db)
assert insp.get_columns(name)
@quote_fixtures
def test_get_pk_constraint(self, name):
- insp = inspect(testing.db)
+ insp = inspect(config.db)
assert insp.get_pk_constraint(name)
@quote_fixtures
def test_get_foreign_keys(self, name):
- insp = inspect(testing.db)
+ insp = inspect(config.db)
assert insp.get_foreign_keys(name)
@quote_fixtures
def test_get_indexes(self, name):
- insp = inspect(testing.db)
+ insp = inspect(config.db)
assert insp.get_indexes(name)
@quote_fixtures
@testing.requires.unique_constraint_reflection
def test_get_unique_constraints(self, name):
- insp = inspect(testing.db)
+ insp = inspect(config.db)
assert insp.get_unique_constraints(name)
@quote_fixtures
@testing.requires.comment_reflection
def test_get_table_comment(self, name):
- insp = inspect(testing.db)
+ insp = inspect(config.db)
assert insp.get_table_comment(name)
@quote_fixtures
@testing.requires.check_constraint_reflection
def test_get_check_constraints(self, name):
- insp = inspect(testing.db)
+ insp = inspect(config.db)
assert insp.get_check_constraints(name)
@testing.requires.schema_reflection
def test_get_schema_names(self):
- insp = inspect(testing.db)
+ insp = inspect(self.bind)
self.assert_(testing.config.test_schema in insp.get_schema_names())
@testing.requires.schema_reflection
def test_get_default_schema_name(self):
- insp = inspect(testing.db)
- eq_(insp.default_schema_name, testing.db.dialect.default_schema_name)
-
- @testing.provide_metadata
- def _test_get_table_names(
- self, schema=None, table_type="table", order_by=None
+ insp = inspect(self.bind)
+ eq_(insp.default_schema_name, self.bind.dialect.default_schema_name)
+
+ @testing.combinations(
+ (None, True, False, False),
+ (None, True, False, True, testing.requires.schemas),
+ ("foreign_key", True, False, False),
+ (None, False, True, False),
+ (None, False, True, True, testing.requires.schemas),
+ (None, True, True, False),
+ (None, True, True, True, testing.requires.schemas),
+ argnames="order_by,include_plain,include_views,use_schema",
+ )
+ def test_get_table_names(
+ self, connection, order_by, include_plain, include_views, use_schema
):
+
+ if use_schema:
+ schema = config.test_schema
+ else:
+ schema = None
+
_ignore_tables = [
"comment_test",
"noncol_idx_test_pk",
"remote_table",
"remote_table_2",
]
- meta = self.metadata
- insp = inspect(meta.bind)
+ insp = inspect(connection)
- if table_type == "view":
+ if include_views:
table_names = insp.get_view_names(schema)
table_names.sort()
answer = ["email_addresses_v", "users_v"]
eq_(sorted(table_names), answer)
- else:
+
+ if include_plain:
if order_by:
tables = [
rec[0]
temp_table_names = insp.get_temp_view_names()
eq_(sorted(temp_table_names), ["user_tmp_v"])
- @testing.requires.table_reflection
- def test_get_table_names(self):
- self._test_get_table_names()
-
- @testing.requires.table_reflection
- @testing.requires.foreign_key_constraint_reflection
- def test_get_table_names_fks(self):
- self._test_get_table_names(order_by="foreign_key")
-
@testing.requires.comment_reflection
def test_get_comments(self):
self._test_get_comments()
self._test_get_comments(testing.config.test_schema)
def _test_get_comments(self, schema=None):
- insp = inspect(testing.db)
+ insp = inspect(self.bind)
eq_(
insp.get_table_comment("comment_test", schema=schema),
],
)
- @testing.requires.table_reflection
- @testing.requires.schemas
- def test_get_table_names_with_schema(self):
- self._test_get_table_names(testing.config.test_schema)
-
- @testing.requires.view_column_reflection
- def test_get_view_names(self):
- self._test_get_table_names(table_type="view")
-
- @testing.requires.view_column_reflection
- @testing.requires.schemas
- def test_get_view_names_with_schema(self):
- self._test_get_table_names(
- testing.config.test_schema, table_type="view"
- )
-
- @testing.requires.table_reflection
- @testing.requires.view_column_reflection
- def test_get_tables_and_views(self):
- self._test_get_table_names()
- self._test_get_table_names(table_type="view")
+ @testing.combinations(
+ (False, False),
+ (False, True, testing.requires.schemas),
+ (True, False),
+ (False, True, testing.requires.schemas),
+ argnames="use_views,use_schema",
+ )
+ def test_get_columns(self, connection, use_views, use_schema):
+
+ if use_schema:
+ schema = config.test_schema
+ else:
+ schema = None
- def _test_get_columns(self, schema=None, table_type="table"):
- meta = MetaData(testing.db)
users, addresses = (self.tables.users, self.tables.email_addresses)
- table_names = ["users", "email_addresses"]
- if table_type == "view":
+ if use_views:
table_names = ["users_v", "email_addresses_v"]
- insp = inspect(meta.bind)
+ else:
+ table_names = ["users", "email_addresses"]
+
+ insp = inspect(connection)
for table_name, table in zip(table_names, (users, addresses)):
schema_name = schema
cols = insp.get_columns(table_name, schema=schema_name)
if not col.primary_key:
assert cols[i]["default"] is None
- @testing.requires.table_reflection
- def test_get_columns(self):
- self._test_get_columns()
-
- @testing.provide_metadata
- def _type_round_trip(self, *types):
- t = Table(
- "t",
- self.metadata,
- *[Column("t%d" % i, type_) for i, type_ in enumerate(types)]
- )
- t.create()
-
- return [
- c["type"] for c in inspect(self.metadata.bind).get_columns("t")
- ]
-
- @testing.requires.table_reflection
- def test_numeric_reflection(self):
- for typ in self._type_round_trip(sql_types.Numeric(18, 5)):
- assert isinstance(typ, sql_types.Numeric)
- eq_(typ.precision, 18)
- eq_(typ.scale, 5)
-
- @testing.requires.table_reflection
- def test_varchar_reflection(self):
- typ = self._type_round_trip(sql_types.String(52))[0]
- assert isinstance(typ, sql_types.String)
- eq_(typ.length, 52)
-
- @testing.requires.table_reflection
- @testing.provide_metadata
- def test_nullable_reflection(self):
- t = Table(
- "t",
- self.metadata,
- Column("a", Integer, nullable=True),
- Column("b", Integer, nullable=False),
- )
- t.create()
- eq_(
- dict(
- (col["name"], col["nullable"])
- for col in inspect(self.metadata.bind).get_columns("t")
- ),
- {"a": True, "b": False},
- )
-
- @testing.requires.table_reflection
- @testing.requires.schemas
- def test_get_columns_with_schema(self):
- self._test_get_columns(schema=testing.config.test_schema)
-
@testing.requires.temp_table_reflection
def test_get_temp_table_columns(self):
table_name = get_temp_table_name(
- config, config.db, "user_tmp_%s" % config.ident
+ config, self.bind, "user_tmp_%s" % config.ident
)
- meta = MetaData(self.bind)
user_tmp = self.tables[table_name]
- insp = inspect(meta.bind)
+ insp = inspect(self.bind)
cols = insp.get_columns(table_name)
self.assert_(len(cols) > 0, len(cols))
cols = insp.get_columns("user_tmp_v")
eq_([col["name"] for col in cols], ["id", "name", "foo"])
- @testing.requires.view_column_reflection
- def test_get_view_columns(self):
- self._test_get_columns(table_type="view")
-
- @testing.requires.view_column_reflection
- @testing.requires.schemas
- def test_get_view_columns_with_schema(self):
- self._test_get_columns(
- schema=testing.config.test_schema, table_type="view"
- )
+ @testing.combinations(
+ (False,), (True, testing.requires.schemas), argnames="use_schema"
+ )
+ @testing.requires.primary_key_constraint_reflection
+ def test_get_pk_constraint(self, connection, use_schema):
+ if use_schema:
+ schema = testing.config.test_schema
+ else:
+ schema = None
- @testing.provide_metadata
- def _test_get_pk_constraint(self, schema=None):
- meta = self.metadata
users, addresses = self.tables.users, self.tables.email_addresses
- insp = inspect(meta.bind)
+ insp = inspect(connection)
users_cons = insp.get_pk_constraint(users.name, schema=schema)
users_pkeys = users_cons["constrained_columns"]
with testing.requires.reflects_pk_names.fail_if():
eq_(addr_cons["name"], "email_ad_pk")
- @testing.requires.primary_key_constraint_reflection
- def test_get_pk_constraint(self):
- self._test_get_pk_constraint()
-
- @testing.requires.table_reflection
- @testing.requires.primary_key_constraint_reflection
- @testing.requires.schemas
- def test_get_pk_constraint_with_schema(self):
- self._test_get_pk_constraint(schema=testing.config.test_schema)
+ @testing.combinations(
+ (False,), (True, testing.requires.schemas), argnames="use_schema"
+ )
+ @testing.requires.foreign_key_constraint_reflection
+ def test_get_foreign_keys(self, connection, use_schema):
+ if use_schema:
+ schema = config.test_schema
+ else:
+ schema = None
- @testing.provide_metadata
- def _test_get_foreign_keys(self, schema=None):
- meta = self.metadata
users, addresses = (self.tables.users, self.tables.email_addresses)
- insp = inspect(meta.bind)
+ insp = inspect(connection)
expected_schema = schema
# users
eq_(fkey1["referred_columns"], ["user_id"])
eq_(fkey1["constrained_columns"], ["remote_user_id"])
- @testing.requires.foreign_key_constraint_reflection
- def test_get_foreign_keys(self):
- self._test_get_foreign_keys()
-
- @testing.requires.foreign_key_constraint_reflection
- @testing.requires.schemas
- def test_get_foreign_keys_with_schema(self):
- self._test_get_foreign_keys(schema=testing.config.test_schema)
-
@testing.requires.cross_schema_fk_reflection
@testing.requires.schemas
def test_get_inter_schema_foreign_keys(self):
local_table, remote_table, remote_table_2 = self.tables(
- "%s.local_table" % testing.db.dialect.default_schema_name,
+ "%s.local_table" % self.bind.dialect.default_schema_name,
"%s.remote_table" % testing.config.test_schema,
"%s.remote_table_2" % testing.config.test_schema,
)
- insp = inspect(config.db)
+ insp = inspect(self.bind)
local_fkeys = insp.get_foreign_keys(local_table.name)
eq_(len(local_fkeys), 1)
assert fkey2["referred_schema"] in (
None,
- testing.db.dialect.default_schema_name,
+ self.bind.dialect.default_schema_name,
)
eq_(fkey2["referred_table"], local_table.name)
eq_(fkey2["referred_columns"], ["id"])
eq_(fkey2["constrained_columns"], ["local_id"])
- @testing.requires.foreign_key_constraint_option_reflection_ondelete
- def test_get_foreign_key_options_ondelete(self):
- self._test_get_foreign_key_options(ondelete="CASCADE")
-
- @testing.requires.foreign_key_constraint_option_reflection_onupdate
- def test_get_foreign_key_options_onupdate(self):
- self._test_get_foreign_key_options(onupdate="SET NULL")
-
- @testing.requires.foreign_key_constraint_option_reflection_onupdate
- def test_get_foreign_key_options_onupdate_noaction(self):
- self._test_get_foreign_key_options(onupdate="NO ACTION", expected={})
-
- @testing.requires.fk_constraint_option_reflection_ondelete_noaction
- def test_get_foreign_key_options_ondelete_noaction(self):
- self._test_get_foreign_key_options(ondelete="NO ACTION", expected={})
-
- @testing.requires.fk_constraint_option_reflection_onupdate_restrict
- def test_get_foreign_key_options_onupdate_restrict(self):
- self._test_get_foreign_key_options(onupdate="RESTRICT")
-
- @testing.requires.fk_constraint_option_reflection_ondelete_restrict
- def test_get_foreign_key_options_ondelete_restrict(self):
- self._test_get_foreign_key_options(ondelete="RESTRICT")
-
- @testing.provide_metadata
- def _test_get_foreign_key_options(self, expected=None, **options):
- meta = self.metadata
-
- if expected is None:
- expected = options
-
- Table(
- "x",
- meta,
- Column("id", Integer, primary_key=True),
- test_needs_fk=True,
- )
-
- Table(
- "table",
- meta,
- Column("id", Integer, primary_key=True),
- Column("x_id", Integer, sa.ForeignKey("x.id", name="xid")),
- Column("test", String(10)),
- test_needs_fk=True,
- )
-
- Table(
- "user",
- meta,
- Column("id", Integer, primary_key=True),
- Column("name", String(50), nullable=False),
- Column("tid", Integer),
- sa.ForeignKeyConstraint(
- ["tid"], ["table.id"], name="myfk", **options
- ),
- test_needs_fk=True,
- )
-
- meta.create_all()
-
- insp = inspect(meta.bind)
-
- # test 'options' is always present for a backend
- # that can reflect these, since alembic looks for this
- opts = insp.get_foreign_keys("table")[0]["options"]
-
- eq_(dict((k, opts[k]) for k in opts if opts[k]), {})
-
- opts = insp.get_foreign_keys("user")[0]["options"]
- eq_(opts, expected)
- # eq_(dict((k, opts[k]) for k in opts if opts[k]), expected)
-
def _assert_insp_indexes(self, indexes, expected_indexes):
index_names = [d["name"] for d in indexes]
for e_index in expected_indexes:
for key in e_index:
eq_(e_index[key], index[key])
- @testing.provide_metadata
- def _test_get_indexes(self, schema=None):
- meta = self.metadata
+ @testing.combinations(
+ (False,), (True, testing.requires.schemas), argnames="use_schema"
+ )
+ def test_get_indexes(self, connection, use_schema):
+
+ if use_schema:
+ schema = config.test_schema
+ else:
+ schema = None
# The database may decide to create indexes for foreign keys, etc.
# so there may be more indexes than expected.
- insp = inspect(meta.bind)
+ insp = inspect(self.bind)
indexes = insp.get_indexes("users", schema=schema)
expected_indexes = [
{
]
self._assert_insp_indexes(indexes, expected_indexes)
+ @testing.combinations(
+ ("noncol_idx_test_nopk", "noncol_idx_nopk"),
+ ("noncol_idx_test_pk", "noncol_idx_pk"),
+ argnames="tname,ixname",
+ )
@testing.requires.index_reflection
- def test_get_indexes(self):
- self._test_get_indexes()
-
- @testing.requires.index_reflection
- @testing.requires.schemas
- def test_get_indexes_with_schema(self):
- self._test_get_indexes(schema=testing.config.test_schema)
-
- @testing.provide_metadata
- def _test_get_noncol_index(self, tname, ixname):
- meta = self.metadata
- insp = inspect(meta.bind)
+ @testing.requires.indexes_with_ascdesc
+ def test_get_noncol_index(self, connection, tname, ixname):
+ insp = inspect(connection)
indexes = insp.get_indexes(tname)
# reflecting an index that has "x DESC" in it as the column.
expected_indexes = [{"unique": False, "name": ixname}]
self._assert_insp_indexes(indexes, expected_indexes)
- t = Table(tname, meta, autoload_with=meta.bind)
+ t = Table(tname, MetaData(), autoload_with=connection)
eq_(len(t.indexes), 1)
is_(list(t.indexes)[0].table, t)
eq_(list(t.indexes)[0].name, ixname)
- @testing.requires.index_reflection
- @testing.requires.indexes_with_ascdesc
- def test_get_noncol_index_no_pk(self):
- self._test_get_noncol_index("noncol_idx_test_nopk", "noncol_idx_nopk")
-
- @testing.requires.index_reflection
- @testing.requires.indexes_with_ascdesc
- def test_get_noncol_index_pk(self):
- self._test_get_noncol_index("noncol_idx_test_pk", "noncol_idx_pk")
-
- @testing.requires.indexes_with_expressions
- @testing.provide_metadata
- def test_reflect_expression_based_indexes(self):
- t = Table(
- "t",
- self.metadata,
- Column("x", String(30)),
- Column("y", String(30)),
- )
-
- Index("t_idx", func.lower(t.c.x), func.lower(t.c.y))
-
- Index("t_idx_2", t.c.x)
-
- self.metadata.create_all(testing.db)
-
- insp = inspect(testing.db)
-
- expected = [
- {"name": "t_idx_2", "column_names": ["x"], "unique": False}
- ]
- if testing.requires.index_reflects_included_columns.enabled:
- expected[0]["include_columns"] = []
-
- with expect_warnings(
- "Skipped unsupported reflection of expression-based index t_idx"
- ):
- eq_(
- insp.get_indexes("t"),
- expected,
- )
-
- @testing.requires.index_reflects_included_columns
- @testing.provide_metadata
- def test_reflect_covering_index(self):
- t = Table(
- "t",
- self.metadata,
- Column("x", String(30)),
- Column("y", String(30)),
- )
- idx = Index("t_idx", t.c.x)
- idx.dialect_options[testing.db.name]["include"] = ["y"]
-
- self.metadata.create_all(testing.db)
-
- insp = inspect(testing.db)
-
- eq_(
- insp.get_indexes("t"),
- [
- {
- "name": "t_idx",
- "column_names": ["x"],
- "include_columns": ["y"],
- "unique": False,
- }
- ],
- )
-
- @testing.requires.unique_constraint_reflection
- def test_get_unique_constraints(self):
- self._test_get_unique_constraints()
-
@testing.requires.temp_table_reflection
@testing.requires.unique_constraint_reflection
def test_get_temp_table_unique_constraints(self):
expected,
)
+ @testing.combinations(
+ (True, testing.requires.schemas), (False,), argnames="use_schema"
+ )
@testing.requires.unique_constraint_reflection
- @testing.requires.schemas
- def test_get_unique_constraints_with_schema(self):
- self._test_get_unique_constraints(schema=testing.config.test_schema)
-
- @testing.provide_metadata
- def _test_get_unique_constraints(self, schema=None):
+ def test_get_unique_constraints(self, metadata, connection, use_schema):
# SQLite dialect needs to parse the names of the constraints
# separately from what it gets from PRAGMA index_list(), and
# then matches them up. so same set of column_names in two
# constraints will confuse it. Perhaps we should no longer
# bother with index_list() here since we have the whole
# CREATE TABLE?
+
+ if use_schema:
+ schema = config.test_schema
+ else:
+ schema = None
uniques = sorted(
[
{"name": "unique_a", "column_names": ["a"]},
],
key=operator.itemgetter("name"),
)
- orig_meta = self.metadata
table = Table(
"testtbl",
- orig_meta,
+ metadata,
Column("a", sa.String(20)),
Column("b", sa.String(30)),
Column("c", sa.Integer),
table.append_constraint(
sa.UniqueConstraint(*uc["column_names"], name=uc["name"])
)
- orig_meta.create_all()
+ table.create(connection)
- inspector = inspect(orig_meta.bind)
+ inspector = inspect(connection)
reflected = sorted(
inspector.get_unique_constraints("testtbl", schema=schema),
key=operator.itemgetter("name"),
reflected = Table(
"testtbl",
reflected_metadata,
- autoload_with=orig_meta.bind,
+ autoload_with=connection,
schema=schema,
)
eq_(names_that_duplicate_index, idx_names)
eq_(uq_names, set())
- @testing.requires.check_constraint_reflection
- def test_get_check_constraints(self):
- self._test_get_check_constraints()
+ @testing.combinations(
+ (False,), (True, testing.requires.schemas), argnames="use_schema"
+ )
+ def test_get_view_definition(self, connection, use_schema):
+ if use_schema:
+ schema = config.test_schema
+ else:
+ schema = None
+ view_name1 = "users_v"
+ view_name2 = "email_addresses_v"
+ insp = inspect(connection)
+ v1 = insp.get_view_definition(view_name1, schema=schema)
+ self.assert_(v1)
+ v2 = insp.get_view_definition(view_name2, schema=schema)
+ self.assert_(v2)
+
+ # why is this here if it's PG specific ?
+ @testing.combinations(
+ ("users", False),
+ ("users", True, testing.requires.schemas),
+ argnames="table_name,use_schema",
+ )
+ @testing.only_on("postgresql", "PG specific feature")
+ def test_get_table_oid(self, connection, table_name, use_schema):
+ if use_schema:
+ schema = config.test_schema
+ else:
+ schema = None
+ insp = inspect(connection)
+ oid = insp.get_table_oid(table_name, schema)
+ self.assert_(isinstance(oid, int))
+
+ @testing.requires.table_reflection
+ def test_autoincrement_col(self):
+ """test that 'autoincrement' is reflected according to sqla's policy.
+
+ Don't mark this test as unsupported for any backend !
+
+ (technically it fails with MySQL InnoDB since "id" comes before "id2")
+
+ A backend is better off not returning "autoincrement" at all,
+ instead of potentially returning "False" for an auto-incrementing
+ primary key column.
+
+ """
+
+ insp = inspect(self.bind)
+
+ for tname, cname in [
+ ("users", "user_id"),
+ ("email_addresses", "address_id"),
+ ("dingalings", "dingaling_id"),
+ ]:
+ cols = insp.get_columns(tname)
+ id_ = {c["name"]: c for c in cols}[cname]
+ assert id_.get("autoincrement", True)
+
+
+class ComponentReflectionTestExtra(fixtures.TestBase):
+ __backend__ = True
+
+ @testing.combinations(
+ (True, testing.requires.schemas), (False,), argnames="use_schema"
+ )
@testing.requires.check_constraint_reflection
- @testing.requires.schemas
- def test_get_check_constraints_schema(self):
- self._test_get_check_constraints(schema=testing.config.test_schema)
+ def test_get_check_constraints(self, metadata, connection, use_schema):
+ if use_schema:
+ schema = config.test_schema
+ else:
+ schema = None
- @testing.provide_metadata
- def _test_get_check_constraints(self, schema=None):
- orig_meta = self.metadata
Table(
"sa_cc",
- orig_meta,
+ metadata,
Column("a", Integer()),
sa.CheckConstraint("a > 1 AND a < 5", name="cc1"),
sa.CheckConstraint("a = 1 OR (a > 2 AND a < 5)", name="cc2"),
schema=schema,
)
- orig_meta.create_all()
+ metadata.create_all(connection)
- inspector = inspect(orig_meta.bind)
+ inspector = inspect(connection)
reflected = sorted(
inspector.get_check_constraints("sa_cc", schema=schema),
key=operator.itemgetter("name"),
],
)
- @testing.provide_metadata
- def _test_get_view_definition(self, schema=None):
- meta = self.metadata
- view_name1 = "users_v"
- view_name2 = "email_addresses_v"
- insp = inspect(meta.bind)
- v1 = insp.get_view_definition(view_name1, schema=schema)
- self.assert_(v1)
- v2 = insp.get_view_definition(view_name2, schema=schema)
- self.assert_(v2)
+ @testing.requires.indexes_with_expressions
+ def test_reflect_expression_based_indexes(self, metadata, connection):
+ t = Table(
+ "t",
+ metadata,
+ Column("x", String(30)),
+ Column("y", String(30)),
+ )
- @testing.requires.view_reflection
- def test_get_view_definition(self):
- self._test_get_view_definition()
+ Index("t_idx", func.lower(t.c.x), func.lower(t.c.y))
- @testing.requires.view_reflection
- @testing.requires.schemas
- def test_get_view_definition_with_schema(self):
- self._test_get_view_definition(schema=testing.config.test_schema)
+ Index("t_idx_2", t.c.x)
- @testing.only_on("postgresql", "PG specific feature")
- @testing.provide_metadata
- def _test_get_table_oid(self, table_name, schema=None):
- meta = self.metadata
- insp = inspect(meta.bind)
- oid = insp.get_table_oid(table_name, schema)
- self.assert_(isinstance(oid, int))
+ metadata.create_all(connection)
- def test_get_table_oid(self):
- self._test_get_table_oid("users")
+ insp = inspect(connection)
- @testing.requires.schemas
- def test_get_table_oid_with_schema(self):
- self._test_get_table_oid("users", schema=testing.config.test_schema)
+ expected = [
+ {"name": "t_idx_2", "column_names": ["x"], "unique": False}
+ ]
+ if testing.requires.index_reflects_included_columns.enabled:
+ expected[0]["include_columns"] = []
+
+ with expect_warnings(
+ "Skipped unsupported reflection of expression-based index t_idx"
+ ):
+ eq_(
+ insp.get_indexes("t"),
+ expected,
+ )
+
+ @testing.requires.index_reflects_included_columns
+ def test_reflect_covering_index(self, metadata, connection):
+ t = Table(
+ "t",
+ metadata,
+ Column("x", String(30)),
+ Column("y", String(30)),
+ )
+ idx = Index("t_idx", t.c.x)
+ idx.dialect_options[connection.engine.name]["include"] = ["y"]
+
+ metadata.create_all(connection)
+
+ insp = inspect(connection)
+
+ eq_(
+ insp.get_indexes("t"),
+ [
+ {
+ "name": "t_idx",
+ "column_names": ["x"],
+ "include_columns": ["y"],
+ "unique": False,
+ }
+ ],
+ )
+
+ def _type_round_trip(self, connection, metadata, *types):
+ t = Table(
+ "t",
+ metadata,
+ *[Column("t%d" % i, type_) for i, type_ in enumerate(types)]
+ )
+ t.create(connection)
+
+ return [c["type"] for c in inspect(connection).get_columns("t")]
@testing.requires.table_reflection
- @testing.provide_metadata
- def test_autoincrement_col(self):
- """test that 'autoincrement' is reflected according to sqla's policy.
+ def test_numeric_reflection(self, connection, metadata):
+ for typ in self._type_round_trip(
+ connection, metadata, sql_types.Numeric(18, 5)
+ ):
+ assert isinstance(typ, sql_types.Numeric)
+ eq_(typ.precision, 18)
+ eq_(typ.scale, 5)
- Don't mark this test as unsupported for any backend !
+ @testing.requires.table_reflection
+ def test_varchar_reflection(self, connection, metadata):
+ typ = self._type_round_trip(
+ connection, metadata, sql_types.String(52)
+ )[0]
+ assert isinstance(typ, sql_types.String)
+ eq_(typ.length, 52)
- (technically it fails with MySQL InnoDB since "id" comes before "id2")
+ @testing.requires.table_reflection
+ def test_nullable_reflection(self, connection, metadata):
+ t = Table(
+ "t",
+ metadata,
+ Column("a", Integer, nullable=True),
+ Column("b", Integer, nullable=False),
+ )
+ t.create(connection)
+ eq_(
+ dict(
+ (col["name"], col["nullable"])
+ for col in inspect(connection).get_columns("t")
+ ),
+ {"a": True, "b": False},
+ )
- A backend is better off not returning "autoincrement" at all,
- instead of potentially returning "False" for an auto-incrementing
- primary key column.
+ @testing.combinations(
+ (
+ None,
+ "CASCADE",
+ None,
+ testing.requires.foreign_key_constraint_option_reflection_ondelete,
+ ),
+ (
+ None,
+ None,
+ "SET NULL",
+ testing.requires.foreign_key_constraint_option_reflection_onupdate,
+ ),
+ (
+ {},
+ None,
+ "NO ACTION",
+ testing.requires.foreign_key_constraint_option_reflection_onupdate,
+ ),
+ (
+ {},
+ "NO ACTION",
+ None,
+ testing.requires.fk_constraint_option_reflection_ondelete_noaction,
+ ),
+ (
+ None,
+ None,
+ "RESTRICT",
+ testing.requires.fk_constraint_option_reflection_onupdate_restrict,
+ ),
+ (
+ None,
+ "RESTRICT",
+ None,
+ testing.requires.fk_constraint_option_reflection_ondelete_restrict,
+ ),
+ argnames="expected,ondelete,onupdate",
+ )
+ def test_get_foreign_key_options(
+ self, connection, metadata, expected, ondelete, onupdate
+ ):
+ options = {}
+ if ondelete:
+ options["ondelete"] = ondelete
+ if onupdate:
+ options["onupdate"] = onupdate
- """
+ if expected is None:
+ expected = options
- meta = self.metadata
- insp = inspect(meta.bind)
+ Table(
+ "x",
+ metadata,
+ Column("id", Integer, primary_key=True),
+ test_needs_fk=True,
+ )
- for tname, cname in [
- ("users", "user_id"),
- ("email_addresses", "address_id"),
- ("dingalings", "dingaling_id"),
- ]:
- cols = insp.get_columns(tname)
- id_ = {c["name"]: c for c in cols}[cname]
- assert id_.get("autoincrement", True)
+ Table(
+ "table",
+ metadata,
+ Column("id", Integer, primary_key=True),
+ Column("x_id", Integer, sa.ForeignKey("x.id", name="xid")),
+ Column("test", String(10)),
+ test_needs_fk=True,
+ )
+
+ Table(
+ "user",
+ metadata,
+ Column("id", Integer, primary_key=True),
+ Column("name", String(50), nullable=False),
+ Column("tid", Integer),
+ sa.ForeignKeyConstraint(
+ ["tid"], ["table.id"], name="myfk", **options
+ ),
+ test_needs_fk=True,
+ )
+
+ metadata.create_all(connection)
+
+ insp = inspect(connection)
+
+ # test 'options' is always present for a backend
+ # that can reflect these, since alembic looks for this
+ opts = insp.get_foreign_keys("table")[0]["options"]
+
+ eq_(dict((k, opts[k]) for k in opts if opts[k]), {})
+
+ opts = insp.get_foreign_keys("user")[0]["options"]
+ eq_(opts, expected)
+ # eq_(dict((k, opts[k]) for k in opts if opts[k]), expected)
class NormalizedNameTest(fixtures.TablesTest):
m2 = MetaData()
t2_ref = Table(
- quoted_name("t2", quote=True), m2, autoload_with=testing.db
+ quoted_name("t2", quote=True), m2, autoload_with=config.db
)
t1_ref = m2.tables["t1"]
assert t2_ref.c.t1id.references(t1_ref.c.id)
m3 = MetaData()
m3.reflect(
- testing.db, only=lambda name, m: name.lower() in ("t1", "t2")
+ config.db, only=lambda name, m: name.lower() in ("t1", "t2")
)
assert m3.tables["t2"].c.t1id.references(m3.tables["t1"].c.id)
def test_get_table_names(self):
tablenames = [
t
- for t in inspect(testing.db).get_table_names()
+ for t in inspect(config.db).get_table_names()
if t.lower() in ("t1", "t2")
]
)
@testing.requires.primary_key_constraint_reflection
- @testing.provide_metadata
def test_pk_column_order(self):
# test for issue #5661
- meta = self.metadata
- insp = inspect(meta.bind)
+ insp = inspect(self.bind)
primary_key = insp.get_pk_constraint(self.tables.tb1.name)
eq_(primary_key.get("constrained_columns"), ["name", "id", "attr"])
@testing.requires.foreign_key_constraint_reflection
- @testing.provide_metadata
def test_fk_column_order(self):
# test for issue #5661
- meta = self.metadata
- insp = inspect(meta.bind)
+ insp = inspect(self.bind)
foreign_keys = insp.get_foreign_keys(self.tables.tb2.name)
eq_(len(foreign_keys), 1)
fkey1 = foreign_keys[0]
__all__ = (
"ComponentReflectionTest",
+ "ComponentReflectionTestExtra",
"QuotedNameArgumentTest",
"HasTableTest",
"HasIndexTest",
class _LiteralRoundTripFixture(object):
supports_whereclause = True
- @testing.provide_metadata
- def _literal_round_trip(self, type_, input_, output, filter_=None):
+ @testing.fixture
+ def literal_round_trip(self, metadata, connection):
"""test literal rendering """
# for literal, we test the literal render in an INSERT
# into a typed column. we can then SELECT it back as its
# official type; ideally we'd be able to use CAST here
# but MySQL in particular can't CAST fully
- t = Table("t", self.metadata, Column("x", type_))
- t.create()
- with testing.db.begin() as conn:
+ def run(type_, input_, output, filter_=None):
+ t = Table("t", metadata, Column("x", type_))
+ t.create(connection)
+
for value in input_:
ins = (
t.insert()
compile_kwargs=dict(literal_binds=True),
)
)
- conn.execute(ins)
+ connection.execute(ins)
if self.supports_whereclause:
stmt = t.select().where(t.c.x == literal(value))
dialect=testing.db.dialect,
compile_kwargs=dict(literal_binds=True),
)
- for row in conn.execute(stmt):
+ for row in connection.execute(stmt):
value = row[0]
if filter_ is not None:
value = filter_(value)
assert value in output
+ return run
+
class _UnicodeFixture(_LiteralRoundTripFixture, fixtures.TestBase):
__requires__ = ("unicode_data",)
row = connection.execute(select(unicode_table.c.unicode_data)).first()
eq_(row, (u(""),))
- def test_literal(self):
- self._literal_round_trip(self.datatype, [self.data], [self.data])
+ def test_literal(self, literal_round_trip):
+ literal_round_trip(self.datatype, [self.data], [self.data])
- def test_literal_non_ascii(self):
- self._literal_round_trip(
+ def test_literal_non_ascii(self, literal_round_trip):
+ literal_round_trip(
self.datatype, [util.u("réve🐍 illé")], [util.u("réve🐍 illé")]
)
row = connection.execute(select(text_table.c.text_data)).first()
eq_(row, (None,))
- def test_literal(self):
- self._literal_round_trip(Text, ["some text"], ["some text"])
+ def test_literal(self, literal_round_trip):
+ literal_round_trip(Text, ["some text"], ["some text"])
- def test_literal_non_ascii(self):
- self._literal_round_trip(
+ def test_literal_non_ascii(self, literal_round_trip):
+ literal_round_trip(
Text, [util.u("réve🐍 illé")], [util.u("réve🐍 illé")]
)
- def test_literal_quoting(self):
+ def test_literal_quoting(self, literal_round_trip):
data = """some 'text' hey "hi there" that's text"""
- self._literal_round_trip(Text, [data], [data])
+ literal_round_trip(Text, [data], [data])
- def test_literal_backslashes(self):
+ def test_literal_backslashes(self, literal_round_trip):
data = r"backslash one \ backslash two \\ end"
- self._literal_round_trip(Text, [data], [data])
+ literal_round_trip(Text, [data], [data])
- def test_literal_percentsigns(self):
+ def test_literal_percentsigns(self, literal_round_trip):
data = r"percent % signs %% percent"
- self._literal_round_trip(Text, [data], [data])
+ literal_round_trip(Text, [data], [data])
class StringTest(_LiteralRoundTripFixture, fixtures.TestBase):
foo.create(config.db)
foo.drop(config.db)
- def test_literal(self):
+ def test_literal(self, literal_round_trip):
# note that in Python 3, this invokes the Unicode
# datatype for the literal part because all strings are unicode
- self._literal_round_trip(String(40), ["some text"], ["some text"])
+ literal_round_trip(String(40), ["some text"], ["some text"])
- def test_literal_non_ascii(self):
- self._literal_round_trip(
+ def test_literal_non_ascii(self, literal_round_trip):
+ literal_round_trip(
String(40), [util.u("réve🐍 illé")], [util.u("réve🐍 illé")]
)
- def test_literal_quoting(self):
+ def test_literal_quoting(self, literal_round_trip):
data = """some 'text' hey "hi there" that's text"""
- self._literal_round_trip(String(40), [data], [data])
+ literal_round_trip(String(40), [data], [data])
- def test_literal_backslashes(self):
+ def test_literal_backslashes(self, literal_round_trip):
data = r"backslash one \ backslash two \\ end"
- self._literal_round_trip(String(40), [data], [data])
+ literal_round_trip(String(40), [data], [data])
class _DateFixture(_LiteralRoundTripFixture, fixtures.TestBase):
eq_(row, (None,))
@testing.requires.datetime_literals
- def test_literal(self):
+ def test_literal(self, literal_round_trip):
compare = self.compare or self.data
- self._literal_round_trip(self.datatype, [self.data], [compare])
+ literal_round_trip(self.datatype, [self.data], [compare])
@testing.requires.standalone_null_binds_whereclause
def test_null_bound_comparison(self):
class IntegerTest(_LiteralRoundTripFixture, fixtures.TestBase):
__backend__ = True
- def test_literal(self):
- self._literal_round_trip(Integer, [5], [5])
+ def test_literal(self, literal_round_trip):
+ literal_round_trip(Integer, [5], [5])
- def test_huge_int(self, connection):
- self._round_trip(BigInteger, 1376537018368127, connection)
+ def test_huge_int(self, integer_round_trip):
+ integer_round_trip(BigInteger, 1376537018368127)
- @testing.provide_metadata
- def _round_trip(self, datatype, data, connection):
- metadata = self.metadata
- int_table = Table(
- "integer_table",
- metadata,
- Column(
- "id", Integer, primary_key=True, test_needs_autoincrement=True
- ),
- Column("integer_data", datatype),
- )
+ @testing.fixture
+ def integer_round_trip(self, metadata, connection):
+ def run(datatype, data):
+ int_table = Table(
+ "integer_table",
+ metadata,
+ Column(
+ "id",
+ Integer,
+ primary_key=True,
+ test_needs_autoincrement=True,
+ ),
+ Column("integer_data", datatype),
+ )
- metadata.create_all(config.db)
+ metadata.create_all(config.db)
- connection.execute(int_table.insert(), {"integer_data": data})
+ connection.execute(int_table.insert(), {"integer_data": data})
- row = connection.execute(select(int_table.c.integer_data)).first()
+ row = connection.execute(select(int_table.c.integer_data)).first()
- eq_(row, (data,))
+ eq_(row, (data,))
- if util.py3k:
- assert isinstance(row[0], int)
- else:
- assert isinstance(row[0], (long, int)) # noqa
+ if util.py3k:
+ assert isinstance(row[0], int)
+ else:
+ assert isinstance(row[0], (long, int)) # noqa
+
+ return run
class CastTypeDecoratorTest(_LiteralRoundTripFixture, fixtures.TestBase):
return StringAsInt()
- @testing.provide_metadata
- def test_special_type(self, connection, string_as_int):
+ def test_special_type(self, metadata, connection, string_as_int):
type_ = string_as_int
- metadata = self.metadata
t = Table("t", metadata, Column("x", type_))
t.create(connection)
class NumericTest(_LiteralRoundTripFixture, fixtures.TestBase):
__backend__ = True
- @testing.emits_warning(r".*does \*not\* support Decimal objects natively")
- @testing.provide_metadata
- def _do_test(self, type_, input_, output, filter_=None, check_scale=False):
- metadata = self.metadata
- t = Table("t", metadata, Column("x", type_))
- t.create()
- with config.db.begin() as conn:
- conn.execute(t.insert(), [{"x": x} for x in input_])
-
- result = {row[0] for row in conn.execute(t.select())}
- output = set(output)
- if filter_:
- result = set(filter_(x) for x in result)
- output = set(filter_(x) for x in output)
- eq_(result, output)
- if check_scale:
- eq_([str(x) for x in result], [str(x) for x in output])
+ @testing.fixture
+ def do_numeric_test(self, metadata):
+ @testing.emits_warning(
+ r".*does \*not\* support Decimal objects natively"
+ )
+ def run(type_, input_, output, filter_=None, check_scale=False):
+ t = Table("t", metadata, Column("x", type_))
+ t.create(testing.db)
+ with config.db.begin() as conn:
+ conn.execute(t.insert(), [{"x": x} for x in input_])
+
+ result = {row[0] for row in conn.execute(t.select())}
+ output = set(output)
+ if filter_:
+ result = set(filter_(x) for x in result)
+ output = set(filter_(x) for x in output)
+ eq_(result, output)
+ if check_scale:
+ eq_([str(x) for x in result], [str(x) for x in output])
+
+ return run
@testing.emits_warning(r".*does \*not\* support Decimal objects natively")
- def test_render_literal_numeric(self):
- self._literal_round_trip(
+ def test_render_literal_numeric(self, literal_round_trip):
+ literal_round_trip(
Numeric(precision=8, scale=4),
[15.7563, decimal.Decimal("15.7563")],
[decimal.Decimal("15.7563")],
)
@testing.emits_warning(r".*does \*not\* support Decimal objects natively")
- def test_render_literal_numeric_asfloat(self):
- self._literal_round_trip(
+ def test_render_literal_numeric_asfloat(self, literal_round_trip):
+ literal_round_trip(
Numeric(precision=8, scale=4, asdecimal=False),
[15.7563, decimal.Decimal("15.7563")],
[15.7563],
)
- def test_render_literal_float(self):
- self._literal_round_trip(
+ def test_render_literal_float(self, literal_round_trip):
+ literal_round_trip(
Float(4),
[15.7563, decimal.Decimal("15.7563")],
[15.7563],
)
@testing.requires.precision_generic_float_type
- def test_float_custom_scale(self):
- self._do_test(
+ def test_float_custom_scale(self, do_numeric_test):
+ do_numeric_test(
Float(None, decimal_return_scale=7, asdecimal=True),
[15.7563827, decimal.Decimal("15.7563827")],
[decimal.Decimal("15.7563827")],
check_scale=True,
)
- def test_numeric_as_decimal(self):
- self._do_test(
+ def test_numeric_as_decimal(self, do_numeric_test):
+ do_numeric_test(
Numeric(precision=8, scale=4),
[15.7563, decimal.Decimal("15.7563")],
[decimal.Decimal("15.7563")],
)
- def test_numeric_as_float(self):
- self._do_test(
+ def test_numeric_as_float(self, do_numeric_test):
+ do_numeric_test(
Numeric(precision=8, scale=4, asdecimal=False),
[15.7563, decimal.Decimal("15.7563")],
[15.7563],
)
@testing.requires.fetch_null_from_numeric
- def test_numeric_null_as_decimal(self):
- self._do_test(Numeric(precision=8, scale=4), [None], [None])
+ def test_numeric_null_as_decimal(self, do_numeric_test):
+ do_numeric_test(Numeric(precision=8, scale=4), [None], [None])
@testing.requires.fetch_null_from_numeric
- def test_numeric_null_as_float(self):
- self._do_test(
+ def test_numeric_null_as_float(self, do_numeric_test):
+ do_numeric_test(
Numeric(precision=8, scale=4, asdecimal=False), [None], [None]
)
@testing.requires.floats_to_four_decimals
- def test_float_as_decimal(self):
- self._do_test(
+ def test_float_as_decimal(self, do_numeric_test):
+ do_numeric_test(
Float(precision=8, asdecimal=True),
[15.7563, decimal.Decimal("15.7563"), None],
[decimal.Decimal("15.7563"), None],
filter_=lambda n: n is not None and round(n, 4) or None,
)
- def test_float_as_float(self):
- self._do_test(
+ def test_float_as_float(self, do_numeric_test):
+ do_numeric_test(
Float(precision=8),
[15.7563, decimal.Decimal("15.7563")],
[15.7563],
eq_(val, expr)
@testing.requires.precision_numerics_general
- def test_precision_decimal(self):
+ def test_precision_decimal(self, do_numeric_test):
numbers = set(
[
decimal.Decimal("54.234246451650"),
]
)
- self._do_test(Numeric(precision=18, scale=12), numbers, numbers)
+ do_numeric_test(Numeric(precision=18, scale=12), numbers, numbers)
@testing.requires.precision_numerics_enotation_large
- def test_enotation_decimal(self):
+ def test_enotation_decimal(self, do_numeric_test):
"""test exceedingly small decimals.
Decimal reports values with E notation when the exponent
decimal.Decimal("696E-12"),
]
)
- self._do_test(Numeric(precision=18, scale=14), numbers, numbers)
+ do_numeric_test(Numeric(precision=18, scale=14), numbers, numbers)
@testing.requires.precision_numerics_enotation_large
- def test_enotation_decimal_large(self):
+ def test_enotation_decimal_large(self, do_numeric_test):
"""test exceedingly large decimals."""
numbers = set(
decimal.Decimal("00000000000000.1E+12"),
]
)
- self._do_test(Numeric(precision=25, scale=2), numbers, numbers)
+ do_numeric_test(Numeric(precision=25, scale=2), numbers, numbers)
@testing.requires.precision_numerics_many_significant_digits
- def test_many_significant_digits(self):
+ def test_many_significant_digits(self, do_numeric_test):
numbers = set(
[
decimal.Decimal("31943874831932418390.01"),
decimal.Decimal("87673.594069654243"),
]
)
- self._do_test(Numeric(precision=38, scale=12), numbers, numbers)
+ do_numeric_test(Numeric(precision=38, scale=12), numbers, numbers)
@testing.requires.precision_numerics_retains_significant_digits
- def test_numeric_no_decimal(self):
+ def test_numeric_no_decimal(self, do_numeric_test):
numbers = set([decimal.Decimal("1.000")])
- self._do_test(
+ do_numeric_test(
Numeric(precision=5, scale=3), numbers, numbers, check_scale=True
)
Column("unconstrained_value", Boolean(create_constraint=False)),
)
- def test_render_literal_bool(self):
- self._literal_round_trip(Boolean(), [True, False], [True, False])
+ def test_render_literal_bool(self, literal_round_trip):
+ literal_round_trip(Boolean(), [True, False], [True, False])
def test_round_trip(self, connection):
boolean_table = self.tables.boolean_table
# in py3k this can be moved top level.
from . import engines
- metadata = schema.MetaData(config.db)
+ metadata = schema.MetaData()
self = args[0]
prev_meta = getattr(self, "metadata", None)
self.metadata = metadata
#
# bound metadaa
#
- r"The MetaData.bind argument is deprecated",
r"The ``bind`` argument for schema methods that invoke SQL ",
r"The Function.bind argument",
r"The select.bind argument",
go()
def test_session(self):
- metadata = MetaData(self.engine)
+ metadata = MetaData()
table1 = Table(
"mytable",
Column("col3", Integer, ForeignKey("mytable.col1")),
)
- metadata.create_all()
+ metadata.create_all(self.engine)
m1 = mapper(
A,
@profile_memory()
def go():
- with Session() as sess:
+ with Session(self.engine) as sess:
a1 = A(col2="a1")
a2 = A(col2="a2")
a3 = A(col2="a3")
go()
- metadata.drop_all()
+ metadata.drop_all(self.engine)
del m1, m2
assert_no_mappers()
@testing.crashes("mysql+cymysql", "blocking")
def test_unicode_warnings(self):
- metadata = MetaData(self.engine)
+ metadata = MetaData()
table1 = Table(
"mytable",
metadata,
),
Column("col2", Unicode(30)),
)
- metadata.create_all()
+ metadata.create_all(self.engine)
i = [1]
# the times here is cranked way up so that we can see
try:
go()
finally:
- metadata.drop_all()
+ metadata.drop_all(self.engine)
def test_warnings_util(self):
counter = itertools.count()
go()
def test_mapper_reset(self):
- metadata = MetaData(self.engine)
+ metadata = MetaData()
table1 = Table(
"mytable",
)
mapper(B, table2)
- sess = create_session()
+ sess = create_session(self.engine)
a1 = A(col2="a1")
a2 = A(col2="a2")
a3 = A(col2="a3")
sess.close()
clear_mappers()
- metadata.create_all()
+ metadata.create_all(self.engine)
try:
go()
finally:
- metadata.drop_all()
+ metadata.drop_all(self.engine)
assert_no_mappers()
def test_alias_pathing(self):
- metadata = MetaData(self.engine)
+ metadata = MetaData()
a = Table(
"a",
mapper(ASub, asub, inherits=A, polymorphic_identity="asub")
mapper(B, b, properties={"as_": relationship(A)})
- metadata.create_all()
- sess = Session()
+ metadata.create_all(self.engine)
+ sess = Session(self.engine)
a1 = ASub(data="a1")
a2 = ASub(data="a2")
a3 = ASub(data="a3")
# "dip" again
@profile_memory(maxtimes=120)
def go():
- sess = Session()
+ sess = Session(self.engine)
sess.query(B).options(subqueryload(B.as_.of_type(ASub))).all()
sess.close()
del sess
try:
go()
finally:
- metadata.drop_all()
+ metadata.drop_all(self.engine)
clear_mappers()
def test_path_registry(self):
clear_mappers()
def test_with_inheritance(self):
- metadata = MetaData(self.engine)
+ metadata = MetaData()
table1 = Table(
"mytable",
)
mapper(B, table2, inherits=A, polymorphic_identity="b")
- sess = create_session()
+ sess = create_session(self.engine)
a1 = A()
a2 = A()
b1 = B(col3="b1")
del B
del A
- metadata.create_all()
+ metadata.create_all(self.engine)
try:
go()
finally:
- metadata.drop_all()
+ metadata.drop_all(self.engine)
assert_no_mappers()
def test_with_manytomany(self):
- metadata = MetaData(self.engine)
+ metadata = MetaData()
table1 = Table(
"mytable",
)
mapper(B, table2)
- sess = create_session()
+ sess = create_session(self.engine)
a1 = A(col2="a1")
a2 = A(col2="a2")
b1 = B(col2="b1")
del B
del A
- metadata.create_all()
+ metadata.create_all(self.engine)
try:
go()
finally:
- metadata.drop_all()
+ metadata.drop_all(self.engine)
assert_no_mappers()
@testing.uses_deprecated()
@testing.crashes("mysql+cymysql", "blocking")
def test_join_cache_deprecated_coercion(self):
- metadata = MetaData(self.engine)
+ metadata = MetaData()
table1 = Table(
"table1",
metadata,
mapper(
Foo, table1, properties={"bars": relationship(mapper(Bar, table2))}
)
- metadata.create_all()
- session = sessionmaker()
+ metadata.create_all(self.engine)
+ session = sessionmaker(self.engine)
@profile_memory()
def go():
try:
go()
finally:
- metadata.drop_all()
+ metadata.drop_all(self.engine)
@testing.crashes("mysql+cymysql", "blocking")
def test_join_cache(self):
- metadata = MetaData(self.engine)
+ metadata = MetaData()
table1 = Table(
"table1",
metadata,
mapper(
Foo, table1, properties={"bars": relationship(mapper(Bar, table2))}
)
- metadata.create_all()
- session = sessionmaker()
+ metadata.create_all(self.engine)
+ session = sessionmaker(self.engine)
@profile_memory()
def go():
try:
go()
finally:
- metadata.drop_all()
+ metadata.drop_all(self.engine)
class CycleTest(_fixtures.FixtureTest):
from sqlalchemy import Column
from sqlalchemy import create_engine
from sqlalchemy import Integer
-from sqlalchemy import MetaData
from sqlalchemy import String
from sqlalchemy import Table
from sqlalchemy import testing
NUM_FIELDS = 10
NUM_RECORDS = 1000
-t = t2 = metadata = None
-
-class ResultSetTest(fixtures.TestBase, AssertsExecutionResults):
+class ResultSetTest(fixtures.TablesTest, AssertsExecutionResults):
__backend__ = True
@classmethod
- def setup_class(cls):
- global t, t2, metadata
- metadata = MetaData(testing.db)
- t = Table(
+ def define_tables(cls, metadata):
+ Table(
"table1",
metadata,
*[
for fnum in range(NUM_FIELDS)
]
)
- t2 = Table(
+ Table(
"table2",
metadata,
*[
]
)
- def setup(self):
- with testing.db.begin() as conn:
- metadata.create_all(conn)
- conn.execute(
- t.insert(),
- [
- dict(
- ("field%d" % fnum, u("value%d" % fnum))
- for fnum in range(NUM_FIELDS)
- )
- for r_num in range(NUM_RECORDS)
- ],
- )
- conn.execute(
- t2.insert(),
- [
- dict(
- ("field%d" % fnum, u("value%d" % fnum))
- for fnum in range(NUM_FIELDS)
- )
- for r_num in range(NUM_RECORDS)
- ],
- )
+ @classmethod
+ def insert_data(cls, connection):
+ conn = connection
+ t, t2 = cls.tables("table1", "table2")
+ conn.execute(
+ t.insert(),
+ [
+ dict(
+ ("field%d" % fnum, u("value%d" % fnum))
+ for fnum in range(NUM_FIELDS)
+ )
+ for r_num in range(NUM_RECORDS)
+ ],
+ )
+ conn.execute(
+ t2.insert(),
+ [
+ dict(
+ ("field%d" % fnum, u("value%d" % fnum))
+ for fnum in range(NUM_FIELDS)
+ )
+ for r_num in range(NUM_RECORDS)
+ ],
+ )
# warm up type caches
- with testing.db.connect() as conn:
- conn.execute(t.select()).fetchall()
- conn.execute(t2.select()).fetchall()
- conn.exec_driver_sql(
- "SELECT %s FROM table1"
- % (", ".join("field%d" % fnum for fnum in range(NUM_FIELDS)))
- ).fetchall()
- conn.exec_driver_sql(
- "SELECT %s FROM table2"
- % (", ".join("field%d" % fnum for fnum in range(NUM_FIELDS)))
- ).fetchall()
-
- def teardown(self):
- metadata.drop_all()
+ conn.execute(t.select()).fetchall()
+ conn.execute(t2.select()).fetchall()
+ conn.exec_driver_sql(
+ "SELECT %s FROM table1"
+ % (", ".join("field%d" % fnum for fnum in range(NUM_FIELDS)))
+ ).fetchall()
+ conn.exec_driver_sql(
+ "SELECT %s FROM table2"
+ % (", ".join("field%d" % fnum for fnum in range(NUM_FIELDS)))
+ ).fetchall()
@profiling.function_call_count(variance=0.15)
def test_string(self):
+ t, t2 = self.tables("table1", "table2")
with testing.db.connect().execution_options(
compiled_cache=None
) as conn:
@profiling.function_call_count(variance=0.15)
def test_unicode(self):
+ t, t2 = self.tables("table1", "table2")
+
with testing.db.connect().execution_options(
compiled_cache=None
) as conn:
@profiling.function_call_count()
def test_fetch_by_key_legacy(self):
+ t, t2 = self.tables("table1", "table2")
with testing.db.connect().execution_options(
compiled_cache=None
) as conn:
@profiling.function_call_count()
def test_fetch_by_key_mappings(self):
+ t, t2 = self.tables("table1", "table2")
with testing.db.connect().execution_options(
compiled_cache=None
) as conn:
def test_one_or_none(self, one_or_first, rows_present):
# TODO: this is not testing the ORM level "scalar_mapping"
# mode which has a different performance profile
+ t, t2 = self.tables("table1", "table2")
+
with testing.db.connect().execution_options(
compiled_cache=None
) as conn:
result.close()
def test_contains_doesnt_compile(self):
+ t, t2 = self.tables("table1", "table2")
+
row = t.select().execute().first()
c1 = Column("some column", Integer) + Column(
"some other column", Integer
__backend__ = True
__requires__ = ("pyodbc_fast_executemany",)
- @testing.provide_metadata
- def test_flag_on(self):
+ def test_flag_on(self, metadata):
t = Table(
"t",
- self.metadata,
+ metadata,
Column("id", Integer, primary_key=True),
Column("data", String(50)),
)
- t.create()
+ t.create(testing.db)
eng = engines.testing_engine(options={"fast_executemany": True})
__only_on__ = "mssql"
__backend__ = True
- @testing.provide_metadata
- def test_isolation_level(self):
- Table("test", self.metadata, Column("id", Integer)).create(
- checkfirst=True
+ def test_isolation_level(self, metadata):
+ Table("test", metadata, Column("id", Integer)).create(
+ testing.db, checkfirst=True
)
with testing.db.connect() as c:
__only_on__ = "mssql"
__backend__ = True
- @testing.provide_metadata
- def test_basic_reflection(self):
- meta = self.metadata
+ def test_basic_reflection(self, metadata, connection):
+ meta = metadata
users = Table(
"engine_users",
),
Column("email_address", types.String(20)),
)
- meta.create_all()
+ meta.create_all(connection)
meta2 = MetaData()
reflected_users = Table(
- "engine_users", meta2, autoload_with=testing.db
+ "engine_users", meta2, autoload_with=connection
)
reflected_addresses = Table(
"engine_email_addresses",
meta2,
- autoload_with=testing.db,
+ autoload_with=connection,
)
self.assert_tables_equal(users, reflected_users)
self.assert_tables_equal(addresses, reflected_addresses)
- @testing.provide_metadata
- def _test_specific_type(self, type_obj, ddl):
- metadata = self.metadata
+ @testing.combinations(
+ (mssql.XML, "XML"),
+ (mssql.IMAGE, "IMAGE"),
+ (mssql.MONEY, "MONEY"),
+ (mssql.NUMERIC(10, 2), "NUMERIC(10, 2)"),
+ (mssql.FLOAT, "FLOAT(53)"),
+ (mssql.REAL, "REAL"),
+ # FLOAT(5) comes back as REAL
+ (mssql.FLOAT(5), "REAL"),
+ argnames="type_obj,ddl",
+ )
+ def test_assorted_types(self, metadata, connection, type_obj, ddl):
table = Table("type_test", metadata, Column("col1", type_obj))
- table.create()
+ table.create(connection)
m2 = MetaData()
- table2 = Table("type_test", m2, autoload_with=testing.db)
+ table2 = Table("type_test", m2, autoload_with=connection)
self.assert_compile(
schema.CreateTable(table2),
"CREATE TABLE type_test (col1 %s NULL)" % ddl,
)
- def test_xml_type(self):
- self._test_specific_type(mssql.XML, "XML")
-
- def test_image_type(self):
- self._test_specific_type(mssql.IMAGE, "IMAGE")
-
- def test_money_type(self):
- self._test_specific_type(mssql.MONEY, "MONEY")
-
- def test_numeric_prec_scale(self):
- self._test_specific_type(mssql.NUMERIC(10, 2), "NUMERIC(10, 2)")
-
- def test_float(self):
- self._test_specific_type(mssql.FLOAT, "FLOAT(53)")
-
- def test_real(self):
- self._test_specific_type(mssql.REAL, "REAL")
-
- def test_float_as_real(self):
- # FLOAT(5) comes back as REAL
- self._test_specific_type(mssql.FLOAT(5), "REAL")
-
- @testing.provide_metadata
- def test_identity(self):
- metadata = self.metadata
+ def test_identity(self, metadata, connection):
table = Table(
"identity_test",
metadata,
with testing.expect_deprecated(
"The dialect options 'mssql_identity_start' and"
):
- table.create()
+ table.create(connection)
meta2 = MetaData()
- table2 = Table("identity_test", meta2, autoload_with=testing.db)
+ table2 = Table("identity_test", meta2, autoload_with=connection)
eq_(table2.c["col1"].dialect_options["mssql"]["identity_start"], None)
eq_(
table2.c["col1"].dialect_options["mssql"]["identity_increment"],
eq_(table2.c["col1"].identity.start, 2)
eq_(table2.c["col1"].identity.increment, 3)
- @testing.provide_metadata
def test_skip_types(self, connection):
connection.exec_driver_sql(
"create table foo (id integer primary key, data xml)"
],
)
- @testing.provide_metadata
- def test_cross_schema_fk_pk_name_overlaps(self):
+ def test_cross_schema_fk_pk_name_overlaps(self, metadata, connection):
# test for issue #4228
- metadata = self.metadata
Table(
"subject",
schema=testing.config.test_schema_2,
)
- metadata.create_all()
+ metadata.create_all(connection)
- insp = inspect(testing.db)
+ insp = inspect(connection)
eq_(
insp.get_foreign_keys("referrer", testing.config.test_schema),
[
],
)
- @testing.provide_metadata
- def test_table_name_that_is_greater_than_16_chars(self):
- metadata = self.metadata
+ def test_table_name_that_is_greater_than_16_chars(
+ self, metadata, connection
+ ):
Table(
"ABCDEFGHIJKLMNOPQRSTUVWXYZ",
metadata,
Column("foo", Integer),
Index("foo_idx", "foo"),
)
- metadata.create_all()
+ metadata.create_all(connection)
t = Table(
- "ABCDEFGHIJKLMNOPQRSTUVWXYZ", MetaData(), autoload_with=testing.db
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZ", MetaData(), autoload_with=connection
)
eq_(t.name, "ABCDEFGHIJKLMNOPQRSTUVWXYZ")
- @testing.provide_metadata
@testing.combinations(
("local_temp", "#tmp", True),
("global_temp", "##tmp", True),
id_="iaa",
argnames="table_name, exists",
)
- def test_temporary_table(self, connection, table_name, exists):
- metadata = self.metadata
+ def test_temporary_table(self, metadata, connection, table_name, exists):
if exists:
tt = Table(
table_name,
- self.metadata,
+ metadata,
Column("id", Integer, primary_key=True),
Column("txt", mssql.NVARCHAR(50)),
Column("dt2", mssql.DATETIME2),
[(2, "bar", datetime.datetime(2020, 2, 2, 2, 2, 2))],
)
- @testing.provide_metadata
@testing.combinations(
("local_temp", "#tmp", True),
("global_temp", "##tmp", True),
id_="iaa",
argnames="table_name, exists",
)
- def test_has_table_temporary(self, connection, table_name, exists):
+ def test_has_table_temporary(
+ self, metadata, connection, table_name, exists
+ ):
if exists:
tt = Table(
table_name,
- self.metadata,
+ metadata,
Column("id", Integer),
)
tt.create(connection)
found_it = testing.db.dialect.has_table(connection, table_name)
eq_(found_it, exists)
- @testing.provide_metadata
- def test_db_qualified_items(self):
- metadata = self.metadata
+ def test_db_qualified_items(self, metadata, connection):
Table("foo", metadata, Column("id", Integer, primary_key=True))
Table(
"bar",
Column("id", Integer, primary_key=True),
Column("foo_id", Integer, ForeignKey("foo.id", name="fkfoo")),
)
- metadata.create_all()
+ metadata.create_all(connection)
- with testing.db.connect() as c:
- dbname = c.exec_driver_sql("select db_name()").scalar()
- owner = c.exec_driver_sql("SELECT user_name()").scalar()
+ dbname = connection.exec_driver_sql("select db_name()").scalar()
+ owner = connection.exec_driver_sql("SELECT user_name()").scalar()
referred_schema = "%(dbname)s.%(owner)s" % {
"dbname": dbname,
"owner": owner,
}
- inspector = inspect(testing.db)
+ inspector = inspect(connection)
bar_via_db = inspector.get_foreign_keys("bar", schema=referred_schema)
eq_(
bar_via_db,
],
)
- assert inspect(testing.db).has_table("bar", schema=referred_schema)
+ assert inspect(connection).has_table("bar", schema=referred_schema)
m2 = MetaData()
Table(
"bar",
m2,
schema=referred_schema,
- autoload_with=testing.db,
+ autoload_with=connection,
)
eq_(m2.tables["%s.foo" % referred_schema].schema, referred_schema)
- @testing.provide_metadata
- def test_indexes_cols(self):
- metadata = self.metadata
+ def test_indexes_cols(self, metadata, connection):
t1 = Table("t", metadata, Column("x", Integer), Column("y", Integer))
Index("foo", t1.c.x, t1.c.y)
- metadata.create_all()
+ metadata.create_all(connection)
m2 = MetaData()
- t2 = Table("t", m2, autoload_with=testing.db)
+ t2 = Table("t", m2, autoload_with=connection)
eq_(set(list(t2.indexes)[0].columns), set([t2.c["x"], t2.c.y]))
- @testing.provide_metadata
- def test_indexes_cols_with_commas(self):
- metadata = self.metadata
+ def test_indexes_cols_with_commas(self, metadata, connection):
t1 = Table(
"t",
Column("y", Integer),
)
Index("foo", t1.c.x, t1.c.y)
- metadata.create_all()
+ metadata.create_all(connection)
m2 = MetaData()
- t2 = Table("t", m2, autoload_with=testing.db)
+ t2 = Table("t", m2, autoload_with=connection)
eq_(set(list(t2.indexes)[0].columns), set([t2.c["x, col"], t2.c.y]))
- @testing.provide_metadata
- def test_indexes_cols_with_spaces(self):
- metadata = self.metadata
+ def test_indexes_cols_with_spaces(self, metadata, connection):
t1 = Table(
"t",
Column("y", Integer),
)
Index("foo", t1.c.x, t1.c.y)
- metadata.create_all()
+ metadata.create_all(connection)
m2 = MetaData()
- t2 = Table("t", m2, autoload_with=testing.db)
+ t2 = Table("t", m2, autoload_with=connection)
eq_(set(list(t2.indexes)[0].columns), set([t2.c["x col"], t2.c.y]))
- @testing.provide_metadata
- def test_indexes_with_filtered(self, connection):
- metadata = self.metadata
+ def test_indexes_with_filtered(self, metadata, connection):
t1 = Table(
"t",
CreateIndex(idx), "CREATE INDEX idx_x ON t (x) WHERE ([x]='test')"
)
- @testing.provide_metadata
- def test_max_ident_in_varchar_not_present(self):
+ def test_max_ident_in_varchar_not_present(self, metadata, connection):
"""test [ticket:3504].
Here we are testing not just that the "max" token comes back
pattern however is likely in common use.
"""
- metadata = self.metadata
Table(
"t",
Column("t4", types.LargeBinary("max")),
Column("t5", types.VARBINARY("max")),
)
- metadata.create_all()
- for col in inspect(testing.db).get_columns("t"):
+ metadata.create_all(connection)
+ for col in inspect(connection).get_columns("t"):
is_(col["type"].length, None)
- in_("max", str(col["type"].compile(dialect=testing.db.dialect)))
+ in_("max", str(col["type"].compile(dialect=connection.dialect)))
class InfoCoerceUnicodeTest(fixtures.TestBase, AssertsCompiledSQL):
)
-class ReflectHugeViewTest(fixtures.TestBase):
+class ReflectHugeViewTest(fixtures.TablesTest):
__only_on__ = "mssql"
__backend__ = True
# crashes on freetds 0.91, not worth it
__skip_if__ = (lambda: testing.requires.mssql_freetds.enabled,)
- def setup(self):
- self.col_num = 150
+ @classmethod
+ def define_tables(cls, metadata):
+ col_num = 150
- self.metadata = MetaData(testing.db)
t = Table(
"base_table",
- self.metadata,
+ metadata,
*[
Column("long_named_column_number_%d" % i, Integer)
- for i in range(self.col_num)
+ for i in range(col_num)
]
)
- self.view_str = (
+ cls.view_str = (
view_str
) = "CREATE VIEW huge_named_view AS SELECT %s FROM base_table" % (
- ",".join(
- "long_named_column_number_%d" % i for i in range(self.col_num)
- )
+ ",".join("long_named_column_number_%d" % i for i in range(col_num))
)
assert len(view_str) > 4000
event.listen(t, "after_create", DDL(view_str))
event.listen(t, "before_drop", DDL("DROP VIEW huge_named_view"))
- self.metadata.create_all()
-
- def teardown(self):
- self.metadata.drop_all()
-
def test_inspect_view_definition(self):
inspector = inspect(testing.db)
view_def = inspector.get_view_definition("huge_named_view")
):
Table("t%s" % i, metadata, col)
- def test_reflect_identity(self):
- insp = inspect(testing.db)
+ def test_reflect_identity(self, connection):
+ insp = inspect(connection)
cols = []
- for t in self.metadata.tables.keys():
+ for t in self.tables_test_metadata.tables.keys():
cols.extend(insp.get_columns(t))
for col in cols:
is_true("dialect_options" not in col)
__backend__ = True
- @testing.provide_metadata
- def test_decimal_notation(self, connection):
- metadata = self.metadata
+ def test_decimal_notation(self, metadata, connection):
numeric_table = Table(
"numeric_table",
metadata,
)
eq_(value, returned)
- @testing.provide_metadata
- def test_float(self, connection):
- metadata = self.metadata
+ def test_float(self, metadata, connection):
float_table = Table(
"float_table",
)
eq_(value, returned)
- # todo this should suppress warnings, but it does not
@emits_warning_on("mssql+mxodbc", r".*does not have any indexes.*")
- @testing.provide_metadata
- def test_dates(self):
+ def test_dates(self, metadata, connection):
"Exercise type specification for date types."
columns = [
(mssql.MSDateTime2, [1], {}, "DATETIME2(1)", [">=", (10,)]),
]
- metadata = self.metadata
-
table_args = ["test_mssql_dates", metadata]
for index, spec in enumerate(columns):
type_, args, kw, res, requires = spec[0:5]
or not requires
):
c = Column("c%s" % index, type_(*args, **kw), nullable=None)
- testing.db.dialect.type_descriptor(c.type)
+ connection.dialect.type_descriptor(c.type)
table_args.append(c)
dates_table = Table(*table_args)
- gen = testing.db.dialect.ddl_compiler(
- testing.db.dialect, schema.CreateTable(dates_table)
+ gen = connection.dialect.ddl_compiler(
+ connection.dialect, schema.CreateTable(dates_table)
)
for col in dates_table.c:
index = int(col.name[1:])
"%s %s" % (col.name, columns[index][3]),
)
self.assert_(repr(col))
- dates_table.create(checkfirst=True)
+ dates_table.create(connection)
reflected_dates = Table(
- "test_mssql_dates", MetaData(), autoload_with=testing.db
+ "test_mssql_dates", MetaData(), autoload_with=connection
)
for col in reflected_dates.c:
self.assert_types_base(col, dates_table.c[col.key])
)
@emits_warning_on("mssql+mxodbc", r".*does not have any indexes.*")
- @testing.provide_metadata
@testing.combinations(
("legacy_large_types", False),
("sql2012_large_types", True, lambda: testing.only_on("mssql >= 11")),
id_="ia",
+ argnames="deprecate_large_types",
)
- def test_binary_reflection(self, deprecate_large_types):
+ def test_binary_reflection(self, metadata, deprecate_large_types):
"Exercise type specification for binary types."
columns = [
),
]
- metadata = self.metadata
- metadata.bind = engines.testing_engine(
+ engine = engines.testing_engine(
options={"deprecate_large_types": deprecate_large_types}
)
- table_args = ["test_mssql_binary", metadata]
- for index, spec in enumerate(columns):
- type_, args, kw, res = spec
- table_args.append(
- Column("c%s" % index, type_(*args, **kw), nullable=None)
+ with engine.begin() as conn:
+ table_args = ["test_mssql_binary", metadata]
+ for index, spec in enumerate(columns):
+ type_, args, kw, res = spec
+ table_args.append(
+ Column("c%s" % index, type_(*args, **kw), nullable=None)
+ )
+ binary_table = Table(*table_args)
+ metadata.create_all(conn)
+ reflected_binary = Table(
+ "test_mssql_binary", MetaData(), autoload_with=conn
)
- binary_table = Table(*table_args)
- metadata.create_all()
- reflected_binary = Table(
- "test_mssql_binary", MetaData(), autoload_with=testing.db
- )
- for col, spec in zip(reflected_binary.c, columns):
- eq_(
- col.type.compile(dialect=mssql.dialect()),
- spec[3],
- "column %s %s != %s"
- % (
- col.key,
+ for col, spec in zip(reflected_binary.c, columns):
+ eq_(
col.type.compile(dialect=mssql.dialect()),
spec[3],
- ),
- )
- c1 = testing.db.dialect.type_descriptor(col.type).__class__
- c2 = testing.db.dialect.type_descriptor(
- binary_table.c[col.name].type
- ).__class__
- assert issubclass(
- c1, c2
- ), "column %s: %r is not a subclass of %r" % (col.key, c1, c2)
- if binary_table.c[col.name].type.length:
- testing.eq_(
- col.type.length, binary_table.c[col.name].type.length
+ "column %s %s != %s"
+ % (
+ col.key,
+ col.type.compile(dialect=conn.dialect),
+ spec[3],
+ ),
)
+ c1 = conn.dialect.type_descriptor(col.type).__class__
+ c2 = conn.dialect.type_descriptor(
+ binary_table.c[col.name].type
+ ).__class__
+ assert issubclass(
+ c1, c2
+ ), "column %s: %r is not a subclass of %r" % (col.key, c1, c2)
+ if binary_table.c[col.name].type.length:
+ testing.eq_(
+ col.type.length, binary_table.c[col.name].type.length
+ )
- @testing.provide_metadata
- def test_autoincrement(self):
- metadata = self.metadata
+ def test_autoincrement(self, metadata, connection):
Table(
"ai_1",
metadata,
Column("o1", String(1), DefaultClause("x"), primary_key=True),
Column("o2", String(1), DefaultClause("x"), primary_key=True),
)
- metadata.create_all()
+ metadata.create_all(connection)
table_names = [
"ai_1",
mr = MetaData()
for name in table_names:
- tbl = Table(name, mr, autoload_with=testing.db)
+ tbl = Table(name, mr, autoload_with=connection)
tbl = metadata.tables[name]
# test that the flag itself reflects appropriately
]
for counter, engine in enumerate(eng):
- with engine.begin() as conn:
- conn.execute(tbl.insert())
- if "int_y" in tbl.c:
- eq_(
- conn.execute(select(tbl.c.int_y)).scalar(),
- counter + 1,
- )
- assert (
- list(conn.execute(tbl.select()).first()).count(
- counter + 1
- )
- == 1
- )
- else:
- assert 1 not in list(
- conn.execute(tbl.select()).first()
+ connection.execute(tbl.insert())
+ if "int_y" in tbl.c:
+ eq_(
+ connection.execute(select(tbl.c.int_y)).scalar(),
+ counter + 1,
+ )
+ assert (
+ list(connection.execute(tbl.select()).first()).count(
+ counter + 1
)
- conn.execute(tbl.delete())
+ == 1
+ )
+ else:
+ assert 1 not in list(
+ connection.execute(tbl.select()).first()
+ )
+ connection.execute(tbl.delete())
class StringTest(fixtures.TestBase, AssertsCompiledSQL):
)
+class MyPickleType(types.TypeDecorator):
+ impl = PickleType
+
+ def process_bind_param(self, value, dialect):
+ if value:
+ value.stuff = "BIND" + value.stuff
+ return value
+
+ def process_result_value(self, value, dialect):
+ if value:
+ value.stuff = value.stuff + "RESULT"
+ return value
+
+
class BinaryTest(fixtures.TestBase):
__only_on__ = "mssql"
__requires__ = ("non_broken_binary",)
__backend__ = True
- def test_character_binary(self):
- self._test_round_trip(mssql.MSVarBinary(800), b("some normal data"))
-
- @testing.provide_metadata
- def _test_round_trip(
- self, type_, data, deprecate_large_types=True, expected=None
+ @testing.combinations(
+ (
+ mssql.MSVarBinary(800),
+ b("some normal data"),
+ None,
+ True,
+ None,
+ False,
+ ),
+ (
+ mssql.VARBINARY("max"),
+ "binary_data_one.dat",
+ None,
+ False,
+ None,
+ False,
+ ),
+ (
+ mssql.VARBINARY("max"),
+ "binary_data_one.dat",
+ None,
+ True,
+ None,
+ False,
+ ),
+ (
+ sqltypes.LargeBinary,
+ "binary_data_one.dat",
+ None,
+ False,
+ None,
+ False,
+ ),
+ (sqltypes.LargeBinary, "binary_data_one.dat", None, True, None, False),
+ (mssql.MSImage, "binary_data_one.dat", None, True, None, False),
+ (PickleType, pickleable.Foo("im foo 1"), None, True, None, False),
+ (
+ MyPickleType,
+ pickleable.Foo("im foo 1"),
+ pickleable.Foo("im foo 1", stuff="BINDim stuffRESULT"),
+ True,
+ None,
+ False,
+ ),
+ (types.BINARY(100), "binary_data_one.dat", None, True, 100, False),
+ (types.VARBINARY(100), "binary_data_one.dat", None, True, 100, False),
+ (mssql.VARBINARY(100), "binary_data_one.dat", None, True, 100, False),
+ (types.BINARY(100), "binary_data_two.dat", None, True, 99, True),
+ (types.VARBINARY(100), "binary_data_two.dat", None, True, 99, False),
+ (mssql.VARBINARY(100), "binary_data_two.dat", None, True, 99, False),
+ argnames="type_, data, expected, deprecate_large_types, "
+ "slice_, zeropad",
+ )
+ def test_round_trip(
+ self,
+ metadata,
+ type_,
+ data,
+ expected,
+ deprecate_large_types,
+ slice_,
+ zeropad,
):
if (
testing.db.dialect.deprecate_large_types
binary_table = Table(
"binary_table",
- self.metadata,
+ metadata,
Column("id", Integer, primary_key=True),
Column("data", type_),
)
binary_table.create(engine)
+ if isinstance(data, str) and (
+ data == "binary_data_one.dat" or data == "binary_data_two.dat"
+ ):
+ data = self._load_stream(data)
+
+ if slice_ is not None:
+ data = data[0:slice_]
+
if expected is None:
- expected = data
+ if zeropad:
+ expected = data[0:slice_] + b"\x00"
+ else:
+ expected = data
with engine.begin() as conn:
conn.execute(binary_table.insert(), data=data)
None,
)
- def test_plain_pickle(self):
- self._test_round_trip(PickleType, pickleable.Foo("im foo 1"))
-
- def test_custom_pickle(self):
- class MyPickleType(types.TypeDecorator):
- impl = PickleType
-
- def process_bind_param(self, value, dialect):
- if value:
- value.stuff = "BIND" + value.stuff
- return value
-
- def process_result_value(self, value, dialect):
- if value:
- value.stuff = value.stuff + "RESULT"
- return value
-
- data = pickleable.Foo("im foo 1")
- expected = pickleable.Foo("im foo 1")
- expected.stuff = "BINDim stuffRESULT"
-
- self._test_round_trip(MyPickleType, data, expected=expected)
-
- def test_image(self):
- stream1 = self._load_stream("binary_data_one.dat")
- self._test_round_trip(mssql.MSImage, stream1)
-
- def test_large_binary(self):
- stream1 = self._load_stream("binary_data_one.dat")
- self._test_round_trip(sqltypes.LargeBinary, stream1)
-
- def test_large_legacy_types(self):
- stream1 = self._load_stream("binary_data_one.dat")
- self._test_round_trip(
- sqltypes.LargeBinary, stream1, deprecate_large_types=False
- )
-
- def test_mssql_varbinary_max(self):
- stream1 = self._load_stream("binary_data_one.dat")
- self._test_round_trip(mssql.VARBINARY("max"), stream1)
-
- def test_mssql_legacy_varbinary_max(self):
- stream1 = self._load_stream("binary_data_one.dat")
- self._test_round_trip(
- mssql.VARBINARY("max"), stream1, deprecate_large_types=False
- )
-
- def test_binary_slice(self):
- self._test_var_slice(types.BINARY)
-
- def test_binary_slice_zeropadding(self):
- self._test_var_slice_zeropadding(types.BINARY, True)
-
- def test_varbinary_slice(self):
- self._test_var_slice(types.VARBINARY)
-
- def test_varbinary_slice_zeropadding(self):
- self._test_var_slice_zeropadding(types.VARBINARY, False)
-
- def test_mssql_varbinary_slice(self):
- self._test_var_slice(mssql.VARBINARY)
-
- def test_mssql_varbinary_slice_zeropadding(self):
- self._test_var_slice_zeropadding(mssql.VARBINARY, False)
-
- def _test_var_slice(self, type_):
- stream1 = self._load_stream("binary_data_one.dat")
-
- data = stream1[0:100]
-
- self._test_round_trip(type_(100), data)
-
- def _test_var_slice_zeropadding(
- self, type_, pad, deprecate_large_types=True
- ):
- stream2 = self._load_stream("binary_data_two.dat")
-
- data = stream2[0:99]
-
- # the type we used here is 100 bytes
- # so we will get 100 bytes zero-padded
-
- if pad:
- paddedstream = stream2[0:99] + b"\x00"
- else:
- paddedstream = stream2[0:99]
-
- self._test_round_trip(type_(100), data, expected=paddedstream)
-
def _load_stream(self, name, len_=3000):
fp = open(
os.path.join(os.path.dirname(__file__), "..", "..", name), "rb"
__only_on__ = "mysql", "mariadb"
__backend__ = True
- @testing.provide_metadata
- def _run_test(self, specs, attributes):
+ def _run_test(self, metadata, connection, specs, attributes):
columns = [Column("c%i" % (i + 1), t[0]) for i, t in enumerate(specs)]
# Early 5.0 releases seem to report more "general" for columns
# in a view, e.g. char -> varchar, tinyblob -> mediumblob
use_views = testing.db.dialect.server_version_info > (5, 0, 10)
- m = self.metadata
+ m = metadata
Table("mysql_types", m, *columns)
if use_views:
event.listen(
m, "before_drop", DDL("DROP VIEW IF EXISTS mysql_types_v")
)
- m.create_all()
+ m.create_all(connection)
m2 = MetaData()
- tables = [Table("mysql_types", m2, autoload_with=testing.db)]
+ tables = [Table("mysql_types", m2, autoload_with=connection)]
if use_views:
- tables.append(Table("mysql_types_v", m2, autoload_with=testing.db))
+ tables.append(Table("mysql_types_v", m2, autoload_with=connection))
for table in tables:
for i, (reflected_col, spec) in enumerate(zip(table.c, specs)):
),
)
- def test_time_types(self):
+ def test_time_types(self, metadata, connection):
specs = []
if testing.requires.mysql_fsp.enabled:
)
# note 'timezone' should always be None on both
- self._run_test(specs, ["fsp", "timezone"])
+ self._run_test(metadata, connection, specs, ["fsp", "timezone"])
- def test_year_types(self):
+ def test_year_types(self, metadata, connection):
specs = [
(mysql.YEAR(), mysql.YEAR(display_width=4)),
(mysql.YEAR(display_width=4), mysql.YEAR(display_width=4)),
]
if testing.against("mysql>=8.0.19"):
- self._run_test(specs, [])
+ self._run_test(metadata, connection, specs, [])
else:
- self._run_test(specs, ["display_width"])
+ self._run_test(metadata, connection, specs, ["display_width"])
- def test_string_types(self):
+ def test_string_types(
+ self,
+ metadata,
+ connection,
+ ):
specs = [
(String(1), mysql.MSString(1)),
(String(3), mysql.MSString(3)),
(mysql.MSNChar(2), mysql.MSChar(2)),
(mysql.MSNVarChar(22), mysql.MSString(22)),
]
- self._run_test(specs, ["length"])
+ self._run_test(metadata, connection, specs, ["length"])
- def test_integer_types(self):
+ def test_integer_types(self, metadata, connection):
specs = []
for type_ in [
mysql.TINYINT,
# on display_width. need to test this more accurately though
# for the cases where it does
if testing.against("mysql >= 8.0.19"):
- self._run_test(specs, ["unsigned", "zerofill"])
+ self._run_test(
+ metadata, connection, specs, ["unsigned", "zerofill"]
+ )
else:
- self._run_test(specs, ["display_width", "unsigned", "zerofill"])
+ self._run_test(
+ metadata,
+ connection,
+ specs,
+ ["display_width", "unsigned", "zerofill"],
+ )
- def test_binary_types(self):
+ def test_binary_types(
+ self,
+ metadata,
+ connection,
+ ):
specs = [
(LargeBinary(3), mysql.TINYBLOB()),
(LargeBinary(), mysql.BLOB()),
(mysql.MSMediumBlob(), mysql.MSMediumBlob()),
(mysql.MSLongBlob(), mysql.MSLongBlob()),
]
- self._run_test(specs, [])
+ self._run_test(metadata, connection, specs, [])
- def test_legacy_enum_types(self):
+ def test_legacy_enum_types(
+ self,
+ metadata,
+ connection,
+ ):
specs = [(mysql.ENUM("", "fleem"), mysql.ENUM("", "fleem"))]
- self._run_test(specs, ["enums"])
+ self._run_test(metadata, connection, specs, ["enums"])
class ReflectionTest(fixtures.TestBase, AssertsCompiledSQL):
str(reflected.c.c6.server_default.arg).upper(),
)
- @testing.provide_metadata
- def test_reflection_with_table_options(self, connection):
+ def test_reflection_with_table_options(self, metadata, connection):
comment = r"""Comment types type speedily ' " \ '' Fun!"""
if testing.against("mariadb"):
kwargs = dict(
def_table = Table(
"mysql_def",
- self.metadata,
+ metadata,
Column("c1", Integer()),
comment=comment,
**kwargs
# This is explicitly ignored when reflecting schema.
# assert reflected.kwargs['mysql_auto_increment'] == '5'
- @testing.provide_metadata
- def test_reflection_on_include_columns(self):
+ def test_reflection_on_include_columns(self, metadata, connection):
"""Test reflection of include_columns to be sure they respect case."""
- meta = self.metadata
+ meta = metadata
case_table = Table(
"mysql_case",
meta,
Column("C3", String(10)),
)
- case_table.create(testing.db)
+ case_table.create(connection)
reflected = Table(
"mysql_case",
MetaData(),
- autoload_with=testing.db,
+ autoload_with=connection,
include_columns=["c1", "C2"],
)
for t in case_table, reflected:
reflected2 = Table(
"mysql_case",
MetaData(),
- autoload_with=testing.db,
+ autoload_with=connection,
include_columns=["c1", "c2"],
)
assert "c1" in reflected2.c.keys()
for c in ["c2", "C2", "C3"]:
assert c not in reflected2.c.keys()
- @testing.provide_metadata
- def test_autoincrement(self):
- meta = self.metadata
+ def test_autoincrement(self, metadata, connection):
+ meta = metadata
Table(
"ai_1",
meta,
Column("o2", String(1), DefaultClause("x"), primary_key=True),
mysql_engine="MyISAM",
)
- meta.create_all(testing.db)
+ meta.create_all(connection)
table_names = [
"ai_1",
"ai_8",
]
mr = MetaData()
- mr.reflect(testing.db, only=table_names)
-
- with testing.db.begin() as conn:
- for tbl in [mr.tables[name] for name in table_names]:
- for c in tbl.c:
- if c.name.startswith("int_y"):
- assert c.autoincrement
- elif c.name.startswith("int_n"):
- assert not c.autoincrement
- conn.execute(tbl.insert())
- if "int_y" in tbl.c:
- assert conn.scalar(select(tbl.c.int_y)) == 1
- assert (
- list(conn.execute(tbl.select()).first()).count(1) == 1
- )
- else:
- assert 1 not in list(conn.execute(tbl.select()).first())
+ mr.reflect(connection, only=table_names)
+
+ for tbl in [mr.tables[name] for name in table_names]:
+ for c in tbl.c:
+ if c.name.startswith("int_y"):
+ assert c.autoincrement
+ elif c.name.startswith("int_n"):
+ assert not c.autoincrement
+ connection.execute(tbl.insert())
+ if "int_y" in tbl.c:
+ assert connection.scalar(select(tbl.c.int_y)) == 1
+ assert (
+ list(connection.execute(tbl.select()).first()).count(1)
+ == 1
+ )
+ else:
+ assert 1 not in list(connection.execute(tbl.select()).first())
- @testing.provide_metadata
- def test_view_reflection(self, connection):
- Table(
- "x", self.metadata, Column("a", Integer), Column("b", String(50))
- )
- self.metadata.create_all(connection)
+ def test_view_reflection(self, metadata, connection):
+ Table("x", metadata, Column("a", Integer), Column("b", String(50)))
+ metadata.create_all(connection)
conn = connection
conn.exec_driver_sql("CREATE VIEW v1 AS SELECT * FROM x")
"CREATE DEFINER=CURRENT_USER VIEW v4 AS SELECT * FROM x"
)
- @event.listens_for(self.metadata, "before_drop")
+ @event.listens_for(metadata, "before_drop")
def cleanup(*arg, **kw):
with testing.db.begin() as conn:
for v in ["v1", "v2", "v3", "v4"]:
[("a", mysql.INTEGER), ("b", mysql.VARCHAR)],
)
- @testing.provide_metadata
- def test_skip_not_describable(self, connection):
- @event.listens_for(self.metadata, "before_drop")
+ def test_skip_not_describable(self, metadata, connection):
+ @event.listens_for(metadata, "before_drop")
def cleanup(*arg, **kw):
with testing.db.begin() as conn:
conn.exec_driver_sql("DROP TABLE IF EXISTS test_t1")
view_names = dialect.get_view_names(connection, "information_schema")
self.assert_("TABLES" in view_names)
- @testing.provide_metadata
- def test_nullable_reflection(self):
+ def test_nullable_reflection(self, metadata, connection):
"""test reflection of NULL/NOT NULL, in particular with TIMESTAMP
defaults where MySQL is inconsistent in how it reports CREATE TABLE.
"""
- meta = self.metadata
+ meta = metadata
# this is ideally one table, but older MySQL versions choke
# on the multiple TIMESTAMP columns
- with testing.db.connect() as c:
- row = c.exec_driver_sql(
- "show variables like '%%explicit_defaults_for_timestamp%%'"
- ).first()
+ row = connection.exec_driver_sql(
+ "show variables like '%%explicit_defaults_for_timestamp%%'"
+ ).first()
explicit_defaults_for_timestamp = row[1].lower() in ("on", "1", "true")
reflected = []
):
Table("nn_t%d" % idx, meta) # to allow DROP
- with testing.db.begin() as c:
- c.exec_driver_sql(
- """
- CREATE TABLE nn_t%d (
- %s
- )
- """
- % (idx, ", \n".join(cols))
- )
+ connection.exec_driver_sql(
+ """
+ CREATE TABLE nn_t%d (
+ %s
+ )
+ """
+ % (idx, ", \n".join(cols))
+ )
reflected.extend(
{
"nullable": d["nullable"],
"default": d["default"],
}
- for d in inspect(testing.db).get_columns("nn_t%d" % idx)
+ for d in inspect(connection).get_columns("nn_t%d" % idx)
)
- if testing.db.dialect._is_mariadb_102:
+ if connection.dialect._is_mariadb_102:
current_timestamp = "current_timestamp()"
else:
current_timestamp = "CURRENT_TIMESTAMP"
],
)
- @testing.provide_metadata
- def test_reflection_with_unique_constraint(self):
- insp = inspect(testing.db)
+ def test_reflection_with_unique_constraint(self, metadata, connection):
+ insp = inspect(connection)
- meta = self.metadata
+ meta = metadata
uc_table = Table(
"mysql_uc",
meta,
UniqueConstraint("a", name="uc_a"),
)
- uc_table.create()
+ uc_table.create(connection)
# MySQL converts unique constraints into unique indexes.
# separately we get both
self.assert_(indexes["uc_a"].unique)
self.assert_("uc_a" not in constraints)
- @testing.provide_metadata
- def test_reflect_fulltext(self):
+ def test_reflect_fulltext(self, metadata, connection):
mt = Table(
"mytable",
- self.metadata,
+ metadata,
Column("id", Integer, primary_key=True),
Column("textdata", String(50)),
mariadb_engine="InnoDB",
mysql_prefix="FULLTEXT",
mariadb_prefix="FULLTEXT",
)
- self.metadata.create_all(testing.db)
+ metadata.create_all(connection)
mt = Table("mytable", MetaData(), autoload_with=testing.db)
idx = list(mt.indexes)[0]
)
@testing.requires.mysql_ngram_fulltext
- @testing.provide_metadata
- def test_reflect_fulltext_comment(self):
+ def test_reflect_fulltext_comment(
+ self,
+ metadata,
+ connection,
+ ):
mt = Table(
"mytable",
- self.metadata,
+ metadata,
Column("id", Integer, primary_key=True),
Column("textdata", String(50)),
mysql_engine="InnoDB",
mysql_with_parser="ngram",
)
- self.metadata.create_all(testing.db)
+ metadata.create_all(connection)
- mt = Table("mytable", MetaData(), autoload_with=testing.db)
+ mt = Table("mytable", MetaData(), autoload_with=connection)
idx = list(mt.indexes)[0]
eq_(idx.name, "textdata_ix")
eq_(idx.dialect_options["mysql"]["prefix"], "FULLTEXT")
"(textdata) WITH PARSER ngram",
)
- @testing.provide_metadata
- def test_non_column_index(self):
- m1 = self.metadata
+ def test_non_column_index(self, metadata, connection):
+ m1 = metadata
t1 = Table(
"add_ix", m1, Column("x", String(50)), mysql_engine="InnoDB"
)
Index("foo_idx", t1.c.x.desc())
- m1.create_all()
+ m1.create_all(connection)
- insp = inspect(testing.db)
+ insp = inspect(connection)
eq_(
insp.get_indexes("add_ix"),
[{"name": "foo_idx", "column_names": ["x"], "unique": False}],
],
)
- @testing.provide_metadata
- def test_case_sensitive_column_constraint_reflection(self):
+ def test_case_sensitive_column_constraint_reflection(
+ self, metadata, connection
+ ):
# test for issue #4344 which works around
# MySQL 8.0 bug https://bugs.mysql.com/bug.php?id=88718
- m1 = self.metadata
+ m1 = metadata
Table(
"Track",
),
mysql_engine="InnoDB",
)
- m1.create_all()
+ m1.create_all(connection)
- if testing.db.dialect._casing in (1, 2):
+ if connection.dialect._casing in (1, 2):
# the original test for the 88718 fix here in [ticket:4344]
# actually set referred_table='track', with the wrong casing!
# this test was never run. with [ticket:4751], I've gone through
# lower case is also an 8.0 regression.
eq_(
- inspect(testing.db).get_foreign_keys("PlaylistTrack"),
+ inspect(connection).get_foreign_keys("PlaylistTrack"),
[
{
"name": "FK_PlaylistTTrackId",
else:
eq_(
sorted(
- inspect(testing.db).get_foreign_keys("PlaylistTrack"),
+ inspect(connection).get_foreign_keys("PlaylistTrack"),
key=lambda elem: elem["name"],
),
[
)
@testing.requires.mysql_fully_case_sensitive
- @testing.provide_metadata
- def test_case_sensitive_reflection_dual_case_references(self):
+ def test_case_sensitive_reflection_dual_case_references(
+ self, metadata, connection
+ ):
# this tests that within the fix we do for MySQL bug
# 88718, we don't do case-insensitive logic if the backend
# is case sensitive
- m = self.metadata
+ m = metadata
Table(
"t1",
m,
Column("cap_t1id", ForeignKey("T1.Some_Id", name="cap_t1id_fk")),
mysql_engine="InnoDB",
)
- m.create_all(testing.db)
+ m.create_all(connection)
eq_(
dict(
(rec["name"], rec)
- for rec in inspect(testing.db).get_foreign_keys("t2")
+ for rec in inspect(connection).get_foreign_keys("t2")
),
{
"cap_t1id_fk": {
# fixed in mysql-connector as of 2.0.1,
# see http://bugs.mysql.com/bug.php?id=73266
- @testing.provide_metadata
- def test_precision_float_roundtrip(self, connection):
+ def test_precision_float_roundtrip(self, metadata, connection):
t = Table(
"t",
- self.metadata,
+ metadata,
Column(
"scale_value",
mysql.DOUBLE(precision=15, scale=12, asdecimal=True),
eq_(result, decimal.Decimal("45.768392065789"))
@testing.only_if("mysql")
- @testing.provide_metadata
- def test_charset_collate_table(self, connection):
+ def test_charset_collate_table(self, metadata, connection):
t = Table(
"foo",
- self.metadata,
+ metadata,
Column("id", Integer),
Column("data", UnicodeText),
mysql_default_charset="utf8",
impl = TIMESTAMP
@testing.combinations(
- (TIMESTAMP,), (MyTime(),), (String().with_variant(TIMESTAMP, "mysql"),)
+ (TIMESTAMP,),
+ (MyTime(),),
+ (String().with_variant(TIMESTAMP, "mysql"),),
+ argnames="type_",
)
@testing.requires.mysql_zero_date
- @testing.provide_metadata
- def test_timestamp_nullable(self, type_):
+ def test_timestamp_nullable(self, metadata, connection, type_):
ts_table = Table(
"mysql_timestamp",
- self.metadata,
+ metadata,
Column("t1", type_),
Column("t2", type_, nullable=False),
mysql_engine="InnoDB",
)
- self.metadata.create_all()
+ metadata.create_all(connection)
# TIMESTAMP without NULL inserts current time when passed
# NULL. when not passed, generates 0000-00-00 quite
else:
return dt
- with testing.db.begin() as conn:
- now = conn.exec_driver_sql("select now()").scalar()
- conn.execute(ts_table.insert(), {"t1": now, "t2": None})
- conn.execute(ts_table.insert(), {"t1": None, "t2": None})
- conn.execute(ts_table.insert(), {"t2": None})
+ now = connection.exec_driver_sql("select now()").scalar()
+ connection.execute(ts_table.insert(), {"t1": now, "t2": None})
+ connection.execute(ts_table.insert(), {"t1": None, "t2": None})
+ connection.execute(ts_table.insert(), {"t2": None})
- new_now = conn.exec_driver_sql("select now()").scalar()
+ new_now = connection.exec_driver_sql("select now()").scalar()
- eq_(
- [
- tuple([normalize(dt) for dt in row])
- for row in conn.execute(ts_table.select())
- ],
- [(now, now), (None, now), (None, now)],
- )
+ eq_(
+ [
+ tuple([normalize(dt) for dt in row])
+ for row in connection.execute(ts_table.select())
+ ],
+ [(now, now), (None, now), (None, now)],
+ )
- @testing.provide_metadata
- def test_time_roundtrip(self, connection):
- t = Table("mysql_time", self.metadata, Column("t1", mysql.TIME()))
+ def test_time_roundtrip(self, metadata, connection):
+ t = Table("mysql_time", metadata, Column("t1", mysql.TIME()))
t.create(connection)
datetime.time(8, 37, 35),
)
- @testing.provide_metadata
- def test_year(self, connection):
+ def test_year(self, metadata, connection):
"""Exercise YEAR."""
year_table = Table(
"mysql_year",
- self.metadata,
+ metadata,
Column("y1", mysql.MSYear),
Column("y2", mysql.MSYear),
Column("y3", mysql.MSYear),
__only_on__ = "mysql", "mariadb"
__backend__ = True
- @testing.provide_metadata
@testing.requires.reflects_json_type
- def test_reflection(self, connection):
+ def test_reflection(self, metadata, connection):
- Table("mysql_json", self.metadata, Column("foo", mysql.JSON))
- self.metadata.create_all(connection)
+ Table("mysql_json", metadata, Column("foo", mysql.JSON))
+ metadata.create_all(connection)
reflected = Table("mysql_json", MetaData(), autoload_with=connection)
is_(reflected.c.foo.type._type_affinity, sqltypes.JSON)
assert isinstance(reflected.c.foo.type, mysql.JSON)
- @testing.provide_metadata
- def test_rudimental_round_trip(self, connection):
+ def test_rudimental_round_trip(self, metadata, connection):
# note that test_suite has many more JSON round trip tests
# using the backend-agnostic JSON type
- mysql_json = Table(
- "mysql_json", self.metadata, Column("foo", mysql.JSON)
- )
- self.metadata.create_all(connection)
+ mysql_json = Table("mysql_json", metadata, Column("foo", mysql.JSON))
+ metadata.create_all(connection)
value = {"json": {"foo": "bar"}, "recs": ["one", "two"]}
def get_enum_string_values(some_enum):
return [str(v.value) for v in some_enum.__members__.values()]
- @testing.provide_metadata
- def test_enum(self, connection):
+ def test_enum(self, metadata, connection):
"""Exercise the ENUM type."""
e1 = mysql.ENUM("a", "b")
enum_table = Table(
"mysql_enum",
- self.metadata,
+ metadata,
Column("e1", e1),
Column("e2", e2, nullable=False),
Column(
assert_raises(
exc.DBAPIError,
- enum_table.insert().execute,
- e1=None,
- e2=None,
- e3=None,
- e4=None,
+ connection.execute,
+ enum_table.insert(),
+ dict(
+ e1=None,
+ e2=None,
+ e3=None,
+ e4=None,
+ ),
)
assert enum_table.c.e2generic.type.validate_strings
eq_(res, expected)
- def _set_fixture_one(self):
+ def _set_fixture_one(self, metadata):
e1 = mysql.SET("a", "b")
e2 = mysql.SET("a", "b")
e4 = mysql.SET("'a'", "b")
set_table = Table(
"mysql_set",
- self.metadata,
+ metadata,
Column("e1", e1),
Column("e2", e2, nullable=False),
Column("e3", mysql.SET("a", "b")),
)
return set_table
- def test_set_colspec(self):
- self.metadata = MetaData()
- set_table = self._set_fixture_one()
+ def test_set_colspec(self, metadata):
+ set_table = self._set_fixture_one(metadata)
eq_(colspec(set_table.c.e1), "e1 SET('a','b')")
eq_(colspec(set_table.c.e2), "e2 SET('a','b') NOT NULL")
eq_(colspec(set_table.c.e3), "e3 SET('a','b')")
eq_(colspec(set_table.c.e4), "e4 SET('''a''','b')")
eq_(colspec(set_table.c.e5), "e5 SET('a','b')")
- @testing.provide_metadata
- def test_no_null(self, connection):
- set_table = self._set_fixture_one()
+ def test_no_null(self, metadata, connection):
+ set_table = self._set_fixture_one(metadata)
set_table.create(connection)
assert_raises(
exc.DBAPIError,
)
@testing.requires.mysql_non_strict
- @testing.provide_metadata
- def test_empty_set_no_empty_string(self, connection):
+ def test_empty_set_no_empty_string(self, metadata, connection):
t = Table(
"t",
- self.metadata,
+ metadata,
Column("id", Integer),
Column("data", mysql.SET("a", "b")),
)
"",
)
- @testing.provide_metadata
- def test_empty_set_empty_string(self, connection):
+ def test_empty_set_empty_string(self, metadata, connection):
t = Table(
"t",
- self.metadata,
+ metadata,
Column("id", Integer),
Column("data", mysql.SET("a", "b", "", retrieve_as_bitwise=True)),
)
],
)
- @testing.provide_metadata
- def test_string_roundtrip(self, connection):
- set_table = self._set_fixture_one()
+ def test_string_roundtrip(self, metadata, connection):
+ set_table = self._set_fixture_one(metadata)
set_table.create(connection)
connection.execute(
set_table.insert(),
eq_(res, expected)
- @testing.provide_metadata
- def test_unicode_roundtrip(self, connection):
+ def test_unicode_roundtrip(self, metadata, connection):
set_table = Table(
"t",
- self.metadata,
+ metadata,
Column("id", Integer, primary_key=True),
Column("data", mysql.SET(u("réveillé"), u("drôle"), u("S’il"))),
)
eq_(row, (1, set([u("réveillé"), u("drôle")])))
- @testing.provide_metadata
- def test_int_roundtrip(self, connection):
- set_table = self._set_fixture_one()
+ def test_int_roundtrip(self, metadata, connection):
+ set_table = self._set_fixture_one(metadata)
set_table.create(connection)
connection.execute(
set_table.insert(), dict(e1=1, e2=2, e3=3, e4=3, e5=0)
),
)
- @testing.provide_metadata
- def test_set_roundtrip_plus_reflection(self, connection):
+ def test_set_roundtrip_plus_reflection(self, metadata, connection):
set_table = Table(
"mysql_set",
- self.metadata,
+ metadata,
Column("s1", mysql.SET("dq", "sq")),
Column("s2", mysql.SET("a")),
Column("s3", mysql.SET("5", "7", "9")),
eq_(list(rows), [({"5"},), ({"7", "5"},)])
- @testing.provide_metadata
- def test_unicode_enum(self, connection):
- metadata = self.metadata
+ def test_unicode_enum(self, metadata, connection):
t1 = Table(
"table",
metadata,
"'y', 'z')))",
)
- @testing.provide_metadata
- def test_enum_parse(self, connection):
+ def test_enum_parse(self, metadata, connection):
enum_table = Table(
"mysql_enum",
- self.metadata,
+ metadata,
Column("e1", mysql.ENUM("a")),
Column("e2", mysql.ENUM("")),
Column("e3", mysql.ENUM("a")),
eq_(t.c.e6.type.enums, ["", "a"])
eq_(t.c.e7.type.enums, ["", "'a'", "b'b", "'"])
- @testing.provide_metadata
- def test_set_parse(self, connection):
+ def test_set_parse(self, metadata, connection):
set_table = Table(
"mysql_set",
- self.metadata,
+ metadata,
Column("e1", mysql.SET("a")),
Column("e2", mysql.SET("", retrieve_as_bitwise=True)),
Column("e3", mysql.SET("a")),
eq_(t.c.e7.type.values, ("", "'a'", "b'b", "'"))
@testing.requires.mysql_non_strict
- @testing.provide_metadata
- def test_broken_enum_returns_blanks(self, connection):
+ def test_broken_enum_returns_blanks(self, metadata, connection):
t = Table(
"enum_missing",
- self.metadata,
+ metadata,
Column("id", Integer, primary_key=True),
Column("e1", sqltypes.Enum("one", "two", "three")),
Column("e2", mysql.ENUM("one", "two", "three")),
__only_on__ = "oracle"
__backend__ = True
- @testing.provide_metadata
- def test_table_round_trip(self, connection):
+ def test_table_round_trip(self, metadata, connection):
oracle.RESERVED_WORDS.discard("UNION")
- metadata = self.metadata
table = Table(
"t1",
metadata,
# is set
Column("union", Integer, quote=True),
)
- metadata.create_all()
+ metadata.create_all(connection)
connection.execute(
table.insert(), {"option": 1, "plain": 1, "union": 1}
4,
)
- @testing.provide_metadata
- def test_numeric_bind_in_crud(self, connection):
- t = Table("asfd", self.metadata, Column("100K", Integer))
+ def test_numeric_bind_in_crud(self, metadata, connection):
+ t = Table("asfd", metadata, Column("100K", Integer))
t.create(connection)
connection.execute(t.insert(), {"100K": 10})
eq_(connection.scalar(t.select()), 10)
- @testing.provide_metadata
- def test_expanding_quote_roundtrip(self, connection):
- t = Table("asfd", self.metadata, Column("foo", Integer))
+ def test_expanding_quote_roundtrip(self, metadata, connection):
+ t = Table("asfd", metadata, Column("foo", Integer))
t.create(connection)
connection.execute(
finally:
seq.drop(connection)
- @testing.provide_metadata
- def test_limit_offset_for_update(self, connection):
- metadata = self.metadata
+ def test_limit_offset_for_update(self, metadata, connection):
# oracle can't actually do the ROWNUM thing with FOR UPDATE
# very well.
__only_on__ = "oracle"
__backend__ = True
- @testing.provide_metadata
- def test_quoted_column_non_unicode(self, connection):
- metadata = self.metadata
+ def test_quoted_column_non_unicode(self, metadata, connection):
table = Table(
"atable",
metadata,
Column("_underscorecolumn", Unicode(255), primary_key=True),
)
- metadata.create_all()
+ metadata.create_all(connection)
connection.execute(table.insert(), {"_underscorecolumn": u("’é")})
result = connection.execute(
).scalar()
eq_(result, u("’é"))
- @testing.provide_metadata
- def test_quoted_column_unicode(self, connection):
- metadata = self.metadata
+ def test_quoted_column_unicode(self, metadata, connection):
table = Table(
"atable",
metadata,
Column(u("méil"), Unicode(255), primary_key=True),
)
- metadata.create_all()
+ metadata.create_all(connection)
connection.execute(table.insert(), {u("méil"): u("’é")})
result = connection.execute(
if stmt.strip():
conn.exec_driver_sql(stmt)
- @testing.provide_metadata
- def test_create_same_names_explicit_schema(self):
+ def test_create_same_names_explicit_schema(self, metadata, connection):
schema = testing.db.dialect.default_schema_name
- meta = self.metadata
+ meta = metadata
parent = Table(
"parent",
meta,
Column("pid", Integer, ForeignKey("%s.parent.pid" % schema)),
schema=schema,
)
- with testing.db.begin() as conn:
- meta.create_all(conn)
- conn.execute(parent.insert(), {"pid": 1})
- conn.execute(child.insert(), {"cid": 1, "pid": 1})
- eq_(conn.execute(child.select()).fetchall(), [(1, 1)])
+ meta.create_all(connection)
+ connection.execute(parent.insert(), {"pid": 1})
+ connection.execute(child.insert(), {"cid": 1, "pid": 1})
+ eq_(connection.execute(child.select()).fetchall(), [(1, 1)])
def test_reflect_alt_table_owner_local_synonym(self):
meta = MetaData()
% {"test_schema": testing.config.test_schema},
)
- @testing.provide_metadata
- def test_create_same_names_implicit_schema(self, connection):
- meta = self.metadata
+ def test_create_same_names_implicit_schema(self, metadata, connection):
+ meta = metadata
parent = Table(
"parent", meta, Column("pid", Integer, primary_key=True)
)
# check table comment (#5146)
eq_(parent.comment, "my table comment")
- @testing.provide_metadata
- def test_reflect_table_comment(self):
+ def test_reflect_table_comment(self, metadata, connection):
local_parent = Table(
"parent",
- self.metadata,
+ metadata,
Column("q", Integer),
comment="my local comment",
)
- local_parent.create(testing.db)
+ local_parent.create(connection)
- insp = inspect(testing.db)
+ insp = inspect(connection)
eq_(
insp.get_table_comment(
"parent", schema=testing.config.test_schema
)
eq_(
insp.get_table_comment(
- "parent", schema=testing.db.dialect.default_schema_name
+ "parent", schema=connection.dialect.default_schema_name
),
{"text": "my local comment"},
)
def define_tables(cls, metadata):
Table("foo", metadata, Column("id", Integer, primary_key=True))
- def test_oracle_has_no_on_update_cascade(self):
+ def test_oracle_has_no_on_update_cascade(self, connection):
bar = Table(
"bar",
- self.metadata,
+ self.tables_test_metadata,
Column("id", Integer, primary_key=True),
Column(
"foo_id", Integer, ForeignKey("foo.id", onupdate="CASCADE")
),
)
- assert_raises(exc.SAWarning, bar.create)
+ assert_raises(exc.SAWarning, bar.create, connection)
bat = Table(
"bat",
- self.metadata,
+ self.tables_test_metadata,
Column("id", Integer, primary_key=True),
Column("foo_id", Integer),
ForeignKeyConstraint(["foo_id"], ["foo.id"], onupdate="CASCADE"),
)
- assert_raises(exc.SAWarning, bat.create)
+ assert_raises(exc.SAWarning, bat.create, connection)
- def test_reflect_check_include_all(self):
- insp = inspect(testing.db)
+ def test_reflect_check_include_all(self, connection):
+ insp = inspect(connection)
eq_(insp.get_check_constraints("foo"), [])
eq_(
[
with testing.db.begin() as conn:
conn.exec_driver_sql("drop table admin_docindex")
- def test_reflect_all(self):
- m = MetaData(testing.db)
- m.reflect()
+ def test_reflect_all(self, connection):
+ m = MetaData()
+ m.reflect(connection)
eq_(set(t.name for t in m.tables.values()), set(["admin_docindex"]))
__only_on__ = "oracle"
__backend__ = True
- @testing.provide_metadata
@testing.fails_if(all_tables_compression_missing)
- def test_reflect_basic_compression(self):
- metadata = self.metadata
+ def test_reflect_basic_compression(self, metadata, connection):
tbl = Table(
"test_compress",
Column("data", Integer, primary_key=True),
oracle_compress=True,
)
- metadata.create_all()
+ metadata.create_all(connection)
m2 = MetaData()
- tbl = Table("test_compress", m2, autoload_with=testing.db)
+ tbl = Table("test_compress", m2, autoload_with=connection)
# Don't hardcode the exact value, but it must be non-empty
assert tbl.dialect_options["oracle"]["compress"]
- @testing.provide_metadata
@testing.fails_if(all_tables_compress_for_missing)
- def test_reflect_oltp_compression(self):
- metadata = self.metadata
-
+ def test_reflect_oltp_compression(self, metadata, connection):
tbl = Table(
"test_compress",
metadata,
Column("data", Integer, primary_key=True),
oracle_compress="OLTP",
)
- metadata.create_all()
+ metadata.create_all(connection)
m2 = MetaData()
- tbl = Table("test_compress", m2, autoload_with=testing.db)
+ tbl = Table("test_compress", m2, autoload_with=connection)
assert tbl.dialect_options["oracle"]["compress"] == "OLTP"
__only_on__ = "oracle"
__backend__ = True
- @testing.provide_metadata
- def test_no_pk(self):
- metadata = self.metadata
-
+ def test_no_pk(self, metadata, connection):
Table(
"sometable",
metadata,
Index("pk_idx_1", "id_a", "id_b", unique=True),
Index("pk_idx_2", "id_b", "id_a", unique=True),
)
- metadata.create_all()
+ metadata.create_all(connection)
- insp = inspect(testing.db)
+ insp = inspect(connection)
eq_(
insp.get_indexes("sometable"),
[
],
)
- @testing.combinations((True,), (False,))
- @testing.provide_metadata
- def test_include_indexes_resembling_pk(self, explicit_pk):
- metadata = self.metadata
+ @testing.combinations((True,), (False,), argnames="explicit_pk")
+ def test_include_indexes_resembling_pk(
+ self, metadata, connection, explicit_pk
+ ):
t = Table(
"sometable",
"id_a", "id_b", "group", name="some_primary_key"
)
)
- metadata.create_all()
+ metadata.create_all(connection)
- insp = inspect(testing.db)
+ insp = inspect(connection)
eq_(
insp.get_indexes("sometable"),
[
],
)
- @testing.provide_metadata
- def test_reflect_fn_index(self, connection):
+ def test_reflect_fn_index(self, metadata, connection):
"""test reflection of a functional index.
it appears this emitted a warning at some point but does not right now.
"""
- metadata = self.metadata
s_table = Table(
"sometable",
metadata,
],
)
- @testing.provide_metadata
- def test_basic(self):
- metadata = self.metadata
+ def test_basic(self, metadata, connection):
s_table = Table(
"sometable",
oracle_compress=1,
)
- metadata.create_all()
+ metadata.create_all(connection)
- mirror = MetaData(testing.db)
- mirror.reflect()
+ mirror = MetaData()
+ mirror.reflect(connection)
- metadata.drop_all()
- mirror.create_all()
+ metadata.drop_all(connection)
+ mirror.create_all(connection)
- inspect = MetaData(testing.db)
- inspect.reflect()
+ inspect = MetaData()
+ inspect.reflect(connection)
def obj_definition(obj):
return (
)
# find what the primary k constraint name should be
- primaryconsname = testing.db.scalar(
+ primaryconsname = connection.scalar(
text(
"""SELECT constraint_name
FROM all_constraints
__only_on__ = "oracle"
__backend__ = True
- @testing.provide_metadata
- def _run_test(self, specs, attributes):
+ def _run_test(self, metadata, connection, specs, attributes):
columns = [Column("c%i" % (i + 1), t[0]) for i, t in enumerate(specs)]
- m = self.metadata
+ m = metadata
Table("oracle_types", m, *columns)
- m.create_all()
+ m.create_all(connection)
m2 = MetaData()
- table = Table("oracle_types", m2, autoload_with=testing.db)
+ table = Table("oracle_types", m2, autoload_with=connection)
for i, (reflected_col, spec) in enumerate(zip(table.c, specs)):
expected_spec = spec[1]
reflected_type = reflected_col.type
),
)
- def test_integer_types(self):
+ def test_integer_types(self, metadata, connection):
specs = [(Integer, INTEGER()), (Numeric, INTEGER())]
- self._run_test(specs, [])
+ self._run_test(metadata, connection, specs, [])
- def test_number_types(self):
+ def test_number_types(
+ self,
+ metadata,
+ connection,
+ ):
specs = [(Numeric(5, 2), NUMBER(5, 2)), (NUMBER, NUMBER())]
- self._run_test(specs, ["precision", "scale"])
+ self._run_test(metadata, connection, specs, ["precision", "scale"])
- def test_float_types(self):
+ def test_float_types(
+ self,
+ metadata,
+ connection,
+ ):
specs = [
(DOUBLE_PRECISION(), FLOAT()),
# when binary_precision is supported
# when binary_precision is supported
# (FLOAT(5), oracle.FLOAT(binary_precision=126),),
]
- self._run_test(specs, ["precision"])
+ self._run_test(metadata, connection, specs, ["precision"])
class IdentityReflectionTest(fixtures.TablesTest):
__dialect__ = oracle.OracleDialect()
__backend__ = True
- @testing.combinations((CHAR,), (NCHAR,))
- @testing.provide_metadata
- def test_fixed_char(self, char_type):
- m = self.metadata
+ @testing.combinations((CHAR,), (NCHAR,), argnames="char_type")
+ def test_fixed_char(self, metadata, connection, char_type):
+ m = metadata
t = Table(
"t1",
m,
else:
v1, v2, v3 = "value 1", "value 2", "value 3"
- with testing.db.begin() as conn:
- t.create(conn)
- conn.execute(
- t.insert(),
- dict(id=1, data=v1),
- dict(id=2, data=v2),
- dict(id=3, data=v3),
- )
+ t.create(connection)
+ connection.execute(
+ t.insert(),
+ dict(id=1, data=v1),
+ dict(id=2, data=v2),
+ dict(id=3, data=v3),
+ )
- eq_(
- conn.execute(t.select().where(t.c.data == v2)).fetchall(),
- [(2, "value 2 ")],
- )
+ eq_(
+ connection.execute(t.select().where(t.c.data == v2)).fetchall(),
+ [(2, "value 2 ")],
+ )
- m2 = MetaData()
- t2 = Table("t1", m2, autoload_with=conn)
- is_(type(t2.c.data.type), char_type)
- eq_(
- conn.execute(t2.select().where(t2.c.data == v2)).fetchall(),
- [(2, "value 2 ")],
- )
+ m2 = MetaData()
+ t2 = Table("t1", m2, autoload_with=connection)
+ is_(type(t2.c.data.type), char_type)
+ eq_(
+ connection.execute(t2.select().where(t2.c.data == v2)).fetchall(),
+ [(2, "value 2 ")],
+ )
@testing.requires.returning
- @testing.provide_metadata
- def test_int_not_float(self, connection):
- m = self.metadata
+ def test_int_not_float(self, metadata, connection):
+ m = metadata
t1 = Table("t1", m, Column("foo", Integer))
t1.create(connection)
r = connection.execute(t1.insert().values(foo=5).returning(t1.c.foo))
assert isinstance(x, int)
@testing.requires.returning
- @testing.provide_metadata
- def test_int_not_float_no_coerce_decimal(self):
+ def test_int_not_float_no_coerce_decimal(self, metadata):
engine = testing_engine(options=dict(coerce_to_decimal=False))
- m = self.metadata
+ m = metadata
t1 = Table("t1", m, Column("foo", Integer))
with engine.begin() as conn:
- t1.create()
+ t1.create(conn)
r = conn.execute(t1.insert().values(foo=5).returning(t1.c.foo))
x = r.scalar()
assert x == 5
assert x == 5
assert isinstance(x, int)
- @testing.provide_metadata
- def test_rowid(self):
- metadata = self.metadata
+ def test_rowid(self, metadata, connection):
t = Table("t1", metadata, Column("x", Integer))
- with testing.db.begin() as conn:
- t.create(conn)
- conn.execute(t.insert(), {"x": 5})
- s1 = select(t).subquery()
- s2 = select(column("rowid")).select_from(s1)
- rowid = conn.scalar(s2)
-
- # the ROWID type is not really needed here,
- # as cx_oracle just treats it as a string,
- # but we want to make sure the ROWID works...
- rowid_col = column("rowid", oracle.ROWID)
- s3 = select(t.c.x, rowid_col).where(
- rowid_col == cast(rowid, oracle.ROWID)
- )
- eq_(conn.execute(s3).fetchall(), [(5, rowid)])
+ t.create(connection)
+ connection.execute(t.insert(), {"x": 5})
+ s1 = select(t).subquery()
+ s2 = select(column("rowid")).select_from(s1)
+ rowid = connection.scalar(s2)
+
+ # the ROWID type is not really needed here,
+ # as cx_oracle just treats it as a string,
+ # but we want to make sure the ROWID works...
+ rowid_col = column("rowid", oracle.ROWID)
+ s3 = select(t.c.x, rowid_col).where(
+ rowid_col == cast(rowid, oracle.ROWID)
+ )
+ eq_(connection.execute(s3).fetchall(), [(5, rowid)])
- @testing.provide_metadata
- def test_interval(self, connection):
- metadata = self.metadata
+ def test_interval(self, metadata, connection):
interval_table = Table(
"intervaltable",
metadata,
row = connection.execute(interval_table.select()).first()
eq_(row["day_interval"], datetime.timedelta(days=35, seconds=5743))
- @testing.provide_metadata
- def test_numerics(self):
- m = self.metadata
+ def test_numerics(self, metadata, connection):
+ m = metadata
t1 = Table(
"t1",
m,
Column("numbercol2", oracle.NUMBER(9, 3)),
Column("numbercol3", oracle.NUMBER),
)
- with testing.db.begin() as conn:
- t1.create(conn)
- conn.execute(
- t1.insert(),
- dict(
- intcol=1,
- numericcol=5.2,
- floatcol1=6.5,
- floatcol2=8.5,
- doubleprec=9.5,
- numbercol1=12,
- numbercol2=14.85,
- numbercol3=15.76,
- ),
- )
+ t1.create(connection)
+ connection.execute(
+ t1.insert(),
+ dict(
+ intcol=1,
+ numericcol=5.2,
+ floatcol1=6.5,
+ floatcol2=8.5,
+ doubleprec=9.5,
+ numbercol1=12,
+ numbercol2=14.85,
+ numbercol3=15.76,
+ ),
+ )
m2 = MetaData()
- t2 = Table("t1", m2, autoload_with=testing.db)
+ t2 = Table("t1", m2, autoload_with=connection)
- with testing.db.connect() as conn:
- for row in (
- conn.execute(t1.select()).first(),
- conn.execute(t2.select()).first(),
+ for row in (
+ connection.execute(t1.select()).first(),
+ connection.execute(t2.select()).first(),
+ ):
+ for i, (val, type_) in enumerate(
+ (
+ (1, int),
+ (decimal.Decimal("5.2"), decimal.Decimal),
+ (6.5, float),
+ (8.5, float),
+ (9.5, float),
+ (12, int),
+ (decimal.Decimal("14.85"), decimal.Decimal),
+ (15.76, float),
+ )
):
- for i, (val, type_) in enumerate(
- (
- (1, int),
- (decimal.Decimal("5.2"), decimal.Decimal),
- (6.5, float),
- (8.5, float),
- (9.5, float),
- (12, int),
- (decimal.Decimal("14.85"), decimal.Decimal),
- (15.76, float),
- )
- ):
- eq_(row[i], val)
- assert isinstance(row[i], type_), "%r is not %r" % (
- row[i],
- type_,
- )
+ eq_(row[i], val)
+ assert isinstance(row[i], type_), "%r is not %r" % (
+ row[i],
+ type_,
+ )
- @testing.provide_metadata
- def test_numeric_infinity_float(self, connection):
- m = self.metadata
+ def test_numeric_infinity_float(self, metadata, connection):
+ m = metadata
t1 = Table(
"t1",
m,
[(float("inf"),), (float("-inf"),)],
)
- @testing.provide_metadata
- def test_numeric_infinity_decimal(self, connection):
- m = self.metadata
+ def test_numeric_infinity_decimal(self, metadata, connection):
+ m = metadata
t1 = Table(
"t1",
m,
[(decimal.Decimal("Infinity"),), (decimal.Decimal("-Infinity"),)],
)
- @testing.provide_metadata
- def test_numeric_nan_float(self, connection):
- m = self.metadata
+ def test_numeric_nan_float(self, metadata, connection):
+ m = metadata
t1 = Table(
"t1",
m,
# needs https://github.com/oracle/python-cx_Oracle/
# issues/184#issuecomment-391399292
- @testing.provide_metadata
- def _dont_test_numeric_nan_decimal(self, connection):
- m = self.metadata
+ def _dont_test_numeric_nan_decimal(self, metadata, connection):
+ m = metadata
t1 = Table(
"t1",
m,
[(decimal.Decimal("NaN"),), (decimal.Decimal("NaN"),)],
)
- @testing.provide_metadata
- def test_numerics_broken_inspection(self, connection):
+ def test_numerics_broken_inspection(self, metadata, connection):
"""Numeric scenarios where Oracle type info is 'broken',
returning us precision, scale of the form (0, 0) or (0, -127).
We convert to Decimal and let int()/float() processors take over.
"""
- metadata = self.metadata
-
# this test requires cx_oracle 5
foo = Table(
value = exec_sql(connection, "SELECT 'hello' FROM DUAL").scalar()
assert isinstance(value, util.text_type)
- @testing.provide_metadata
- def test_reflect_dates(self):
- metadata = self.metadata
+ def test_reflect_dates(self, metadata, connection):
Table(
"date_types",
metadata,
Column("d4", TIMESTAMP(timezone=True)),
Column("d5", oracle.INTERVAL(second_precision=5)),
)
- metadata.create_all()
+ metadata.create_all(connection)
m = MetaData()
- t1 = Table("date_types", m, autoload_with=testing.db)
+ t1 = Table("date_types", m, autoload_with=connection)
assert isinstance(t1.c.d1.type, oracle.DATE)
assert isinstance(t1.c.d1.type, DateTime)
assert isinstance(t1.c.d2.type, oracle.DATE)
for row in types_table.select().execute().fetchall():
[row[k] for k in row.keys()]
- @testing.provide_metadata
- def test_raw_roundtrip(self, connection):
- metadata = self.metadata
+ def test_raw_roundtrip(self, metadata, connection):
raw_table = Table(
"raw",
metadata,
Column("id", Integer, primary_key=True),
Column("data", oracle.RAW(35)),
)
- metadata.create_all()
+ metadata.create_all(connection)
connection.execute(raw_table.insert(), id=1, data=b("ABCDEF"))
eq_(connection.execute(raw_table.select()).first(), (1, b("ABCDEF")))
- @testing.provide_metadata
- def test_reflect_nvarchar(self, connection):
- metadata = self.metadata
+ def test_reflect_nvarchar(self, metadata, connection):
Table(
"tnv",
metadata,
assert isinstance(nv_data, util.text_type)
assert isinstance(c_data, util.text_type)
- @testing.provide_metadata
- def test_reflect_unicode_no_nvarchar(self):
- metadata = self.metadata
+ def test_reflect_unicode_no_nvarchar(self, metadata, connection):
Table("tnv", metadata, Column("data", sqltypes.Unicode(255)))
- metadata.create_all()
+ metadata.create_all(connection)
m2 = MetaData()
- t2 = Table("tnv", m2, autoload_with=testing.db)
+ t2 = Table("tnv", m2, autoload_with=connection)
assert isinstance(t2.c.data.type, sqltypes.VARCHAR)
if testing.against("oracle+cx_oracle"):
assert isinstance(
- t2.c.data.type.dialect_impl(testing.db.dialect),
+ t2.c.data.type.dialect_impl(connection.dialect),
cx_oracle._OracleString,
)
data = u("m’a réveillé.")
- with testing.db.begin() as conn:
- conn.execute(t2.insert(), {"data": data})
- res = conn.execute(t2.select()).first().data
- eq_(res, data)
- assert isinstance(res, util.text_type)
+ connection.execute(t2.insert(), {"data": data})
+ res = connection.execute(t2.select()).first().data
+ eq_(res, data)
+ assert isinstance(res, util.text_type)
- @testing.provide_metadata
- def test_char_length(self):
- metadata = self.metadata
+ def test_char_length(self, metadata, connection):
t1 = Table(
"t1",
metadata,
Column("c3", CHAR(200)),
Column("c4", NCHAR(180)),
)
- t1.create()
+ t1.create(connection)
m2 = MetaData()
- t2 = Table("t1", m2, autoload_with=testing.db)
+ t2 = Table("t1", m2, autoload_with=connection)
eq_(t2.c.c1.type.length, 50)
eq_(t2.c.c2.type.length, 250)
eq_(t2.c.c3.type.length, 200)
eq_(t2.c.c4.type.length, 180)
- @testing.provide_metadata
- def test_long_type(self, connection):
- metadata = self.metadata
+ def test_long_type(self, metadata, connection):
t = Table("t", metadata, Column("data", oracle.LONG))
- metadata.create_all(testing.db)
+ metadata.create_all(connection)
connection.execute(t.insert(), data="xyz")
eq_(connection.scalar(select(t.c.data)), "xyz")
- @testing.provide_metadata
- def test_longstring(self, connection):
- metadata = self.metadata
+ def test_longstring(self, metadata, connection):
exec_sql(
connection,
"""
(CHAR(30), "test", "FIXED_CHAR", False),
(NCHAR(30), u("test"), "FIXED_NCHAR", False),
(oracle.LONG(), "test", None, False),
+ argnames="datatype, value, sis_value_text, set_nchar_flag",
)
- @testing.provide_metadata
def test_setinputsizes(
- self, datatype, value, sis_value_text, set_nchar_flag
+ self, metadata, datatype, value, sis_value_text, set_nchar_flag
):
if isinstance(sis_value_text, str):
sis_value = getattr(testing.db.dialect.dbapi, sis_value_text)
else:
return self.impl
- m = self.metadata
+ m = metadata
# Oracle can have only one column of type LONG so we make three
# tables rather than one table w/ three columns
t1 = Table("t1", m, Column("foo", datatype))
"t2", m, Column("foo", NullType().with_variant(datatype, "oracle"))
)
t3 = Table("t3", m, Column("foo", TestTypeDec()))
- m.create_all()
+ m.create_all(testing.db)
class CursorWrapper(object):
# cx_oracle cursor can't be modified so we have to
[mock.call.setinputsizes()],
)
- def test_event_no_native_float(self):
+ def test_event_no_native_float(self, metadata):
def _remove_type(inputsizes, cursor, statement, parameters, context):
for param, dbapitype in list(inputsizes.items()):
if dbapitype is testing.db.dialect.dbapi.NATIVE_FLOAT:
event.listen(testing.db, "do_setinputsizes", _remove_type)
try:
- self.test_setinputsizes(oracle.BINARY_FLOAT, 25.34534, None, False)
+ self.test_setinputsizes(
+ metadata, oracle.BINARY_FLOAT, 25.34534, None, False
+ )
finally:
event.remove(testing.db, "do_setinputsizes", _remove_type)
Column("date1", DateTime(timezone=True)),
Column("date2", DateTime(timezone=False)),
)
- metadata.create_all()
+ metadata.create_all(testing.db)
m2 = MetaData()
t2 = Table("pgdate", m2, autoload_with=testing.db)
assert t2.c.date1.type.timezone is True
__only_on__ = "postgresql"
__backend__ = True
- @testing.fails_if(
- "postgresql < 8.4", "Better int2vector functions not available"
- )
- @testing.provide_metadata
- def test_reflected_primary_key_order(self):
- meta1 = self.metadata
+ def test_reflected_primary_key_order(self, metadata, connection):
+ meta1 = metadata
subject = Table(
"subject",
meta1,
Column("p2", Integer, primary_key=True),
PrimaryKeyConstraint("p2", "p1"),
)
- meta1.create_all()
+ meta1.create_all(connection)
meta2 = MetaData()
- subject = Table("subject", meta2, autoload_with=testing.db)
+ subject = Table("subject", meta2, autoload_with=connection)
eq_(subject.primary_key.columns.keys(), ["p2", "p1"])
@testing.provide_metadata
user_tmp.create(testing.db)
assert inspect(testing.db).has_table("some_temp_table")
- @testing.provide_metadata
- def test_cross_schema_reflection_one(self):
+ def test_cross_schema_reflection_one(self, metadata, connection):
- meta1 = self.metadata
+ meta1 = metadata
users = Table(
"users",
Column("email_address", String(20)),
schema="test_schema",
)
- meta1.create_all()
+ meta1.create_all(connection)
meta2 = MetaData()
addresses = Table(
"email_addresses",
meta2,
- autoload_with=testing.db,
+ autoload_with=connection,
schema="test_schema",
)
users = Table("users", meta2, must_exist=True, schema="test_schema")
(users.c.user_id == addresses.c.remote_user_id).compare(j.onclause)
)
- @testing.provide_metadata
- def test_cross_schema_reflection_two(self):
- meta1 = self.metadata
+ def test_cross_schema_reflection_two(self, metadata, connection):
+ meta1 = metadata
subject = Table(
"subject", meta1, Column("id", Integer, primary_key=True)
)
Column("ref", Integer, ForeignKey("subject.id")),
schema="test_schema",
)
- meta1.create_all()
+ meta1.create_all(connection)
meta2 = MetaData()
- subject = Table("subject", meta2, autoload_with=testing.db)
+ subject = Table("subject", meta2, autoload_with=connection)
referer = Table(
- "referer", meta2, schema="test_schema", autoload_with=testing.db
+ "referer", meta2, schema="test_schema", autoload_with=connection
)
self.assert_(
(subject.c.id == referer.c.ref).compare(
)
)
- @testing.provide_metadata
- def test_cross_schema_reflection_three(self):
- meta1 = self.metadata
+ def test_cross_schema_reflection_three(self, metadata, connection):
+ meta1 = metadata
subject = Table(
"subject",
meta1,
Column("ref", Integer, ForeignKey("test_schema_2.subject.id")),
schema="test_schema",
)
- meta1.create_all()
+ meta1.create_all(connection)
meta2 = MetaData()
subject = Table(
- "subject", meta2, autoload_with=testing.db, schema="test_schema_2"
+ "subject", meta2, autoload_with=connection, schema="test_schema_2"
)
referer = Table(
- "referer", meta2, autoload_with=testing.db, schema="test_schema"
+ "referer", meta2, autoload_with=connection, schema="test_schema"
)
self.assert_(
(subject.c.id == referer.c.ref).compare(
)
)
- @testing.provide_metadata
- def test_cross_schema_reflection_four(self):
- meta1 = self.metadata
+ def test_cross_schema_reflection_four(self, metadata, connection):
+ meta1 = metadata
subject = Table(
"subject",
meta1,
Column("ref", Integer, ForeignKey("test_schema_2.subject.id")),
schema="test_schema",
)
- meta1.create_all()
+ meta1.create_all(connection)
- conn = testing.db.connect()
- conn.detach()
- conn.exec_driver_sql("SET search_path TO test_schema, test_schema_2")
- meta2 = MetaData(bind=conn)
+ connection.detach()
+ connection.exec_driver_sql(
+ "SET search_path TO test_schema, test_schema_2"
+ )
+ meta2 = MetaData()
subject = Table(
"subject",
meta2,
- autoload_with=testing.db,
+ autoload_with=connection,
schema="test_schema_2",
postgresql_ignore_search_path=True,
)
referer = Table(
"referer",
meta2,
- autoload_with=testing.db,
+ autoload_with=connection,
schema="test_schema",
postgresql_ignore_search_path=True,
)
subject.join(referer).onclause
)
)
- conn.close()
- @testing.provide_metadata
- def test_cross_schema_reflection_five(self):
- meta1 = self.metadata
+ def test_cross_schema_reflection_five(self, metadata, connection):
+ meta1 = metadata
# we assume 'public'
- default_schema = testing.db.dialect.default_schema_name
+ default_schema = connection.dialect.default_schema_name
subject = Table(
"subject", meta1, Column("id", Integer, primary_key=True)
)
Column("id", Integer, primary_key=True),
Column("ref", Integer, ForeignKey("subject.id")),
)
- meta1.create_all()
+ meta1.create_all(connection)
meta2 = MetaData()
subject = Table(
"subject",
meta2,
- autoload_with=testing.db,
+ autoload_with=connection,
schema=default_schema,
postgresql_ignore_search_path=True,
)
referer = Table(
"referer",
meta2,
- autoload_with=testing.db,
+ autoload_with=connection,
schema=default_schema,
postgresql_ignore_search_path=True,
)
)
)
- @testing.provide_metadata
- def test_cross_schema_reflection_six(self):
+ def test_cross_schema_reflection_six(self, metadata, connection):
# test that the search path *is* taken into account
# by default
- meta1 = self.metadata
+ meta1 = metadata
Table(
"some_table",
Column("sid", Integer, ForeignKey("test_schema.some_table.id")),
schema="test_schema_2",
)
- meta1.create_all()
- with testing.db.connect() as conn:
- conn.detach()
+ meta1.create_all(connection)
+ connection.detach()
- conn.exec_driver_sql(
- "set search_path to test_schema_2, test_schema, public"
- )
+ connection.exec_driver_sql(
+ "set search_path to test_schema_2, test_schema, public"
+ )
- m1 = MetaData()
+ m1 = MetaData()
- Table("some_table", m1, schema="test_schema", autoload_with=conn)
- t2_schema = Table(
- "some_other_table",
- m1,
- schema="test_schema_2",
- autoload_with=conn,
- )
+ Table("some_table", m1, schema="test_schema", autoload_with=connection)
+ t2_schema = Table(
+ "some_other_table",
+ m1,
+ schema="test_schema_2",
+ autoload_with=connection,
+ )
- t2_no_schema = Table("some_other_table", m1, autoload_with=conn)
+ t2_no_schema = Table("some_other_table", m1, autoload_with=connection)
- t1_no_schema = Table("some_table", m1, autoload_with=conn)
+ t1_no_schema = Table("some_table", m1, autoload_with=connection)
- m2 = MetaData()
- t1_schema_isp = Table(
- "some_table",
- m2,
- schema="test_schema",
- autoload_with=conn,
- postgresql_ignore_search_path=True,
- )
- t2_schema_isp = Table(
- "some_other_table",
- m2,
- schema="test_schema_2",
- autoload_with=conn,
- postgresql_ignore_search_path=True,
- )
+ m2 = MetaData()
+ t1_schema_isp = Table(
+ "some_table",
+ m2,
+ schema="test_schema",
+ autoload_with=connection,
+ postgresql_ignore_search_path=True,
+ )
+ t2_schema_isp = Table(
+ "some_other_table",
+ m2,
+ schema="test_schema_2",
+ autoload_with=connection,
+ postgresql_ignore_search_path=True,
+ )
- # t2_schema refers to t1_schema, but since "test_schema"
- # is in the search path, we instead link to t2_no_schema
- assert t2_schema.c.sid.references(t1_no_schema.c.id)
+ # t2_schema refers to t1_schema, but since "test_schema"
+ # is in the search path, we instead link to t2_no_schema
+ assert t2_schema.c.sid.references(t1_no_schema.c.id)
- # the two no_schema tables refer to each other also.
- assert t2_no_schema.c.sid.references(t1_no_schema.c.id)
+ # the two no_schema tables refer to each other also.
+ assert t2_no_schema.c.sid.references(t1_no_schema.c.id)
- # but if we're ignoring search path, then we maintain
- # those explicit schemas vs. what the "default" schema is
- assert t2_schema_isp.c.sid.references(t1_schema_isp.c.id)
+ # but if we're ignoring search path, then we maintain
+ # those explicit schemas vs. what the "default" schema is
+ assert t2_schema_isp.c.sid.references(t1_schema_isp.c.id)
- @testing.provide_metadata
- def test_cross_schema_reflection_seven(self):
+ def test_cross_schema_reflection_seven(self, metadata, connection):
# test that the search path *is* taken into account
# by default
- meta1 = self.metadata
+ meta1 = metadata
Table(
"some_table",
Column("sid", Integer, ForeignKey("test_schema.some_table.id")),
schema="test_schema_2",
)
- meta1.create_all()
- with testing.db.connect() as conn:
- conn.detach()
+ meta1.create_all(connection)
+ connection.detach()
- conn.exec_driver_sql(
- "set search_path to test_schema_2, test_schema, public"
- )
- meta2 = MetaData(conn)
- meta2.reflect(schema="test_schema_2")
+ connection.exec_driver_sql(
+ "set search_path to test_schema_2, test_schema, public"
+ )
+ meta2 = MetaData()
+ meta2.reflect(connection, schema="test_schema_2")
- eq_(
- set(meta2.tables),
- set(["test_schema_2.some_other_table", "some_table"]),
- )
+ eq_(
+ set(meta2.tables),
+ set(["test_schema_2.some_other_table", "some_table"]),
+ )
- meta3 = MetaData(conn)
- meta3.reflect(
- schema="test_schema_2", postgresql_ignore_search_path=True
- )
+ meta3 = MetaData()
+ meta3.reflect(
+ connection,
+ schema="test_schema_2",
+ postgresql_ignore_search_path=True,
+ )
- eq_(
- set(meta3.tables),
- set(
- [
- "test_schema_2.some_other_table",
- "test_schema.some_table",
- ]
- ),
- )
+ eq_(
+ set(meta3.tables),
+ set(
+ [
+ "test_schema_2.some_other_table",
+ "test_schema.some_table",
+ ]
+ ),
+ )
- @testing.provide_metadata
- def test_cross_schema_reflection_metadata_uses_schema(self):
+ def test_cross_schema_reflection_metadata_uses_schema(
+ self, metadata, connection
+ ):
# test [ticket:3716]
- metadata = self.metadata
-
Table(
"some_table",
metadata,
Column("id", Integer, primary_key=True),
schema=None,
)
- metadata.create_all()
- with testing.db.connect() as conn:
- meta2 = MetaData(conn, schema="test_schema")
- meta2.reflect()
+ metadata.create_all(connection)
+ meta2 = MetaData(schema="test_schema")
+ meta2.reflect(connection)
- eq_(
- set(meta2.tables),
- set(["some_other_table", "test_schema.some_table"]),
- )
+ eq_(
+ set(meta2.tables),
+ set(["some_other_table", "test_schema.some_table"]),
+ )
- @testing.provide_metadata
- def test_uppercase_lowercase_table(self):
- metadata = self.metadata
+ def test_uppercase_lowercase_table(self, metadata, connection):
a_table = Table("a", metadata, Column("x", Integer))
A_table = Table("A", metadata, Column("x", Integer))
- a_table.create()
- assert inspect(testing.db).has_table("a")
- assert not inspect(testing.db).has_table("A")
- A_table.create(checkfirst=True)
- assert inspect(testing.db).has_table("A")
+ a_table.create(connection)
+ assert inspect(connection).has_table("a")
+ assert not inspect(connection).has_table("A")
+ A_table.create(connection, checkfirst=True)
+ assert inspect(connection).has_table("A")
def test_uppercase_lowercase_sequence(self):
a_seq.drop(testing.db)
A_seq.drop(testing.db)
- @testing.provide_metadata
- def test_index_reflection(self):
+ def test_index_reflection(self, metadata, connection):
"""Reflecting expression-based indexes should warn"""
- metadata = self.metadata
-
Table(
"party",
metadata,
Column("name", String(20), index=True),
Column("aname", String(20)),
)
- metadata.create_all(testing.db)
- with testing.db.begin() as conn:
- conn.exec_driver_sql("create index idx1 on party ((id || name))")
- conn.exec_driver_sql(
- "create unique index idx2 on party (id) where name = 'test'"
- )
- conn.exec_driver_sql(
- """
- create index idx3 on party using btree
- (lower(name::text), lower(aname::text))
- """
- )
+ metadata.create_all(connection)
+ connection.exec_driver_sql("create index idx1 on party ((id || name))")
+ connection.exec_driver_sql(
+ "create unique index idx2 on party (id) where name = 'test'"
+ )
+ connection.exec_driver_sql(
+ """
+ create index idx3 on party using btree
+ (lower(name::text), lower(aname::text))
+ """
+ )
def go():
m2 = MetaData()
- t2 = Table("party", m2, autoload_with=testing.db)
+ t2 = Table("party", m2, autoload_with=connection)
assert len(t2.indexes) == 2
# Make sure indexes are in the order we expect them in
"WHERE ((name)::text = 'test'::text)",
)
- @testing.fails_if("postgresql < 8.3", "index ordering not supported")
- @testing.provide_metadata
- def test_index_reflection_with_sorting(self):
+ def test_index_reflection_with_sorting(self, metadata, connection):
"""reflect indexes with sorting options set"""
t1 = Table(
"party",
- self.metadata,
+ metadata,
Column("id", String(10), nullable=False),
Column("name", String(20)),
Column("aname", String(20)),
)
- with testing.db.begin() as conn:
-
- t1.create(conn)
+ t1.create(connection)
- # check ASC, DESC options alone
- conn.exec_driver_sql(
- """
- create index idx1 on party
- (id, name ASC, aname DESC)
+ # check ASC, DESC options alone
+ connection.exec_driver_sql(
"""
- )
+ create index idx1 on party
+ (id, name ASC, aname DESC)
+ """
+ )
- # check DESC w/ NULLS options
- conn.exec_driver_sql(
- """
- create index idx2 on party
- (name DESC NULLS FIRST, aname DESC NULLS LAST)
+ # check DESC w/ NULLS options
+ connection.exec_driver_sql(
"""
- )
+ create index idx2 on party
+ (name DESC NULLS FIRST, aname DESC NULLS LAST)
+ """
+ )
- # check ASC w/ NULLS options
- conn.exec_driver_sql(
- """
- create index idx3 on party
- (name ASC NULLS FIRST, aname ASC NULLS LAST)
+ # check ASC w/ NULLS options
+ connection.exec_driver_sql(
"""
- )
+ create index idx3 on party
+ (name ASC NULLS FIRST, aname ASC NULLS LAST)
+ """
+ )
# reflect data
- with testing.db.connect() as conn:
- m2 = MetaData(conn)
- t2 = Table("party", m2, autoload_with=testing.db)
+ m2 = MetaData()
+ t2 = Table("party", m2, autoload_with=connection)
eq_(len(t2.indexes), 3)
)
@testing.skip_if("postgresql < 11.0", "indnkeyatts not supported")
- @testing.provide_metadata
- def test_index_reflection_with_include(self):
+ def test_index_reflection_with_include(self, metadata, connection):
"""reflect indexes with include set"""
- metadata = self.metadata
-
Table(
"t",
metadata,
Column("x", ARRAY(Integer)),
Column("name", String(20)),
)
- metadata.create_all()
- with testing.db.begin() as conn:
- conn.exec_driver_sql("CREATE INDEX idx1 ON t (x) INCLUDE (name)")
+ metadata.create_all(connection)
+ connection.exec_driver_sql("CREATE INDEX idx1 ON t (x) INCLUDE (name)")
- # prior to #5205, this would return:
- # [{'column_names': ['x', 'name'],
- # 'name': 'idx1', 'unique': False}]
+ # prior to #5205, this would return:
+ # [{'column_names': ['x', 'name'],
+ # 'name': 'idx1', 'unique': False}]
- ind = testing.db.dialect.get_indexes(conn, "t", None)
- eq_(
- ind,
- [
- {
- "unique": False,
- "column_names": ["x"],
- "include_columns": ["name"],
- "name": "idx1",
- }
- ],
- )
+ ind = testing.db.dialect.get_indexes(connection, "t", None)
+ eq_(
+ ind,
+ [
+ {
+ "unique": False,
+ "column_names": ["x"],
+ "include_columns": ["name"],
+ "name": "idx1",
+ }
+ ],
+ )
- @testing.provide_metadata
- def test_foreign_key_option_inspection(self):
- metadata = self.metadata
+ def test_foreign_key_option_inspection(self, metadata, connection):
Table(
"person",
metadata,
"options": {"onupdate": "CASCADE", "ondelete": "CASCADE"},
},
}
- metadata.create_all()
- inspector = inspect(testing.db)
+ metadata.create_all(connection)
+ inspector = inspect(connection)
fks = inspector.get_foreign_keys(
"person"
) + inspector.get_foreign_keys("company")
t = Table("t", MetaData(), autoload_with=testing.db)
eq_(t.c.x.type.enums, [])
- @testing.provide_metadata
- @testing.only_on("postgresql >= 8.5")
- def test_reflection_with_unique_constraint(self):
- insp = inspect(testing.db)
+ def test_reflection_with_unique_constraint(self, metadata, connection):
+ insp = inspect(connection)
- meta = self.metadata
+ meta = metadata
uc_table = Table(
"pgsql_uc",
meta,
UniqueConstraint("a", name="uc_a"),
)
- uc_table.create()
+ uc_table.create(connection)
# PostgreSQL will create an implicit index for a unique
# constraint. Separately we get both
self.assert_("uc_a" in constraints)
# reflection corrects for the dupe
- reflected = Table("pgsql_uc", MetaData(), autoload_with=testing.db)
+ reflected = Table("pgsql_uc", MetaData(), autoload_with=connection)
indexes = set(i.name for i in reflected.indexes)
constraints = set(uc.name for uc in reflected.constraints)
self.assert_("uc_a" in constraints)
@testing.requires.btree_gist
- @testing.provide_metadata
- def test_reflection_with_exclude_constraint(self):
- m = self.metadata
+ def test_reflection_with_exclude_constraint(self, metadata, connection):
+ m = metadata
Table(
"t",
m,
ExcludeConstraint(("period", "&&"), name="quarters_period_excl"),
)
- m.create_all()
+ m.create_all(connection)
- insp = inspect(testing.db)
+ insp = inspect(connection)
# PostgreSQL will create an implicit index for an exclude constraint.
# we don't reflect the EXCLUDE yet.
eq_(insp.get_indexes("t"), expected)
# reflection corrects for the dupe
- reflected = Table("t", MetaData(), autoload_with=testing.db)
+ reflected = Table("t", MetaData(), autoload_with=connection)
eq_(set(reflected.indexes), set())
- @testing.provide_metadata
- def test_reflect_unique_index(self):
- insp = inspect(testing.db)
+ def test_reflect_unique_index(self, metadata, connection):
+ insp = inspect(connection)
- meta = self.metadata
+ meta = metadata
# a unique index OTOH we are able to detect is an index
# and not a unique constraint
Index("ix_a", "a", unique=True),
)
- uc_table.create()
+ uc_table.create(connection)
indexes = dict((i["name"], i) for i in insp.get_indexes("pgsql_uc"))
constraints = set(
assert indexes["ix_a"]["unique"]
self.assert_("ix_a" not in constraints)
- reflected = Table("pgsql_uc", MetaData(), autoload_with=testing.db)
+ reflected = Table("pgsql_uc", MetaData(), autoload_with=connection)
indexes = dict((i.name, i) for i in reflected.indexes)
constraints = set(uc.name for uc in reflected.constraints)
assert indexes["ix_a"].unique
self.assert_("ix_a" not in constraints)
- @testing.provide_metadata
- def test_reflect_check_constraint(self):
- meta = self.metadata
+ def test_reflect_check_constraint(self, metadata, connection):
+ meta = metadata
udf_create = """\
CREATE OR REPLACE FUNCTION is_positive(
"""
sa.event.listen(meta, "before_create", sa.DDL(udf_create))
sa.event.listen(
- meta, "after_drop", sa.DDL("DROP FUNCTION is_positive(integer)")
+ meta,
+ "after_drop",
+ sa.DDL("DROP FUNCTION IF EXISTS is_positive(integer)"),
)
Table(
CheckConstraint("b != 'hi\nim a name \nyup\n'", name="cc4"),
)
- meta.create_all()
+ meta.create_all(connection)
- reflected = Table("pgsql_cc", MetaData(), autoload_with=testing.db)
+ reflected = Table("pgsql_cc", MetaData(), autoload_with=connection)
check_constraints = dict(
(uc.name, uc.sqltext.text)
from sqlalchemy.testing.util import round_decimal
-tztable = notztable = metadata = table = None
-
-
class FloatCoercionTest(fixtures.TablesTest, AssertsExecutionResults):
__only_on__ = "postgresql"
__dialect__ = postgresql.dialect()
).scalar()
eq_(round_decimal(ret, 9), result)
- @testing.provide_metadata
- def test_arrays_pg(self, connection):
- metadata = self.metadata
+ def test_arrays_pg(self, connection, metadata):
t1 = Table(
"t",
metadata,
Column("z", postgresql.ARRAY(postgresql.DOUBLE_PRECISION)),
Column("q", postgresql.ARRAY(Numeric)),
)
- metadata.create_all()
+ metadata.create_all(connection)
connection.execute(
t1.insert(), x=[5], y=[5], z=[6], q=[decimal.Decimal("6.4")]
)
row = connection.execute(t1.select()).first()
eq_(row, ([5], [5], [6], [decimal.Decimal("6.4")]))
- @testing.provide_metadata
- def test_arrays_base(self, connection):
- metadata = self.metadata
+ def test_arrays_base(self, connection, metadata):
t1 = Table(
"t",
metadata,
Column("z", sqltypes.ARRAY(postgresql.DOUBLE_PRECISION)),
Column("q", sqltypes.ARRAY(Numeric)),
)
- metadata.create_all()
+ metadata.create_all(connection)
connection.execute(
t1.insert(), x=[5], y=[5], z=[6], q=[decimal.Decimal("6.4")]
)
]
t1.drop(conn, checkfirst=True)
- def test_name_required(self):
- metadata = MetaData(testing.db)
+ def test_name_required(self, metadata, connection):
etype = Enum("four", "five", "six", metadata=metadata)
- assert_raises(exc.CompileError, etype.create)
+ assert_raises(exc.CompileError, etype.create, connection)
assert_raises(
- exc.CompileError, etype.compile, dialect=postgresql.dialect()
+ exc.CompileError, etype.compile, dialect=connection.dialect
)
- @testing.provide_metadata
- def test_unicode_labels(self, connection):
- metadata = self.metadata
+ def test_unicode_labels(self, connection, metadata):
t1 = Table(
"table",
metadata,
),
),
)
- metadata.create_all()
+ metadata.create_all(connection)
connection.execute(t1.insert(), value=util.u("drôle"))
connection.execute(t1.insert(), value=util.u("réveillé"))
connection.execute(t1.insert(), value=util.u("S’il"))
],
)
m2 = MetaData()
- t2 = Table("table", m2, autoload_with=testing.db)
+ t2 = Table("table", m2, autoload_with=connection)
eq_(
t2.c.value.type.enums,
[util.u("réveillé"), util.u("drôle"), util.u("S’il")],
RegexSQL("DROP TYPE myenum", dialect="postgresql"),
)
- @testing.provide_metadata
- def test_generate_multiple(self):
+ def test_generate_multiple(self, metadata, connection):
"""Test that the same enum twice only generates once
for the create_all() call, without using checkfirst.
now handles this.
"""
- metadata = self.metadata
-
e1 = Enum("one", "two", "three", name="myenum")
Table("e1", metadata, Column("c1", e1))
Table("e2", metadata, Column("c1", e1))
- metadata.create_all(checkfirst=False)
- metadata.drop_all(checkfirst=False)
+ metadata.create_all(connection, checkfirst=False)
+ metadata.drop_all(connection, checkfirst=False)
assert "myenum" not in [
- e["name"] for e in inspect(testing.db).get_enums()
+ e["name"] for e in inspect(connection).get_enums()
]
- @testing.provide_metadata
- def test_generate_alone_on_metadata(self):
+ def test_generate_alone_on_metadata(self, connection, metadata):
"""Test that the same enum twice only generates once
for the create_all() call, without using checkfirst.
now handles this.
"""
- metadata = self.metadata
- Enum("one", "two", "three", name="myenum", metadata=self.metadata)
+ Enum("one", "two", "three", name="myenum", metadata=metadata)
- metadata.create_all(checkfirst=False)
- assert "myenum" in [e["name"] for e in inspect(testing.db).get_enums()]
- metadata.drop_all(checkfirst=False)
+ metadata.create_all(connection, checkfirst=False)
+ assert "myenum" in [e["name"] for e in inspect(connection).get_enums()]
+ metadata.drop_all(connection, checkfirst=False)
assert "myenum" not in [
- e["name"] for e in inspect(testing.db).get_enums()
+ e["name"] for e in inspect(connection).get_enums()
]
- @testing.provide_metadata
- def test_generate_multiple_on_metadata(self):
- metadata = self.metadata
+ def test_generate_multiple_on_metadata(self, connection, metadata):
e1 = Enum("one", "two", "three", name="myenum", metadata=metadata)
t2 = Table("e2", metadata, Column("c1", e1))
- metadata.create_all(checkfirst=False)
- assert "myenum" in [e["name"] for e in inspect(testing.db).get_enums()]
- metadata.drop_all(checkfirst=False)
+ metadata.create_all(connection, checkfirst=False)
+ assert "myenum" in [e["name"] for e in inspect(connection).get_enums()]
+ metadata.drop_all(connection, checkfirst=False)
assert "myenum" not in [
- e["name"] for e in inspect(testing.db).get_enums()
+ e["name"] for e in inspect(connection).get_enums()
]
- e1.create() # creates ENUM
- t1.create() # does not create ENUM
- t2.create() # does not create ENUM
+ e1.create(connection) # creates ENUM
+ t1.create(connection) # does not create ENUM
+ t2.create(connection) # does not create ENUM
- @testing.provide_metadata
- def test_generate_multiple_schemaname_on_metadata(self):
- metadata = self.metadata
+ def test_generate_multiple_schemaname_on_metadata(
+ self, metadata, connection
+ ):
Enum("one", "two", "three", name="myenum", metadata=metadata)
Enum(
schema="test_schema",
)
- metadata.create_all(checkfirst=False)
- assert "myenum" in [e["name"] for e in inspect(testing.db).get_enums()]
+ metadata.create_all(connection, checkfirst=False)
+ assert "myenum" in [e["name"] for e in inspect(connection).get_enums()]
assert "myenum" in [
e["name"]
- for e in inspect(testing.db).get_enums(schema="test_schema")
+ for e in inspect(connection).get_enums(schema="test_schema")
]
- metadata.drop_all(checkfirst=False)
+ metadata.drop_all(connection, checkfirst=False)
assert "myenum" not in [
- e["name"] for e in inspect(testing.db).get_enums()
+ e["name"] for e in inspect(connection).get_enums()
]
assert "myenum" not in [
e["name"]
- for e in inspect(testing.db).get_enums(schema="test_schema")
+ for e in inspect(connection).get_enums(schema="test_schema")
]
- @testing.provide_metadata
- def test_drops_on_table(self):
- metadata = self.metadata
+ def test_drops_on_table(self, connection, metadata):
e1 = Enum("one", "two", "three", name="myenum")
table = Table("e1", metadata, Column("c1", e1))
- table.create()
- table.drop()
+ table.create(connection)
+ table.drop(connection)
assert "myenum" not in [
- e["name"] for e in inspect(testing.db).get_enums()
+ e["name"] for e in inspect(connection).get_enums()
]
- table.create()
- assert "myenum" in [e["name"] for e in inspect(testing.db).get_enums()]
- table.drop()
+ table.create(connection)
+ assert "myenum" in [e["name"] for e in inspect(connection).get_enums()]
+ table.drop(connection)
assert "myenum" not in [
- e["name"] for e in inspect(testing.db).get_enums()
+ e["name"] for e in inspect(connection).get_enums()
]
def test_create_drop_schema_translate_map(self, connection):
assert_raises(exc.ProgrammingError, e1.drop, conn, checkfirst=False)
- @testing.provide_metadata
- def test_remain_on_table_metadata_wide(self):
- metadata = self.metadata
+ def test_remain_on_table_metadata_wide(self, metadata, future_connection):
+ connection = future_connection
e1 = Enum("one", "two", "three", name="myenum", metadata=metadata)
table = Table("e1", metadata, Column("c1", e1))
sa.exc.ProgrammingError,
'.*type "myenum" does not exist',
table.create,
+ connection,
)
- table.create(checkfirst=True)
- table.drop()
- table.create(checkfirst=True)
- table.drop()
- assert "myenum" in [e["name"] for e in inspect(testing.db).get_enums()]
- metadata.drop_all()
+ connection.rollback()
+
+ table.create(connection, checkfirst=True)
+ table.drop(connection)
+ table.create(connection, checkfirst=True)
+ table.drop(connection)
+ assert "myenum" in [e["name"] for e in inspect(connection).get_enums()]
+ metadata.drop_all(connection)
assert "myenum" not in [
- e["name"] for e in inspect(testing.db).get_enums()
+ e["name"] for e in inspect(connection).get_enums()
]
def test_non_native_dialect(self):
finally:
metadata.drop_all(engine)
- def test_standalone_enum(self):
- metadata = MetaData(testing.db)
+ def test_standalone_enum(self, connection, metadata):
etype = Enum(
"four", "five", "six", name="fourfivesixtype", metadata=metadata
)
- etype.create()
+ etype.create(connection)
try:
- assert testing.db.dialect.has_type(testing.db, "fourfivesixtype")
+ assert testing.db.dialect.has_type(connection, "fourfivesixtype")
finally:
- etype.drop()
+ etype.drop(connection)
assert not testing.db.dialect.has_type(
- testing.db, "fourfivesixtype"
+ connection, "fourfivesixtype"
)
- metadata.create_all()
+ metadata.create_all(connection)
try:
- assert testing.db.dialect.has_type(testing.db, "fourfivesixtype")
+ assert testing.db.dialect.has_type(connection, "fourfivesixtype")
finally:
- metadata.drop_all()
+ metadata.drop_all(connection)
assert not testing.db.dialect.has_type(
- testing.db, "fourfivesixtype"
+ connection, "fourfivesixtype"
)
def test_no_support(self):
e.connect()
assert not dialect.supports_native_enum
- @testing.provide_metadata
- def test_reflection(self):
- metadata = self.metadata
+ def test_reflection(self, metadata, connection):
etype = Enum(
"four", "five", "six", name="fourfivesixtype", metadata=metadata
)
),
Column("value2", etype),
)
- metadata.create_all()
+ metadata.create_all(connection)
m2 = MetaData()
- t2 = Table("table", m2, autoload_with=testing.db)
+ t2 = Table("table", m2, autoload_with=connection)
eq_(t2.c.value.type.enums, ["one", "two", "three"])
eq_(t2.c.value.type.name, "onetwothreetype")
eq_(t2.c.value2.type.enums, ["four", "five", "six"])
eq_(t2.c.value2.type.name, "fourfivesixtype")
- @testing.provide_metadata
- def test_schema_reflection(self):
- metadata = self.metadata
+ def test_schema_reflection(self, metadata, connection):
etype = Enum(
"four",
"five",
),
Column("value2", etype),
)
- metadata.create_all()
+ metadata.create_all(connection)
m2 = MetaData()
- t2 = Table("table", m2, autoload_with=testing.db)
+ t2 = Table("table", m2, autoload_with=connection)
eq_(t2.c.value.type.enums, ["one", "two", "three"])
eq_(t2.c.value.type.name, "onetwothreetype")
eq_(t2.c.value2.type.enums, ["four", "five", "six"])
__only_on__ = "postgresql"
__backend__ = True
- @testing.provide_metadata
- def test_reflection(self):
- metadata = self.metadata
+ def test_reflection(self, connection, metadata):
Table(
"table",
metadata,
Column("x", Integer),
Column("y", postgresql.OID),
)
- metadata.create_all()
+ metadata.create_all(connection)
m2 = MetaData()
t2 = Table(
"table",
m2,
- autoload_with=testing.db,
+ autoload_with=connection,
)
assert isinstance(t2.c.y.type, postgresql.OID)
"pg_class",
)
- def test_cast_whereclause(self):
+ def test_cast_whereclause(self, connection):
pga = Table(
"pg_attribute",
- MetaData(testing.db),
+ MetaData(),
Column("attrelid", postgresql.OID),
Column("attname", String(64)),
)
- with testing.db.connect() as conn:
- oid = conn.scalar(
- select(pga.c.attrelid).where(
- pga.c.attrelid == cast("pg_class", postgresql.REGCLASS)
- )
+ oid = connection.scalar(
+ select(pga.c.attrelid).where(
+ pga.c.attrelid == cast("pg_class", postgresql.REGCLASS)
)
+ )
assert isinstance(oid, int)
val = proc(val)
assert val in (23.7, decimal.Decimal("23.7"))
- @testing.provide_metadata
- def test_numeric_default(self, connection):
- metadata = self.metadata
+ def test_numeric_default(self, connection, metadata):
# pg8000 appears to fail when the value is 0,
# returns an int instead of decimal.
t = Table(
Column("fd", Float(asdecimal=True), default=1),
Column("ff", Float(asdecimal=False), default=1),
)
- metadata.create_all()
+ metadata.create_all(connection)
connection.execute(t.insert())
row = connection.execute(t.select()).first()
is_(postgresql.INTERVAL().python_type, datetime.timedelta)
-class TimezoneTest(fixtures.TestBase):
+class TimezoneTest(fixtures.TablesTest):
__backend__ = True
"""Test timezone-aware datetimes.
__only_on__ = "postgresql"
@classmethod
- def setup_class(cls):
- global tztable, notztable, metadata
- metadata = MetaData(testing.db)
-
+ def define_tables(cls, metadata):
# current_timestamp() in postgresql is assumed to return
# TIMESTAMP WITH TIMEZONE
- tztable = Table(
+ Table(
"tztable",
metadata,
Column("id", Integer, primary_key=True),
),
Column("name", String(20)),
)
- notztable = Table(
+ Table(
"notztable",
metadata,
Column("id", Integer, primary_key=True),
),
Column("name", String(20)),
)
- metadata.create_all()
-
- @classmethod
- def teardown_class(cls):
- metadata.drop_all()
def test_with_timezone(self, connection):
-
+ tztable, notztable = self.tables("tztable", "notztable")
# get a date with a tzinfo
- somedate = testing.db.connect().scalar(
- func.current_timestamp().select()
- )
+ somedate = connection.scalar(func.current_timestamp().select())
assert somedate.tzinfo
connection.execute(tztable.insert(), id=1, name="row1", date=somedate)
row = connection.execute(
def test_without_timezone(self, connection):
# get a date without a tzinfo
+ tztable, notztable = self.tables("tztable", "notztable")
somedate = datetime.datetime(2005, 10, 20, 11, 52, 0)
assert not somedate.tzinfo
class TimePrecisionTest(fixtures.TestBase):
- __dialect__ = postgresql.dialect()
- __prefer__ = "postgresql"
+ __only_on__ = "postgresql"
__backend__ = True
- @testing.only_on("postgresql", "DB specific feature")
- @testing.provide_metadata
- def test_reflection(self):
- metadata = self.metadata
+ def test_reflection(self, metadata, connection):
t1 = Table(
"t1",
metadata,
Column("c5", postgresql.TIMESTAMP(precision=5)),
Column("c6", postgresql.TIMESTAMP(timezone=True, precision=5)),
)
- t1.create()
+ t1.create(connection)
m2 = MetaData()
- t2 = Table("t1", m2, autoload_with=testing.db)
+ t2 = Table("t1", m2, autoload_with=connection)
eq_(t2.c.c1.type.precision, None)
eq_(t2.c.c2.type.precision, 5)
eq_(t2.c.c3.type.precision, 5)
assert isinstance(tbl.c.intarr.type.item_type, Integer)
assert isinstance(tbl.c.strarr.type.item_type, String)
- @testing.provide_metadata
- def test_array_str_collation(self):
- m = self.metadata
-
+ def test_array_str_collation(self, metadata, connection):
t = Table(
"t",
- m,
+ metadata,
Column("data", sqltypes.ARRAY(String(50, collation="en_US"))),
)
- t.create()
+ t.create(connection)
- @testing.provide_metadata
- def test_array_agg(self, connection):
- values_table = Table("values", self.metadata, Column("value", Integer))
- self.metadata.create_all(testing.db)
+ def test_array_agg(self, metadata, connection):
+ values_table = Table("values", metadata, Column("value", Integer))
+ metadata.create_all(connection)
connection.execute(
values_table.insert(), [{"value": i} for i in range(1, 10)]
)
[4, 5, 6],
)
- @testing.provide_metadata
- def test_tuple_flag(self, connection):
- metadata = self.metadata
+ def test_tuple_flag(self, connection, metadata):
t1 = Table(
"t1",
"data2", self.ARRAY(Numeric(asdecimal=False), as_tuple=True)
),
)
- metadata.create_all()
+ metadata.create_all(connection)
connection.execute(
t1.insert(), id=1, data=["1", "2", "3"], data2=[5.4, 5.6]
)
assert t.c.precision_interval.type.precision == 3
assert t.c.bitstring.type.length == 4
- @testing.provide_metadata
- def test_tsvector_round_trip(self, connection):
- t = Table("t1", self.metadata, Column("data", postgresql.TSVECTOR))
- t.create()
+ def test_tsvector_round_trip(self, connection, metadata):
+ t = Table("t1", metadata, Column("data", postgresql.TSVECTOR))
+ t.create(connection)
connection.execute(t.insert(), data="a fat cat sat")
eq_(connection.scalar(select(t.c.data)), "'a' 'cat' 'fat' 'sat'")
"'a' 'cat' 'fat' 'mat' 'sat'",
)
- @testing.provide_metadata
- def test_bit_reflection(self):
- metadata = self.metadata
+ def test_bit_reflection(self, metadata, connection):
t1 = Table(
"t1",
metadata,
Column("bitvarying", postgresql.BIT(varying=True)),
Column("bitvarying5", postgresql.BIT(5, varying=True)),
)
- t1.create()
+ t1.create(connection)
m2 = MetaData()
- t2 = Table("t1", m2, autoload_with=testing.db)
+ t2 = Table("t1", m2, autoload_with=connection)
eq_(t2.c.bit1.type.length, 1)
eq_(t2.c.bit1.type.varying, False)
eq_(t2.c.bit5.type.length, 5)
__only_on__ = "sqlite"
- @testing.provide_metadata
- def test_boolean(self):
+ def test_boolean(self, connection, metadata):
"""Test that the boolean only treats 1 as True"""
- meta = self.metadata
t = Table(
"bool_table",
- meta,
+ metadata,
Column("id", Integer, primary_key=True),
Column("boo", Boolean(create_constraint=False)),
)
- meta.create_all(testing.db)
- exec_sql(
- testing.db,
+ metadata.create_all(connection)
+ for stmt in [
"INSERT INTO bool_table (id, boo) " "VALUES (1, 'false');",
- )
- exec_sql(
- testing.db,
"INSERT INTO bool_table (id, boo) " "VALUES (2, 'true');",
- )
- exec_sql(
- testing.db,
"INSERT INTO bool_table (id, boo) " "VALUES (3, '1');",
- )
- exec_sql(
- testing.db,
"INSERT INTO bool_table (id, boo) " "VALUES (4, '0');",
- )
- exec_sql(
- testing.db,
"INSERT INTO bool_table (id, boo) " "VALUES (5, 1);",
- )
- exec_sql(
- testing.db,
"INSERT INTO bool_table (id, boo) " "VALUES (6, 0);",
- )
+ ]:
+ connection.exec_driver_sql(stmt)
+
eq_(
- t.select(t.c.boo).order_by(t.c.id).execute().fetchall(),
+ connection.execute(
+ t.select().where(t.c.boo).order_by(t.c.id)
+ ).fetchall(),
[(3, True), (5, True)],
)
__requires__ = ("json_type",)
__only_on__ = "sqlite"
- @testing.provide_metadata
@testing.requires.reflects_json_type
- def test_reflection(self):
- Table("json_test", self.metadata, Column("foo", sqlite.JSON))
- self.metadata.create_all()
+ def test_reflection(self, connection, metadata):
+ Table("json_test", metadata, Column("foo", sqlite.JSON))
+ metadata.create_all(connection)
- reflected = Table("json_test", MetaData(), autoload_with=testing.db)
+ reflected = Table("json_test", MetaData(), autoload_with=connection)
is_(reflected.c.foo.type._type_affinity, sqltypes.JSON)
assert isinstance(reflected.c.foo.type, sqlite.JSON)
- @testing.provide_metadata
- def test_rudimentary_roundtrip(self):
- sqlite_json = Table(
- "json_test", self.metadata, Column("foo", sqlite.JSON)
- )
+ def test_rudimentary_roundtrip(self, metadata, connection):
+ sqlite_json = Table("json_test", metadata, Column("foo", sqlite.JSON))
- self.metadata.create_all()
+ metadata.create_all(connection)
value = {"json": {"foo": "bar"}, "recs": ["one", "two"]}
- with testing.db.begin() as conn:
- conn.execute(sqlite_json.insert(), foo=value)
+ connection.execute(sqlite_json.insert(), foo=value)
- eq_(conn.scalar(select(sqlite_json.c.foo)), value)
+ eq_(connection.scalar(select(sqlite_json.c.foo)), value)
- @testing.provide_metadata
- def test_extract_subobject(self):
- sqlite_json = Table(
- "json_test", self.metadata, Column("foo", sqlite.JSON)
- )
+ def test_extract_subobject(self, connection, metadata):
+ sqlite_json = Table("json_test", metadata, Column("foo", sqlite.JSON))
- self.metadata.create_all()
+ metadata.create_all(connection)
value = {"json": {"foo": "bar"}}
- with testing.db.begin() as conn:
- conn.execute(sqlite_json.insert(), foo=value)
-
- eq_(conn.scalar(select(sqlite_json.c.foo["json"])), value["json"])
+ connection.execute(sqlite_json.insert(), foo=value)
- @testing.provide_metadata
- def test_deprecated_serializer_args(self):
- sqlite_json = Table(
- "json_test", self.metadata, Column("foo", sqlite.JSON)
+ eq_(
+ connection.scalar(select(sqlite_json.c.foo["json"])), value["json"]
)
+
+ def test_deprecated_serializer_args(self, metadata):
+ sqlite_json = Table("json_test", metadata, Column("foo", sqlite.JSON))
data_element = {"foo": "bar"}
js = mock.Mock(side_effect=json.dumps)
engine = engines.testing_engine(
options=dict(_json_serializer=js, _json_deserializer=jd)
)
- self.metadata.create_all(engine)
+ metadata.create_all(engine)
with engine.begin() as conn:
conn.execute(sqlite_json.insert(), {"foo": data_element})
__only_on__ = "sqlite"
- @testing.exclude(
- "sqlite",
- "<",
- (3, 3, 8),
- "sqlite3 changesets 3353 and 3440 modified "
- "behavior of default displayed in pragma "
- "table_info()",
- )
- def test_default_reflection(self):
-
- # (ask_for, roundtripped_as_if_different)
+ def test_default_reflection(self, connection, metadata):
specs = [
(String(3), '"foo"'),
Column("c%i" % (i + 1), t[0], server_default=text(t[1]))
for (i, t) in enumerate(specs)
]
- db = testing.db
- m = MetaData(db)
- Table("t_defaults", m, *columns)
- try:
- m.create_all()
- m2 = MetaData()
- rt = Table("t_defaults", m2, autoload_with=db)
- expected = [c[1] for c in specs]
- for i, reflected in enumerate(rt.c):
- eq_(str(reflected.server_default.arg), expected[i])
- finally:
- m.drop_all()
+ Table("t_defaults", metadata, *columns)
+ metadata.create_all(connection)
+ m2 = MetaData()
+ rt = Table("t_defaults", m2, autoload_with=connection)
+ expected = [c[1] for c in specs]
+ for i, reflected in enumerate(rt.c):
+ eq_(str(reflected.server_default.arg), expected[i])
@testing.exclude(
"sqlite",
eq_(insp.get_schema_names(), ["main", "test_schema"])
def test_reflect_system_table(self):
- meta = MetaData(self.conn)
+ meta = MetaData()
alt_master = Table(
"sqlite_master",
meta,
connection.exec_driver_sql('DETACH DATABASE "default"')
def test_reflect(self, connection, db_fixture):
- meta = MetaData(bind=connection, schema="default")
- meta.reflect()
+ meta = MetaData(schema="default")
+ meta.reflect(connection)
assert "default.a" in meta.tables
class DDLExecutionTest(fixtures.TestBase):
def setup(self):
self.engine = engines.mock_engine()
- self.metadata = MetaData(self.engine)
+ self.metadata = MetaData()
self.users = Table(
"users",
self.metadata,
event.listen(users, "before_drop", DDL("xyzzy"))
event.listen(users, "after_drop", DDL("fnord"))
- users.create()
+ users.create(self.engine)
strings = [str(x) for x in engine.mock]
assert "mxyzptlk" in strings
assert "klptzyxm" in strings
assert "xyzzy" not in strings
assert "fnord" not in strings
del engine.mock[:]
- users.drop()
+ users.drop(self.engine)
strings = [str(x) for x in engine.mock]
assert "mxyzptlk" not in strings
assert "klptzyxm" not in strings
event.listen(users, "before_drop", DDL("xyzzy"))
event.listen(users, "after_drop", DDL("fnord"))
- metadata.create_all()
+ metadata.create_all(self.engine)
strings = [str(x) for x in engine.mock]
assert "mxyzptlk" in strings
assert "klptzyxm" in strings
assert "xyzzy" not in strings
assert "fnord" not in strings
del engine.mock[:]
- metadata.drop_all()
+ metadata.drop_all(self.engine)
strings = [str(x) for x in engine.mock]
assert "mxyzptlk" not in strings
assert "klptzyxm" not in strings
event.listen(metadata, "before_drop", DDL("xyzzy"))
event.listen(metadata, "after_drop", DDL("fnord"))
- metadata.create_all()
+ metadata.create_all(self.engine)
strings = [str(x) for x in engine.mock]
assert "mxyzptlk" in strings
assert "klptzyxm" in strings
assert "xyzzy" not in strings
assert "fnord" not in strings
del engine.mock[:]
- metadata.drop_all()
+ metadata.drop_all(self.engine)
strings = [str(x) for x in engine.mock]
assert "mxyzptlk" not in strings
assert "klptzyxm" not in strings
bind.begin()
try:
for args in (([bind], {}), ([], {"bind": bind})):
- metadata = MetaData(*args[0], **args[1])
+ with testing.expect_deprecated_20(
+ "The MetaData.bind argument is deprecated "
+ ):
+ metadata = MetaData(*args[0], **args[1])
table = Table(
"test_table", metadata, Column("foo", Integer)
)
):
eq_(testing.db.execute(stmt).fetchall(), [(1,)])
- @testing.provide_metadata
- def test_implicit_execute(self):
- table = Table("t", self.metadata, Column("a", Integer))
+ def test_implicit_execute(self, metadata):
+ table = Table("t", metadata, Column("a", Integer))
table.create(testing.db)
+ metadata.bind = testing.db
stmt = table.insert().values(a=1)
with testing.expect_deprecated_20(
r"The Executable.execute\(\) method is considered legacy",
is_true(testing.db.has_table("user"))
def test_engine_table_names(self):
- metadata = self.metadata
+ metadata = self.tables_test_metadata
with testing.expect_deprecated(
r"The Engine.table_names\(\) method is deprecated"
def test_reflecttable(self):
inspector = inspect(testing.db)
- metadata = self.metadata
+ metadata = MetaData()
+
table = Table("user", metadata)
with testing.expect_deprecated_20(
r"The Inspector.reflecttable\(\) method is considered "
class DDLExecutionTest(fixtures.TestBase):
def setup(self):
self.engine = engines.mock_engine()
- self.metadata = MetaData(self.engine)
+ self.metadata = MetaData()
self.users = Table(
"users",
self.metadata,
self._test_keyword("SELECT foo FROM table", False)
-class ExplicitAutoCommitTest(fixtures.TestBase):
+class ExplicitAutoCommitTest(fixtures.TablesTest):
"""test the 'autocommit' flag on select() and text() objects.
__only_on__ = "postgresql"
@classmethod
- def setup_class(cls):
- global metadata, foo
- metadata = MetaData(testing.db)
- foo = Table(
+ def define_tables(cls, metadata):
+ Table(
"foo",
metadata,
Column("id", Integer, primary_key=True),
Column("data", String(100)),
)
- with testing.db.begin() as conn:
- metadata.create_all(conn)
- conn.exec_driver_sql(
+
+ event.listen(
+ metadata,
+ "after_create",
+ DDL(
"create function insert_foo(varchar) "
"returns integer as 'insert into foo(data) "
"values ($1);select 1;' language sql"
- )
-
- def teardown(self):
- with testing.db.begin() as conn:
- conn.execute(foo.delete())
-
- @classmethod
- def teardown_class(cls):
- with testing.db.begin() as conn:
- conn.exec_driver_sql("drop function insert_foo(varchar)")
- metadata.drop_all(conn)
+ ),
+ )
+ event.listen(
+ metadata, "before_drop", DDL("drop function insert_foo(varchar)")
+ )
def test_control(self):
# test that not using autocommit does not commit
+ foo = self.tables.foo
conn1 = testing.db.connect()
conn2 = testing.db.connect()
conn2.close()
def test_explicit_compiled(self):
+ foo = self.tables.foo
+
conn1 = testing.db.connect()
conn2 = testing.db.connect()
conn2.close()
def test_explicit_connection(self):
+ foo = self.tables.foo
+
conn1 = testing.db.connect()
conn2 = testing.db.connect()
with testing.expect_deprecated_20(
conn2.close()
def test_explicit_text(self):
+ foo = self.tables.foo
+
conn1 = testing.db.connect()
conn2 = testing.db.connect()
with testing.expect_deprecated_20(
conn2.close()
def test_implicit_text(self):
+ foo = self.tables.foo
+
conn1 = testing.db.connect()
conn2 = testing.db.connect()
with testing.expect_deprecated_20(
self._assert_no_data()
-class CompiledCacheTest(fixtures.TablesTest):
+class CompiledCacheTest(fixtures.TestBase):
__backend__ = True
- @classmethod
- def define_tables(cls, metadata):
- Table(
+ def test_cache(self, connection, metadata):
+ users = Table(
"users",
metadata,
Column(
Column("user_name", VARCHAR(20)),
Column("extra_data", VARCHAR(20)),
)
-
- def test_cache(self, connection):
- users = self.tables.users
+ users.create(connection)
conn = connection
cache = {}
# the statement values (only the keys).
eq_(ref_blob(), None)
- def test_keys_independent_of_ordering(self, connection):
- users = self.tables.users
+ def test_keys_independent_of_ordering(self, connection, metadata):
+ users = Table(
+ "users",
+ metadata,
+ Column(
+ "user_id", INT, primary_key=True, test_needs_autoincrement=True
+ ),
+ Column("user_name", VARCHAR(20)),
+ Column("extra_data", VARCHAR(20)),
+ )
+ users.create(connection)
connection.execute(
users.insert(),
eq_(len(cache), 1)
@testing.requires.schemas
- @testing.provide_metadata
- def test_schema_translate_in_key(self):
- Table("x", self.metadata, Column("q", Integer))
- Table(
- "x", self.metadata, Column("q", Integer), schema=config.test_schema
- )
- self.metadata.create_all()
+ def test_schema_translate_in_key(self, metadata, connection):
+ Table("x", metadata, Column("q", Integer))
+ Table("x", metadata, Column("q", Integer), schema=config.test_schema)
+ metadata.create_all(connection)
m = MetaData()
t1 = Table("x", m, Column("q", Integer))
stmt = select(t1.c.q)
cache = {}
- with config.db.begin() as conn:
- conn = conn.execution_options(compiled_cache=cache)
- conn.execute(ins, {"q": 1})
- eq_(conn.scalar(stmt), 1)
- with config.db.begin() as conn:
- conn = conn.execution_options(
- compiled_cache=cache,
- schema_translate_map={None: config.test_schema},
- )
- conn.execute(ins, {"q": 2})
- eq_(conn.scalar(stmt), 2)
+ conn = connection.execution_options(compiled_cache=cache)
+ conn.execute(ins, {"q": 1})
+ eq_(conn.scalar(stmt), 1)
- with config.db.begin() as conn:
- conn = conn.execution_options(
- compiled_cache=cache,
- schema_translate_map={None: None},
- )
- # should use default schema again even though statement
- # was compiled with test_schema in the map
- eq_(conn.scalar(stmt), 1)
+ conn = connection.execution_options(
+ compiled_cache=cache,
+ schema_translate_map={None: config.test_schema},
+ )
+ conn.execute(ins, {"q": 2})
+ eq_(conn.scalar(stmt), 2)
- with config.db.begin() as conn:
- conn = conn.execution_options(
- compiled_cache=cache,
- )
- eq_(conn.scalar(stmt), 1)
+ conn = connection.execution_options(
+ compiled_cache=cache,
+ schema_translate_map={None: None},
+ )
+ # should use default schema again even though statement
+ # was compiled with test_schema in the map
+ eq_(conn.scalar(stmt), 1)
+
+ conn = connection.execution_options(
+ compiled_cache=cache,
+ )
+ eq_(conn.scalar(stmt), 1)
class MockStrategyTest(fixtures.TestBase):
Table("t1", metadata, Column("x", Integer), schema=config.test_schema)
Table("t2", metadata, Column("x", Integer), schema=config.test_schema)
Table("t3", metadata, Column("x", Integer), schema=None)
- metadata.create_all()
+ metadata.create_all(testing.db)
def test_ddl_hastable(self):
]:
event.listen(engine, "before_execute", execute)
event.listen(engine, "before_cursor_execute", cursor_execute)
- m = MetaData(engine)
+ m = MetaData()
t1 = Table(
"t1",
m,
from sqlalchemy.testing import AssertsCompiledSQL
from sqlalchemy.testing import ComparesTables
from sqlalchemy.testing import config
-from sqlalchemy.testing import engines
from sqlalchemy.testing import eq_
from sqlalchemy.testing import eq_regex
from sqlalchemy.testing import expect_warnings
class ReflectionTest(fixtures.TestBase, ComparesTables):
__backend__ = True
- @testing.exclude(
- "mssql", "<", (10, 0, 0), "Date is only supported on MSSQL 2008+"
- )
- @testing.exclude("mysql", "<", (4, 1, 1), "early types are squirrely")
- @testing.provide_metadata
- def test_basic_reflection(self):
- meta = self.metadata
+ def test_basic_reflection(self, connection, metadata):
+ meta = metadata
users = Table(
"engine_users",
Column("email_address", sa.String(20)),
test_needs_fk=True,
)
- meta.create_all()
+ meta.create_all(connection)
meta2 = MetaData()
reflected_users = Table(
- "engine_users", meta2, autoload_with=testing.db
+ "engine_users", meta2, autoload_with=connection
)
reflected_addresses = Table(
"engine_email_addresses",
meta2,
- autoload_with=testing.db,
+ autoload_with=connection,
)
self.assert_tables_equal(users, reflected_users)
self.assert_tables_equal(addresses, reflected_addresses)
- @testing.provide_metadata
- def test_autoload_with_imply_autoload(
- self,
- ):
- meta = self.metadata
+ def test_autoload_with_imply_autoload(self, metadata, connection):
+ meta = metadata
t = Table(
"t",
meta,
Column("x", sa.String(20)),
Column("y", sa.Integer),
)
- meta.create_all()
+ meta.create_all(connection)
meta2 = MetaData()
- reflected_t = Table("t", meta2, autoload_with=testing.db)
+ reflected_t = Table("t", meta2, autoload_with=connection)
self.assert_tables_equal(t, reflected_t)
- @testing.provide_metadata
- def test_two_foreign_keys(self):
- meta = self.metadata
+ def test_two_foreign_keys(self, metadata, connection):
+ meta = metadata
Table(
"t1",
meta,
Column("id", sa.Integer, primary_key=True),
test_needs_fk=True,
)
- meta.create_all()
+ meta.create_all(connection)
meta2 = MetaData()
t1r, t2r, t3r = [
- Table(x, meta2, autoload_with=testing.db)
+ Table(x, meta2, autoload_with=connection)
for x in ("t1", "t2", "t3")
]
assert t1r.c.t2id.references(t2r.c.id)
assert t1r.c.t3id.references(t3r.c.id)
- @testing.provide_metadata
- def test_resolve_fks_false_table(self):
- meta = self.metadata
+ def test_resolve_fks_false_table(self, connection, metadata):
+ meta = metadata
Table(
"t1",
meta,
Column("id", sa.Integer, primary_key=True),
test_needs_fk=True,
)
- meta.create_all()
+ meta.create_all(connection)
meta2 = MetaData()
- t1 = Table("t1", meta2, resolve_fks=False, autoload_with=testing.db)
+ t1 = Table("t1", meta2, resolve_fks=False, autoload_with=connection)
in_("t1", meta2.tables)
not_in("t2", meta2.tables)
lambda: list(t1.c.t2id.foreign_keys)[0].column,
)
- t2 = Table("t2", meta2, autoload_with=testing.db)
+ t2 = Table("t2", meta2, autoload_with=connection)
# now it resolves
is_true(t1.c.t2id.references(t2.c.id))
- @testing.provide_metadata
- def test_resolve_fks_false_extend_existing(self):
- meta = self.metadata
+ def test_resolve_fks_false_extend_existing(self, connection, metadata):
+ meta = metadata
Table(
"t1",
meta,
Column("id", sa.Integer, primary_key=True),
test_needs_fk=True,
)
- meta.create_all()
+ meta.create_all(connection)
meta2 = MetaData()
Table("t1", meta2)
in_("t1", meta2.tables)
"t1",
meta2,
resolve_fks=False,
- autoload_with=testing.db,
+ autoload_with=connection,
extend_existing=True,
)
not_in("t2", meta2.tables)
lambda: list(t1.c.t2id.foreign_keys)[0].column,
)
- t2 = Table("t2", meta2, autoload_with=testing.db)
+ t2 = Table("t2", meta2, autoload_with=connection)
# now it resolves
is_true(t1.c.t2id.references(t2.c.id))
- @testing.provide_metadata
- def test_resolve_fks_false_metadata(self):
- meta = self.metadata
+ def test_resolve_fks_false_metadata(self, connection, metadata):
+ meta = metadata
Table(
"t1",
meta,
Column("id", sa.Integer, primary_key=True),
test_needs_fk=True,
)
- meta.create_all()
+ meta.create_all(connection)
meta2 = MetaData()
- meta2.reflect(testing.db, resolve_fks=False, only=["t1"])
+ meta2.reflect(connection, resolve_fks=False, only=["t1"])
in_("t1", meta2.tables)
not_in("t2", meta2.tables)
lambda: list(t1.c.t2id.foreign_keys)[0].column,
)
- meta2.reflect(testing.db, resolve_fks=False)
+ meta2.reflect(connection, resolve_fks=False)
t2 = meta2.tables["t2"]
is_true(t1.c.t2id.references(t2.c.id))
- def test_nonexistent(self):
+ def test_nonexistent(self, connection):
meta = MetaData()
assert_raises(
sa.exc.NoSuchTableError,
Table,
"nonexistent",
meta,
- autoload_with=testing.db,
+ autoload_with=connection,
)
assert "nonexistent" not in meta.tables
- @testing.provide_metadata
- def test_include_columns(self):
- meta = self.metadata
+ def test_include_columns(self, connection, metadata):
+ meta = metadata
foo = Table(
"foo",
meta,
*[Column(n, sa.String(30)) for n in ["a", "b", "c", "d", "e", "f"]]
)
- meta.create_all()
+ meta.create_all(connection)
meta2 = MetaData()
foo = Table(
"foo",
meta2,
- autoload_with=testing.db,
+ autoload_with=connection,
include_columns=["b", "f", "e"],
)
# test that cols come back in original order
# test against a table which is already reflected
meta3 = MetaData()
- foo = Table("foo", meta3, autoload_with=testing.db)
+ foo = Table("foo", meta3, autoload_with=connection)
foo = Table(
"foo", meta3, include_columns=["b", "f", "e"], extend_existing=True
for c in ("a", "c", "d"):
assert c not in foo.c
- @testing.provide_metadata
- def test_extend_existing(self):
- meta = self.metadata
+ def test_extend_existing(self, connection, metadata):
+ meta = metadata
Table(
"t",
Column("y", Integer),
Column("z", Integer, server_default="5"),
)
- meta.create_all()
+ meta.create_all(connection)
m2 = MetaData()
old_z = Column("z", String, primary_key=True)
m2,
old_y,
extend_existing=True,
- autoload_with=testing.db,
+ autoload_with=connection,
)
eq_(set(t2.columns.keys()), set(["x", "y", "z", "q", "id"]))
"t",
m3,
extend_existing=False,
- autoload_with=testing.db,
+ autoload_with=connection,
)
eq_(set(t3.columns.keys()), set(["z"]))
old_y,
extend_existing=True,
autoload_replace=False,
- autoload_with=testing.db,
+ autoload_with=connection,
)
eq_(set(t4.columns.keys()), set(["x", "y", "z", "q", "id"]))
eq_(list(t4.primary_key.columns), [t4.c.z, t4.c.id])
assert t4.c.z.type._type_affinity is String
assert t4.c.q is old_q
- @testing.provide_metadata
- def test_extend_existing_reflect_all_dont_dupe_index(self):
- m = self.metadata
+ def test_extend_existing_reflect_all_dont_dupe_index(
+ self, connection, metadata
+ ):
+ m = metadata
d = Table(
"d",
m,
Column("id", Integer, primary_key=True),
Column("aid", ForeignKey("d.id")),
)
- m.create_all()
+ m.create_all(connection)
m2 = MetaData()
- m2.reflect(testing.db, extend_existing=True)
+ m2.reflect(connection, extend_existing=True)
eq_(
len(
)
@testing.emits_warning(r".*omitted columns")
- @testing.provide_metadata
- def test_include_columns_indexes(self):
- m = self.metadata
+ def test_include_columns_indexes(self, connection, metadata):
+ m = metadata
t1 = Table("t1", m, Column("a", sa.Integer), Column("b", sa.Integer))
sa.Index("foobar", t1.c.a, t1.c.b)
sa.Index("bat", t1.c.a)
- m.create_all()
+ m.create_all(connection)
m2 = MetaData()
- t2 = Table("t1", m2, autoload_with=testing.db)
+ t2 = Table("t1", m2, autoload_with=connection)
assert len(t2.indexes) == 2
m2 = MetaData()
- t2 = Table("t1", m2, autoload_with=testing.db, include_columns=["a"])
+ t2 = Table("t1", m2, autoload_with=connection, include_columns=["a"])
assert len(t2.indexes) == 1
m2 = MetaData()
t2 = Table(
- "t1", m2, autoload_with=testing.db, include_columns=["a", "b"]
+ "t1", m2, autoload_with=connection, include_columns=["a", "b"]
)
assert len(t2.indexes) == 2
- @testing.provide_metadata
- def test_autoload_replace_foreign_key_nonpresent(self):
+ def test_autoload_replace_foreign_key_nonpresent(
+ self, connection, metadata
+ ):
"""test autoload_replace=False with col plus FK
establishes the FK not present in the DB.
"""
- Table("a", self.metadata, Column("id", Integer, primary_key=True))
+ Table("a", metadata, Column("id", Integer, primary_key=True))
Table(
"b",
- self.metadata,
+ metadata,
Column("id", Integer, primary_key=True),
Column("a_id", Integer),
)
- self.metadata.create_all()
+ metadata.create_all(connection)
m2 = MetaData()
b2 = Table("b", m2, Column("a_id", Integer, sa.ForeignKey("a.id")))
- a2 = Table("a", m2, autoload_with=testing.db)
+ a2 = Table("a", m2, autoload_with=connection)
b2 = Table(
"b",
m2,
extend_existing=True,
- autoload_with=testing.db,
+ autoload_with=connection,
autoload_replace=False,
)
assert b2.c.a_id.references(a2.c.id)
eq_(len(b2.constraints), 2)
- @testing.provide_metadata
- def test_autoload_replace_foreign_key_ispresent(self):
+ def test_autoload_replace_foreign_key_ispresent(
+ self, connection, metadata
+ ):
"""test autoload_replace=False with col plus FK mirroring
DB-reflected FK skips the reflected FK and installs
the in-python one only.
"""
- Table("a", self.metadata, Column("id", Integer, primary_key=True))
+ Table("a", metadata, Column("id", Integer, primary_key=True))
Table(
"b",
- self.metadata,
+ metadata,
Column("id", Integer, primary_key=True),
Column("a_id", Integer, sa.ForeignKey("a.id")),
)
- self.metadata.create_all()
+ metadata.create_all(connection)
m2 = MetaData()
b2 = Table("b", m2, Column("a_id", Integer, sa.ForeignKey("a.id")))
- a2 = Table("a", m2, autoload_with=testing.db)
+ a2 = Table("a", m2, autoload_with=connection)
b2 = Table(
"b",
m2,
extend_existing=True,
- autoload_with=testing.db,
+ autoload_with=connection,
autoload_replace=False,
)
assert b2.c.a_id.references(a2.c.id)
eq_(len(b2.constraints), 2)
- @testing.provide_metadata
- def test_autoload_replace_foreign_key_removed(self):
+ def test_autoload_replace_foreign_key_removed(self, connection, metadata):
"""test autoload_replace=False with col minus FK that's in the
DB means the FK is skipped and doesn't get installed at all.
"""
- Table("a", self.metadata, Column("id", Integer, primary_key=True))
+ Table("a", metadata, Column("id", Integer, primary_key=True))
Table(
"b",
- self.metadata,
+ metadata,
Column("id", Integer, primary_key=True),
Column("a_id", Integer, sa.ForeignKey("a.id")),
)
- self.metadata.create_all()
+ metadata.create_all(connection)
m2 = MetaData()
b2 = Table("b", m2, Column("a_id", Integer))
- a2 = Table("a", m2, autoload_with=testing.db)
+ a2 = Table("a", m2, autoload_with=connection)
b2 = Table(
"b",
m2,
extend_existing=True,
- autoload_with=testing.db,
+ autoload_with=connection,
autoload_replace=False,
)
assert not b2.c.a_id.references(a2.c.id)
eq_(len(b2.constraints), 1)
- @testing.provide_metadata
- def test_autoload_replace_primary_key(self):
- Table("a", self.metadata, Column("id", Integer))
- self.metadata.create_all()
+ def test_autoload_replace_primary_key(self, connection, metadata):
+ Table("a", metadata, Column("id", Integer))
+ metadata.create_all(connection)
m2 = MetaData()
a2 = Table("a", m2, Column("id", Integer, primary_key=True))
Table(
"a",
m2,
- autoload_with=testing.db,
+ autoload_with=connection,
autoload_replace=False,
extend_existing=True,
)
def test_autoload_replace_arg(self):
Table("t", MetaData(), autoload_replace=False)
- @testing.provide_metadata
- def test_autoincrement_col(self):
+ def test_autoincrement_col(self, connection, metadata):
"""test that 'autoincrement' is reflected according to sqla's policy.
Don't mark this test as unsupported for any backend !
"""
- meta = self.metadata
+ meta = metadata
Table(
"test",
meta,
Column("data", sa.String(50)),
mysql_engine="InnoDB",
)
- meta.create_all()
+ meta.create_all(connection)
m2 = MetaData()
- t1a = Table("test", m2, autoload_with=testing.db)
+ t1a = Table("test", m2, autoload_with=connection)
assert t1a._autoincrement_column is t1a.c.id
- t2a = Table("test2", m2, autoload_with=testing.db)
+ t2a = Table("test2", m2, autoload_with=connection)
assert t2a._autoincrement_column is None
@skip("sqlite")
- @testing.provide_metadata
- def test_unknown_types(self):
+ def test_unknown_types(self, connection, metadata):
"""Test the handling of unknown types for the given dialect.
sqlite is skipped because it has special rules for unknown types using
'affinity types' - this feature is tested in that dialect's test spec.
"""
- meta = self.metadata
+ meta = metadata
t = Table("test", meta, Column("foo", sa.DateTime))
- ischema_names = testing.db.dialect.ischema_names
- t.create()
- testing.db.dialect.ischema_names = {}
- try:
- m2 = MetaData(testing.db)
+ t.create(connection)
+
+ with mock.patch.object(connection.dialect, "ischema_names", {}):
+ m2 = MetaData()
with testing.expect_warnings("Did not recognize type"):
- t3 = Table("test", m2, autoload_with=testing.db)
+ t3 = Table("test", m2, autoload_with=connection)
is_(t3.c.foo.type.__class__, sa.types.NullType)
- finally:
- testing.db.dialect.ischema_names = ischema_names
-
- @testing.provide_metadata
- def test_basic_override(self):
- meta = self.metadata
+ def test_basic_override(self, connection, metadata):
+ meta = metadata
table = Table(
"override_test",
meta,
Column("col2", sa.String(20)),
Column("col3", sa.Numeric),
)
- table.create()
+ table.create(connection)
meta2 = MetaData()
table = Table(
meta2,
Column("col2", sa.Unicode()),
Column("col4", sa.String(30)),
- autoload_with=testing.db,
+ autoload_with=connection,
)
self.assert_(isinstance(table.c.col1.type, sa.Integer))
self.assert_(isinstance(table.c.col2.type, sa.Unicode))
self.assert_(isinstance(table.c.col4.type, sa.String))
- @testing.provide_metadata
- def test_override_upgrade_pk_flag(self):
- meta = self.metadata
+ def test_override_upgrade_pk_flag(self, connection, metadata):
+ meta = metadata
table = Table(
"override_test",
meta,
Column("col2", sa.String(20)),
Column("col3", sa.Numeric),
)
- table.create()
+ table.create(connection)
meta2 = MetaData()
table = Table(
"override_test",
meta2,
Column("col1", sa.Integer, primary_key=True),
- autoload_with=testing.db,
+ autoload_with=connection,
)
eq_(list(table.primary_key), [table.c.col1])
eq_(table.c.col1.primary_key, True)
- @testing.provide_metadata
- def test_override_pkfk(self):
+ def test_override_pkfk(self, connection, metadata):
"""test that you can override columns which contain foreign keys
to other reflected tables, where the foreign key column is also
a primary key column"""
- meta = self.metadata
+ meta = metadata
Table(
"users",
meta,
Column("street", sa.String(30)),
)
- meta.create_all()
+ meta.create_all(connection)
meta2 = MetaData()
a2 = Table(
"addresses",
Column(
"id", sa.Integer, sa.ForeignKey("users.id"), primary_key=True
),
- autoload_with=testing.db,
+ autoload_with=connection,
)
- u2 = Table("users", meta2, autoload_with=testing.db)
+ u2 = Table("users", meta2, autoload_with=connection)
assert list(a2.primary_key) == [a2.c.id]
assert list(u2.primary_key) == [u2.c.id]
assert u2.join(a2).onclause.compare(u2.c.id == a2.c.id)
meta3 = MetaData()
- u3 = Table("users", meta3, autoload_with=testing.db)
+ u3 = Table("users", meta3, autoload_with=connection)
a3 = Table(
"addresses",
meta3,
Column(
"id", sa.Integer, sa.ForeignKey("users.id"), primary_key=True
),
- autoload_with=testing.db,
+ autoload_with=connection,
)
assert list(a3.primary_key) == [a3.c.id]
assert list(u3.primary_key) == [u3.c.id]
assert u3.join(a3).onclause.compare(u3.c.id == a3.c.id)
- @testing.provide_metadata
- def test_override_nonexistent_fk(self):
+ def test_override_nonexistent_fk(self, connection, metadata):
"""test that you can override columns and create new foreign
keys to other reflected tables which have no foreign keys. this
is common with MySQL MyISAM tables."""
- meta = self.metadata
+ meta = metadata
Table(
"users",
meta,
Column("user_id", sa.Integer),
)
- meta.create_all()
+ meta.create_all(connection)
meta2 = MetaData()
a2 = Table(
"addresses",
meta2,
Column("user_id", sa.Integer, sa.ForeignKey("users.id")),
- autoload_with=testing.db,
+ autoload_with=connection,
)
- u2 = Table("users", meta2, autoload_with=testing.db)
+ u2 = Table("users", meta2, autoload_with=connection)
assert len(a2.c.user_id.foreign_keys) == 1
assert len(a2.foreign_keys) == 1
assert [c.parent for c in a2.foreign_keys] == [a2.c.user_id]
assert u2.join(a2).onclause.compare(u2.c.id == a2.c.user_id)
meta3 = MetaData()
- u3 = Table("users", meta3, autoload_with=testing.db)
+ u3 = Table("users", meta3, autoload_with=connection)
a3 = Table(
"addresses",
meta3,
Column("user_id", sa.Integer, sa.ForeignKey("users.id")),
- autoload_with=testing.db,
+ autoload_with=connection,
)
assert u3.join(a3).onclause.compare(u3.c.id == a3.c.user_id)
"users",
meta4,
Column("id", sa.Integer, key="u_id", primary_key=True),
- autoload_with=testing.db,
+ autoload_with=connection,
)
a4 = Table(
Column(
"user_id", sa.Integer, sa.ForeignKey("users.u_id"), key="id"
),
- autoload_with=testing.db,
+ autoload_with=connection,
)
# for the thing happening here with the column collection,
assert len(a4.columns) == 3
assert len(a4.constraints) == 2
- @testing.provide_metadata
- def test_override_composite_fk(self):
+ def test_override_composite_fk(self, connection, metadata):
"""Test double-remove of composite foreign key, when replaced."""
- metadata = self.metadata
-
Table(
"a",
metadata,
sa.ForeignKeyConstraint(["x", "y"], ["a.x", "a.y"]),
)
- metadata.create_all()
+ metadata.create_all(connection)
meta2 = MetaData()
c1 = Column("x", sa.Integer, primary_key=True)
c2 = Column("y", sa.Integer, primary_key=True)
f1 = sa.ForeignKeyConstraint(["x", "y"], ["a.x", "a.y"])
- b1 = Table("b", meta2, c1, c2, f1, autoload_with=testing.db)
+ b1 = Table("b", meta2, c1, c2, f1, autoload_with=connection)
assert b1.c.x is c1
assert b1.c.y is c2
assert f1 in b1.constraints
assert len(b1.constraints) == 2
- @testing.provide_metadata
- def test_override_keys(self):
+ def test_override_keys(self, connection, metadata):
"""test that columns can be overridden with a 'key',
and that ForeignKey targeting during reflection still works."""
- meta = self.metadata
+ meta = metadata
Table(
"a",
meta,
Column("y", sa.Integer, sa.ForeignKey("a.x")),
test_needs_fk=True,
)
- meta.create_all(testing.db)
+ meta.create_all(connection)
m2 = MetaData()
a2 = Table(
"a",
m2,
Column("x", sa.Integer, primary_key=True, key="x1"),
- autoload_with=testing.db,
+ autoload_with=connection,
)
- b2 = Table("b", m2, autoload_with=testing.db)
+ b2 = Table("b", m2, autoload_with=connection)
assert a2.join(b2).onclause.compare(a2.c.x1 == b2.c.y)
assert b2.c.y.references(a2.c.x1)
- @testing.provide_metadata
- def test_nonreflected_fk_raises(self):
+ def test_nonreflected_fk_raises(self, connection, metadata):
"""test that a NoReferencedColumnError is raised when reflecting
a table with an FK to another table which has not included the target
column in its reflection.
"""
- meta = self.metadata
+ meta = metadata
Table(
"a",
meta,
Column("y", sa.Integer, sa.ForeignKey("a.x")),
test_needs_fk=True,
)
- meta.create_all()
+ meta.create_all(connection)
m2 = MetaData()
- a2 = Table("a", m2, include_columns=["z"], autoload_with=testing.db)
- b2 = Table("b", m2, autoload_with=testing.db)
+ a2 = Table("a", m2, include_columns=["z"], autoload_with=connection)
+ b2 = Table("b", m2, autoload_with=connection)
assert_raises(sa.exc.NoReferencedColumnError, a2.join, b2)
- @testing.exclude("mysql", "<", (4, 1, 1), "innodb funkiness")
- @testing.provide_metadata
- def test_override_existing_fk(self):
+ def test_override_existing_fk(self, connection, metadata):
"""test that you can override columns and specify new foreign
keys to other reflected tables, on columns which *do* already
have that foreign key, and that the FK is not duped."""
- meta = self.metadata
+ meta = metadata
Table(
"users",
meta,
test_needs_fk=True,
)
- meta.create_all(testing.db)
+ meta.create_all(connection)
meta2 = MetaData()
a2 = Table(
"addresses",
meta2,
Column("user_id", sa.Integer, sa.ForeignKey("users.id")),
- autoload_with=testing.db,
+ autoload_with=connection,
)
- u2 = Table("users", meta2, autoload_with=testing.db)
+ u2 = Table("users", meta2, autoload_with=connection)
s = sa.select(a2).subquery()
assert s.c.user_id is not None
"users",
meta2,
Column("id", sa.Integer, primary_key=True),
- autoload_with=testing.db,
+ autoload_with=connection,
)
a2 = Table(
"addresses",
meta2,
Column("id", sa.Integer, primary_key=True),
Column("user_id", sa.Integer, sa.ForeignKey("users.id")),
- autoload_with=testing.db,
+ autoload_with=connection,
)
s = sa.select(a2).subquery()
assert u2.join(a2).onclause.compare(u2.c.id == a2.c.user_id)
@testing.only_on(["postgresql", "mysql"])
- @testing.provide_metadata
- def test_fk_options(self):
+ def test_fk_options(self, connection, metadata):
"""test that foreign key reflection includes options (on
backends with {dialect}.get_foreign_keys() support)"""
)
test_attrs = ("onupdate", "ondelete")
- meta = self.metadata
+ meta = metadata
Table(
"users",
meta,
Column("user_id", sa.Integer, addresses_user_id_fkey),
test_needs_fk=True,
)
- meta.create_all()
+ meta.create_all(connection)
meta2 = MetaData()
- meta2.reflect(testing.db)
+ meta2.reflect(connection)
for fk in meta2.tables["addresses"].foreign_keys:
ref = addresses_user_id_fkey
for attr in test_attrs:
eq_(getattr(fk, attr), getattr(ref, attr))
- @testing.provide_metadata
- def test_pks_not_uniques(self):
+ def test_pks_not_uniques(self, connection, metadata):
"""test that primary key reflection not tripped up by unique
indexes"""
- with testing.db.begin() as conn:
- conn.exec_driver_sql(
- """
- CREATE TABLE book (
- id INTEGER NOT NULL,
- title VARCHAR(100) NOT NULL,
- series INTEGER,
- series_id INTEGER,
- UNIQUE(series, series_id),
- PRIMARY KEY(id)
- )"""
- )
+ conn = connection
+ conn.exec_driver_sql(
+ """
+ CREATE TABLE book (
+ id INTEGER NOT NULL,
+ title VARCHAR(100) NOT NULL,
+ series INTEGER,
+ series_id INTEGER,
+ UNIQUE(series, series_id),
+ PRIMARY KEY(id)
+ )"""
+ )
- book = Table("book", self.metadata, autoload_with=testing.db)
+ book = Table("book", metadata, autoload_with=connection)
assert book.primary_key.contains_column(book.c.id)
assert not book.primary_key.contains_column(book.c.series)
eq_(len(book.primary_key), 1)
- def test_fk_error(self):
- metadata = MetaData(testing.db)
+ def test_fk_error(self, connection, metadata):
Table(
"slots",
metadata,
"could not find table 'pkgs' with which to generate "
"a foreign key to target column 'pkg_id'",
metadata.create_all,
+ connection,
)
- @testing.provide_metadata
- def test_composite_pks(self):
+ def test_composite_pks(self, connection, metadata):
"""test reflection of a composite primary key"""
- with testing.db.begin() as conn:
- conn.exec_driver_sql(
- """
- CREATE TABLE book (
- id INTEGER NOT NULL,
- isbn VARCHAR(50) NOT NULL,
- title VARCHAR(100) NOT NULL,
- series INTEGER NOT NULL,
- series_id INTEGER NOT NULL,
- UNIQUE(series, series_id),
- PRIMARY KEY(id, isbn)
- )"""
- )
- book = Table("book", self.metadata, autoload_with=testing.db)
+ conn = connection
+ conn.exec_driver_sql(
+ """
+ CREATE TABLE book (
+ id INTEGER NOT NULL,
+ isbn VARCHAR(50) NOT NULL,
+ title VARCHAR(100) NOT NULL,
+ series INTEGER NOT NULL,
+ series_id INTEGER NOT NULL,
+ UNIQUE(series, series_id),
+ PRIMARY KEY(id, isbn)
+ )"""
+ )
+ book = Table("book", metadata, autoload_with=connection)
assert book.primary_key.contains_column(book.c.id)
assert book.primary_key.contains_column(book.c.isbn)
assert not book.primary_key.contains_column(book.c.series)
eq_(len(book.primary_key), 2)
- @testing.exclude("mysql", "<", (4, 1, 1), "innodb funkiness")
- @testing.provide_metadata
- def test_composite_fk(self):
+ def test_composite_fk(self, connection, metadata):
"""test reflection of composite foreign keys"""
- meta = self.metadata
+ meta = metadata
multi = Table(
"multi",
meta,
),
test_needs_fk=True,
)
- meta.create_all()
+ meta.create_all(connection)
meta2 = MetaData()
- table = Table("multi", meta2, autoload_with=testing.db)
- table2 = Table("multi2", meta2, autoload_with=testing.db)
+ table = Table("multi", meta2, autoload_with=connection)
+ table2 = Table("multi2", meta2, autoload_with=connection)
self.assert_tables_equal(multi, table)
self.assert_tables_equal(multi2, table2)
j = sa.join(table, table2)
@testing.crashes("oracle", "FIXME: unknown, confirm not fails_on")
@testing.requires.check_constraints
- @testing.provide_metadata
- def test_reserved(self):
+ def test_reserved(self, connection, metadata):
# check a table that uses a SQL reserved name doesn't cause an
# error
- meta = self.metadata
+ meta = metadata
table_a = Table(
"select",
meta,
)
sa.Index("where", table_a.c["from"])
- if meta.bind.dialect.requires_name_normalize:
+ if connection.dialect.requires_name_normalize:
check_col = "TRUE"
else:
check_col = "true"
- quoter = meta.bind.dialect.identifier_preparer.quote_identifier
+ quoter = connection.dialect.identifier_preparer.quote_identifier
Table(
"false",
sa.PrimaryKeyConstraint("or", "join", name="to"),
)
index_c = sa.Index("else", table_c.c.join)
- meta.create_all()
- index_c.drop()
+ meta.create_all(connection)
+ index_c.drop(connection)
meta2 = MetaData()
- Table("select", meta2, autoload_with=testing.db)
- Table("false", meta2, autoload_with=testing.db)
- Table("is", meta2, autoload_with=testing.db)
+ Table("select", meta2, autoload_with=connection)
+ Table("false", meta2, autoload_with=connection)
+ Table("is", meta2, autoload_with=connection)
- @testing.provide_metadata
- def _test_reflect_uses_bind(self, fn):
- from sqlalchemy.pool import AssertionPool
-
- e = engines.testing_engine(options={"poolclass": AssertionPool})
- fn(e)
-
- def test_reflect_uses_bind_constructor_conn_reflect(self):
- self._test_reflect_uses_bind(lambda e: MetaData(e.connect()).reflect())
-
- def test_reflect_uses_bind_constructor_engine_reflect(self):
- self._test_reflect_uses_bind(lambda e: MetaData(e).reflect())
-
- def test_reflect_uses_bind_conn_reflect(self):
- self._test_reflect_uses_bind(lambda e: MetaData().reflect(e.connect()))
-
- def test_reflect_uses_bind_engine_reflect(self):
- self._test_reflect_uses_bind(lambda e: MetaData().reflect(e))
-
- def test_reflect_uses_bind_option_engine_reflect(self):
- self._test_reflect_uses_bind(
- lambda e: MetaData().reflect(e.execution_options(foo="bar"))
- )
-
- @testing.provide_metadata
- def test_reflect_all(self):
- existing = inspect(testing.db).get_table_names()
+ def test_reflect_all(self, connection, metadata):
names = ["rt_%s" % name for name in ("a", "b", "c", "d", "e")]
nameset = set(names)
- for name in names:
- # be sure our starting environment is sane
- self.assert_(name not in existing)
- self.assert_("rt_f" not in existing)
- baseline = self.metadata
+ baseline = metadata
for name in names:
Table(name, baseline, Column("id", sa.Integer, primary_key=True))
- baseline.create_all()
+ baseline.create_all(connection)
- m1 = MetaData(testing.db)
- self.assert_(not m1.tables)
- m1.reflect()
- self.assert_(nameset.issubset(set(m1.tables.keys())))
+ m1 = MetaData()
+ is_false(m1.tables)
+ m1.reflect(connection)
+ is_true(nameset.issubset(set(m1.tables.keys())))
m2 = MetaData()
- m2.reflect(testing.db, only=["rt_a", "rt_b"])
- self.assert_(set(m2.tables.keys()) == set(["rt_a", "rt_b"]))
+ m2.reflect(connection, only=["rt_a", "rt_b"])
+ eq_(set(m2.tables.keys()), set(["rt_a", "rt_b"]))
m3 = MetaData()
- c = testing.db.connect()
- m3.reflect(bind=c, only=lambda name, meta: name == "rt_c")
- self.assert_(set(m3.tables.keys()) == set(["rt_c"]))
+ m3.reflect(connection, only=lambda name, meta: name == "rt_c")
+ eq_(set(m3.tables.keys()), set(["rt_c"]))
- m4 = MetaData(testing.db)
+ m4 = MetaData()
assert_raises_message(
sa.exc.InvalidRequestError,
r"Could not reflect: requested table\(s\) not available in "
r"Engine\(.*?\): \(rt_f\)",
m4.reflect,
+ connection,
only=["rt_a", "rt_f"],
)
- m5 = MetaData(testing.db)
- m5.reflect(only=[])
- self.assert_(not m5.tables)
+ m5 = MetaData()
+ m5.reflect(connection, only=[])
+ is_false(m5.tables)
- m6 = MetaData(testing.db)
- m6.reflect(only=lambda n, m: False)
- self.assert_(not m6.tables)
+ m6 = MetaData()
+ m6.reflect(connection, only=lambda n, m: False)
+ is_false(m6.tables)
- m7 = MetaData(testing.db)
- m7.reflect()
- self.assert_(nameset.issubset(set(m7.tables.keys())))
+ m7 = MetaData()
+ m7.reflect(connection)
+ is_true(nameset.issubset(set(m7.tables.keys())))
- m8 = MetaData()
- assert_raises(sa.exc.UnboundExecutionError, m8.reflect)
-
- m8_e1 = MetaData(testing.db)
+ m8_e1 = MetaData()
rt_c = Table("rt_c", m8_e1)
- m8_e1.reflect(extend_existing=True)
+ m8_e1.reflect(connection, extend_existing=True)
eq_(set(m8_e1.tables.keys()), set(names))
eq_(rt_c.c.keys(), ["id"])
- m8_e2 = MetaData(testing.db)
+ m8_e2 = MetaData()
rt_c = Table("rt_c", m8_e2)
- m8_e2.reflect(extend_existing=True, only=["rt_a", "rt_c"])
+ m8_e2.reflect(connection, extend_existing=True, only=["rt_a", "rt_c"])
eq_(set(m8_e2.tables.keys()), set(["rt_a", "rt_c"]))
eq_(rt_c.c.keys(), ["id"])
- if existing:
- print("Other tables present in database, skipping some checks.")
- else:
- baseline.drop_all()
- m9 = MetaData(testing.db)
- m9.reflect()
- self.assert_(not m9.tables)
+ baseline.drop_all(connection)
+ m9 = MetaData()
+ m9.reflect(connection)
+ is_false(m9.tables)
- @testing.provide_metadata
- def test_reflect_all_unreflectable_table(self):
+ def test_reflect_all_unreflectable_table(self, connection, metadata):
names = ["rt_%s" % name for name in ("a", "b", "c", "d", "e")]
for name in names:
- Table(
- name, self.metadata, Column("id", sa.Integer, primary_key=True)
- )
- self.metadata.create_all()
+ Table(name, metadata, Column("id", sa.Integer, primary_key=True))
+ metadata.create_all(connection)
m = MetaData()
with mock.patch.object(inspector, "reflect_table", patched):
with expect_warnings("Skipping table rt_c: Can't reflect rt_c"):
- m.reflect(bind=testing.db)
+ m.reflect(connection)
assert_raises_message(
sa.exc.UnreflectableTableError,
Table,
"rt_c",
m,
- autoload_with=testing.db,
+ autoload_with=connection,
)
- def test_reflect_all_conn_closing(self):
- m1 = MetaData()
- c = testing.db.connect()
- m1.reflect(bind=c)
- assert not c.closed
-
- def test_inspector_conn_closing(self):
- c = testing.db.connect()
- inspect(c)
- assert not c.closed
-
- @testing.provide_metadata
- def test_index_reflection(self):
- m1 = self.metadata
+ def test_index_reflection(self, connection, metadata):
+ m1 = metadata
t1 = Table(
"party",
m1,
)
sa.Index("idx1", t1.c.id, unique=True)
sa.Index("idx2", t1.c.name, t1.c.id, unique=False)
- m1.create_all()
+ m1.create_all(connection)
m2 = MetaData()
- t2 = Table("party", m2, autoload_with=testing.db)
+ t2 = Table("party", m2, autoload_with=connection)
assert len(t2.indexes) == 3
# Make sure indexes are in the order we expect them in
assert set([t2.c.name]) == set(r3.columns)
@testing.requires.comment_reflection
- @testing.provide_metadata
- def test_comment_reflection(self):
- m1 = self.metadata
+ def test_comment_reflection(self, connection, metadata):
+ m1 = metadata
Table(
"sometable",
m1,
Column("id", sa.Integer, comment="c1 comment"),
comment="t1 comment",
)
- m1.create_all()
+ m1.create_all(connection)
m2 = MetaData()
- t2 = Table("sometable", m2, autoload_with=testing.db)
+ t2 = Table("sometable", m2, autoload_with=connection)
eq_(t2.comment, "t1 comment")
eq_(t2.c.id.comment, "c1 comment")
eq_(t3.c.id.comment, "c1 comment")
@testing.requires.check_constraint_reflection
- @testing.provide_metadata
- def test_check_constraint_reflection(self):
- m1 = self.metadata
+ def test_check_constraint_reflection(self, connection, metadata):
+ m1 = metadata
Table(
"x",
m1,
Column("q", Integer),
sa.CheckConstraint("q > 10", name="ck1"),
)
- m1.create_all()
+ m1.create_all(connection)
m2 = MetaData()
- t2 = Table("x", m2, autoload_with=testing.db)
+ t2 = Table("x", m2, autoload_with=connection)
ck = [
const
eq_regex(ck.sqltext.text, r"[\(`]*q[\)`]* > 10")
eq_(ck.name, "ck1")
- @testing.provide_metadata
- def test_index_reflection_cols_busted(self):
- t = Table(
- "x", self.metadata, Column("a", Integer), Column("b", Integer)
- )
+ def test_index_reflection_cols_busted(self, connection, metadata):
+ t = Table("x", metadata, Column("a", Integer), Column("b", Integer))
sa.Index("x_ix", t.c.a, t.c.b)
- self.metadata.create_all()
+ metadata.create_all(connection)
def mock_get_columns(self, connection, table_name, **kw):
return [{"name": "b", "type": Integer, "primary_key": False}]
with testing.mock.patch.object(
- testing.db.dialect, "get_columns", mock_get_columns
+ connection.dialect, "get_columns", mock_get_columns
):
m = MetaData()
with testing.expect_warnings(
"index key 'a' was not located in columns"
):
- t = Table("x", m, autoload_with=testing.db)
+ t = Table("x", m, autoload_with=connection)
eq_(list(t.indexes)[0].columns, [t.c.b])
@testing.requires.views
- @testing.provide_metadata
- def test_views(self):
- metadata = self.metadata
+ def test_views(self, connection, metadata):
users, addresses, dingalings = createTables(metadata)
try:
- metadata.create_all()
- _create_views(metadata.bind, None)
+ metadata.create_all(connection)
+ _create_views(connection, None)
m2 = MetaData()
- users_v = Table("users_v", m2, autoload_with=testing.db)
+ users_v = Table("users_v", m2, autoload_with=connection)
addresses_v = Table(
- "email_addresses_v", m2, autoload_with=testing.db
+ "email_addresses_v", m2, autoload_with=connection
)
for c1, c2 in zip(users_v.c, users.c):
eq_(c1.name, c2.name)
self.assert_types_base(c1, c2)
finally:
- _drop_views(metadata.bind)
+ _drop_views(connection)
@testing.requires.views
- @testing.provide_metadata
- def test_reflect_all_with_views(self):
- metadata = self.metadata
+ def test_reflect_all_with_views(self, connection, metadata):
users, addresses, dingalings = createTables(metadata, None)
try:
- metadata.create_all()
- _create_views(metadata.bind, None)
- m2 = MetaData(testing.db)
+ metadata.create_all(connection)
+ _create_views(connection, None)
+ m2 = MetaData()
- m2.reflect(views=False)
+ m2.reflect(connection, views=False)
eq_(
set(m2.tables), set(["users", "email_addresses", "dingalings"])
)
- m2 = MetaData(testing.db)
- m2.reflect(views=True)
+ m2 = MetaData()
+ m2.reflect(connection, views=True)
eq_(
set(m2.tables),
set(
),
)
finally:
- _drop_views(metadata.bind)
+ _drop_views(connection)
class CreateDropTest(fixtures.TablesTest):
run_create_tables = None
+ @classmethod
+ def teardown_class(cls):
+ # TablesTest is used here without
+ # run_create_tables, so add an explicit drop of whatever is in
+ # metadata
+ cls._tables_metadata.drop_all(testing.db)
+
@classmethod
def define_tables(cls, metadata):
Table(
Column("item_name", sa.VARCHAR(50)),
)
- def teardown(self):
- self.metadata.drop_all(testing.db)
-
def test_sorter(self):
- tables = self.metadata.sorted_tables
+ tables = self.tables_test_metadata.sorted_tables
table_names = [t.name for t in tables]
ua = [n for n in table_names if n in ("users", "email_addresses")]
oi = [n for n in table_names if n in ("orders", "items")]
eq_(ua, ["users", "email_addresses"])
eq_(oi, ["orders", "items"])
- def test_checkfirst(self):
- insp = inspect(testing.db)
+ def test_checkfirst(self, connection):
+ insp = inspect(connection)
users = self.tables.users
is_false(insp.has_table("users"))
- users.create(bind=testing.db)
+ users.create(connection)
is_true(insp.has_table("users"))
- users.create(bind=testing.db, checkfirst=True)
- users.drop(bind=testing.db)
- users.drop(bind=testing.db, checkfirst=True)
+ users.create(connection, checkfirst=True)
+ users.drop(connection)
+ users.drop(connection, checkfirst=True)
is_false(insp.has_table("users"))
- users.create(bind=testing.db, checkfirst=True)
- users.drop(bind=testing.db)
+ users.create(connection, checkfirst=True)
+ users.drop(connection)
- def test_createdrop(self):
- insp = inspect(testing.db)
- metadata = self.metadata
- metadata.create_all(bind=testing.db)
+ def test_createdrop(self, connection):
+ insp = inspect(connection)
+
+ metadata = self.tables_test_metadata
+
+ metadata.create_all(connection)
is_true(insp.has_table("items"))
is_true(insp.has_table("email_addresses"))
- metadata.create_all(bind=testing.db)
+ metadata.create_all(connection)
is_true(insp.has_table("items"))
- metadata.drop_all(bind=testing.db)
+ metadata.drop_all(connection)
is_false(insp.has_table("items"))
is_false(insp.has_table("email_addresses"))
- metadata.drop_all(bind=testing.db)
+ metadata.drop_all(connection)
is_false(insp.has_table("items"))
- def test_tablenames(self):
- metadata = self.metadata
- metadata.create_all(bind=testing.db)
- insp = inspect(testing.db)
+ def test_tablenames(self, connection):
+ metadata = self.tables_test_metadata
+ metadata.create_all(bind=connection)
+ insp = inspect(connection)
# ensure all tables we created are in the list.
is_true(set(insp.get_table_names()).issuperset(metadata.tables))
assert addresses.constraints == set([addresses.primary_key, fk])
-class UnicodeReflectionTest(fixtures.TestBase):
+class UnicodeReflectionTest(fixtures.TablesTest):
__backend__ = True
@classmethod
- def setup_class(cls):
- cls.metadata = metadata = MetaData()
+ def define_tables(cls, metadata):
no_multibyte_period = set([("plain", "col_plain", "ix_plain")])
no_has_table = [
)
schema.Index(ixname, t.c[cname])
- metadata.create_all(testing.db)
cls.names = names
- @classmethod
- def teardown_class(cls):
- cls.metadata.drop_all(testing.db, checkfirst=False)
-
@testing.requires.unicode_connections
- def test_has_table(self):
- insp = inspect(testing.db)
+ def test_has_table(self, connection):
+ insp = inspect(connection)
for tname, cname, ixname in self.names:
assert insp.has_table(tname), "Can't detect name %s" % tname
@testing.requires.unicode_connections
- def test_basic(self):
+ def test_basic(self, connection):
# the 'convert_unicode' should not get in the way of the
# reflection process. reflect_table for oracle, postgresql
# (others?) expect non-unicode strings in result sets/bind
# params
- bind = testing.db
names = set([rec[0] for rec in self.names])
- reflected = set(inspect(bind).get_table_names())
-
- # Jython 2.5 on Java 5 lacks unicodedata.normalize
+ reflected = set(inspect(connection).get_table_names())
if not names.issubset(reflected) and hasattr(unicodedata, "normalize"):
# Yep. But still ensure that bulk reflection and
# create/drop work with either normalization.
- r = MetaData(bind)
- r.reflect()
- r.drop_all(checkfirst=False)
- r.create_all(checkfirst=False)
+ r = MetaData()
+ r.reflect(connection)
+ r.drop_all(connection, checkfirst=False)
+ r.create_all(connection, checkfirst=False)
@testing.requires.unicode_connections
- def test_get_names(self):
- inspector = inspect(testing.db)
+ def test_get_names(self, connection):
+ inspector = inspect(connection)
names = dict(
(tname, (cname, ixname)) for tname, cname, ixname in self.names
)
@testing.requires.cross_schema_fk_reflection
@testing.requires.implicit_default_schema
@testing.provide_metadata
- def test_blank_schema_arg(self):
- metadata = self.metadata
+ def test_blank_schema_arg(self, connection, metadata):
Table(
"some_table",
schema=None,
test_needs_fk=True,
)
- metadata.create_all()
- with testing.db.connect() as conn:
- meta2 = MetaData(conn, schema=testing.config.test_schema)
- meta2.reflect()
+ metadata.create_all(connection)
+ meta2 = MetaData(schema=testing.config.test_schema)
+ meta2.reflect(connection)
- eq_(
- set(meta2.tables),
- set(
- [
- "some_other_table",
- "%s.some_table" % testing.config.test_schema,
- ]
- ),
- )
+ eq_(
+ set(meta2.tables),
+ set(
+ [
+ "some_other_table",
+ "%s.some_table" % testing.config.test_schema,
+ ]
+ ),
+ )
@testing.requires.schemas
- def test_explicit_default_schema(self):
- engine = testing.db
- engine.connect().close()
-
- if testing.against("sqlite"):
- # Works for CREATE TABLE main.foo, SELECT FROM main.foo, etc.,
- # but fails on:
- # FOREIGN KEY(col2) REFERENCES main.table1 (col1)
- schema = "main"
- else:
- schema = engine.dialect.default_schema_name
+ def test_explicit_default_schema(self, connection, metadata):
+
+ schema = connection.dialect.default_schema_name
assert bool(schema)
- metadata = MetaData()
Table(
"table1",
metadata,
test_needs_fk=True,
schema=schema,
)
- try:
- metadata.create_all(engine)
- metadata.create_all(engine, checkfirst=True)
- assert len(metadata.tables) == 2
- metadata.clear()
-
- Table("table1", metadata, autoload_with=engine, schema=schema)
- Table("table2", metadata, autoload_with=engine, schema=schema)
- assert len(metadata.tables) == 2
- finally:
- metadata.drop_all(engine)
+ metadata.create_all(connection)
+ metadata.create_all(connection, checkfirst=True)
+ eq_(len(metadata.tables), 2)
+
+ m1 = MetaData()
+ Table("table1", m1, autoload_with=connection, schema=schema)
+ Table("table2", m1, autoload_with=connection, schema=schema)
+ eq_(len(m1.tables), 2)
@testing.requires.schemas
- @testing.provide_metadata
- def test_schema_translation(self):
+ def test_schema_translation(self, connection, metadata):
Table(
"foob",
- self.metadata,
+ metadata,
Column("q", Integer),
schema=config.test_schema,
)
- self.metadata.create_all()
+ metadata.create_all(connection)
m = MetaData()
map_ = {"foob": config.test_schema}
- with config.db.connect().execution_options(
- schema_translate_map=map_
- ) as conn:
- t = Table("foob", m, schema="foob", autoload_with=conn)
- eq_(t.schema, "foob")
- eq_(t.c.keys(), ["q"])
+
+ c2 = connection.execution_options(schema_translate_map=map_)
+ t = Table("foob", m, schema="foob", autoload_with=c2)
+ eq_(t.schema, "foob")
+ eq_(t.c.keys(), ["q"])
@testing.requires.schemas
@testing.fails_on("sybase", "FIXME: unknown")
- def test_explicit_default_schema_metadata(self):
- engine = testing.db
-
- if testing.against("sqlite"):
- # Works for CREATE TABLE main.foo, SELECT FROM main.foo, etc.,
- # but fails on:
- # FOREIGN KEY(col2) REFERENCES main.table1 (col1)
- schema = "main"
- else:
- schema = engine.dialect.default_schema_name
+ def test_explicit_default_schema_metadata(self, connection, metadata):
+ schema = connection.dialect.default_schema_name
- assert bool(schema)
+ is_true(schema)
- metadata = MetaData(schema=schema)
+ metadata.schema = schema
Table(
"table1",
metadata,
Column("col2", sa.Integer, sa.ForeignKey("table1.col1")),
test_needs_fk=True,
)
- try:
- metadata.create_all(engine)
- metadata.create_all(engine, checkfirst=True)
- assert len(metadata.tables) == 2
- metadata.clear()
-
- Table("table1", metadata, autoload_with=engine)
- Table("table2", metadata, autoload_with=engine)
- assert len(metadata.tables) == 2
- finally:
- metadata.drop_all(engine)
+ metadata.create_all(connection)
+ metadata.create_all(connection, checkfirst=True)
+
+ m1 = MetaData(schema=schema)
+
+ Table("table1", m1, autoload_with=connection)
+ Table("table2", m1, autoload_with=connection)
+ eq_(len(m1.tables), 2)
@testing.requires.schemas
- @testing.provide_metadata
- def test_metadata_reflect_schema(self):
- metadata = self.metadata
+ def test_metadata_reflect_schema(self, connection, metadata):
createTables(metadata, testing.config.test_schema)
- metadata.create_all()
- m2 = MetaData(schema=testing.config.test_schema, bind=testing.db)
- m2.reflect()
+ metadata.create_all(connection)
+ m2 = MetaData(schema=testing.config.test_schema)
+ m2.reflect(connection)
eq_(
set(m2.tables),
set(
@testing.requires.schemas
@testing.requires.cross_schema_fk_reflection
@testing.requires.implicit_default_schema
- @testing.provide_metadata
- def test_reflect_all_schemas_default_overlap(self):
- Table("t", self.metadata, Column("id", Integer, primary_key=True))
+ def test_reflect_all_schemas_default_overlap(self, connection, metadata):
+ Table("t", metadata, Column("id", Integer, primary_key=True))
Table(
"t",
- self.metadata,
+ metadata,
Column("id1", sa.ForeignKey("t.id")),
schema=testing.config.test_schema,
)
- self.metadata.create_all()
+ metadata.create_all(connection)
m2 = MetaData()
- m2.reflect(testing.db, schema=testing.config.test_schema)
+ m2.reflect(connection, schema=testing.config.test_schema)
m3 = MetaData()
- m3.reflect(testing.db)
- m3.reflect(testing.db, schema=testing.config.test_schema)
+ m3.reflect(connection)
+ m3.reflect(connection, schema=testing.config.test_schema)
eq_(
set((t.name, t.schema) for t in m2.tables.values()),
@testing.requires.views
-def _create_views(con, schema=None):
- with testing.db.begin() as conn:
- for table_name in ("users", "email_addresses"):
- fullname = table_name
- if schema:
- fullname = "%s.%s" % (schema, table_name)
- view_name = fullname + "_v"
- query = "CREATE VIEW %s AS SELECT * FROM %s" % (
- view_name,
- fullname,
- )
- conn.execute(sa.sql.text(query))
+def _create_views(conn, schema=None):
+ for table_name in ("users", "email_addresses"):
+ fullname = table_name
+ if schema:
+ fullname = "%s.%s" % (schema, table_name)
+ view_name = fullname + "_v"
+ query = "CREATE VIEW %s AS SELECT * FROM %s" % (
+ view_name,
+ fullname,
+ )
+ conn.execute(sa.sql.text(query))
@testing.requires.views
-def _drop_views(con, schema=None):
- with testing.db.begin() as conn:
- for table_name in ("email_addresses", "users"):
- fullname = table_name
- if schema:
- fullname = "%s.%s" % (schema, table_name)
- view_name = fullname + "_v"
- query = "DROP VIEW %s" % view_name
- conn.execute(sa.sql.text(query))
+def _drop_views(conn, schema=None):
+ for table_name in ("email_addresses", "users"):
+ fullname = table_name
+ if schema:
+ fullname = "%s.%s" % (schema, table_name)
+ view_name = fullname + "_v"
+ query = "DROP VIEW %s" % view_name
+ conn.execute(sa.sql.text(query))
class ReverseCasingReflectTest(fixtures.TestBase, AssertsCompiledSQL):
conn.exec_driver_sql("drop table weird_casing")
@testing.requires.denormalized_names
- def test_direct_quoting(self):
+ def test_direct_quoting(self, connection):
m = MetaData()
- t = Table("weird_casing", m, autoload_with=testing.db)
+ t = Table("weird_casing", m, autoload_with=connection)
self.assert_compile(
t.select(),
"SELECT weird_casing.col1, "
)
@testing.fails_if(testing.requires._has_mysql_on_windows)
- def test_table_names(self):
- x = inspect(testing.db).get_table_names()
+ def test_table_names(self, connection):
+ x = inspect(connection).get_table_names()
assert set(["SomeTable", "SomeOtherTable"]).issubset(x)
- def test_reflect_exact_name(self):
+ def test_reflect_exact_name(self, connection):
m = MetaData()
- t1 = Table("SomeTable", m, autoload_with=testing.db)
+ t1 = Table("SomeTable", m, autoload_with=connection)
eq_(t1.name, "SomeTable")
assert t1.c.x is not None
lambda: testing.against(("mysql", "<", (5, 5)))
and not testing.requires._has_mysql_fully_case_sensitive()
)
- def test_reflect_via_fk(self):
+ def test_reflect_via_fk(self, connection):
m = MetaData()
- t2 = Table("SomeOtherTable", m, autoload_with=testing.db)
+ t2 = Table("SomeOtherTable", m, autoload_with=connection)
eq_(t2.name, "SomeOtherTable")
assert "SomeTable" in m.tables
@testing.fails_if(testing.requires._has_mysql_fully_case_sensitive)
@testing.fails_on_everything_except("sqlite", "mysql", "mssql")
- def test_reflect_case_insensitive(self):
+ def test_reflect_case_insensitive(self, connection):
m = MetaData()
- t2 = Table("sOmEtAbLe", m, autoload_with=testing.db)
+ t2 = Table("sOmEtAbLe", m, autoload_with=connection)
eq_(t2.name, "sOmEtAbLe")
-class ColumnEventsTest(fixtures.RemovesEvents, fixtures.TestBase):
+class ColumnEventsTest(fixtures.RemovesEvents, fixtures.TablesTest):
__backend__ = True
@classmethod
- def setup_class(cls):
- cls.metadata = MetaData()
- cls.to_reflect = Table(
+ def define_tables(cls, metadata):
+ to_reflect = Table(
"to_reflect",
- cls.metadata,
+ metadata,
Column("x", sa.Integer, primary_key=True, autoincrement=False),
Column("y", sa.Integer),
test_needs_fk=True,
)
- cls.related = Table(
+ Table(
"related",
- cls.metadata,
+ metadata,
Column("q", sa.Integer, sa.ForeignKey("to_reflect.x")),
test_needs_fk=True,
)
- sa.Index("some_index", cls.to_reflect.c.y)
- cls.metadata.create_all(testing.db)
+ sa.Index("some_index", to_reflect.c.y)
- @classmethod
- def teardown_class(cls):
- cls.metadata.drop_all(testing.db)
-
- def _do_test(self, col, update, assert_, tablename="to_reflect"):
+ def _do_test(
+ self, connection, col, update, assert_, tablename="to_reflect"
+ ):
# load the actual Table class, not the test
# wrapper
from sqlalchemy.schema import Table
t = Table(
tablename,
m,
- autoload_with=testing.db,
+ autoload_with=connection,
listeners=[("column_reflect", column_reflect)],
)
assert_(t)
m = MetaData()
self.event_listen(Table, "column_reflect", column_reflect)
- t2 = Table(tablename, m, autoload_with=testing.db)
+ t2 = Table(tablename, m, autoload_with=connection)
assert_(t2)
- def test_override_key(self):
+ def test_override_key(self, connection):
def assertions(table):
eq_(table.c.YXZ.name, "x")
eq_(set(table.primary_key), set([table.c.YXZ]))
- self._do_test("x", {"key": "YXZ"}, assertions)
+ self._do_test(connection, "x", {"key": "YXZ"}, assertions)
- def test_override_index(self):
+ def test_override_index(self, connection):
def assertions(table):
idx = list(table.indexes)[0]
eq_(idx.columns, [table.c.YXZ])
- self._do_test("y", {"key": "YXZ"}, assertions)
+ self._do_test(connection, "y", {"key": "YXZ"}, assertions)
- def test_override_key_fk(self):
+ def test_override_key_fk(self, connection):
m = MetaData()
def column_reflect(insp, table, column_info):
to_reflect = Table(
"to_reflect",
m,
- autoload_with=testing.db,
+ autoload_with=connection,
listeners=[("column_reflect", column_reflect)],
)
related = Table(
"related",
m,
- autoload_with=testing.db,
+ autoload_with=connection,
listeners=[("column_reflect", column_reflect)],
)
assert related.c.qyz.references(to_reflect.c.xyz)
- def test_override_type(self):
+ def test_override_type(self, connection):
def assert_(table):
assert isinstance(table.c.x.type, sa.String)
- self._do_test("x", {"type": sa.String}, assert_)
+ self._do_test(connection, "x", {"type": sa.String}, assert_)
- def test_override_info(self):
+ def test_override_info(self, connection):
self._do_test(
+ connection,
"x",
{"info": {"a": "b"}},
lambda table: eq_(table.c.x.info, {"a": "b"}),
)
- def test_override_server_default_fetchedvalue(self):
+ def test_override_server_default_fetchedvalue(self, connection):
my_default = FetchedValue()
self._do_test(
+ connection,
"x",
{"default": my_default},
lambda table: eq_(table.c.x.server_default, my_default),
)
- def test_override_server_default_default_clause(self):
+ def test_override_server_default_default_clause(self, connection):
my_default = DefaultClause("1")
self._do_test(
+ connection,
"x",
{"default": my_default},
lambda table: eq_(table.c.x.server_default, my_default),
)
- def test_override_server_default_plain_text(self):
+ def test_override_server_default_plain_text(self, connection):
my_default = "1"
def assert_text_of_one(table):
)
eq_(str(table.c.x.server_default.arg), "1")
- self._do_test("x", {"default": my_default}, assert_text_of_one)
+ self._do_test(
+ connection, "x", {"default": my_default}, assert_text_of_one
+ )
- def test_override_server_default_textclause(self):
+ def test_override_server_default_textclause(self, connection):
my_default = sa.text("1")
def assert_text_of_one(table):
)
eq_(str(table.c.x.server_default.arg), "1")
- self._do_test("x", {"default": my_default}, assert_text_of_one)
+ self._do_test(
+ connection, "x", {"default": my_default}, assert_text_of_one
+ )
- def test_listen_metadata_obj(self):
+ def test_listen_metadata_obj(self, connection):
m1 = MetaData()
m2 = MetaData()
def go(insp, table, info):
canary.append(info["name"])
- Table("related", m1, autoload_with=testing.db)
+ Table("related", m1, autoload_with=connection)
- Table("related", m2, autoload_with=testing.db)
+ Table("related", m2, autoload_with=connection)
eq_(canary, ["q", "x", "y"])
- def test_listen_metadata_cls(self):
+ def test_listen_metadata_cls(self, connection):
m1 = MetaData()
m2 = MetaData()
self.event_listen(MetaData, "column_reflect", go)
- Table("related", m1, autoload_with=testing.db)
+ Table("related", m1, autoload_with=connection)
- Table("related", m2, autoload_with=testing.db)
+ Table("related", m2, autoload_with=connection)
eq_(canary, ["q", "x", "y", "q", "x", "y"])
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
-from sqlalchemy import MetaData
from sqlalchemy import String
from sqlalchemy import testing
from sqlalchemy.ext.declarative import DeferredReflection
def setup(self):
global Base, registry
- registry = decl.registry(metadata=MetaData(bind=testing.db))
+ registry = decl.registry()
Base = registry.generate_base()
def teardown(self):
u1 = User(
name="u1", addresses=[Address(email="one"), Address(email="two")]
)
- sess = create_session()
+ sess = create_session(testing.db)
sess.add(u1)
sess.flush()
sess.expunge_all()
return {"primary_key": cls.__table__.c.id}
DeferredReflection.prepare(testing.db)
- sess = Session()
+ sess = Session(testing.db)
sess.add_all(
[User(name="G"), User(name="Q"), User(name="A"), User(name="C")]
)
u1 = User(name="u1", items=[Item(name="i1"), Item(name="i2")])
- sess = Session()
+ sess = Session(testing.db)
sess.add(u1)
sess.commit()
def setup(self):
collection_class = self.collection_class
- metadata = MetaData(testing.db)
+ metadata = MetaData()
parents_table = Table(
"Parent",
)
mapper(Child, children_table)
- metadata.create_all()
+ metadata.create_all(testing.db)
self.metadata = metadata
- self.session = create_session()
+ self.session = create_session(testing.db)
self.Parent, self.Child = Parent, Child
def teardown(self):
- self.metadata.drop_all()
+ self.metadata.drop_all(testing.db)
def roundtrip(self, obj):
if obj not in self.session:
class ProxyFactoryTest(ListTest):
def setup(self):
- metadata = MetaData(testing.db)
+ metadata = MetaData()
parents_table = Table(
"Parent",
)
mapper(Child, children_table)
- metadata.create_all()
+ metadata.create_all(testing.db)
self.metadata = metadata
- self.session = create_session()
+ self.session = create_session(testing.db)
self.Parent, self.Child = Parent, Child
def test_sequence_ops(self):
)
mapper(Child, children_table)
- metadata.create_all()
- session = create_session()
+ metadata.create_all(testing.db)
+ session = create_session(testing.db)
def roundtrip(obj):
if obj not in session:
class LazyLoadTest(fixtures.TestBase):
def setup(self):
- metadata = MetaData(testing.db)
+ metadata = MetaData()
parents_table = Table(
"Parent",
self.name = name
mapper(Child, children_table)
- metadata.create_all()
+ metadata.create_all(testing.db)
self.metadata = metadata
- self.session = create_session()
+ self.session = create_session(testing.db)
self.Parent, self.Child = Parent, Child
self.table = parents_table
def teardown(self):
- self.metadata.drop_all()
+ self.metadata.drop_all(testing.db)
def roundtrip(self, obj):
self.session.add(obj)
FixtureTest.define_tables(metadata)
def test_relationship_o2m_default(self):
- Base = automap_base(metadata=self.metadata)
+ Base = automap_base(metadata=self.tables_test_metadata)
Base.prepare()
User = Base.classes.users
assert a1.users is u1
def test_relationship_explicit_override_o2m(self):
- Base = automap_base(metadata=self.metadata)
+ Base = automap_base(metadata=self.tables_test_metadata)
prop = relationship("addresses", collection_class=set)
class User(Base):
assert a1.user is u1
def test_exception_prepare_not_called(self):
- Base = automap_base(metadata=self.metadata)
+ Base = automap_base(metadata=self.tables_test_metadata)
class User(Base):
__tablename__ = "users"
)
def test_relationship_explicit_override_m2o(self):
- Base = automap_base(metadata=self.metadata)
+ Base = automap_base(metadata=self.tables_test_metadata)
prop = relationship("users")
assert a1.users is u1
def test_relationship_self_referential(self):
- Base = automap_base(metadata=self.metadata)
+ Base = automap_base(metadata=self.tables_test_metadata)
Base.prepare()
Node = Base.classes.nodes
This test verifies that prepare can accept an optional schema
argument and pass it to reflect.
"""
- Base = automap_base(metadata=self.metadata)
+ Base = automap_base(metadata=self.tables_test_metadata)
engine_mock = Mock()
with patch.object(Base.metadata, "reflect") as reflect_mock:
Base.prepare(autoload_with=engine_mock, schema="some_schema")
This test verifies that prepare passes a default None if no schema is
provided.
"""
- Base = automap_base(metadata=self.metadata)
+ Base = automap_base(metadata=self.tables_test_metadata)
engine_mock = Mock()
with patch.object(Base.metadata, "reflect") as reflect_mock:
Base.prepare(autoload_with=engine_mock)
)
def test_naming_schemes(self):
- Base = automap_base(metadata=self.metadata)
+ Base = automap_base(metadata=self.tables_test_metadata)
def classname_for_table(base, tablename, table):
return str("cls_" + tablename)
assert a1.scalar_cls_users is u1
def test_relationship_m2m(self):
- Base = automap_base(metadata=self.metadata)
+ Base = automap_base(metadata=self.tables_test_metadata)
Base.prepare()
assert o1 in i1.orders_collection
def test_relationship_explicit_override_forwards_m2m(self):
- Base = automap_base(metadata=self.metadata)
+ Base = automap_base(metadata=self.tables_test_metadata)
class Order(Base):
__tablename__ = "orders"
assert o1 in i1.order_collection
def test_relationship_pass_params(self):
- Base = automap_base(metadata=self.metadata)
+ Base = automap_base(metadata=self.tables_test_metadata)
mock = Mock()
)
def test_o2m_relationship_cascade(self):
- Base = automap_base(metadata=self.metadata)
+ Base = automap_base(metadata=self.tables_test_metadata)
Base.prepare()
configure_mappers()
FixtureTest.define_tables(metadata)
def test_reflect_true(self):
- Base = automap_base(metadata=self.metadata)
+ Base = automap_base(metadata=self.tables_test_metadata)
engine_mock = mock.Mock()
with mock.patch.object(Base.metadata, "reflect") as reflect_mock:
with testing.expect_deprecated(
db1, db2, db3, db4 = self._dbs = self._init_dbs()
- meta = self.metadata = MetaData()
+ meta = self.tables_test_metadata = MetaData()
ids = Table("ids", meta, Column("nextid", Integer, nullable=False))
def id_generator(ctx):
os.remove("shard%d_%s.db" % (i, provision.FOLLOWER_IDENT))
with self.postgresql_engine.begin() as conn:
- self.metadata.drop_all(conn)
+ self.tables_test_metadata.drop_all(conn)
for i in [2, 4]:
conn.exec_driver_sql("DROP SCHEMA shard%s CASCADE" % (i,))
)
for db in (db1, db2):
- self.metadata.create_all(db)
+ self.tables_test_metadata.create_all(db)
self.dbs = [db1, db2]
from sqlalchemy import String
from sqlalchemy import testing
from sqlalchemy.ext.orderinglist import ordering_list
-from sqlalchemy.orm import create_session
from sqlalchemy.orm import mapper
from sqlalchemy.orm import relationship
from sqlalchemy.testing import eq_
from sqlalchemy.testing import fixtures
+from sqlalchemy.testing.fixtures import create_session
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
from sqlalchemy.testing.util import picklers
global metadata, slides_table, bullets_table, Slide, Bullet
slides_table, bullets_table = None, None
Slide, Bullet = None, None
- metadata = MetaData(testing.db)
+ metadata = MetaData()
def _setup(self, test_collection_class):
"""Build a relationship situation using the given
)
mapper(Bullet, bullets_table)
- metadata.create_all()
+ metadata.create_all(testing.db)
def teardown(self):
- metadata.drop_all()
+ metadata.drop_all(testing.db)
def test_append_no_reorder(self):
self._setup(
from sqlalchemy.orm import column_property
from sqlalchemy.orm import composite
from sqlalchemy.orm import configure_mappers
-from sqlalchemy.orm import create_session
from sqlalchemy.orm import decl_base
from sqlalchemy.orm import declarative_base
from sqlalchemy.orm import declared_attr
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import is_
from sqlalchemy.testing import mock
+from sqlalchemy.testing.fixtures import create_session
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
from sqlalchemy.util import with_metaclass
m = MyObj(id="someid", data="somedata")
sess.add(m)
sess.flush()
- eq_(t1.select().execute().fetchall(), [("someid", "somedata")])
+ eq_(sess.execute(t1.select()).fetchall(), [("someid", "somedata")])
def test_synonym_for(self):
class User(Base, fixtures.ComparableEntity):
from sqlalchemy.orm import close_all_sessions
from sqlalchemy.orm import column_property
from sqlalchemy.orm import configure_mappers
-from sqlalchemy.orm import create_session
from sqlalchemy.orm import declarative_base
from sqlalchemy.orm import declared_attr
from sqlalchemy.orm import deferred
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import is_
from sqlalchemy.testing import mock
+from sqlalchemy.testing.fixtures import create_session
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
from sqlalchemy.testing.util import gc_collect
def setup(self):
global Base, mapper_registry
- mapper_registry = registry(metadata=MetaData(bind=testing.db))
+ mapper_registry = registry(metadata=MetaData())
Base = mapper_registry.generate_base()
def teardown(self):
close_all_sessions()
clear_mappers()
- Base.metadata.drop_all()
+ with testing.db.begin() as conn:
+ Base.metadata.drop_all(conn)
class DeclarativeMixinTest(DeclarativeTestBase):
)
# do a brief round trip on this
Base.metadata.create_all(testing.db)
- session = Session()
+ session = create_session()
o1, o2 = Other(), Other()
session.add_all(
[Engineer(target=o1), Manager(target=o2), Manager(target=o1)]
if "subclasses" in value:
self._fixture_from_geometry(value["subclasses"], klass)
- if is_base and self.metadata.tables and self.run_create_tables:
- self.tables.update(self.metadata.tables)
- self.metadata.create_all(config.db)
+ if (
+ is_base
+ and self.tables_test_metadata.tables
+ and self.run_create_tables
+ ):
+ self.tables.update(self.tables_test_metadata.tables)
+ self.tables_test_metadata.create_all(config.db)
},
)
- sess = Session(
- binds={User: self.metadata.bind, Address: self.metadata.bind}
- )
+ sess = Session(binds={User: testing.db, Address: testing.db})
u1 = User(id=1, name="ed")
sess.add(u1)
Session = sessionmaker(
binds={
- users_unbound: self.metadata.bind,
- addresses_unbound: self.metadata.bind,
+ users_unbound: testing.db,
+ addresses_unbound: testing.db,
}
)
sess = Session()
def test_session_bind(self):
Foo = self.classes.Foo
- engine = self.metadata.bind
+ engine = testing.db
for bind in (engine, engine.connect()):
try:
from sqlalchemy import MetaData
from sqlalchemy import String
from sqlalchemy import Table
-from sqlalchemy import testing
from sqlalchemy import Unicode
from sqlalchemy.orm import backref
from sqlalchemy.orm import clear_mappers
clear_mappers()
def test_with_polymorphic(self):
- metadata = MetaData(testing.db)
+ metadata = MetaData()
order = Table(
"orders",
def test_conflicting_backref_one(self):
"""test that conflicting backrefs raises an exception"""
- metadata = MetaData(testing.db)
+ metadata = MetaData()
order = Table(
"orders",
sa_exc.ArgumentError, "Error creating backref", configure_mappers
)
- @testing.provide_metadata
- def test_misc_one(self, connection):
- metadata = self.metadata
+ def test_misc_one(self, connection, metadata):
node_table = Table(
"node",
metadata,
},
)
- metadata.create_all()
- sess = Session(autoflush=False)
+ metadata.create_all(testing.db)
+ sess = Session(testing.db, autoflush=False)
data = {"im": "unhashable"}
a1 = Article(id=1, data=data)
c1 = Category(id=1, data=data)
assert hasattr(Foo, "type")
assert Foo.type.property.columns[0] is t.c.type
- @testing.provide_metadata
- def test_prop_filters_defaults(self):
- metadata = self.metadata
+ def test_prop_filters_defaults(self, metadata, connection):
t = Table(
"t",
metadata,
),
Column("x", Integer(), nullable=False, server_default="0"),
)
- t.create()
+
+ t.create(connection)
class A(object):
pass
self.mapper(A, t, include_properties=["id"])
- s = Session()
+ s = Session(connection)
s.add(A())
s.commit()
class BindSensitiveStringifyTest(fixtures.TestBase):
- def _fixture(self, bind_to=None):
+ def _fixture(self):
# building a totally separate metadata /mapping here
# because we need to control if the MetaData is bound or not
class User(object):
pass
- m = MetaData(bind=bind_to)
+ m = MetaData()
user_table = Table(
"users",
m,
return base.Engine(mock.Mock(), MyDialect(), mock.Mock())
- def _test(
- self, bound_metadata, bound_session, session_present, expect_bound
- ):
- if bound_metadata or bound_session:
+ def _test(self, bound_session, session_present, expect_bound):
+ if bound_session:
eng = self._dialect_fixture()
else:
eng = None
- User = self._fixture(bind_to=eng if bound_metadata else None)
+ User = self._fixture()
s = Session(eng if bound_session else None)
q = s.query(User).filter(User.id == 7)
"FROM users WHERE users.id = :id_1",
)
- def test_query_unbound_metadata_bound_session(self):
- self._test(False, True, True, True)
-
- def test_query_bound_metadata_unbound_session(self):
- self._test(True, False, True, True)
-
- def test_query_unbound_metadata_no_session(self):
- self._test(False, False, False, False)
+ def test_query_bound_session(self):
+ self._test(True, True, True)
- def test_query_unbound_metadata_unbound_session(self):
- self._test(False, False, True, False)
+ def test_query_no_session(self):
+ self._test(False, False, False)
- def test_query_bound_metadata_bound_session(self):
- self._test(True, True, True, True)
+ def test_query_unbound_session(self):
+ self._test(False, True, False)
class GetTest(QueryTest):
__mapper_args__ = {"polymorphic_identity": "bsub2"}
configure_mappers()
- self.metadata.create_all()
+ self.metadata.create_all(testing.db)
return A, AMember, B, BSub1, BSub2
def test_autoflush(self):
User, users = self.classes.User, self.tables.users
- bind = self.metadata.bind
+ bind = testing.db
mapper(User, users)
conn1 = bind.connect()
conn2 = bind.connect()
)
self.assert_sql_execution(
testing.db,
- lambda: metadata.create_all(checkfirst=False),
+ lambda: metadata.create_all(testing.db, checkfirst=False),
CompiledSQL(
"CREATE TABLE employees ("
"id INTEGER NOT NULL, "
assertions.append(AllOf(*fk_assertions))
with self.sql_execution_asserter() as asserter:
- metadata.create_all(checkfirst=False)
+ metadata.create_all(testing.db, checkfirst=False)
asserter.assert_(*assertions)
assertions = [
]
with self.sql_execution_asserter() as asserter:
- metadata.drop_all(checkfirst=False),
+ metadata.drop_all(testing.db, checkfirst=False),
asserter.assert_(*assertions)
def _assert_cyclic_constraint_no_alter(
assertions = [AllOf(*table_assertions)]
with self.sql_execution_asserter() as asserter:
- metadata.create_all(checkfirst=False)
+ metadata.create_all(testing.db, checkfirst=False)
asserter.assert_(*assertions)
assertions = [
if sqlite_warning:
with expect_warnings("Can't sort tables for DROP; "):
with self.sql_execution_asserter() as asserter:
- metadata.drop_all(checkfirst=False),
+ metadata.drop_all(testing.db, checkfirst=False),
else:
with self.sql_execution_asserter() as asserter:
- metadata.drop_all(checkfirst=False),
+ metadata.drop_all(testing.db, checkfirst=False),
asserter.assert_(*assertions)
@testing.force_drop_names("a", "b")
def test_cycle_unnamed_fks(self):
- metadata = MetaData(testing.db)
+ metadata = MetaData()
Table(
"a",
),
]
with self.sql_execution_asserter() as asserter:
- metadata.create_all(checkfirst=False)
+ metadata.create_all(testing.db, checkfirst=False)
if testing.db.dialect.supports_alter:
asserter.assert_(*assertions)
"cycle have names so that they can be dropped using "
"DROP CONSTRAINT.",
metadata.drop_all,
+ testing.db,
checkfirst=False,
)
else:
"foreign key dependency exists between tables"
):
with self.sql_execution_asserter() as asserter:
- metadata.drop_all(checkfirst=False)
+ metadata.drop_all(testing.db, checkfirst=False)
asserter.assert_(
AllOf(CompiledSQL("DROP TABLE b"), CompiledSQL("DROP TABLE a"))
@testing.force_drop_names("a", "b")
def test_cycle_named_fks(self):
- metadata = MetaData(testing.db)
+ metadata = MetaData()
Table(
"a",
),
]
with self.sql_execution_asserter() as asserter:
- metadata.create_all(checkfirst=False)
+ metadata.create_all(testing.db, checkfirst=False)
if testing.db.dialect.supports_alter:
asserter.assert_(*assertions)
with self.sql_execution_asserter() as asserter:
- metadata.drop_all(checkfirst=False)
+ metadata.drop_all(testing.db, checkfirst=False)
asserter.assert_(
CompiledSQL("ALTER TABLE b DROP CONSTRAINT aidfk"),
)
else:
with self.sql_execution_asserter() as asserter:
- metadata.drop_all(checkfirst=False)
+ metadata.drop_all(testing.db, checkfirst=False)
asserter.assert_(
AllOf(CompiledSQL("DROP TABLE b"), CompiledSQL("DROP TABLE a"))
self.assert_sql_execution(
testing.db,
- lambda: metadata.create_all(checkfirst=False),
+ lambda: metadata.create_all(testing.db, checkfirst=False),
AllOf(
CompiledSQL(
"CREATE TABLE foo ("
self.assert_sql_execution(
testing.db,
- lambda: metadata.create_all(checkfirst=False),
+ lambda: metadata.create_all(testing.db, checkfirst=False),
AllOf(
CompiledSQL(
"CREATE TABLE foo ("
self.assert_sql_execution(
testing.db,
- lambda: metadata.create_all(checkfirst=False),
+ lambda: metadata.create_all(testing.db, checkfirst=False),
RegexSQL("^CREATE TABLE"),
AllOf(
CompiledSQL(
self.assert_sql_execution(
testing.db,
- lambda: metadata.create_all(checkfirst=False),
+ lambda: metadata.create_all(testing.db, checkfirst=False),
RegexSQL("^CREATE TABLE"),
AllOf(
CompiledSQL(
from sqlalchemy import VARCHAR
from sqlalchemy.engine import default
from sqlalchemy.testing import assert_raises_message
-from sqlalchemy.testing import engines
from sqlalchemy.testing import eq_
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import is_
from sqlalchemy.testing.schema import Table
-# ongoing - these are old tests. those which are of general use
-# to test a dialect are being slowly migrated to
-# sqlalhcemy.testing.suite
-
-users = users2 = addresses = metadata = None
-
-
-class QueryTest(fixtures.TestBase):
+class QueryTest(fixtures.TablesTest):
__backend__ = True
@classmethod
- def setup_class(cls):
- global users, users2, addresses, metadata
- metadata = MetaData(testing.db)
- users = Table(
- "query_users",
+ def define_tables(cls, metadata):
+ Table(
+ "users",
metadata,
Column(
"user_id", INT, primary_key=True, test_needs_autoincrement=True
Column("user_name", VARCHAR(20)),
test_needs_acid=True,
)
- addresses = Table(
- "query_addresses",
+ Table(
+ "addresses",
metadata,
Column(
"address_id",
primary_key=True,
test_needs_autoincrement=True,
),
- Column("user_id", Integer, ForeignKey("query_users.user_id")),
+ Column("user_id", Integer, ForeignKey("users.user_id")),
Column("address", String(30)),
test_needs_acid=True,
)
- users2 = Table(
+ Table(
"u2",
metadata,
Column("user_id", INT, primary_key=True),
test_needs_acid=True,
)
- metadata.create_all()
-
- @engines.close_first
- def teardown(self):
- with testing.db.begin() as conn:
- conn.execute(addresses.delete())
- conn.execute(users.delete())
- conn.execute(users2.delete())
-
- @classmethod
- def teardown_class(cls):
- metadata.drop_all()
-
@testing.fails_on(
"firebird", "kinterbasdb doesn't send full type information"
)
"""
+ users = self.tables.users
+
connection.execute(
users.insert(),
{"user_id": 7, "user_name": "jack"},
@testing.requires.order_by_label_with_expression
def test_order_by_label_compound(self, connection):
+ users = self.tables.users
connection.execute(
users.insert(),
{"user_id": 7, "user_name": "jack"},
assert row.y == False # noqa
def test_select_tuple(self, connection):
+ users = self.tables.users
connection.execute(
users.insert(),
{"user_id": 1, "user_name": "apples"},
)
def test_like_ops(self, connection):
+ users = self.tables.users
connection.execute(
users.insert(),
{"user_id": 1, "user_name": "apples"},
eq_(connection.scalar(expr), result)
def test_ilike(self, connection):
+ users = self.tables.users
connection.execute(
users.insert(),
{"user_id": 1, "user_name": "one"},
)
def test_compiled_execute(self, connection):
+ users = self.tables.users
connection.execute(users.insert(), user_id=7, user_name="jack")
s = select(users).where(users.c.user_id == bindparam("id")).compile()
eq_(connection.execute(s, id=7).first()._mapping["user_id"], 7)
def test_compiled_insert_execute(self, connection):
+ users = self.tables.users
connection.execute(
users.insert().compile(), user_id=7, user_name="jack"
)
This should be run for DB-APIs with both positional and named
paramstyles.
"""
+ users = self.tables.users
connection.execute(users.insert(), user_id=7, user_name="jack")
connection.execute(users.insert(), user_id=8, user_name="fred")
Tests simple, compound, aliased and DESC clauses.
"""
+ users = self.tables.users
+
connection.execute(users.insert(), user_id=1, user_name="c")
connection.execute(users.insert(), user_id=2, user_name="b")
connection.execute(users.insert(), user_id=3, user_name="a")
Tests simple, compound, aliased and DESC clauses.
"""
+ users = self.tables.users
+
connection.execute(users.insert(), user_id=1)
connection.execute(users.insert(), user_id=2, user_name="b")
connection.execute(users.insert(), user_id=3, user_name="a")
def test_in_filtering(self, connection):
"""test the behavior of the in_() function."""
+ users = self.tables.users
connection.execute(users.insert(), user_id=7, user_name="jack")
connection.execute(users.insert(), user_id=8, user_name="fred")
assert len(r) == 0
def test_expanding_in(self, connection):
+ users = self.tables.users
connection.execute(
users.insert(),
[
@testing.requires.no_quoting_special_bind_names
def test_expanding_in_special_chars(self, connection):
+ users = self.tables.users
connection.execute(
users.insert(),
[
)
def test_expanding_in_multiple(self, connection):
+ users = self.tables.users
+
connection.execute(
users.insert(),
[
)
def test_expanding_in_repeated(self, connection):
+ users = self.tables.users
+
connection.execute(
users.insert(),
[
@testing.requires.tuple_in
def test_expanding_in_composite(self, connection):
+ users = self.tables.users
+
connection.execute(
users.insert(),
[
return value[3:]
users = Table(
- "query_users",
+ "users",
MetaData(),
Column("user_id", Integer, primary_key=True),
Column("user_name", NameWithProcess()),
"""
+ users = self.tables.users
+
connection.execute(users.insert(), user_id=7, user_name="jack")
connection.execute(users.insert(), user_id=8, user_name="fred")
connection.execute(users.insert(), user_id=9, user_name=None)
def test_literal_in(self, connection):
"""similar to test_bind_in but use a bind with a value."""
+ users = self.tables.users
+
connection.execute(users.insert(), user_id=7, user_name="jack")
connection.execute(users.insert(), user_id=8, user_name="fred")
connection.execute(users.insert(), user_id=9, user_name=None)
that a proper boolean value is generated.
"""
+ users = self.tables.users
connection.execute(
users.insert(),
is_(bindparam("foo", callable_=c, required=False).required, False)
-class LimitTest(fixtures.TestBase):
+class LimitTest(fixtures.TablesTest):
__backend__ = True
@classmethod
- def setup_class(cls):
- global users, addresses, metadata
- metadata = MetaData(testing.db)
- users = Table(
- "query_users",
+ def define_tables(cls, metadata):
+ Table(
+ "users",
metadata,
Column("user_id", INT, primary_key=True),
Column("user_name", VARCHAR(20)),
)
- addresses = Table(
- "query_addresses",
+ Table(
+ "addresses",
metadata,
Column("address_id", Integer, primary_key=True),
- Column("user_id", Integer, ForeignKey("query_users.user_id")),
+ Column("user_id", Integer, ForeignKey("users.user_id")),
Column("address", String(30)),
)
- metadata.create_all()
-
- with testing.db.begin() as conn:
- conn.execute(users.insert(), user_id=1, user_name="john")
- conn.execute(
- addresses.insert(), address_id=1, user_id=1, address="addr1"
- )
- conn.execute(users.insert(), user_id=2, user_name="jack")
- conn.execute(
- addresses.insert(), address_id=2, user_id=2, address="addr1"
- )
- conn.execute(users.insert(), user_id=3, user_name="ed")
- conn.execute(
- addresses.insert(), address_id=3, user_id=3, address="addr2"
- )
- conn.execute(users.insert(), user_id=4, user_name="wendy")
- conn.execute(
- addresses.insert(), address_id=4, user_id=4, address="addr3"
- )
- conn.execute(users.insert(), user_id=5, user_name="laura")
- conn.execute(
- addresses.insert(), address_id=5, user_id=5, address="addr4"
- )
- conn.execute(users.insert(), user_id=6, user_name="ralph")
- conn.execute(
- addresses.insert(), address_id=6, user_id=6, address="addr5"
- )
- conn.execute(users.insert(), user_id=7, user_name="fido")
- conn.execute(
- addresses.insert(), address_id=7, user_id=7, address="addr5"
- )
@classmethod
- def teardown_class(cls):
- metadata.drop_all()
+ def insert_data(cls, connection):
+ users, addresses = cls.tables("users", "addresses")
+ conn = connection
+ conn.execute(users.insert(), user_id=1, user_name="john")
+ conn.execute(
+ addresses.insert(), address_id=1, user_id=1, address="addr1"
+ )
+ conn.execute(users.insert(), user_id=2, user_name="jack")
+ conn.execute(
+ addresses.insert(), address_id=2, user_id=2, address="addr1"
+ )
+ conn.execute(users.insert(), user_id=3, user_name="ed")
+ conn.execute(
+ addresses.insert(), address_id=3, user_id=3, address="addr2"
+ )
+ conn.execute(users.insert(), user_id=4, user_name="wendy")
+ conn.execute(
+ addresses.insert(), address_id=4, user_id=4, address="addr3"
+ )
+ conn.execute(users.insert(), user_id=5, user_name="laura")
+ conn.execute(
+ addresses.insert(), address_id=5, user_id=5, address="addr4"
+ )
+ conn.execute(users.insert(), user_id=6, user_name="ralph")
+ conn.execute(
+ addresses.insert(), address_id=6, user_id=6, address="addr5"
+ )
+ conn.execute(users.insert(), user_id=7, user_name="fido")
+ conn.execute(
+ addresses.insert(), address_id=7, user_id=7, address="addr5"
+ )
def test_select_limit(self, connection):
+ users, addresses = self.tables("users", "addresses")
r = connection.execute(
users.select(limit=3, order_by=[users.c.user_id])
).fetchall()
def test_select_limit_offset(self, connection):
"""Test the interaction between limit and offset"""
+ users, addresses = self.tables("users", "addresses")
+
r = connection.execute(
users.select(limit=3, offset=2, order_by=[users.c.user_id])
).fetchall()
def test_select_distinct_limit(self, connection):
"""Test the interaction between limit and distinct"""
+ users, addresses = self.tables("users", "addresses")
+
r = sorted(
[
x[0]
def test_select_distinct_offset(self, connection):
"""Test the interaction between distinct and offset"""
+ users, addresses = self.tables("users", "addresses")
+
r = sorted(
[
x[0]
def test_select_distinct_limit_offset(self, connection):
"""Test the interaction between limit and limit/offset"""
+ users, addresses = self.tables("users", "addresses")
+
r = connection.execute(
select(addresses.c.address)
.order_by(addresses.c.address)
self.assert_(r[0] != r[1] and r[1] != r[2], repr(r))
-class CompoundTest(fixtures.TestBase):
+class CompoundTest(fixtures.TablesTest):
"""test compound statements like UNION, INTERSECT, particularly their
ability to nest on different databases."""
__backend__ = True
+ run_inserts = "each"
+
@classmethod
- def setup_class(cls):
- global metadata, t1, t2, t3
- metadata = MetaData(testing.db)
- t1 = Table(
+ def define_tables(cls, metadata):
+ Table(
"t1",
metadata,
Column(
Column("col3", String(40)),
Column("col4", String(30)),
)
- t2 = Table(
+ Table(
"t2",
metadata,
Column(
Column("col3", String(40)),
Column("col4", String(30)),
)
- t3 = Table(
+ Table(
"t3",
metadata,
Column(
Column("col3", String(40)),
Column("col4", String(30)),
)
- metadata.create_all()
-
- with testing.db.begin() as conn:
- conn.execute(
- t1.insert(),
- [
- dict(col2="t1col2r1", col3="aaa", col4="aaa"),
- dict(col2="t1col2r2", col3="bbb", col4="bbb"),
- dict(col2="t1col2r3", col3="ccc", col4="ccc"),
- ],
- )
- conn.execute(
- t2.insert(),
- [
- dict(col2="t2col2r1", col3="aaa", col4="bbb"),
- dict(col2="t2col2r2", col3="bbb", col4="ccc"),
- dict(col2="t2col2r3", col3="ccc", col4="aaa"),
- ],
- )
- conn.execute(
- t3.insert(),
- [
- dict(col2="t3col2r1", col3="aaa", col4="ccc"),
- dict(col2="t3col2r2", col3="bbb", col4="aaa"),
- dict(col2="t3col2r3", col3="ccc", col4="bbb"),
- ],
- )
-
- @engines.close_first
- def teardown(self):
- pass
@classmethod
- def teardown_class(cls):
- metadata.drop_all()
+ def insert_data(cls, connection):
+ t1, t2, t3 = cls.tables("t1", "t2", "t3")
+ conn = connection
+ conn.execute(
+ t1.insert(),
+ [
+ dict(col2="t1col2r1", col3="aaa", col4="aaa"),
+ dict(col2="t1col2r2", col3="bbb", col4="bbb"),
+ dict(col2="t1col2r3", col3="ccc", col4="ccc"),
+ ],
+ )
+ conn.execute(
+ t2.insert(),
+ [
+ dict(col2="t2col2r1", col3="aaa", col4="bbb"),
+ dict(col2="t2col2r2", col3="bbb", col4="ccc"),
+ dict(col2="t2col2r3", col3="ccc", col4="aaa"),
+ ],
+ )
+ conn.execute(
+ t3.insert(),
+ [
+ dict(col2="t3col2r1", col3="aaa", col4="ccc"),
+ dict(col2="t3col2r2", col3="bbb", col4="aaa"),
+ dict(col2="t3col2r3", col3="ccc", col4="bbb"),
+ ],
+ )
def _fetchall_sorted(self, executed):
return sorted([tuple(row) for row in executed.fetchall()])
@testing.requires.subqueries
def test_union(self, connection):
+ t1, t2, t3 = self.tables("t1", "t2", "t3")
(s1, s2) = (
select(t1.c.col3.label("col3"), t1.c.col4.label("col4")).where(
t1.c.col2.in_(["t1col2r1", "t1col2r2"]),
@testing.fails_on("firebird", "doesn't like ORDER BY with UNIONs")
def test_union_ordered(self, connection):
+ t1, t2, t3 = self.tables("t1", "t2", "t3")
+
(s1, s2) = (
select(t1.c.col3.label("col3"), t1.c.col4.label("col4")).where(
t1.c.col2.in_(["t1col2r1", "t1col2r2"]),
@testing.fails_on("firebird", "doesn't like ORDER BY with UNIONs")
@testing.requires.subqueries
def test_union_ordered_alias(self, connection):
+ t1, t2, t3 = self.tables("t1", "t2", "t3")
+
(s1, s2) = (
select(t1.c.col3.label("col3"), t1.c.col4.label("col4")).where(
t1.c.col2.in_(["t1col2r1", "t1col2r2"]),
)
@testing.fails_on("sqlite", "FIXME: unknown")
def test_union_all(self, connection):
+ t1, t2, t3 = self.tables("t1", "t2", "t3")
+
e = union_all(
select(t1.c.col3),
union(select(t1.c.col3), select(t1.c.col3)),
"""
+ t1, t2, t3 = self.tables("t1", "t2", "t3")
+
u = union(select(t1.c.col3), select(t1.c.col3)).alias()
e = union_all(select(t1.c.col3), select(u.c.col3))
@testing.requires.intersect
def test_intersect(self, connection):
+ t1, t2, t3 = self.tables("t1", "t2", "t3")
+
i = intersect(
select(t2.c.col3, t2.c.col4),
select(t2.c.col3, t2.c.col4).where(t2.c.col4 == t3.c.col3),
@testing.requires.except_
@testing.fails_on("sqlite", "Can't handle this style of nesting")
def test_except_style1(self, connection):
+ t1, t2, t3 = self.tables("t1", "t2", "t3")
+
e = except_(
union(
select(t1.c.col3, t1.c.col4),
# same as style1, but add alias().select() to the except_().
# sqlite can handle it now.
+ t1, t2, t3 = self.tables("t1", "t2", "t3")
+
e = except_(
union(
select(t1.c.col3, t1.c.col4),
@testing.requires.except_
def test_except_style3(self, connection):
# aaa, bbb, ccc - (aaa, bbb, ccc - (ccc)) = ccc
+ t1, t2, t3 = self.tables("t1", "t2", "t3")
+
e = except_(
select(t1.c.col3), # aaa, bbb, ccc
except_(
@testing.requires.except_
def test_except_style4(self, connection):
# aaa, bbb, ccc - (aaa, bbb, ccc - (ccc)) = ccc
+ t1, t2, t3 = self.tables("t1", "t2", "t3")
+
e = except_(
select(t1.c.col3), # aaa, bbb, ccc
except_(
"sqlite can't handle leading parenthesis",
)
def test_intersect_unions(self, connection):
+ t1, t2, t3 = self.tables("t1", "t2", "t3")
+
u = intersect(
union(select(t1.c.col3, t1.c.col4), select(t3.c.col3, t3.c.col4)),
union(select(t2.c.col3, t2.c.col4), select(t3.c.col3, t3.c.col4))
@testing.requires.intersect
def test_intersect_unions_2(self, connection):
+ t1, t2, t3 = self.tables("t1", "t2", "t3")
+
u = intersect(
union(select(t1.c.col3, t1.c.col4), select(t3.c.col3, t3.c.col4))
.alias()
@testing.requires.intersect
def test_intersect_unions_3(self, connection):
+ t1, t2, t3 = self.tables("t1", "t2", "t3")
+
u = intersect(
select(t2.c.col3, t2.c.col4),
union(
@testing.requires.intersect
def test_composite_alias(self, connection):
+ t1, t2, t3 = self.tables("t1", "t2", "t3")
+
ua = intersect(
select(t2.c.col3, t2.c.col4),
union(
eq_(found, wanted)
-t1 = t2 = t3 = None
-
-
-class JoinTest(fixtures.TestBase):
+class JoinTest(fixtures.TablesTest):
"""Tests join execution.
__backend__ = True
@classmethod
- def setup_class(cls):
- global metadata
- global t1, t2, t3
-
- metadata = MetaData(testing.db)
- t1 = Table(
+ def define_tables(cls, metadata):
+ Table(
"t1",
metadata,
Column("t1_id", Integer, primary_key=True),
Column("name", String(32)),
)
- t2 = Table(
+ Table(
"t2",
metadata,
Column("t2_id", Integer, primary_key=True),
Column("t1_id", Integer, ForeignKey("t1.t1_id")),
Column("name", String(32)),
)
- t3 = Table(
+ Table(
"t3",
metadata,
Column("t3_id", Integer, primary_key=True),
Column("t2_id", Integer, ForeignKey("t2.t2_id")),
Column("name", String(32)),
)
- metadata.drop_all()
- metadata.create_all()
-
- with testing.db.begin() as conn:
- # t1.10 -> t2.20 -> t3.30
- # t1.11 -> t2.21
- # t1.12
- conn.execute(
- t1.insert(),
- {"t1_id": 10, "name": "t1 #10"},
- {"t1_id": 11, "name": "t1 #11"},
- {"t1_id": 12, "name": "t1 #12"},
- )
- conn.execute(
- t2.insert(),
- {"t2_id": 20, "t1_id": 10, "name": "t2 #20"},
- {"t2_id": 21, "t1_id": 11, "name": "t2 #21"},
- )
- conn.execute(
- t3.insert(), {"t3_id": 30, "t2_id": 20, "name": "t3 #30"}
- )
@classmethod
- def teardown_class(cls):
- metadata.drop_all()
+ def insert_data(cls, connection):
+ conn = connection
+ # t1.10 -> t2.20 -> t3.30
+ # t1.11 -> t2.21
+ # t1.12
+ t1, t2, t3 = cls.tables("t1", "t2", "t3")
+
+ conn.execute(
+ t1.insert(),
+ {"t1_id": 10, "name": "t1 #10"},
+ {"t1_id": 11, "name": "t1 #11"},
+ {"t1_id": 12, "name": "t1 #12"},
+ )
+ conn.execute(
+ t2.insert(),
+ {"t2_id": 20, "t1_id": 10, "name": "t2 #20"},
+ {"t2_id": 21, "t1_id": 11, "name": "t2 #21"},
+ )
+ conn.execute(t3.insert(), {"t3_id": 30, "t2_id": 20, "name": "t3 #30"})
def assertRows(self, statement, expected):
"""Execute a statement and assert that rows returned equal expected."""
def test_join_x1(self):
"""Joins t1->t2."""
+ t1, t2, t3 = self.tables("t1", "t2", "t3")
for criteria in (t1.c.t1_id == t2.c.t1_id, t2.c.t1_id == t1.c.t1_id):
expr = select(t1.c.t1_id, t2.c.t2_id).select_from(
def test_join_x2(self):
"""Joins t1->t2->t3."""
+ t1, t2, t3 = self.tables("t1", "t2", "t3")
for criteria in (t1.c.t1_id == t2.c.t1_id, t2.c.t1_id == t1.c.t1_id):
expr = select(t1.c.t1_id, t2.c.t2_id).select_from(
def test_outerjoin_x1(self):
"""Outer joins t1->t2."""
+ t1, t2, t3 = self.tables("t1", "t2", "t3")
for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
expr = select(t1.c.t1_id, t2.c.t2_id).select_from(
def test_outerjoin_x2(self):
"""Outer joins t1->t2,t3."""
+ t1, t2, t3 = self.tables("t1", "t2", "t3")
for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
expr = select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id).select_from(
def test_outerjoin_where_x2_t1(self):
"""Outer joins t1->t2,t3, where on t1."""
+ t1, t2, t3 = self.tables("t1", "t2", "t3")
for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
expr = (
def test_outerjoin_where_x2_t2(self):
"""Outer joins t1->t2,t3, where on t2."""
+ t1, t2, t3 = self.tables("t1", "t2", "t3")
for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
expr = (
def test_outerjoin_where_x2_t3(self):
"""Outer joins t1->t2,t3, where on t3."""
+ t1, t2, t3 = self.tables("t1", "t2", "t3")
for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
expr = (
def test_outerjoin_where_x2_t1t3(self):
"""Outer joins t1->t2,t3, where on t1 and t3."""
+ t1, t2, t3 = self.tables("t1", "t2", "t3")
+
for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
expr = (
select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id)
def test_outerjoin_where_x2_t1t2(self):
"""Outer joins t1->t2,t3, where on t1 and t2."""
+ t1, t2, t3 = self.tables("t1", "t2", "t3")
+
for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
expr = (
select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id)
def test_outerjoin_where_x2_t1t2t3(self):
"""Outer joins t1->t2,t3, where on t1, t2 and t3."""
+ t1, t2, t3 = self.tables("t1", "t2", "t3")
for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
expr = (
def test_mixed(self):
"""Joins t1->t2, outer t2->t3."""
+ t1, t2, t3 = self.tables("t1", "t2", "t3")
for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
expr = select(t1.c.t1_id, t2.c.t2_id, t3.c.t3_id).select_from(
def test_mixed_where(self):
"""Joins t1->t2, outer t2->t3, plus a where on each table in turn."""
+ t1, t2, t3 = self.tables("t1", "t2", "t3")
for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
expr = (
self.assertRows(expr, [(10, 20, 30)])
-metadata = flds = None
-
-
-class OperatorTest(fixtures.TestBase):
+class OperatorTest(fixtures.TablesTest):
__backend__ = True
@classmethod
- def setup_class(cls):
- global metadata, flds
- metadata = MetaData(testing.db)
- flds = Table(
+ def define_tables(cls, metadata):
+ Table(
"flds",
metadata,
Column(
Column("intcol", Integer),
Column("strcol", String(50)),
)
- metadata.create_all()
-
- with testing.db.begin() as conn:
- conn.execute(
- flds.insert(),
- [dict(intcol=5, strcol="foo"), dict(intcol=13, strcol="bar")],
- )
@classmethod
- def teardown_class(cls):
- metadata.drop_all()
+ def insert_data(cls, connection):
+ flds = cls.tables.flds
+ connection.execute(
+ flds.insert(),
+ [dict(intcol=5, strcol="foo"), dict(intcol=13, strcol="bar")],
+ )
# TODO: seems like more tests warranted for this setup.
def test_modulo(self, connection):
+ flds = self.tables.flds
+
eq_(
connection.execute(
select(flds.c.intcol % 3).order_by(flds.c.idcol)
@testing.requires.window_functions
def test_over(self, connection):
+ flds = self.tables.flds
+
eq_(
connection.execute(
select(
class QuoteTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = "default"
- @classmethod
- def setup_class(cls):
- # TODO: figure out which databases/which identifiers allow special
- # characters to be used, such as: spaces, quote characters,
- # punctuation characters, set up tests for those as well.
-
- global table1, table2
- metadata = MetaData(testing.db)
-
- table1 = Table(
- "WorstCase1",
- metadata,
- Column("lowercase", Integer, primary_key=True),
- Column("UPPERCASE", Integer),
- Column("MixedCase", Integer),
- Column("ASC", Integer, key="a123"),
- )
- table2 = Table(
- "WorstCase2",
- metadata,
- Column("desc", Integer, primary_key=True, key="d123"),
- Column("Union", Integer, key="u123"),
- Column("MixedCase", Integer),
- )
-
@testing.crashes("oracle", "FIXME: unknown, verify not fails_on")
@testing.requires.subqueries
def test_labels(self):
where the "UPPERCASE" column of "LaLa" doesn't exist.
"""
+ metadata = MetaData()
+ table1 = Table(
+ "WorstCase1",
+ metadata,
+ Column("lowercase", Integer, primary_key=True),
+ Column("UPPERCASE", Integer),
+ Column("MixedCase", Integer),
+ Column("ASC", Integer, key="a123"),
+ )
+ Table(
+ "WorstCase2",
+ metadata,
+ Column("desc", Integer, primary_key=True, key="d123"),
+ Column("Union", Integer, key="u123"),
+ Column("MixedCase", Integer),
+ )
+
self.assert_compile(
table1.select(distinct=True).alias("LaLa").select(),
"SELECT "
def test_drop_ordering(self):
with self.sql_execution_asserter(testing.db) as asserter:
- self.metadata.drop_all(checkfirst=False)
+ self.tables_test_metadata.drop_all(checkfirst=False)
asserter.assert_(
AllOf(
assert p1.compare_values(p1.copy_value(obj), obj)
-meta = None
-
-
class CallableTest(fixtures.TestBase):
- @classmethod
- def setup_class(cls):
- global meta
- meta = MetaData(testing.db)
-
- @classmethod
- def teardown_class(cls):
- meta.drop_all()
-
- def test_callable_as_arg(self):
+ @testing.provide_metadata
+ def test_callable_as_arg(self, connection):
ucode = util.partial(Unicode)
- thing_table = Table("thing", meta, Column("name", ucode(20)))
+ thing_table = Table("thing", self.metadata, Column("name", ucode(20)))
assert isinstance(thing_table.c.name.type, Unicode)
- thing_table.create()
+ thing_table.create(connection)
- def test_callable_as_kwarg(self):
+ @testing.provide_metadata
+ def test_callable_as_kwarg(self, connection):
ucode = util.partial(Unicode)
thang_table = Table(
- "thang", meta, Column("name", type_=ucode(20), primary_key=True)
+ "thang",
+ self.metadata,
+ Column("name", type_=ucode(20), primary_key=True),
)
assert isinstance(thang_table.c.name.type, Unicode)
- thang_table.create()
+ thang_table.create(connection)
class LiteralTest(fixtures.TestBase):