--- /dev/null
+.. change::
+ :tags: bug, reflection
+ :tickets: 5684
+
+ Fixed bug where the now-deprecated ``autoload`` parameter was being called
+ internally within the reflection routines when a related table were
+ reflected.
+
sa_schema.Table(
referred_table,
table.metadata,
- autoload=True,
schema=referred_schema,
autoload_with=self.bind,
_extend_on=_extend_on,
sa_schema.Table(
referred_table,
table.metadata,
- autoload=True,
autoload_with=self.bind,
schema=sa_schema.BLANK_SCHEMA,
_extend_on=_extend_on,
table.metadata,
extend_existing=True,
autoload_replace=False,
- autoload=True,
autoload_with=inspector,
schema=table.schema,
)
else:
args = table_args
+ autoload_with = dict_.get("__autoload_with__")
+ if autoload_with:
+ table_kw["autoload_with"] = autoload_with
+
autoload = dict_.get("__autoload__")
if autoload:
table_kw["autoload"] = True
"""Compile and execute this :class:`.Executable`."""
e = self.bind
if e is None:
- label = getattr(self, "description", self.__class__.__name__)
+ label = (
+ getattr(self, "description", None) or self.__class__.__name__
+ )
msg = (
"This %s is not directly bound to a Connection or Engine. "
"Use the .execute() method of a Connection or Engine "
def test_reflect_lowercase_forced_tables(self):
- m2 = MetaData(testing.db)
- t2_ref = Table(quoted_name("t2", quote=True), m2, autoload=True)
+ m2 = MetaData()
+ t2_ref = Table(
+ quoted_name("t2", quote=True), m2, autoload_with=testing.db
+ )
t1_ref = m2.tables["t1"]
assert t2_ref.c.t1id.references(t1_ref.c.id)
- m3 = MetaData(testing.db)
- m3.reflect(only=lambda name, m: name.lower() in ("t1", "t2"))
+ m3 = MetaData()
+ m3.reflect(
+ testing.db, only=lambda name, m: name.lower() in ("t1", "t2")
+ )
assert m3.tables["t2"].c.t1id.references(m3.tables["t1"].c.id)
def test_get_table_names(self):
"dictionaries only.",
r"The Connection.connect\(\) method is considered legacy",
r".*DefaultGenerator.execute\(\)",
- r"The autoload parameter is deprecated and will be removed ",
- #
#
# bound metadaa
#
meta2 = MetaData()
reflected_users = Table(
- "engine_users", meta2, autoload=True, autoload_with=testing.db
+ "engine_users", meta2, autoload_with=testing.db
)
reflected_addresses = Table(
"engine_email_addresses",
meta2,
- autoload=True,
autoload_with=testing.db,
)
self.assert_tables_equal(users, reflected_users)
):
table.create()
- meta2 = MetaData(testing.db)
- table2 = Table("identity_test", meta2, autoload=True)
+ meta2 = MetaData()
+ table2 = Table("identity_test", meta2, autoload_with=testing.db)
eq_(table2.c["col1"].dialect_options["mssql"]["identity_start"], None)
eq_(
table2.c["col1"].dialect_options["mssql"]["identity_increment"],
"bar",
m2,
schema=referred_schema,
- autoload=True,
autoload_with=testing.db,
)
eq_(m2.tables["%s.foo" % referred_schema].schema, referred_schema)
metadata.create_all()
m2 = MetaData()
- t2 = Table("t", m2, autoload=True, autoload_with=testing.db)
+ t2 = Table("t", m2, autoload_with=testing.db)
eq_(set(list(t2.indexes)[0].columns), set([t2.c["x"], t2.c.y]))
metadata.create_all()
m2 = MetaData()
- t2 = Table("t", m2, autoload=True, autoload_with=testing.db)
+ t2 = Table("t", m2, autoload_with=testing.db)
eq_(set(list(t2.indexes)[0].columns), set([t2.c["x, col"], t2.c.y]))
metadata.create_all()
m2 = MetaData()
- t2 = Table("t", m2, autoload=True, autoload_with=testing.db)
+ t2 = Table("t", m2, autoload_with=testing.db)
eq_(set(list(t2.indexes)[0].columns), set([t2.c["x col"], t2.c.y]))
self.assert_(repr(col))
-metadata = None
-
-
class TypeRoundTripTest(
fixtures.TestBase, AssertsExecutionResults, ComparesTables
):
__backend__ = True
- @classmethod
- def setup_class(cls):
- global metadata
- metadata = MetaData(testing.db)
-
- def teardown(self):
- metadata.drop_all()
-
+ @testing.provide_metadata
def test_decimal_notation(self):
+ metadata = self.metadata
numeric_table = Table(
"numeric_table",
metadata,
)
eq_(value, returned)
+ @testing.provide_metadata
def test_float(self):
+ metadata = self.metadata
+
float_table = Table(
"float_table",
metadata,
# todo this should suppress warnings, but it does not
@emits_warning_on("mssql+mxodbc", r".*does not have any indexes.*")
+ @testing.provide_metadata
def test_dates(self):
"Exercise type specification for date types."
(mssql.MSDateTime2, [1], {}, "DATETIME2(1)", [">=", (10,)]),
]
+ metadata = self.metadata
+
table_args = ["test_mssql_dates", metadata]
for index, spec in enumerate(columns):
type_, args, kw, res, requires = spec[0:5]
self.assert_(repr(col))
dates_table.create(checkfirst=True)
reflected_dates = Table(
- "test_mssql_dates", MetaData(testing.db), autoload=True
+ "test_mssql_dates", MetaData(), autoload_with=testing.db
)
for col in reflected_dates.c:
self.assert_types_base(col, dates_table.c[col.key])
@emits_warning_on("mssql+mxodbc", r".*does not have any indexes.*")
@testing.provide_metadata
- def _test_binary_reflection(self, deprecate_large_types):
+ @testing.combinations(
+ ("legacy_large_types", False),
+ ("sql2012_large_types", True, lambda: testing.only_on("mssql >= 11")),
+ id_="ia",
+ )
+ def test_binary_reflection(self, deprecate_large_types):
"Exercise type specification for binary types."
columns = [
binary_table = Table(*table_args)
metadata.create_all()
reflected_binary = Table(
- "test_mssql_binary", MetaData(testing.db), autoload=True
+ "test_mssql_binary", MetaData(), autoload_with=testing.db
)
for col, spec in zip(reflected_binary.c, columns):
eq_(
col.type.length, binary_table.c[col.name].type.length
)
- def test_binary_reflection_legacy_large_types(self):
- self._test_binary_reflection(False)
-
- @testing.only_on("mssql >= 11")
- def test_binary_reflection_sql2012_large_types(self):
- self._test_binary_reflection(True)
-
+ @testing.provide_metadata
def test_autoincrement(self):
+ metadata = self.metadata
Table(
"ai_1",
metadata,
"ai_7",
"ai_8",
]
- mr = MetaData(testing.db)
+ mr = MetaData()
for name in table_names:
- tbl = Table(name, mr, autoload=True)
+ tbl = Table(name, mr, autoload_with=testing.db)
tbl = metadata.tables[name]
# test that the flag itself reflects appropriately
)
m.create_all()
- m2 = MetaData(testing.db)
- tables = [Table("mysql_types", m2, autoload=True)]
+ m2 = MetaData()
+ tables = [Table("mysql_types", m2, autoload_with=testing.db)]
if use_views:
- tables.append(Table("mysql_types_v", m2, autoload=True))
+ tables.append(Table("mysql_types_v", m2, autoload_with=testing.db))
for table in tables:
for i, (reflected_col, spec) in enumerate(zip(table.c, specs)):
def_table = Table(
"mysql_def",
- MetaData(testing.db),
+ MetaData(),
Column(
"c1",
VARCHAR(10, collation="utf8_unicode_ci"),
),
),
)
- def_table.create()
+
+ def_table.create(testing.db)
try:
- reflected = Table("mysql_def", MetaData(testing.db), autoload=True)
+ reflected = Table(
+ "mysql_def", MetaData(), autoload_with=testing.db
+ )
finally:
- def_table.drop()
+ def_table.drop(testing.db)
assert def_table.c.c1.server_default.arg == ""
assert def_table.c.c2.server_default.arg == "0"
assert def_table.c.c3.server_default.arg == "abc"
r"CURRENT_TIMESTAMP(\(\))? ON UPDATE CURRENT_TIMESTAMP(\(\))?",
str(reflected.c.c6.server_default.arg).upper(),
)
- reflected.create()
+ reflected.create(testing.db)
try:
reflected2 = Table(
- "mysql_def", MetaData(testing.db), autoload=True
+ "mysql_def", MetaData(), autoload_with=testing.db
)
finally:
- reflected.drop()
+ reflected.drop(testing.db)
assert str(reflected2.c.c1.server_default.arg) == "''"
assert str(reflected2.c.c2.server_default.arg) == "'0'"
assert str(reflected2.c.c3.server_default.arg) == "'abc'"
def_table = Table(
"mysql_def",
- MetaData(testing.db),
+ MetaData(),
Column("c1", Integer()),
comment=comment,
**kwargs
)
- def_table.create()
- try:
- reflected = Table("mysql_def", MetaData(testing.db), autoload=True)
- finally:
- def_table.drop()
+ with testing.db.connect() as conn:
+ def_table.create(conn)
+ try:
+ reflected = Table("mysql_def", MetaData(), autoload_with=conn)
+ finally:
+ def_table.drop(conn)
if testing.against("mariadb"):
assert def_table.kwargs["mariadb_engine"] == "MEMORY"
# This is explicitly ignored when reflecting schema.
# assert reflected.kwargs['mysql_auto_increment'] == '5'
+ @testing.provide_metadata
def test_reflection_on_include_columns(self):
"""Test reflection of include_columns to be sure they respect case."""
+ meta = self.metadata
case_table = Table(
"mysql_case",
- MetaData(testing.db),
+ meta,
Column("c1", String(10)),
Column("C2", String(10)),
Column("C3", String(10)),
)
- try:
- case_table.create()
- reflected = Table(
- "mysql_case",
- MetaData(testing.db),
- autoload=True,
- include_columns=["c1", "C2"],
- )
- for t in case_table, reflected:
- assert "c1" in t.c.keys()
- assert "C2" in t.c.keys()
- reflected2 = Table(
- "mysql_case",
- MetaData(testing.db),
- autoload=True,
- include_columns=["c1", "c2"],
- )
- assert "c1" in reflected2.c.keys()
- for c in ["c2", "C2", "C3"]:
- assert c not in reflected2.c.keys()
- finally:
- case_table.drop()
+ case_table.create(testing.db)
+ reflected = Table(
+ "mysql_case",
+ MetaData(),
+ autoload_with=testing.db,
+ include_columns=["c1", "C2"],
+ )
+ for t in case_table, reflected:
+ assert "c1" in t.c.keys()
+ assert "C2" in t.c.keys()
+ reflected2 = Table(
+ "mysql_case",
+ MetaData(),
+ autoload_with=testing.db,
+ include_columns=["c1", "c2"],
+ )
+ assert "c1" in reflected2.c.keys()
+ for c in ["c2", "C2", "C3"]:
+ assert c not in reflected2.c.keys()
+ @testing.provide_metadata
def test_autoincrement(self):
- meta = MetaData(testing.db)
- try:
- Table(
- "ai_1",
- meta,
- Column("int_y", Integer, primary_key=True, autoincrement=True),
- Column("int_n", Integer, DefaultClause("0"), primary_key=True),
- mysql_engine="MyISAM",
- )
- Table(
- "ai_2",
- meta,
- Column("int_y", Integer, primary_key=True, autoincrement=True),
- Column("int_n", Integer, DefaultClause("0"), primary_key=True),
- mysql_engine="MyISAM",
- )
- Table(
- "ai_3",
- meta,
- Column(
- "int_n",
- Integer,
- DefaultClause("0"),
- primary_key=True,
- autoincrement=False,
- ),
- Column("int_y", Integer, primary_key=True, autoincrement=True),
- mysql_engine="MyISAM",
- )
- Table(
- "ai_4",
- meta,
- Column(
- "int_n",
- Integer,
- DefaultClause("0"),
- primary_key=True,
- autoincrement=False,
- ),
- Column(
- "int_n2",
- Integer,
- DefaultClause("0"),
- primary_key=True,
- autoincrement=False,
- ),
- mysql_engine="MyISAM",
- )
- Table(
- "ai_5",
- meta,
- Column("int_y", Integer, primary_key=True, autoincrement=True),
- Column(
- "int_n",
- Integer,
- DefaultClause("0"),
- primary_key=True,
- autoincrement=False,
- ),
- mysql_engine="MyISAM",
- )
- Table(
- "ai_6",
- meta,
- Column("o1", String(1), DefaultClause("x"), primary_key=True),
- Column("int_y", Integer, primary_key=True, autoincrement=True),
- mysql_engine="MyISAM",
- )
- Table(
- "ai_7",
- meta,
- Column("o1", String(1), DefaultClause("x"), primary_key=True),
- Column("o2", String(1), DefaultClause("x"), primary_key=True),
- Column("int_y", Integer, primary_key=True, autoincrement=True),
- mysql_engine="MyISAM",
- )
- Table(
- "ai_8",
- meta,
- Column("o1", String(1), DefaultClause("x"), primary_key=True),
- Column("o2", String(1), DefaultClause("x"), primary_key=True),
- mysql_engine="MyISAM",
- )
- meta.create_all()
-
- table_names = [
- "ai_1",
- "ai_2",
- "ai_3",
- "ai_4",
- "ai_5",
- "ai_6",
- "ai_7",
- "ai_8",
- ]
- mr = MetaData(testing.db)
- mr.reflect(only=table_names)
+ meta = self.metadata
+ Table(
+ "ai_1",
+ meta,
+ Column("int_y", Integer, primary_key=True, autoincrement=True),
+ Column("int_n", Integer, DefaultClause("0"), primary_key=True),
+ mysql_engine="MyISAM",
+ )
+ Table(
+ "ai_2",
+ meta,
+ Column("int_y", Integer, primary_key=True, autoincrement=True),
+ Column("int_n", Integer, DefaultClause("0"), primary_key=True),
+ mysql_engine="MyISAM",
+ )
+ Table(
+ "ai_3",
+ meta,
+ Column(
+ "int_n",
+ Integer,
+ DefaultClause("0"),
+ primary_key=True,
+ autoincrement=False,
+ ),
+ Column("int_y", Integer, primary_key=True, autoincrement=True),
+ mysql_engine="MyISAM",
+ )
+ Table(
+ "ai_4",
+ meta,
+ Column(
+ "int_n",
+ Integer,
+ DefaultClause("0"),
+ primary_key=True,
+ autoincrement=False,
+ ),
+ Column(
+ "int_n2",
+ Integer,
+ DefaultClause("0"),
+ primary_key=True,
+ autoincrement=False,
+ ),
+ mysql_engine="MyISAM",
+ )
+ Table(
+ "ai_5",
+ meta,
+ Column("int_y", Integer, primary_key=True, autoincrement=True),
+ Column(
+ "int_n",
+ Integer,
+ DefaultClause("0"),
+ primary_key=True,
+ autoincrement=False,
+ ),
+ mysql_engine="MyISAM",
+ )
+ Table(
+ "ai_6",
+ meta,
+ Column("o1", String(1), DefaultClause("x"), primary_key=True),
+ Column("int_y", Integer, primary_key=True, autoincrement=True),
+ mysql_engine="MyISAM",
+ )
+ Table(
+ "ai_7",
+ meta,
+ Column("o1", String(1), DefaultClause("x"), primary_key=True),
+ Column("o2", String(1), DefaultClause("x"), primary_key=True),
+ Column("int_y", Integer, primary_key=True, autoincrement=True),
+ mysql_engine="MyISAM",
+ )
+ Table(
+ "ai_8",
+ meta,
+ Column("o1", String(1), DefaultClause("x"), primary_key=True),
+ Column("o2", String(1), DefaultClause("x"), primary_key=True),
+ mysql_engine="MyISAM",
+ )
+ meta.create_all(testing.db)
+
+ table_names = [
+ "ai_1",
+ "ai_2",
+ "ai_3",
+ "ai_4",
+ "ai_5",
+ "ai_6",
+ "ai_7",
+ "ai_8",
+ ]
+ mr = MetaData()
+ mr.reflect(testing.db, only=table_names)
+ with testing.db.begin() as conn:
for tbl in [mr.tables[name] for name in table_names]:
for c in tbl.c:
if c.name.startswith("int_y"):
assert c.autoincrement
elif c.name.startswith("int_n"):
assert not c.autoincrement
- tbl.insert().execute()
+ conn.execute(tbl.insert())
if "int_y" in tbl.c:
- assert select(tbl.c.int_y).scalar() == 1
- assert list(tbl.select().execute().first()).count(1) == 1
+ assert conn.scalar(select(tbl.c.int_y)) == 1
+ assert (
+ list(conn.execute(tbl.select()).first()).count(1) == 1
+ )
else:
- assert 1 not in list(tbl.select().execute().first())
- finally:
- meta.drop_all()
+ assert 1 not in list(conn.execute(tbl.select()).first())
@testing.provide_metadata
def test_view_reflection(self):
# reflection here favors the unique index, as that's the
# more "official" MySQL construct
- reflected = Table("mysql_uc", MetaData(testing.db), autoload=True)
+ reflected = Table("mysql_uc", MetaData(), autoload_with=testing.db)
indexes = dict((i.name, i) for i in reflected.indexes)
constraints = set(uc.name for uc in reflected.constraints)
Column("pid", Integer, ForeignKey("%s.parent.pid" % schema)),
schema=schema,
)
- meta.create_all()
- parent.insert().execute({"pid": 1})
- child.insert().execute({"cid": 1, "pid": 1})
- eq_(child.select().execute().fetchall(), [(1, 1)])
+ with testing.db.begin() as conn:
+ meta.create_all(conn)
+ conn.execute(parent.insert(), {"pid": 1})
+ conn.execute(child.insert(), {"cid": 1, "pid": 1})
+ eq_(conn.execute(child.select()).fetchall(), [(1, 1)])
def test_reflect_alt_table_owner_local_synonym(self):
- meta = MetaData(testing.db)
+ meta = MetaData()
parent = Table(
"%s_pt" % testing.config.test_schema,
meta,
- autoload=True,
+ autoload_with=testing.db,
oracle_resolve_synonyms=True,
)
self.assert_compile(
"%(test_schema)s_pt.data FROM %(test_schema)s_pt"
% {"test_schema": testing.config.test_schema},
)
- select(parent).execute().fetchall()
def test_reflect_alt_synonym_owner_local_table(self):
- meta = MetaData(testing.db)
+ meta = MetaData()
parent = Table(
"local_table",
meta,
- autoload=True,
+ autoload_with=testing.db,
oracle_resolve_synonyms=True,
schema=testing.config.test_schema,
)
"FROM %(test_schema)s.local_table"
% {"test_schema": testing.config.test_schema},
)
- select(parent).execute().fetchall()
@testing.provide_metadata
def test_create_same_names_implicit_schema(self):
eq_(child.select().execute().fetchall(), [(1, 1)])
def test_reflect_alt_owner_explicit(self):
- meta = MetaData(testing.db)
+ meta = MetaData()
parent = Table(
- "parent", meta, autoload=True, schema=testing.config.test_schema
+ "parent",
+ meta,
+ autoload_with=testing.db,
+ schema=testing.config.test_schema,
)
child = Table(
- "child", meta, autoload=True, schema=testing.config.test_schema
+ "child",
+ meta,
+ autoload_with=testing.db,
+ schema=testing.config.test_schema,
)
self.assert_compile(
"%(test_schema)s.parent.id = %(test_schema)s.child.parent_id"
% {"test_schema": testing.config.test_schema},
)
- select(parent, child).select_from(
- parent.join(child)
- ).execute().fetchall()
+ with testing.db.connect() as conn:
+ conn.execute(
+ select(parent, child).select_from(parent.join(child))
+ ).fetchall()
# check table comment (#5146)
eq_(parent.comment, "my table comment")
% {"test_schema": testing.config.test_schema},
)
try:
- meta = MetaData(testing.db)
- lcl = Table("localtable", meta, autoload=True)
+ meta = MetaData()
+ lcl = Table("localtable", meta, autoload_with=testing.db)
parent = meta.tables["%s.parent" % testing.config.test_schema]
self.assert_compile(
parent.join(lcl),
"localtable.parent_id"
% {"test_schema": testing.config.test_schema},
)
- select(parent, lcl).select_from(
- parent.join(lcl)
- ).execute().fetchall()
finally:
exec_sql(testing.db, "DROP TABLE localtable")
def test_reflect_alt_owner_implicit(self):
- meta = MetaData(testing.db)
+ meta = MetaData()
parent = Table(
- "parent", meta, autoload=True, schema=testing.config.test_schema
+ "parent",
+ meta,
+ autoload_with=testing.db,
+ schema=testing.config.test_schema,
)
child = Table(
- "child", meta, autoload=True, schema=testing.config.test_schema
+ "child",
+ meta,
+ autoload_with=testing.db,
+ schema=testing.config.test_schema,
)
self.assert_compile(
parent.join(child),
"%(test_schema)s.child.parent_id"
% {"test_schema": testing.config.test_schema},
)
- select(parent, child).select_from(
- parent.join(child)
- ).execute().fetchall()
+ with testing.db.connect() as conn:
+ conn.execute(
+ select(parent, child).select_from(parent.join(child))
+ ).fetchall()
def test_reflect_alt_owner_synonyms(self):
exec_sql(
"%s.ptable(id))" % testing.config.test_schema,
)
try:
- meta = MetaData(testing.db)
+ meta = MetaData()
lcl = Table(
- "localtable", meta, autoload=True, oracle_resolve_synonyms=True
+ "localtable",
+ meta,
+ autoload_with=testing.db,
+ oracle_resolve_synonyms=True,
)
parent = meta.tables["%s.ptable" % testing.config.test_schema]
self.assert_compile(
"localtable.parent_id"
% {"test_schema": testing.config.test_schema},
)
- select(parent, lcl).select_from(
- parent.join(lcl)
- ).execute().fetchall()
+ with testing.db.connect() as conn:
+ conn.execute(
+ select(parent, lcl).select_from(parent.join(lcl))
+ ).fetchall()
finally:
exec_sql(testing.db, "DROP TABLE localtable")
def test_reflect_remote_synonyms(self):
- meta = MetaData(testing.db)
+ meta = MetaData()
parent = Table(
"ptable",
meta,
- autoload=True,
+ autoload_with=testing.db,
schema=testing.config.test_schema,
oracle_resolve_synonyms=True,
)
child = Table(
"ctable",
meta,
- autoload=True,
+ autoload_with=testing.db,
schema=testing.config.test_schema,
oracle_resolve_synonyms=True,
)
"%(test_schema)s.ctable.parent_id"
% {"test_schema": testing.config.test_schema},
)
- select(parent, child).select_from(
- parent.join(child)
- ).execute().fetchall()
class ConstraintTest(fixtures.TablesTest):
)
metadata.create_all()
- m2 = MetaData(testing.db)
+ m2 = MetaData()
- tbl = Table("test_compress", m2, autoload=True)
+ tbl = Table("test_compress", m2, autoload_with=testing.db)
# Don't hardcode the exact value, but it must be non-empty
assert tbl.dialect_options["oracle"]["compress"]
)
metadata.create_all()
- m2 = MetaData(testing.db)
+ m2 = MetaData()
- tbl = Table("test_compress", m2, autoload=True)
+ tbl = Table("test_compress", m2, autoload_with=testing.db)
assert tbl.dialect_options["oracle"]["compress"] == "OLTP"
t = Table(
"test_table_syn",
m,
- autoload=True,
autoload_with=testing.db,
oracle_resolve_synonyms=True,
)
m = self.metadata
Table("oracle_types", m, *columns)
m.create_all()
- m2 = MetaData(testing.db)
- table = Table("oracle_types", m2, autoload=True)
+ m2 = MetaData()
+ table = Table("oracle_types", m2, autoload_with=testing.db)
for i, (reflected_col, spec) in enumerate(zip(table.c, specs)):
expected_spec = spec[1]
reflected_type = reflected_col.type
Column("numbercol2", oracle.NUMBER(9, 3)),
Column("numbercol3", oracle.NUMBER),
)
- t1.create()
- t1.insert().execute(
- intcol=1,
- numericcol=5.2,
- floatcol1=6.5,
- floatcol2=8.5,
- doubleprec=9.5,
- numbercol1=12,
- numbercol2=14.85,
- numbercol3=15.76,
- )
+ with testing.db.begin() as conn:
+ t1.create(conn)
+ conn.execute(
+ t1.insert(),
+ dict(
+ intcol=1,
+ numericcol=5.2,
+ floatcol1=6.5,
+ floatcol2=8.5,
+ doubleprec=9.5,
+ numbercol1=12,
+ numbercol2=14.85,
+ numbercol3=15.76,
+ ),
+ )
- m2 = MetaData(testing.db)
- t2 = Table("t1", m2, autoload=True)
+ m2 = MetaData()
+ t2 = Table("t1", m2, autoload_with=testing.db)
- for row in (
- t1.select().execute().first(),
- t2.select().execute().first(),
- ):
- for i, (val, type_) in enumerate(
- (
- (1, int),
- (decimal.Decimal("5.2"), decimal.Decimal),
- (6.5, float),
- (8.5, float),
- (9.5, float),
- (12, int),
- (decimal.Decimal("14.85"), decimal.Decimal),
- (15.76, float),
- )
+ with testing.db.connect() as conn:
+ for row in (
+ conn.execute(t1.select()).first(),
+ conn.execute(t2.select()).first(),
):
- eq_(row[i], val)
- assert isinstance(row[i], type_), "%r is not %r" % (
- row[i],
- type_,
- )
+ for i, (val, type_) in enumerate(
+ (
+ (1, int),
+ (decimal.Decimal("5.2"), decimal.Decimal),
+ (6.5, float),
+ (8.5, float),
+ (9.5, float),
+ (12, int),
+ (decimal.Decimal("14.85"), decimal.Decimal),
+ (15.76, float),
+ )
+ ):
+ eq_(row[i], val)
+ assert isinstance(row[i], type_), "%r is not %r" % (
+ row[i],
+ type_,
+ )
@testing.provide_metadata
def test_numeric_infinity_float(self, connection):
Column("d5", oracle.INTERVAL(second_precision=5)),
)
metadata.create_all()
- m = MetaData(testing.db)
- t1 = Table("date_types", m, autoload=True)
+ m = MetaData()
+ t1 = Table("date_types", m, autoload_with=testing.db)
assert isinstance(t1.c.d1.type, oracle.DATE)
assert isinstance(t1.c.d1.type, DateTime)
assert isinstance(t1.c.d2.type, oracle.DATE)
def _dont_test_reflect_all_types_schema(self):
types_table = Table(
"all_types",
- MetaData(testing.db),
+ MetaData(),
Column("owner", String(30), primary_key=True),
Column("type_name", String(30), primary_key=True),
- autoload=True,
+ autoload_with=testing.db,
oracle_resolve_synonyms=True,
)
for row in types_table.select().execute().fetchall():
Column("c_data", sqltypes.NCHAR(20)),
)
metadata.create_all()
- m2 = MetaData(testing.db)
- t2 = Table("tnv", m2, autoload=True)
+ m2 = MetaData()
+ t2 = Table("tnv", m2, autoload_with=testing.db)
assert isinstance(t2.c.nv_data.type, sqltypes.NVARCHAR)
assert isinstance(t2.c.c_data.type, sqltypes.NCHAR)
metadata = self.metadata
Table("tnv", metadata, Column("data", sqltypes.Unicode(255)))
metadata.create_all()
- m2 = MetaData(testing.db)
- t2 = Table("tnv", m2, autoload=True)
+ m2 = MetaData()
+ t2 = Table("tnv", m2, autoload_with=testing.db)
assert isinstance(t2.c.data.type, sqltypes.VARCHAR)
if testing.against("oracle+cx_oracle"):
)
data = u("m’a réveillé.")
- t2.insert().execute(data=data)
- res = t2.select().execute().first()["data"]
- eq_(res, data)
- assert isinstance(res, util.text_type)
+ with testing.db.begin() as conn:
+ conn.execute(t2.insert(), {"data": data})
+ res = conn.execute(t2.select()).first().data
+ eq_(res, data)
+ assert isinstance(res, util.text_type)
@testing.provide_metadata
def test_char_length(self):
Column("c4", NCHAR(180)),
)
t1.create()
- m2 = MetaData(testing.db)
- t2 = Table("t1", m2, autoload=True)
+ m2 = MetaData()
+ t2 = Table("t1", m2, autoload_with=testing.db)
eq_(t2.c.c1.type.length, 50)
eq_(t2.c.c2.type.length, 250)
eq_(t2.c.c3.type.length, 200)
Column("date2", DateTime(timezone=False)),
)
metadata.create_all()
- m2 = MetaData(testing.db)
- t2 = Table("pgdate", m2, autoload=True)
+ m2 = MetaData()
+ t2 = Table("pgdate", m2, autoload_with=testing.db)
assert t2.c.date1.type.timezone is True
assert t2.c.date2.type.timezone is False
# test the same series of events using a reflected version of
# the table
- m2 = MetaData(engine)
- table = Table(table.name, m2, autoload=True)
+ m2 = MetaData()
+ table = Table(table.name, m2, autoload_with=engine)
with self.sql_execution_asserter(engine) as asserter:
with engine.connect() as conn:
# test the same series of events using a reflected version of
# the table
- m2 = MetaData(engine)
- table = Table(table.name, m2, autoload=True)
+ m2 = MetaData()
+ table = Table(table.name, m2, autoload_with=engine)
with self.sql_execution_asserter(engine) as asserter:
with engine.connect() as conn:
# test the same series of events using a reflected version of
# the table
- m2 = MetaData(engine)
- table = Table(table.name, m2, autoload=True)
+ m2 = MetaData()
+ table = Table(table.name, m2, autoload_with=engine)
with engine.connect() as conn:
conn.execute(table.insert(), {"id": 30, "data": "d1"})
sa.event.listen(metadata, "before_drop", sa.DDL(ddl))
def test_foreign_table_is_reflected(self):
- metadata = MetaData(testing.db)
- table = Table("test_foreigntable", metadata, autoload=True)
+ metadata = MetaData()
+ table = Table("test_foreigntable", metadata, autoload_with=testing.db)
eq_(
set(table.columns.keys()),
set(["id", "data"]),
)
def test_mview_is_reflected(self):
- metadata = MetaData(testing.db)
- table = Table("test_mview", metadata, autoload=True)
+ metadata = MetaData()
+ table = Table("test_mview", metadata, autoload_with=testing.db)
eq_(
set(table.columns.keys()),
set(["id", "data"]),
)
def test_mview_select(self):
- metadata = MetaData(testing.db)
- table = Table("test_mview", metadata, autoload=True)
- eq_(table.select().execute().fetchall(), [(89, "d1")])
+ metadata = MetaData()
+ table = Table("test_mview", metadata, autoload_with=testing.db)
+ with testing.db.connect() as conn:
+ eq_(conn.execute(table.select()).fetchall(), [(89, "d1")])
def test_get_view_names(self):
insp = inspect(testing.db)
con.exec_driver_sql('DROP SCHEMA "SomeSchema"')
def test_table_is_reflected(self):
- metadata = MetaData(testing.db)
- table = Table("testtable", metadata, autoload=True)
+ metadata = MetaData()
+ table = Table("testtable", metadata, autoload_with=testing.db)
eq_(
set(table.columns.keys()),
set(["question", "answer"]),
assert isinstance(table.c.answer.type, Integer)
def test_domain_is_reflected(self):
- metadata = MetaData(testing.db)
- table = Table("testtable", metadata, autoload=True)
+ metadata = MetaData()
+ table = Table("testtable", metadata, autoload_with=testing.db)
eq_(
str(table.columns.answer.server_default.arg),
"42",
), "Expected reflected column to not be nullable."
def test_enum_domain_is_reflected(self):
- metadata = MetaData(testing.db)
- table = Table("enum_test", metadata, autoload=True)
+ metadata = MetaData()
+ table = Table("enum_test", metadata, autoload_with=testing.db)
eq_(table.c.data.type.enums, ["test"])
def test_array_domain_is_reflected(self):
- metadata = MetaData(testing.db)
- table = Table("array_test", metadata, autoload=True)
+ metadata = MetaData()
+ table = Table("array_test", metadata, autoload_with=testing.db)
eq_(table.c.data.type.__class__, ARRAY)
eq_(table.c.data.type.item_type.__class__, INTEGER)
def test_quoted_remote_schema_domain_is_reflected(self):
- metadata = MetaData(testing.db)
- table = Table("quote_test", metadata, autoload=True)
+ metadata = MetaData()
+ table = Table("quote_test", metadata, autoload_with=testing.db)
eq_(table.c.data.type.__class__, INTEGER)
def test_table_is_reflected_test_schema(self):
- metadata = MetaData(testing.db)
+ metadata = MetaData()
table = Table(
- "testtable", metadata, autoload=True, schema="test_schema"
+ "testtable",
+ metadata,
+ autoload_with=testing.db,
+ schema="test_schema",
)
eq_(
set(table.columns.keys()),
assert isinstance(table.c.anything.type, Integer)
def test_schema_domain_is_reflected(self):
- metadata = MetaData(testing.db)
+ metadata = MetaData()
table = Table(
- "testtable", metadata, autoload=True, schema="test_schema"
+ "testtable",
+ metadata,
+ autoload_with=testing.db,
+ schema="test_schema",
)
eq_(
str(table.columns.answer.server_default.arg),
), "Expected reflected column to be nullable."
def test_crosschema_domain_is_reflected(self):
- metadata = MetaData(testing.db)
- table = Table("crosschema", metadata, autoload=True)
+ metadata = MetaData()
+ table = Table("crosschema", metadata, autoload_with=testing.db)
eq_(
str(table.columns.answer.server_default.arg),
"0",
ischema_names = base.PGDialect.ischema_names
base.PGDialect.ischema_names = {}
try:
- m2 = MetaData(testing.db)
- assert_raises(exc.SAWarning, Table, "testtable", m2, autoload=True)
+ m2 = MetaData()
+ assert_raises(
+ exc.SAWarning, Table, "testtable", m2, autoload_with=testing.db
+ )
@testing.emits_warning("Did not recognize type")
def warns():
- m3 = MetaData(testing.db)
- t3 = Table("testtable", m3, autoload=True)
+ m3 = MetaData()
+ t3 = Table("testtable", m3, autoload_with=testing.db)
assert t3.c.answer.type.__class__ == sa.types.NullType
finally:
PrimaryKeyConstraint("p2", "p1"),
)
meta1.create_all()
- meta2 = MetaData(testing.db)
- subject = Table("subject", meta2, autoload=True)
+ meta2 = MetaData()
+ subject = Table("subject", meta2, autoload_with=testing.db)
eq_(subject.primary_key.columns.keys(), ["p2", "p1"])
@testing.provide_metadata
Column("ref", Integer, ForeignKey("subject.id$")),
)
meta1.create_all()
- meta2 = MetaData(testing.db)
- subject = Table("subject", meta2, autoload=True)
- referer = Table("referer", meta2, autoload=True)
+ meta2 = MetaData()
+ subject = Table("subject", meta2, autoload_with=testing.db)
+ referer = Table("referer", meta2, autoload_with=testing.db)
self.assert_(
(subject.c["id$"] == referer.c.ref).compare(
subject.join(referer).onclause
).create(testing.db)
m = MetaData()
- t = Table("t", m, autoload=True, autoload_with=testing.db)
+ t = Table("t", m, autoload_with=testing.db)
eq_(
t.c.x.server_default.arg.text,
"'%s'::character varying" % ("abcd" * 40),
def test_renamed_sequence_reflection(self):
metadata = self.metadata
Table("t", metadata, Column("id", Integer, primary_key=True))
- metadata.create_all()
- m2 = MetaData(testing.db)
- t2 = Table("t", m2, autoload=True, implicit_returning=False)
+ metadata.create_all(testing.db)
+ m2 = MetaData()
+ t2 = Table("t", m2, autoload_with=testing.db, implicit_returning=False)
eq_(t2.c.id.server_default.arg.text, "nextval('t_id_seq'::regclass)")
- r = t2.insert().execute()
- eq_(r.inserted_primary_key, (1,))
+ with testing.db.begin() as conn:
+ r = conn.execute(t2.insert())
+ eq_(r.inserted_primary_key, (1,))
testing.db.connect().execution_options(
autocommit=True
).exec_driver_sql("alter table t_id_seq rename to foobar_id_seq")
- m3 = MetaData(testing.db)
- t3 = Table("t", m3, autoload=True, implicit_returning=False)
+ m3 = MetaData()
+ t3 = Table("t", m3, autoload_with=testing.db, implicit_returning=False)
eq_(
t3.c.id.server_default.arg.text,
"nextval('foobar_id_seq'::regclass)",
)
- r = t3.insert().execute()
- eq_(r.inserted_primary_key, (2,))
+ with testing.db.begin() as conn:
+ r = conn.execute(t3.insert())
+ eq_(r.inserted_primary_key, (2,))
@testing.provide_metadata
def test_altered_type_autoincrement_pk_reflection(self):
testing.db.connect().execution_options(
autocommit=True
).exec_driver_sql("alter table t alter column id type varchar(50)")
- m2 = MetaData(testing.db)
- t2 = Table("t", m2, autoload=True)
+ m2 = MetaData()
+ t2 = Table("t", m2, autoload_with=testing.db)
eq_(t2.c.id.autoincrement, False)
eq_(t2.c.x.autoincrement, False)
testing.db.connect().execution_options(
autocommit=True
).exec_driver_sql("alter table t rename id to t_id")
- m2 = MetaData(testing.db)
- t2 = Table("t", m2, autoload=True)
+ m2 = MetaData()
+ t2 = Table("t", m2, autoload_with=testing.db)
eq_([c.name for c in t2.primary_key], ["t_id"])
@testing.provide_metadata
schema="test_schema",
)
meta1.create_all()
- meta2 = MetaData(testing.db)
+ meta2 = MetaData()
addresses = Table(
- "email_addresses", meta2, autoload=True, schema="test_schema"
+ "email_addresses",
+ meta2,
+ autoload_with=testing.db,
+ schema="test_schema",
)
users = Table("users", meta2, must_exist=True, schema="test_schema")
j = join(users, addresses)
schema="test_schema",
)
meta1.create_all()
- meta2 = MetaData(testing.db)
- subject = Table("subject", meta2, autoload=True)
- referer = Table("referer", meta2, schema="test_schema", autoload=True)
+ meta2 = MetaData()
+ subject = Table("subject", meta2, autoload_with=testing.db)
+ referer = Table(
+ "referer", meta2, schema="test_schema", autoload_with=testing.db
+ )
self.assert_(
(subject.c.id == referer.c.ref).compare(
subject.join(referer).onclause
schema="test_schema",
)
meta1.create_all()
- meta2 = MetaData(testing.db)
+ meta2 = MetaData()
subject = Table(
- "subject", meta2, autoload=True, schema="test_schema_2"
+ "subject", meta2, autoload_with=testing.db, schema="test_schema_2"
+ )
+ referer = Table(
+ "referer", meta2, autoload_with=testing.db, schema="test_schema"
)
- referer = Table("referer", meta2, autoload=True, schema="test_schema")
self.assert_(
(subject.c.id == referer.c.ref).compare(
subject.join(referer).onclause
subject = Table(
"subject",
meta2,
- autoload=True,
+ autoload_with=testing.db,
schema="test_schema_2",
postgresql_ignore_search_path=True,
)
referer = Table(
"referer",
meta2,
- autoload=True,
+ autoload_with=testing.db,
schema="test_schema",
postgresql_ignore_search_path=True,
)
)
meta1.create_all()
- meta2 = MetaData(testing.db)
+ meta2 = MetaData()
subject = Table(
"subject",
meta2,
- autoload=True,
+ autoload_with=testing.db,
schema=default_schema,
postgresql_ignore_search_path=True,
)
referer = Table(
"referer",
meta2,
- autoload=True,
+ autoload_with=testing.db,
schema=default_schema,
postgresql_ignore_search_path=True,
)
"set search_path to test_schema_2, test_schema, public"
)
- m1 = MetaData(conn)
+ m1 = MetaData()
- Table("some_table", m1, schema="test_schema", autoload=True)
+ Table("some_table", m1, schema="test_schema", autoload_with=conn)
t2_schema = Table(
- "some_other_table", m1, schema="test_schema_2", autoload=True
+ "some_other_table",
+ m1,
+ schema="test_schema_2",
+ autoload_with=conn,
)
- t2_no_schema = Table("some_other_table", m1, autoload=True)
+ t2_no_schema = Table("some_other_table", m1, autoload_with=conn)
- t1_no_schema = Table("some_table", m1, autoload=True)
+ t1_no_schema = Table("some_table", m1, autoload_with=conn)
- m2 = MetaData(conn)
+ m2 = MetaData()
t1_schema_isp = Table(
"some_table",
m2,
schema="test_schema",
- autoload=True,
+ autoload_with=conn,
postgresql_ignore_search_path=True,
)
t2_schema_isp = Table(
"some_other_table",
m2,
schema="test_schema_2",
- autoload=True,
+ autoload_with=conn,
postgresql_ignore_search_path=True,
)
)
def go():
- m2 = MetaData(testing.db)
- t2 = Table("party", m2, autoload=True)
+ m2 = MetaData()
+ t2 = Table("party", m2, autoload_with=testing.db)
assert len(t2.indexes) == 2
# Make sure indexes are in the order we expect them in
# reflect data
with testing.db.connect() as conn:
m2 = MetaData(conn)
- t2 = Table("party", m2, autoload=True)
+ t2 = Table("party", m2, autoload_with=testing.db)
eq_(len(t2.indexes), 3)
"t", self.metadata, Column("x", postgresql.ENUM(name="empty"))
).create(testing.db)
- t = Table("t", MetaData(testing.db), autoload_with=testing.db)
+ t = Table("t", MetaData(), autoload_with=testing.db)
eq_(t.c.x.type.enums, [])
@testing.provide_metadata
self.assert_("uc_a" in constraints)
# reflection corrects for the dupe
- reflected = Table("pgsql_uc", MetaData(testing.db), autoload=True)
+ reflected = Table("pgsql_uc", MetaData(), autoload_with=testing.db)
indexes = set(i.name for i in reflected.indexes)
constraints = set(uc.name for uc in reflected.constraints)
eq_(insp.get_indexes("t"), expected)
# reflection corrects for the dupe
- reflected = Table("t", MetaData(testing.db), autoload=True)
+ reflected = Table("t", MetaData(), autoload_with=testing.db)
eq_(set(reflected.indexes), set())
assert indexes["ix_a"]["unique"]
self.assert_("ix_a" not in constraints)
- reflected = Table("pgsql_uc", MetaData(testing.db), autoload=True)
+ reflected = Table("pgsql_uc", MetaData(), autoload_with=testing.db)
indexes = dict((i.name, i) for i in reflected.indexes)
constraints = set(uc.name for uc in reflected.constraints)
(3, util.u("S’il")),
],
)
- m2 = MetaData(testing.db)
- t2 = Table("table", m2, autoload=True)
+ m2 = MetaData()
+ t2 = Table("table", m2, autoload_with=testing.db)
eq_(
t2.c.value.type.enums,
[util.u("réveillé"), util.u("drôle"), util.u("S’il")],
Column("value2", etype),
)
metadata.create_all()
- m2 = MetaData(testing.db)
- t2 = Table("table", m2, autoload=True)
+ m2 = MetaData()
+ t2 = Table("table", m2, autoload_with=testing.db)
eq_(t2.c.value.type.enums, ["one", "two", "three"])
eq_(t2.c.value.type.name, "onetwothreetype")
eq_(t2.c.value2.type.enums, ["four", "five", "six"])
Column("value2", etype),
)
metadata.create_all()
- m2 = MetaData(testing.db)
- t2 = Table("table", m2, autoload=True)
+ m2 = MetaData()
+ t2 = Table("table", m2, autoload_with=testing.db)
eq_(t2.c.value.type.enums, ["one", "two", "three"])
eq_(t2.c.value.type.name, "onetwothreetype")
eq_(t2.c.value2.type.enums, ["four", "five", "six"])
)
metadata.create_all()
m2 = MetaData()
- t2 = Table("table", m2, autoload_with=testing.db, autoload=True)
+ t2 = Table(
+ "table",
+ m2,
+ autoload_with=testing.db,
+ )
assert isinstance(t2.c.y.type, postgresql.OID)
Column("c6", postgresql.TIMESTAMP(timezone=True, precision=5)),
)
t1.create()
- m2 = MetaData(testing.db)
- t2 = Table("t1", m2, autoload=True)
+ m2 = MetaData()
+ t2 = Table("t1", m2, autoload_with=testing.db)
eq_(t2.c.c1.type.precision, None)
eq_(t2.c.c2.type.precision, 5)
eq_(t2.c.c3.type.precision, 5)
conn.execute(table.insert(), intarr=[4, 5, 6])
def test_reflect_array_column(self):
- metadata2 = MetaData(testing.db)
- tbl = Table("arrtable", metadata2, autoload=True)
+ metadata2 = MetaData()
+ tbl = Table("arrtable", metadata2, autoload_with=testing.db)
assert isinstance(tbl.c.intarr.type, self.ARRAY)
assert isinstance(tbl.c.strarr.type, self.ARRAY)
assert isinstance(tbl.c.intarr.type.item_type, Integer)
special_types_table.c.year_interval.type = postgresql.INTERVAL()
special_types_table.c.month_interval.type = postgresql.INTERVAL()
- m = MetaData(testing.db)
- t = Table("sometable", m, autoload=True)
+ m = MetaData()
+ t = Table("sometable", m, autoload_with=testing.db)
self.assert_tables_equal(special_types_table, t, strict_types=True)
assert t.c.plain_interval.type.precision is None
Column("bitvarying5", postgresql.BIT(5, varying=True)),
)
t1.create()
- m2 = MetaData(testing.db)
- t2 = Table("t1", m2, autoload=True)
+ m2 = MetaData()
+ t2 = Table("t1", m2, autoload_with=testing.db)
eq_(t2.c.bit1.type.length, 1)
eq_(t2.c.bit1.type.varying, False)
eq_(t2.c.bit5.type.length, 5)
Table("t_defaults", m, *columns)
try:
m.create_all()
- m2 = MetaData(db)
- rt = Table("t_defaults", m2, autoload=True)
+ m2 = MetaData()
+ rt = Table("t_defaults", m2, autoload_with=db)
expected = [c[1] for c in specs]
for i, reflected in enumerate(rt.c):
eq_(str(reflected.server_default.arg), expected[i])
def test_default_reflection_2(self):
db = testing.db
- m = MetaData(db)
+ m = MetaData()
expected = ["'my_default'", "0"]
table = """CREATE TABLE r_defaults (
data VARCHAR(40) DEFAULT 'my_default',
)"""
try:
exec_sql(db, table)
- rt = Table("r_defaults", m, autoload=True)
+ rt = Table("r_defaults", m, autoload_with=db)
for i, reflected in enumerate(rt.c):
eq_(str(reflected.server_default.arg), expected[i])
finally:
)"""
try:
exec_sql(db, table)
- m1 = MetaData(db)
- t1 = Table("r_defaults", m1, autoload=True)
+ m1 = MetaData()
+ t1 = Table("r_defaults", m1, autoload_with=db)
exec_sql(db, "DROP TABLE r_defaults")
- t1.create()
- m2 = MetaData(db)
- t2 = Table("r_defaults", m2, autoload=True)
+ t1.create(db)
+ m2 = MetaData()
+ t2 = Table("r_defaults", m2, autoload_with=db)
self.assert_compile(
CreateTable(t2),
"CREATE TABLE r_defaults (data VARCHAR(40) "
)
""",
)
- table1 = Table("django_admin_log", metadata, autoload=True)
- table2 = Table("django_content_type", metadata, autoload=True)
+ table1 = Table("django_admin_log", metadata, autoload_with=testing.db)
+ table2 = Table(
+ "django_content_type", metadata, autoload_with=testing.db
+ )
j = table1.join(table2)
assert j.onclause.compare(table1.c.content_type_id == table2.c.id)
# )
# ''')
- table1 = Table(r'"a"', metadata, autoload=True)
+ table1 = Table(r'"a"', metadata, autoload_with=testing.db)
assert '"id"' in table1.c
- # table2 = Table(r'"b"', metadata, autoload=True)
- # j = table1.join(table2)
- # assert j.onclause.compare(table1.c['"id"']
- # == table2.c['"aid"'])
-
@testing.provide_metadata
def test_description_encoding(self, connection):
# amazingly, pysqlite seems to still deliver cursor.description
alt_master = Table(
"sqlite_master",
meta,
- autoload=True,
autoload_with=self.conn,
schema="test_schema",
)
self._fixture()
m2 = MetaData()
- c2 = Table("created", m2, autoload=True, autoload_with=self.conn)
+ c2 = Table("created", m2, autoload_with=self.conn)
eq_(len(c2.c), 2)
def test_crud(self):
@classmethod
def setup_class(cls):
global metadata, cattable, matchtable
- metadata = MetaData(testing.db)
+ metadata = MetaData()
exec_sql(
testing.db,
"""
)
""",
)
- cattable = Table("cattable", metadata, autoload=True)
+ cattable = Table("cattable", metadata, autoload_with=testing.db)
exec_sql(
testing.db,
"""
)
""",
)
- matchtable = Table("matchtable", metadata, autoload=True)
- metadata.create_all()
- cattable.insert().execute(
- [
- {"id": 1, "description": "Python"},
- {"id": 2, "description": "Ruby"},
- ]
- )
- matchtable.insert().execute(
- [
- {
- "id": 1,
- "title": "Agile Web Development with Rails",
- "category_id": 2,
- },
- {"id": 2, "title": "Dive Into Python", "category_id": 1},
- {
- "id": 3,
- "title": "Programming Matz's Ruby",
- "category_id": 2,
- },
- {
- "id": 4,
- "title": "The Definitive Guide to Django",
- "category_id": 1,
- },
- {"id": 5, "title": "Python in a Nutshell", "category_id": 1},
- ]
- )
+ matchtable = Table("matchtable", metadata, autoload_with=testing.db)
+ with testing.db.begin() as conn:
+ metadata.create_all(conn)
+
+ conn.execute(
+ cattable.insert(),
+ [
+ {"id": 1, "description": "Python"},
+ {"id": 2, "description": "Ruby"},
+ ],
+ )
+ conn.execute(
+ matchtable.insert(),
+ [
+ {
+ "id": 1,
+ "title": "Agile Web Development with Rails",
+ "category_id": 2,
+ },
+ {"id": 2, "title": "Dive Into Python", "category_id": 1},
+ {
+ "id": 3,
+ "title": "Programming Matz's Ruby",
+ "category_id": 2,
+ },
+ {
+ "id": 4,
+ "title": "The Definitive Guide to Django",
+ "category_id": 1,
+ },
+ {
+ "id": 5,
+ "title": "Python in a Nutshell",
+ "category_id": 1,
+ },
+ ],
+ )
@classmethod
def teardown_class(cls):
- metadata.drop_all()
+ metadata.drop_all(testing.db)
def test_expression(self):
self.assert_compile(
dialect=sqlite.dialect(),
)
- def test_simple_match(self):
- results = (
+ def test_simple_match(self, connection):
+ results = connection.execute(
matchtable.select()
.where(matchtable.c.title.match("python"))
.order_by(matchtable.c.id)
- .execute()
- .fetchall()
- )
+ ).fetchall()
eq_([2, 5], [r.id for r in results])
- def test_simple_prefix_match(self):
- results = (
- matchtable.select()
- .where(matchtable.c.title.match("nut*"))
- .execute()
- .fetchall()
- )
+ def test_simple_prefix_match(self, connection):
+ results = connection.execute(
+ matchtable.select().where(matchtable.c.title.match("nut*"))
+ ).fetchall()
eq_([5], [r.id for r in results])
- def test_or_match(self):
- results2 = (
+ def test_or_match(self, connection):
+ results2 = connection.execute(
matchtable.select()
.where(matchtable.c.title.match("nutshell OR ruby"))
.order_by(matchtable.c.id)
- .execute()
- .fetchall()
- )
+ ).fetchall()
eq_([3, 5], [r.id for r in results2])
- def test_and_match(self):
- results2 = (
- matchtable.select()
- .where(matchtable.c.title.match("python nutshell"))
- .execute()
- .fetchall()
- )
+ def test_and_match(self, connection):
+ results2 = connection.execute(
+ matchtable.select().where(
+ matchtable.c.title.match("python nutshell")
+ )
+ ).fetchall()
eq_([5], [r.id for r in results2])
- def test_match_across_joins(self):
- results = (
+ def test_match_across_joins(self, connection):
+ results = connection.execute(
matchtable.select()
.where(
and_(
)
)
.order_by(matchtable.c.id)
- .execute()
- .fetchall()
- )
+ ).fetchall()
eq_([1, 3], [r.id for r in results])
def test_reflect_tables_fk_no_colref(self):
meta = MetaData()
- a = Table("a", meta, autoload=True, autoload_with=testing.db)
- b = Table("b", meta, autoload=True, autoload_with=testing.db)
+ a = Table("a", meta, autoload_with=testing.db)
+ b = Table("b", meta, autoload_with=testing.db)
assert b.c.id.references(a.c.id)
meta2 = MetaData()
reflected_users = Table(
- "engine_users", meta2, autoload=True, autoload_with=testing.db
+ "engine_users", meta2, autoload_with=testing.db
)
reflected_addresses = Table(
"engine_email_addresses",
meta2,
- autoload=True,
autoload_with=testing.db,
)
self.assert_tables_equal(users, reflected_users)
meta.create_all()
meta2 = MetaData()
t1r, t2r, t3r = [
- Table(x, meta2, autoload=True, autoload_with=testing.db)
+ Table(x, meta2, autoload_with=testing.db)
for x in ("t1", "t2", "t3")
]
assert t1r.c.t2id.references(t2r.c.id)
is_true(t1.c.t2id.references(t2.c.id))
def test_nonexistent(self):
- meta = MetaData(testing.db)
+ meta = MetaData()
assert_raises(
- sa.exc.NoSuchTableError, Table, "nonexistent", meta, autoload=True
+ sa.exc.NoSuchTableError,
+ Table,
+ "nonexistent",
+ meta,
+ autoload_with=testing.db,
)
assert "nonexistent" not in meta.tables
*[Column(n, sa.String(30)) for n in ["a", "b", "c", "d", "e", "f"]]
)
meta.create_all()
- meta2 = MetaData(testing.db)
+ meta2 = MetaData()
foo = Table(
- "foo", meta2, autoload=True, include_columns=["b", "f", "e"]
+ "foo",
+ meta2,
+ autoload_with=testing.db,
+ include_columns=["b", "f", "e"],
)
# test that cols come back in original order
eq_([c.name for c in foo.c], ["b", "e", "f"])
assert c not in foo.c
# test against a table which is already reflected
- meta3 = MetaData(testing.db)
- foo = Table("foo", meta3, autoload=True)
+ meta3 = MetaData()
+ foo = Table("foo", meta3, autoload_with=testing.db)
foo = Table(
"foo", meta3, include_columns=["b", "f", "e"], extend_existing=True
m2,
old_y,
extend_existing=True,
- autoload=True,
autoload_with=testing.db,
)
eq_(set(t2.columns.keys()), set(["x", "y", "z", "q", "id"]))
"t",
m3,
extend_existing=False,
- autoload=True,
autoload_with=testing.db,
)
eq_(set(t3.columns.keys()), set(["z"]))
m4,
old_y,
extend_existing=True,
- autoload=True,
autoload_replace=False,
autoload_with=testing.db,
)
sa.Index("foobar", t1.c.a, t1.c.b)
sa.Index("bat", t1.c.a)
m.create_all()
- m2 = MetaData(testing.db)
- t2 = Table("t1", m2, autoload=True)
+ m2 = MetaData()
+ t2 = Table("t1", m2, autoload_with=testing.db)
assert len(t2.indexes) == 2
- m2 = MetaData(testing.db)
- t2 = Table("t1", m2, autoload=True, include_columns=["a"])
+ m2 = MetaData()
+ t2 = Table("t1", m2, autoload_with=testing.db, include_columns=["a"])
assert len(t2.indexes) == 1
- m2 = MetaData(testing.db)
- t2 = Table("t1", m2, autoload=True, include_columns=["a", "b"])
+ m2 = MetaData()
+ t2 = Table(
+ "t1", m2, autoload_with=testing.db, include_columns=["a", "b"]
+ )
assert len(t2.indexes) == 2
@testing.provide_metadata
m2 = MetaData()
b2 = Table("b", m2, Column("a_id", Integer, sa.ForeignKey("a.id")))
- a2 = Table("a", m2, autoload=True, autoload_with=testing.db)
+ a2 = Table("a", m2, autoload_with=testing.db)
b2 = Table(
"b",
m2,
extend_existing=True,
- autoload=True,
autoload_with=testing.db,
autoload_replace=False,
)
m2 = MetaData()
b2 = Table("b", m2, Column("a_id", Integer, sa.ForeignKey("a.id")))
- a2 = Table("a", m2, autoload=True, autoload_with=testing.db)
+ a2 = Table("a", m2, autoload_with=testing.db)
b2 = Table(
"b",
m2,
extend_existing=True,
- autoload=True,
autoload_with=testing.db,
autoload_replace=False,
)
m2 = MetaData()
b2 = Table("b", m2, Column("a_id", Integer))
- a2 = Table("a", m2, autoload=True, autoload_with=testing.db)
+ a2 = Table("a", m2, autoload_with=testing.db)
b2 = Table(
"b",
m2,
extend_existing=True,
- autoload=True,
autoload_with=testing.db,
autoload_replace=False,
)
Table(
"a",
m2,
- autoload=True,
autoload_with=testing.db,
autoload_replace=False,
extend_existing=True,
mysql_engine="InnoDB",
)
meta.create_all()
- m2 = MetaData(testing.db)
- t1a = Table("test", m2, autoload=True)
+ m2 = MetaData()
+ t1a = Table("test", m2, autoload_with=testing.db)
assert t1a._autoincrement_column is t1a.c.id
- t2a = Table("test2", m2, autoload=True)
+ t2a = Table("test2", m2, autoload_with=testing.db)
assert t2a._autoincrement_column is None
@skip("sqlite")
)
table.create()
- meta2 = MetaData(testing.db)
+ meta2 = MetaData()
table = Table(
"override_test",
meta2,
Column("col2", sa.Unicode()),
Column("col4", sa.String(30)),
- autoload=True,
+ autoload_with=testing.db,
)
self.assert_(isinstance(table.c.col1.type, sa.Integer))
)
table.create()
- meta2 = MetaData(testing.db)
+ meta2 = MetaData()
table = Table(
"override_test",
meta2,
Column("col1", sa.Integer, primary_key=True),
- autoload=True,
+ autoload_with=testing.db,
)
eq_(list(table.primary_key), [table.c.col1])
)
meta.create_all()
- meta2 = MetaData(testing.db)
+ meta2 = MetaData()
a2 = Table(
"addresses",
meta2,
Column(
"id", sa.Integer, sa.ForeignKey("users.id"), primary_key=True
),
- autoload=True,
+ autoload_with=testing.db,
)
- u2 = Table("users", meta2, autoload=True)
+ u2 = Table("users", meta2, autoload_with=testing.db)
assert list(a2.primary_key) == [a2.c.id]
assert list(u2.primary_key) == [u2.c.id]
assert u2.join(a2).onclause.compare(u2.c.id == a2.c.id)
- meta3 = MetaData(testing.db)
- u3 = Table("users", meta3, autoload=True)
+ meta3 = MetaData()
+ u3 = Table("users", meta3, autoload_with=testing.db)
a3 = Table(
"addresses",
meta3,
Column(
"id", sa.Integer, sa.ForeignKey("users.id"), primary_key=True
),
- autoload=True,
+ autoload_with=testing.db,
)
assert list(a3.primary_key) == [a3.c.id]
)
meta.create_all()
- meta2 = MetaData(testing.db)
+ meta2 = MetaData()
a2 = Table(
"addresses",
meta2,
Column("user_id", sa.Integer, sa.ForeignKey("users.id")),
- autoload=True,
+ autoload_with=testing.db,
)
- u2 = Table("users", meta2, autoload=True)
+ u2 = Table("users", meta2, autoload_with=testing.db)
assert len(a2.c.user_id.foreign_keys) == 1
assert len(a2.foreign_keys) == 1
assert [c.parent for c in a2.foreign_keys] == [a2.c.user_id]
assert [c.parent for c in a2.c.user_id.foreign_keys] == [a2.c.user_id]
assert list(a2.c.user_id.foreign_keys)[0].parent is a2.c.user_id
assert u2.join(a2).onclause.compare(u2.c.id == a2.c.user_id)
- meta3 = MetaData(testing.db)
+ meta3 = MetaData()
- u3 = Table("users", meta3, autoload=True)
+ u3 = Table("users", meta3, autoload_with=testing.db)
a3 = Table(
"addresses",
meta3,
Column("user_id", sa.Integer, sa.ForeignKey("users.id")),
- autoload=True,
+ autoload_with=testing.db,
)
assert u3.join(a3).onclause.compare(u3.c.id == a3.c.user_id)
- meta4 = MetaData(testing.db)
+ meta4 = MetaData()
u4 = Table(
"users",
meta4,
Column("id", sa.Integer, key="u_id", primary_key=True),
- autoload=True,
+ autoload_with=testing.db,
)
a4 = Table(
Column(
"user_id", sa.Integer, sa.ForeignKey("users.u_id"), key="id"
),
- autoload=True,
+ autoload_with=testing.db,
)
# for the thing happening here with the column collection,
c1 = Column("x", sa.Integer, primary_key=True)
c2 = Column("y", sa.Integer, primary_key=True)
f1 = sa.ForeignKeyConstraint(["x", "y"], ["a.x", "a.y"])
- b1 = Table(
- "b", meta2, c1, c2, f1, autoload=True, autoload_with=testing.db
- )
+ b1 = Table("b", meta2, c1, c2, f1, autoload_with=testing.db)
assert b1.c.x is c1
assert b1.c.y is c2
Column("y", sa.Integer, sa.ForeignKey("a.x")),
test_needs_fk=True,
)
- meta.create_all()
- m2 = MetaData(testing.db)
+ meta.create_all(testing.db)
+ m2 = MetaData()
a2 = Table(
"a",
m2,
Column("x", sa.Integer, primary_key=True, key="x1"),
- autoload=True,
+ autoload_with=testing.db,
)
- b2 = Table("b", m2, autoload=True)
+ b2 = Table("b", m2, autoload_with=testing.db)
assert a2.join(b2).onclause.compare(a2.c.x1 == b2.c.y)
assert b2.c.y.references(a2.c.x1)
test_needs_fk=True,
)
meta.create_all()
- m2 = MetaData(testing.db)
- a2 = Table("a", m2, include_columns=["z"], autoload=True)
- b2 = Table("b", m2, autoload=True)
+ m2 = MetaData()
+ a2 = Table("a", m2, include_columns=["z"], autoload_with=testing.db)
+ b2 = Table("b", m2, autoload_with=testing.db)
assert_raises(sa.exc.NoReferencedColumnError, a2.join, b2)
meta.create_all()
meta2 = MetaData()
- table = Table("multi", meta2, autoload=True, autoload_with=testing.db)
- table2 = Table(
- "multi2", meta2, autoload=True, autoload_with=testing.db
- )
+ table = Table("multi", meta2, autoload_with=testing.db)
+ table2 = Table("multi2", meta2, autoload_with=testing.db)
self.assert_tables_equal(multi, table)
self.assert_tables_equal(multi2, table2)
j = sa.join(table, table2)
index_c = sa.Index("else", table_c.c.join)
meta.create_all()
index_c.drop()
- meta2 = MetaData(testing.db)
- Table("select", meta2, autoload=True)
- Table("false", meta2, autoload=True)
- Table("is", meta2, autoload=True)
+ meta2 = MetaData()
+ Table("select", meta2, autoload_with=testing.db)
+ Table("false", meta2, autoload_with=testing.db)
+ Table("is", meta2, autoload_with=testing.db)
@testing.provide_metadata
def _test_reflect_uses_bind(self, fn):
sa.Index("idx1", t1.c.id, unique=True)
sa.Index("idx2", t1.c.name, t1.c.id, unique=False)
m1.create_all()
- m2 = MetaData(testing.db)
- t2 = Table("party", m2, autoload=True)
+ m2 = MetaData()
+ t2 = Table("party", m2, autoload_with=testing.db)
assert len(t2.indexes) == 3
# Make sure indexes are in the order we expect them in
comment="t1 comment",
)
m1.create_all()
- m2 = MetaData(testing.db)
- t2 = Table("sometable", m2, autoload=True)
+ m2 = MetaData()
+ t2 = Table("sometable", m2, autoload_with=testing.db)
eq_(t2.comment, "t1 comment")
eq_(t2.c.id.comment, "c1 comment")
sa.CheckConstraint("q > 10", name="ck1"),
)
m1.create_all()
- m2 = MetaData(testing.db)
- t2 = Table("x", m2, autoload=True)
+ m2 = MetaData()
+ t2 = Table("x", m2, autoload_with=testing.db)
ck = [
const
with testing.expect_warnings(
"index key 'a' was not located in columns"
):
- t = Table("x", m, autoload=True, autoload_with=testing.db)
+ t = Table("x", m, autoload_with=testing.db)
eq_(list(t.indexes)[0].columns, [t.c.b])
try:
metadata.create_all()
_create_views(metadata.bind, None)
- m2 = MetaData(testing.db)
- users_v = Table("users_v", m2, autoload=True)
- addresses_v = Table("email_addresses_v", m2, autoload=True)
+ m2 = MetaData()
+ users_v = Table("users_v", m2, autoload_with=testing.db)
+ addresses_v = Table(
+ "email_addresses_v", m2, autoload_with=testing.db
+ )
for c1, c2 in zip(users_v.c, users.c):
eq_(c1.name, c2.name)
assert bool(schema)
- metadata = MetaData(engine)
+ metadata = MetaData()
Table(
"table1",
metadata,
schema=schema,
)
try:
- metadata.create_all()
- metadata.create_all(checkfirst=True)
+ metadata.create_all(engine)
+ metadata.create_all(engine, checkfirst=True)
assert len(metadata.tables) == 2
metadata.clear()
- Table("table1", metadata, autoload=True, schema=schema)
- Table("table2", metadata, autoload=True, schema=schema)
+ Table("table1", metadata, autoload_with=engine, schema=schema)
+ Table("table2", metadata, autoload_with=engine, schema=schema)
assert len(metadata.tables) == 2
finally:
- metadata.drop_all()
+ metadata.drop_all(engine)
@testing.requires.schemas
@testing.provide_metadata
assert bool(schema)
- metadata = MetaData(engine, schema=schema)
+ metadata = MetaData(schema=schema)
Table(
"table1",
metadata,
test_needs_fk=True,
)
try:
- metadata.create_all()
- metadata.create_all(checkfirst=True)
+ metadata.create_all(engine)
+ metadata.create_all(engine, checkfirst=True)
assert len(metadata.tables) == 2
metadata.clear()
- Table("table1", metadata, autoload=True)
- Table("table2", metadata, autoload=True)
+ Table("table1", metadata, autoload_with=engine)
+ Table("table2", metadata, autoload_with=engine)
assert len(metadata.tables) == 2
finally:
- metadata.drop_all()
+ metadata.drop_all(engine)
@testing.requires.schemas
@testing.provide_metadata
@testing.requires.denormalized_names
def test_direct_quoting(self):
- m = MetaData(testing.db)
- t = Table("weird_casing", m, autoload=True)
+ m = MetaData()
+ t = Table("weird_casing", m, autoload_with=testing.db)
self.assert_compile(
t.select(),
"SELECT weird_casing.col1, "
def test_reflect_exact_name(self):
m = MetaData()
- t1 = Table("SomeTable", m, autoload=True, autoload_with=testing.db)
+ t1 = Table("SomeTable", m, autoload_with=testing.db)
eq_(t1.name, "SomeTable")
assert t1.c.x is not None
)
def test_reflect_via_fk(self):
m = MetaData()
- t2 = Table(
- "SomeOtherTable", m, autoload=True, autoload_with=testing.db
- )
+ t2 = Table("SomeOtherTable", m, autoload_with=testing.db)
eq_(t2.name, "SomeOtherTable")
assert "SomeTable" in m.tables
@testing.fails_on_everything_except("sqlite", "mysql", "mssql")
def test_reflect_case_insensitive(self):
m = MetaData()
- t2 = Table("sOmEtAbLe", m, autoload=True, autoload_with=testing.db)
+ t2 = Table("sOmEtAbLe", m, autoload_with=testing.db)
eq_(t2.name, "sOmEtAbLe")
# wrapper
from sqlalchemy.schema import Table
- m = MetaData(testing.db)
+ m = MetaData()
def column_reflect(insp, table, column_info):
if column_info["name"] == col:
t = Table(
tablename,
m,
- autoload=True,
+ autoload_with=testing.db,
listeners=[("column_reflect", column_reflect)],
)
assert_(t)
- m = MetaData(testing.db)
+ m = MetaData()
self.event_listen(Table, "column_reflect", column_reflect)
- t2 = Table(tablename, m, autoload=True)
+ t2 = Table(tablename, m, autoload_with=testing.db)
assert_(t2)
def test_override_key(self):
self._do_test("y", {"key": "YXZ"}, assertions)
def test_override_key_fk(self):
- m = MetaData(testing.db)
+ m = MetaData()
def column_reflect(insp, table, column_info):
to_reflect = Table(
"to_reflect",
m,
- autoload=True,
+ autoload_with=testing.db,
listeners=[("column_reflect", column_reflect)],
)
related = Table(
"related",
m,
- autoload=True,
+ autoload_with=testing.db,
listeners=[("column_reflect", column_reflect)],
)
from sqlalchemy import Index
from sqlalchemy import inspect
from sqlalchemy import Integer
-from sqlalchemy import MetaData
from sqlalchemy import String
from sqlalchemy import testing
from sqlalchemy import UniqueConstraint
assert Bar.__mapper__.primary_key[0] is Bar.__table__.c.id
assert Bar.__mapper__.primary_key[1] is Bar.__table__.c.ex
+ @testing.provide_metadata
def test_with_explicit_autoloaded(self):
- meta = MetaData(testing.db)
+ meta = self.metadata
t1 = Table(
"t1",
meta,
Column("id", String(50), primary_key=True),
Column("data", String(50)),
)
- meta.create_all()
- try:
+ meta.create_all(testing.db)
- class MyObj(Base):
+ class MyObj(Base):
- __table__ = Table("t1", Base.metadata, autoload=True)
+ __table__ = Table("t1", Base.metadata, autoload_with=testing.db)
- sess = create_session()
- m = MyObj(id="someid", data="somedata")
- sess.add(m)
- sess.flush()
- eq_(t1.select().execute().fetchall(), [("someid", "somedata")])
- finally:
- meta.drop_all()
+ sess = create_session()
+ m = MyObj(id="someid", data="somedata")
+ sess.add(m)
+ sess.flush()
+ eq_(t1.select().execute().fetchall(), [("someid", "somedata")])
def test_synonym_for(self):
class User(Base, fixtures.ComparableEntity):
from sqlalchemy import String
from sqlalchemy import testing
from sqlalchemy.orm import clear_mappers
-from sqlalchemy.orm import create_session
from sqlalchemy.orm import decl_api as decl
from sqlalchemy.orm import relationship
from sqlalchemy.testing import assert_raises
from sqlalchemy.testing import eq_
from sqlalchemy.testing import fixtures
+from sqlalchemy.testing.fixtures import create_session
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
def setup(self):
global Base, registry
- registry = decl.registry(metadata=MetaData(bind=testing.db))
+ registry = decl.registry(metadata=MetaData())
Base = registry.generate_base()
def teardown(self):
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
- __autoload__ = True
+ __autoload_with__ = testing.db
addresses = relationship("Address", backref="user")
class Address(Base, fixtures.ComparableEntity):
__tablename__ = "addresses"
- __autoload__ = True
+ __autoload_with__ = testing.db
u1 = User(
name="u1", addresses=[Address(email="one"), Address(email="two")]
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
- __autoload__ = True
+ __autoload_with__ = testing.db
nom = Column("name", String(50), key="nom")
addresses = relationship("Address", backref="user")
class Address(Base, fixtures.ComparableEntity):
__tablename__ = "addresses"
- __autoload__ = True
+ __autoload_with__ = testing.db
u1 = User(
nom="u1", addresses=[Address(email="one"), Address(email="two")]
class User(fixtures.ComparableMixin):
__tablename__ = "users"
- __autoload__ = True
+ __autoload_with__ = testing.db
nom = Column("name", String(50), key="nom")
addresses = relationship("Address", backref="user")
class Address(fixtures.ComparableMixin):
__tablename__ = "addresses"
- __autoload__ = True
+ __autoload_with__ = testing.db
u1 = User(
nom="u1", addresses=[Address(email="one"), Address(email="two")]
class IMHandle(Base, fixtures.ComparableEntity):
__tablename__ = "imhandles"
- __autoload__ = True
+ __autoload_with__ = testing.db
user_id = Column("user_id", Integer, ForeignKey("users.id"))
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
- __autoload__ = True
+ __autoload_with__ = testing.db
handles = relationship("IMHandle", backref="user")
u1 = User(
Column("col2", String(20)),
)
- metadata.create_all()
- Table("table1", metadata, autoload=True)
+ metadata.create_all(testing.db)
+ Table("table1", metadata, autoload_with=testing.db)
def go():
Table(
tsa.exc.NoSuchTableError,
Table,
"fake_table",
- MetaData(testing.db),
- autoload=True,
+ MetaData(),
+ autoload_with=testing.db,
)
def test_assorted_repr(self):
def test_pickle_via_reflect():
# this is the most common use case, pickling the results of a
# database reflection
- meta2 = MetaData(bind=testing.db)
- t1 = Table("mytable", meta2, autoload=True)
- Table("othertable", meta2, autoload=True)
- Table("has_comments", meta2, autoload=True)
+ meta2 = MetaData()
+ t1 = Table("mytable", meta2, autoload_with=testing.db)
+ Table("othertable", meta2, autoload_with=testing.db)
+ Table("has_comments", meta2, autoload_with=testing.db)
meta3 = pickle.loads(pickle.dumps(meta2))
assert meta3.bind is None
assert meta3.tables["mytable"] is not t1
@testing.fixture
def existing_meta(self):
- meta2 = MetaData(testing.db)
- Table("users", meta2, autoload=True)
+ meta2 = MetaData()
+ Table("users", meta2, autoload_with=testing.db)
return meta2
@testing.fixture
def empty_meta(self):
- return MetaData(testing.db)
+ return MetaData()
def test_exception_no_flags(self, existing_meta):
def go():
Table(
- "users", existing_meta, Column("name", Unicode), autoload=True
+ "users",
+ existing_meta,
+ Column("name", Unicode),
+ autoload_with=testing.db,
)
assert_raises_message(
def test_autoload_replace_column(self, empty_meta):
users = Table(
- "users", empty_meta, Column("name", Unicode), autoload=True
+ "users",
+ empty_meta,
+ Column("name", Unicode),
+ autoload_with=testing.db,
)
assert isinstance(users.c.name.type, Unicode)
"users",
existing_meta,
Column("name", Unicode),
- autoload=True,
+ autoload_with=testing.db,
keep_existing=True,
)
assert not isinstance(users.c.name.type, Unicode)
"users",
existing_meta,
quote=True,
- autoload=True,
+ autoload_with=testing.db,
keep_existing=True,
)
assert not users.name.quote
"users",
existing_meta,
Column("foo", Integer),
- autoload=True,
+ autoload_with=testing.db,
keep_existing=True,
)
assert "foo" not in users.c
"users",
empty_meta,
Column("name", Unicode),
- autoload=True,
+ autoload_with=testing.db,
keep_existing=True,
)
assert isinstance(users.c.name.type, Unicode)
)
def test_keep_existing_quote_no_orig(self, empty_meta):
users = Table(
- "users", empty_meta, quote=True, autoload=True, keep_existing=True
+ "users",
+ empty_meta,
+ quote=True,
+ autoload_with=testing.db,
+ keep_existing=True,
)
assert users.name.quote
"users",
empty_meta,
Column("foo", Integer),
- autoload=True,
+ autoload_with=testing.db,
keep_existing=True,
)
assert "foo" in users.c
"users",
existing_meta,
Column("name", Unicode),
- autoload=True,
+ autoload_with=testing.db,
extend_existing=True,
)
assert isinstance(users.c.name.type, Unicode)
"users",
existing_meta,
quote=True,
- autoload=True,
+ autoload_with=testing.db,
extend_existing=True,
)
"users",
existing_meta,
Column("foo", Integer),
- autoload=True,
+ autoload_with=testing.db,
extend_existing=True,
)
assert "foo" in users.c
"users",
empty_meta,
Column("name", Unicode),
- autoload=True,
+ autoload_with=testing.db,
extend_existing=True,
)
assert isinstance(users.c.name.type, Unicode)
"users",
empty_meta,
quote=True,
- autoload=True,
+ autoload_with=testing.db,
extend_existing=True,
)
assert users.name.quote
"users",
empty_meta,
Column("foo", Integer),
- autoload=True,
+ autoload_with=testing.db,
extend_existing=True,
)
assert "foo" in users.c
table2.drop()
def test_reflect(self):
- meta2 = MetaData(testing.db)
- t2 = Table("WorstCase1", meta2, autoload=True, quote=True)
+ meta2 = MetaData()
+ t2 = Table("WorstCase1", meta2, autoload_with=testing.db, quote=True)
assert "lowercase" in t2.c
# indicates the DB returns unquoted names as