"""
return False
- def prep_table_for_batch(self, table):
+ def prep_table_for_batch(self, batch_impl, table):
"""perform any operations needed on a table before a new
one is created to replace it in batch mode.
)
identity_attrs_ignore = ("on_null", "order")
- def prep_table_for_batch(self, table):
+ def prep_table_for_batch(self, batch_impl, table):
+
for constraint in table.constraints:
- if constraint.name is not None:
+ if (
+ constraint.name is not None
+ and constraint.name in batch_impl.named_constraints
+ ):
self.drop_constraint(constraint)
def compare_server_default(
def _create(self, op_impl):
self._transfer_elements_to_new_table()
- op_impl.prep_table_for_batch(self.table)
+ op_impl.prep_table_for_batch(self, self.table)
op_impl.create_table(self.new_table)
try:
--- /dev/null
+.. change::
+ :tags: bug, batch
+ :tickets: 773
+
+ Made an adjustment to the PostgreSQL dialect to allow it to work more
+ effectively in batch mode, where a datatype like Boolean or non-native Enum
+ that may have embedded rules to generate CHECK constraints will be more
+ correctly handled in that these constraints usually will not have been
+ generated on the PostgreSQL backend; previously it would inadvertently
+ assume they existed unconditionally in a special PG-only "drop constraint"
+ step.
+
__only_on__ = "postgresql"
__backend__ = True
+ def _native_boolean_fixture(self):
+ t = Table(
+ "has_native_bool",
+ self.metadata,
+ Column(
+ "x",
+ Boolean(create_constraint=True),
+ server_default="false",
+ nullable=False,
+ ),
+ Column("y", Integer),
+ )
+ t.create(self.conn)
+
def _datetime_server_default_fixture(self):
return func.current_timestamp()
super(
BatchRoundTripPostgresqlTest, self
).test_change_type_boolean_to_int()
+
+ def test_add_col_table_has_native_boolean(self):
+ self._native_boolean_fixture()
+
+ # to ensure test coverage on SQLAlchemy 1.4 and above,
+ # force the create_constraint flag to True even though it
+ # defaults to false in 1.4. this test wants to ensure that the
+ # "should create" rule is consulted
+ def listen_for_reflect(inspector, table, column_info):
+ if isinstance(column_info["type"], Boolean):
+ column_info["type"].create_constraint = True
+
+ with self.op.batch_alter_table(
+ "has_native_bool",
+ recreate="always",
+ reflect_kwargs={
+ "listeners": [("column_reflect", listen_for_reflect)]
+ },
+ ) as batch_op:
+ batch_op.add_column(Column("data", Integer))
+
+ insp = inspect(config.db)
+
+ eq_(
+ [
+ c["type"]._type_affinity
+ for c in insp.get_columns("has_native_bool")
+ if c["name"] == "data"
+ ],
+ [Integer],
+ )
+ eq_(
+ [
+ c["type"]._type_affinity
+ for c in insp.get_columns("has_native_bool")
+ if c["name"] == "x"
+ ],
+ [Boolean],
+ )