)
-def _make_index(params: Dict[str, Any], conn_table: Table) -> Optional[Index]:
+def _make_index(
+ impl: DefaultImpl, params: Dict[str, Any], conn_table: Table
+) -> Optional[Index]:
exprs: list[Union[Column[Any], TextClause]] = []
sorting = params.get("column_sorting")
item = _IndexColumnSortingOps[operator](item)
exprs.append(item)
ix = sa_schema.Index(
- params["name"], *exprs, unique=params["unique"], _table=conn_table
+ params["name"],
+ *exprs,
+ unique=params["unique"],
+ _table=conn_table,
+ **impl.adjust_reflected_dialect_options(params, "index"),
)
if "duplicates_constraint" in params:
ix.info["duplicates_constraint"] = params["duplicates_constraint"]
def _make_unique_constraint(
- params: Dict[str, Any], conn_table: Table
+ impl: DefaultImpl, params: Dict[str, Any], conn_table: Table
) -> UniqueConstraint:
uq = sa_schema.UniqueConstraint(
*[conn_table.c[cname] for cname in params["column_names"]],
name=params["name"],
+ **impl.adjust_reflected_dialect_options(params, "unique_constraint"),
)
if "duplicates_index" in params:
uq.info["duplicates_index"] = params["duplicates_index"]
inspector = autogen_context.inspector
is_create_table = conn_table is None
is_drop_table = metadata_table is None
+ impl = autogen_context.migration_context.impl
# 1a. get raw indexes and unique constraints from metadata ...
if metadata_table is not None:
conn_uniques = set() # type:ignore[assignment]
else:
conn_uniques = { # type:ignore[assignment]
- _make_unique_constraint(uq_def, conn_table)
+ _make_unique_constraint(impl, uq_def, conn_table)
for uq_def in conn_uniques
}
conn_indexes = { # type:ignore[assignment]
index
- for index in (_make_index(ix, conn_table) for ix in conn_indexes)
+ for index in (
+ _make_index(impl, ix, conn_table) for ix in conn_indexes
+ )
if index is not None
}
# 2a. if the dialect dupes unique indexes as unique constraints
# (mysql and oracle), correct for that
- impl = autogen_context.migration_context.impl
if unique_constraints_duplicate_unique_indexes:
_correct_for_uq_duplicates_uix(
conn_uniques,
)
metadata_indexes.discard(idx)
+ def adjust_reflected_dialect_options(
+ self, reflected_object: Dict[str, Any], kind: str
+ ) -> Dict[str, Any]:
+ return reflected_object.get("dialect_options", {})
+
def _compare_identity_options(
attributes, metadata_io, inspector_io, default_io
import re
from typing import Any
+from typing import Dict
from typing import List
from typing import Optional
from typing import TYPE_CHECKING
return diff, ignored, is_alter
+ def adjust_reflected_dialect_options(
+ self, reflected_object: Dict[str, Any], kind: str
+ ) -> Dict[str, Any]:
+ options: Dict[str, Any]
+ options = reflected_object.get("dialect_options", {}).copy()
+ if not options.get("mssql_include"):
+ options.pop("mssql_include", None)
+ if not options.get("mssql_clustered"):
+ options.pop("mssql_clustered", None)
+ return options
+
class _ExecDropConstraint(Executable, ClauseElement):
inherit_cache = False
import re
from typing import Any
from typing import cast
+from typing import Dict
from typing import List
from typing import Optional
from typing import Sequence
def _dialect_sig(
self, item: Union[Index, UniqueConstraint]
) -> Tuple[Any, ...]:
- if (
- item.dialect_kwargs.get("postgresql_nulls_not_distinct")
- is not None
- ):
- return (
- (
- "nulls_not_distinct",
- item.dialect_kwargs["postgresql_nulls_not_distinct"],
- ),
- )
+ # only the positive case is returned by sqlalchemy reflection so
+ # None and False are threated the same
+ if item.dialect_kwargs.get("postgresql_nulls_not_distinct"):
+ return ("nulls_not_distinct",)
return ()
def create_index_sig(self, index: Index) -> Tuple[Any, ...]:
sorted([col.name for col in const.columns])
) + self._dialect_sig(const)
+ def adjust_reflected_dialect_options(
+ self, reflected_options: Dict[str, Any], kind: str
+ ) -> Dict[str, Any]:
+ options: Dict[str, Any]
+ options = reflected_options.get("dialect_options", {}).copy()
+ if not options.get("postgresql_include"):
+ options.pop("postgresql_include", None)
+ return options
+
def _compile_element(self, element: ClauseElement) -> str:
return element.compile(
dialect=self.dialect,
:tags: usecase, autogenerate
:tickets: 1248
- Added support in autogenerate for NULLS NOT DISTINCT in
+ Added support in autogenerate for ``NULLS NOT DISTINCT`` in
the PostgreSQL dialect.
.. change::
--- /dev/null
+.. change::
+ :tags: bug, autogenerate
+ :tickets: 1291
+
+ Fixed issue with ``NULLS NOT DISTINCT`` detection in postgresql that
+ would keep detecting changes in the index or unique constraint.
-import re
-
from sqlalchemy import Column
from sqlalchemy import Integer
from sqlalchemy import MetaData
autogenerate._render_migration_diffs(context, template_args)
eq_(
- re.sub(r"u'", "'", template_args["upgrades"]),
+ template_args["upgrades"],
"""# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###""",
)
eq_(
- re.sub(r"u'", "'", template_args["downgrades"]),
+ template_args["downgrades"],
"""# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###""",
autogenerate._render_migration_diffs(context, template_args)
eq_(
- re.sub(r"u'", "'", template_args["upgrades"]),
+ template_args["upgrades"],
"""# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###""",
)
eq_(
- re.sub(r"u'", "'", template_args["downgrades"]),
+ template_args["downgrades"],
"""# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###""",
template_args = {}
autogenerate._render_migration_diffs(self.context, template_args)
eq_(
- re.sub(r"u'", "'", template_args["upgrades"]),
+ template_args["upgrades"],
"""# ### commands auto generated by Alembic - please adjust! ###
op.create_table('item',
sa.Column('id', sa.Integer(), nullable=False),
)
eq_(
- re.sub(r"u'", "'", template_args["downgrades"]),
+ template_args["downgrades"],
"""# ### commands auto generated by Alembic - please adjust! ###
op.add_column('user', sa.Column('pw', sa.VARCHAR(length=50), \
nullable=True))
autogenerate._render_migration_diffs(self.context, template_args)
eq_(
- re.sub(r"u'", "'", template_args["upgrades"]),
+ template_args["upgrades"],
"""# ### commands auto generated by Alembic - please adjust! ###
op.create_table('item',
sa.Column('id', sa.Integer(), nullable=False),
)
eq_(
- re.sub(r"u'", "'", template_args["downgrades"]),
+ template_args["downgrades"],
"""# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('user', schema=None) as batch_op:
batch_op.add_column(sa.Column('pw', sa.VARCHAR(length=50), nullable=True))
template_args = {}
autogenerate._render_migration_diffs(self.context, template_args)
eq_(
- re.sub(r"u'", "'", template_args["upgrades"]),
+ template_args["upgrades"],
"""# ### commands auto generated by Alembic - please adjust! ###
op.add_column('user', sa.Column('username', sa.String(length=50), nullable=True))
op.add_column('user', sa.Column('password_hash', sa.String(length=32), nullable=True))
)
eq_(
- re.sub(r"u'", "'", template_args["downgrades"]),
+ template_args["downgrades"],
"""# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('user', 'timestamp')
op.drop_column('user', 'password_hash')
autogenerate._render_migration_diffs(context, template_args)
eq_(
- re.sub(r"u'", "'", template_args["upgrades"]),
+ template_args["upgrades"],
"""# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###""",
)
eq_(
- re.sub(r"u'", "'", template_args["downgrades"]),
+ template_args["downgrades"],
"""# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###""",
autogenerate._render_migration_diffs(self.context, template_args)
eq_(
- re.sub(r"u'", "'", template_args["upgrades"]),
+ template_args["upgrades"],
"""# ### commands auto generated by Alembic - please adjust! ###
op.create_table('item',
sa.Column('id', sa.Integer(), nullable=False),
)
eq_(
- re.sub(r"u'", "'", template_args["downgrades"]),
+ template_args["downgrades"],
"""# ### commands auto generated by Alembic - please adjust! ###
op.add_column('user', sa.Column('pw', sa.VARCHAR(length=50), \
autoincrement=False, nullable=True), schema='%(schema)s')
eq_(len(diffs), 1)
-case = combinations(False, True, None, argnames="case", id_="s")
+case = combinations(
+ ("nulls_not_distinct=False", False),
+ ("nulls_not_distinct=True", True),
+ ("nulls_not_distinct=None", None),
+ argnames="case",
+ id_="ia",
+)
name_type = combinations(
(
"index",
eq_(diffs[1][0], f"add_{name}")
eq_(diffs[1][1].name, "nnd_obj")
eq_(diffs[1][1].dialect_kwargs["postgresql_nulls_not_distinct"], to)
+
+ @case
+ @name_type
+ def test_no_change(self, case, name, type_):
+ m1 = MetaData()
+ m2 = MetaData()
+ Table(
+ "tbl",
+ m1,
+ Column("id", Integer, primary_key=True),
+ Column("name", String),
+ type_(case),
+ )
+ Table(
+ "tbl",
+ m2,
+ Column("id", Integer, primary_key=True),
+ Column("name", String),
+ type_(case),
+ )
+ diffs = self._fixture(m1, m2)
+ eq_(len(diffs), 0, str(diffs))