def _make_index(params, conn_table):
- # TODO: add .info such as 'duplicates_constraint'
- return sa_schema.Index(
+ ix = sa_schema.Index(
params['name'],
*[conn_table.c[cname] for cname in params['column_names']],
unique=params['unique']
)
+ if 'duplicates_constraint' in params:
+ ix.info['duplicates_constraint'] = params['duplicates_constraint']
+ return ix
def _make_unique_constraint(params, conn_table):
conn_indexes,
metadata_unique_constraints,
metadata_indexes):
+
conn_uniques_by_name = dict(
(c.name, c) for c in conn_unique_constraints)
conn_indexes_by_name = dict(
(c.name, c) for c in conn_indexes)
- # TODO: if SQLA 1.0, make use of "duplicates_constraint"
- # metadata
- doubled_constraints = dict(
- (name, (conn_uniques_by_name[name], conn_indexes_by_name[name]))
- for name in set(conn_uniques_by_name).intersection(
- conn_indexes_by_name)
- )
- for name, (uq, ix) in doubled_constraints.items():
+ if not util.sqla_100:
+ doubled_constraints = set(
+ conn_indexes_by_name[name]
+ for name in set(conn_uniques_by_name).intersection(
+ conn_indexes_by_name)
+ )
+ else:
+ doubled_constraints = set(
+ index for index in
+ conn_indexes if index.info.get('duplicates_constraint')
+ )
+
+ for ix in doubled_constraints:
conn_indexes.remove(ix)
for idx in list(metadata_indexes):
"SQLAlchemy 0.9.4 or greater required"
)
+ @property
+ def sqlalchemy_100(self):
+ return exclusions.skip_if(
+ lambda config: not util.sqla_100,
+ "SQLAlchemy 1.0.0 or greater required"
+ )
+
@property
def sqlalchemy_1014(self):
return exclusions.skip_if(
--- /dev/null
+.. change::
+ :tags: bug, postgresql, autogenerate
+ :tickets: 461
+
+ Fixed bug where autogenerate would produce a DROP statement for the index
+ implicitly created by a Postgresql EXCLUDE constraint, rather than skipping
+ it as is the case for indexes implicitly generated by unique constraints.
+ Makes use of SQLAlchemy 1.0.x's improved "duplicates index" metadata and
+ requires at least SQLAlchemy version 1.0.x to function correctly.
+
+
return exclusions.only_if(check_uuid_ossp)
+ def _has_pg_extension(self, name):
+ def check(config):
+ if not exclusions.against(config, "postgresql"):
+ return False
+ count = config.db.scalar(
+ "SELECT count(*) FROM pg_extension "
+ "WHERE extname='%s'" % name)
+ return bool(count)
+ return exclusions.only_if(check, "needs %s extension" % name)
+
+ @property
+ def hstore(self):
+ return self._has_pg_extension("hstore")
+
+ @property
+ def btree_gist(self):
+ return self._has_pg_extension("btree_gist")
+
+
@property
def autoincrement_on_composite_pk(self):
return exclusions.skip_if(["sqlite"], "not supported by database")
diffs = self._fixture(m1, m2, include_schemas=True)
eq_(diffs, [])
+ @config.requirements.sqlalchemy_100
+ @config.requirements.btree_gist
+ def test_exclude_const_unchanged(self):
+ from sqlalchemy.dialects.postgresql import TSRANGE, ExcludeConstraint
+
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table(
+ 'add_excl', m1,
+ Column('id', Integer, primary_key=True),
+ Column('period', TSRANGE),
+ ExcludeConstraint(('period', '&&'), name='quarters_period_excl')
+ )
+
+ Table(
+ 'add_excl', m2,
+ Column('id', Integer, primary_key=True),
+ Column('period', TSRANGE),
+ ExcludeConstraint(('period', '&&'), name='quarters_period_excl')
+ )
+
+ diffs = self._fixture(m1, m2)
+ eq_(diffs, [])
+
def test_same_tname_two_schemas(self):
m1 = MetaData()
m2 = MetaData()