Several extensions to the :class:`.Index` construct are available, specific
to the PostgreSQL dialect.
+.. _postgresql_covering_indexes:
+
Covering Indexes
^^^^^^^^^^^^^^^^
Note that this feature requires PostgreSQL 11 or later.
+.. seealso::
+
+ :ref:`postgresql_constraint_options`
+
.. versionadded:: 1.4
.. _postgresql_partial_indexes:
<https://www.postgresql.org/docs/current/static/sql-altertable.html>`_ -
in the PostgreSQL documentation.
+* ``INCLUDE``: This option adds one or more columns as a "payload" to the
+ unique index created automatically by PostgreSQL for the constraint.
+ For example, the following table definition::
+
+ Table(
+ "mytable",
+ metadata,
+ Column("id", Integer, nullable=False),
+ Column("value", Integer, nullable=False),
+ UniqueConstraint("id", postgresql_include=["value"]),
+ )
+
+ would produce the DDL statement
+
+ .. sourcecode:: sql
+
+ CREATE TABLE mytable (
+ id INTEGER NOT NULL,
+ value INTEGER NOT NULL,
+ UNIQUE (id) INCLUDE (value)
+ )
+
+ Note that this feature requires PostgreSQL 11 or later.
+
+ .. versionadded:: 2.0.41
+
+ .. seealso::
+
+ :ref:`postgresql_covering_indexes`
+
+ .. seealso::
+
+ `PostgreSQL CREATE TABLE options
+ <https://www.postgresql.org/docs/current/static/sql-createtable.html>`_ -
+ in the PostgreSQL documentation.
+
* Column list with foreign key ``ON DELETE SET`` actions: This applies to
:class:`.ForeignKey` and :class:`.ForeignKeyConstraint`, the :paramref:`.ForeignKey.ondelete`
parameter will accept on the PostgreSQL backend only a string list of column
not_valid = constraint.dialect_options["postgresql"]["not_valid"]
return " NOT VALID" if not_valid else ""
+ def _define_include(self, obj):
+ includeclause = obj.dialect_options["postgresql"]["include"]
+ if not includeclause:
+ return ""
+ inclusions = [
+ obj.table.c[col] if isinstance(col, str) else col
+ for col in includeclause
+ ]
+ return " INCLUDE (%s)" % ", ".join(
+ [self.preparer.quote(c.name) for c in inclusions]
+ )
+
def visit_check_constraint(self, constraint, **kw):
if constraint._type_bound:
typ = list(constraint.columns)[0].type
text += self._define_constraint_validity(constraint)
return text
+ def visit_primary_key_constraint(self, constraint, **kw):
+ text = super().visit_primary_key_constraint(constraint)
+ text += self._define_include(constraint)
+ return text
+
+ def visit_unique_constraint(self, constraint, **kw):
+ text = super().visit_unique_constraint(constraint)
+ text += self._define_include(constraint)
+ return text
+
@util.memoized_property
def _fk_ondelete_pattern(self):
return re.compile(
)
)
- includeclause = index.dialect_options["postgresql"]["include"]
- if includeclause:
- inclusions = [
- index.table.c[col] if isinstance(col, str) else col
- for col in includeclause
- ]
- text += " INCLUDE (%s)" % ", ".join(
- [preparer.quote(c.name) for c in inclusions]
- )
+ text += self._define_include(index)
nulls_not_distinct = index.dialect_options["postgresql"][
"nulls_not_distinct"
"not_valid": False,
},
),
+ (
+ schema.PrimaryKeyConstraint,
+ {"include": None},
+ ),
(
schema.UniqueConstraint,
- {"nulls_not_distinct": None},
+ {
+ "include": None,
+ "nulls_not_distinct": None,
+ },
),
]
pg_catalog.pg_constraint.c.conkey, 1
).label("ord"),
pg_catalog.pg_description.c.description,
+ pg_catalog.pg_get_constraintdef(
+ pg_catalog.pg_constraint.c.oid, True
+ ).label("condef"),
)
.outerjoin(
pg_catalog.pg_description,
con_sq.c.conindid,
con_sq.c.description,
con_sq.c.ord,
+ con_sq.c.condef,
pg_catalog.pg_attribute.c.attname,
)
.select_from(pg_catalog.pg_attribute)
).label("cols"),
attr_sq.c.conname,
sql.func.min(attr_sq.c.description).label("description"),
+ attr_sq.c.condef,
)
- .group_by(attr_sq.c.conrelid, attr_sq.c.conname)
+ .group_by(attr_sq.c.conrelid, attr_sq.c.conname, attr_sq.c.condef)
.order_by(attr_sq.c.conrelid, attr_sq.c.conname)
)
)
return constraint_query
+ @util.memoized_property
+ def _include_regex_pattern(self):
+ return re.compile(r"INCLUDE \((.+)\)")
+
def _reflect_constraint(
self, connection, contype, schema, filter_names, scope, kind, **kw
):
batches = list(table_oids)
is_unique = contype == "u"
+ INCLUDE_REGEX = self._include_regex_pattern
+
while batches:
batch = batches[0:3000]
batches[0:3000] = []
)
result_by_oid = defaultdict(list)
- for oid, cols, constraint_name, comment, extra in result:
+ for oid, cols, constraint_name, comment, condef, extra in result:
result_by_oid[oid].append(
- (cols, constraint_name, comment, extra)
+ (cols, constraint_name, comment, condef, extra)
)
for oid, tablename in batch:
for_oid = result_by_oid.get(oid, ())
if for_oid:
- for cols, constraint, comment, extra in for_oid:
+ for cols, constraint, comment, condef, extra in for_oid:
+ opts = {}
if is_unique:
- yield tablename, cols, constraint, comment, {
- "nullsnotdistinct": extra
- }
- else:
- yield tablename, cols, constraint, comment, None
+ opts["nullsnotdistinct"] = extra
+ m = INCLUDE_REGEX.search(condef)
+ if m:
+ opts["include"] = [
+ v.strip() for v in m.group(1).split(", ")
+ ]
+ if not opts:
+ opts = None
+ yield tablename, cols, constraint, comment, opts
else:
yield tablename, None, None, None, None
# only a single pk can be present for each table. Return an entry
# even if a table has no primary key
default = ReflectionDefaults.pk_constraint
+
+ def pk_constraint(pk_name, cols, comment, opts):
+ info = {
+ "constrained_columns": ([] if cols is None else cols),
+ "name": pk_name,
+ "comment": comment,
+ }
+ if opts and "include" in opts:
+ info["dialect_options"] = {
+ "postgresql_include": opts["include"]
+ }
+ return info
+
return (
(
(schema, table_name),
(
- {
- "constrained_columns": [] if cols is None else cols,
- "name": pk_name,
- "comment": comment,
- }
+ pk_constraint(pk_name, cols, comment, opts)
if pk_name is not None
else default()
),
)
- for table_name, cols, pk_name, comment, _ in result
+ for table_name, cols, pk_name, comment, opts in result
)
@reflection.cache
}
if options:
if options["nullsnotdistinct"]:
- uc_dict["dialect_options"] = {
- "postgresql_nulls_not_distinct": options[
- "nullsnotdistinct"
- ]
- }
+ uc_dict.setdefault("dialect_options", {})[
+ "postgresql_nulls_not_distinct"
+ ] = options["nullsnotdistinct"]
+ if "include" in options:
+ uc_dict.setdefault("dialect_options", {})[
+ "postgresql_include"
+ ] = options["include"]
uniques[(schema, table_name)].append(uc_dict)
return uniques.items()
from sqlalchemy import literal
from sqlalchemy import MetaData
from sqlalchemy import null
+from sqlalchemy import PrimaryKeyConstraint
from sqlalchemy import schema
from sqlalchemy import select
from sqlalchemy import Sequence
expr = testing.resolve_lambda(expr_fn, tbl=tbl)
self.assert_compile(expr, expected, dialect=dd)
+ @testing.combinations(
+ (
+ lambda tbl: schema.AddConstraint(
+ UniqueConstraint(tbl.c.id, postgresql_include=[tbl.c.value])
+ ),
+ "ALTER TABLE foo ADD UNIQUE (id) INCLUDE (value)",
+ ),
+ (
+ lambda tbl: schema.AddConstraint(
+ PrimaryKeyConstraint(
+ tbl.c.id, postgresql_include=[tbl.c.value, "misc"]
+ )
+ ),
+ "ALTER TABLE foo ADD PRIMARY KEY (id) INCLUDE (value, misc)",
+ ),
+ (
+ lambda tbl: schema.CreateIndex(
+ Index("idx", tbl.c.id, postgresql_include=[tbl.c.value])
+ ),
+ "CREATE INDEX idx ON foo (id) INCLUDE (value)",
+ ),
+ )
+ def test_include(self, expr_fn, expected):
+ dd = PGDialect()
+ m = MetaData()
+ tbl = Table(
+ "foo",
+ m,
+ Column("id", Integer, nullable=False),
+ Column("value", Integer, nullable=False),
+ Column("misc", String),
+ )
+ expr = testing.resolve_lambda(expr_fn, tbl=tbl)
+ self.assert_compile(expr, expected, dialect=dd)
+
def test_create_index_with_labeled_ops(self):
m = MetaData()
tbl = Table(