]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
Format code in documentation
authorDoctor <thirvondukr@gmail.com>
Sun, 22 May 2022 20:05:25 +0000 (16:05 -0400)
committerMike Bayer <mike_mp@zzzcomputing.com>
Sun, 22 May 2022 20:06:46 +0000 (16:06 -0400)
Closes: #7959
Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/7959
Pull-request-sha: fd8f60fcfe9cda0c2ba6dc9ddd171bf85a180295

Change-Id: I9a96c6e3e56cfd550672db4c1da4d68a961f970a

15 files changed:
doc/build/orm/backref.rst
doc/build/orm/basic_relationships.rst
doc/build/orm/cascades.rst
doc/build/orm/collections.rst
doc/build/orm/composites.rst
doc/build/orm/constructors.rst
doc/build/orm/declarative_config.rst
doc/build/orm/declarative_mixins.rst
doc/build/orm/declarative_styles.rst
doc/build/orm/declarative_tables.rst
doc/build/orm/extensions/associationproxy.rst
doc/build/orm/extensions/asyncio.rst
doc/build/orm/extensions/baked.rst
doc/build/orm/extensions/mypy.rst
doc/build/orm/inheritance.rst

index 3c1f7f8e646c26297425d2850c414d11efe55337..24587003ea5d79478bbed915a9b4783128f84beb 100644 (file)
@@ -7,24 +7,25 @@ The :paramref:`_orm.relationship.backref` keyword argument was first introduced
 mentioned throughout many of the examples here.   What does it actually do ?   Let's start
 with the canonical ``User`` and ``Address`` scenario::
 
-    from sqlalchemy import Integer, ForeignKey, String, Column
-    from sqlalchemy.ext.declarative import declarative_base
-    from sqlalchemy.orm import relationship
+    from sqlalchemy import Column, ForeignKey, Integer, String
+    from sqlalchemy.orm import declarative_base, relationship
 
     Base = declarative_base()
 
+
     class User(Base):
-        __tablename__ = 'user'
+        __tablename__ = "user"
         id = Column(Integer, primary_key=True)
         name = Column(String)
 
         addresses = relationship("Address", backref="user")
 
+
     class Address(Base):
-        __tablename__ = 'address'
+        __tablename__ = "address"
         id = Column(Integer, primary_key=True)
         email = Column(String)
-        user_id = Column(Integer, ForeignKey('user.id'))
+        user_id = Column(Integer, ForeignKey("user.id"))
 
 The above configuration establishes a collection of ``Address`` objects on ``User`` called
 ``User.addresses``.   It also establishes a ``.user`` attribute on ``Address`` which will
@@ -35,24 +36,25 @@ In fact, the :paramref:`_orm.relationship.backref` keyword is only a common shor
 of an event listener on both sides which will mirror attribute operations
 in both directions.   The above configuration is equivalent to::
 
-    from sqlalchemy import Integer, ForeignKey, String, Column
-    from sqlalchemy.ext.declarative import declarative_base
-    from sqlalchemy.orm import relationship
+    from sqlalchemy import Column, ForeignKey, Integer, String
+    from sqlalchemy.orm import declarative_base, relationship
 
     Base = declarative_base()
 
+
     class User(Base):
-        __tablename__ = 'user'
+        __tablename__ = "user"
         id = Column(Integer, primary_key=True)
         name = Column(String)
 
         addresses = relationship("Address", back_populates="user")
 
+
     class Address(Base):
-        __tablename__ = 'address'
+        __tablename__ = "address"
         id = Column(Integer, primary_key=True)
         email = Column(String)
-        user_id = Column(Integer, ForeignKey('user.id'))
+        user_id = Column(Integer, ForeignKey("user.id"))
 
         user = relationship("User", back_populates="addresses")
 
@@ -119,27 +121,31 @@ or a one-to-many or many-to-one which has a :paramref:`_orm.relationship.primary
 :paramref:`_orm.relationship.primaryjoin` argument is discussed in :ref:`relationship_primaryjoin`).  Such
 as if we limited the list of ``Address`` objects to those which start with "tony"::
 
-    from sqlalchemy import Integer, ForeignKey, String, Column
-    from sqlalchemy.ext.declarative import declarative_base
-    from sqlalchemy.orm import relationship
+    from sqlalchemy import Column, ForeignKey, Integer, String
+    from sqlalchemy.orm import declarative_base, relationship
 
     Base = declarative_base()
 
+
     class User(Base):
-        __tablename__ = 'user'
+        __tablename__ = "user"
         id = Column(Integer, primary_key=True)
         name = Column(String)
 
-        addresses = relationship("Address",
-                        primaryjoin="and_(User.id==Address.user_id, "
-                            "Address.email.startswith('tony'))",
-                        backref="user")
+        addresses = relationship(
+            "Address",
+            primaryjoin=(
+                "and_(User.id==Address.user_id, Address.email.startswith('tony'))"
+            ),
+            backref="user",
+        )
+
 
     class Address(Base):
-        __tablename__ = 'address'
+        __tablename__ = "address"
         id = Column(Integer, primary_key=True)
         email = Column(String)
-        user_id = Column(Integer, ForeignKey('user.id'))
+        user_id = Column(Integer, ForeignKey("user.id"))
 
 We can observe, by inspecting the resulting property, that both sides
 of the relationship have this join condition applied::
@@ -171,13 +177,16 @@ the :func:`.backref` function in place of a string::
     # <other imports>
     from sqlalchemy.orm import backref
 
+
     class User(Base):
-        __tablename__ = 'user'
+        __tablename__ = "user"
         id = Column(Integer, primary_key=True)
         name = Column(String)
 
-        addresses = relationship("Address",
-                        backref=backref("user", lazy="joined"))
+        addresses = relationship(
+            "Address",
+            backref=backref("user", lazy="joined"),
+        )
 
 Where above, we placed a ``lazy="joined"`` directive only on the ``Address.user``
 side, indicating that when a query against ``Address`` is made, a join to the ``User``
@@ -243,26 +252,31 @@ present, due to the filtering condition.   But we can do away with this unwanted
 of the "backref" behavior on the Python side by using two separate :func:`_orm.relationship` constructs,
 placing :paramref:`_orm.relationship.back_populates` only on one side::
 
-    from sqlalchemy import Integer, ForeignKey, String, Column
-    from sqlalchemy.ext.declarative import declarative_base
-    from sqlalchemy.orm import relationship
+    from sqlalchemy import Column, ForeignKey, Integer, String
+    from sqlalchemy.orm import declarative_base, relationship
 
     Base = declarative_base()
 
+
     class User(Base):
-        __tablename__ = 'user'
+        __tablename__ = "user"
         id = Column(Integer, primary_key=True)
         name = Column(String)
-        addresses = relationship("Address",
-                        primaryjoin="and_(User.id==Address.user_id, "
-                            "Address.email.startswith('tony'))",
-                        back_populates="user")
+
+        addresses = relationship(
+            "Address",
+            primaryjoin="and_(User.id==Address.user_id, "
+            "Address.email.startswith('tony'))",
+            back_populates="user",
+        )
+
 
     class Address(Base):
-        __tablename__ = 'address'
+        __tablename__ = "address"
         id = Column(Integer, primary_key=True)
         email = Column(String)
-        user_id = Column(Integer, ForeignKey('user.id'))
+        user_id = Column(Integer, ForeignKey("user.id"))
+
         user = relationship("User")
 
 With the above scenario, appending an ``Address`` object to the ``.addresses``
index 40b3590b6fa67ae4df090aae2d51e187047b5d22..ad57d4ca0791bbad2728313bc88dd80f59e55851 100644 (file)
@@ -7,13 +7,11 @@ A quick walkthrough of the basic relational patterns.
 
 The imports used for each of the following sections is as follows::
 
-    from sqlalchemy import Table, Column, Integer, ForeignKey
-    from sqlalchemy.orm import relationship
-    from sqlalchemy.ext.declarative import declarative_base
+    from sqlalchemy import Column, ForeignKey, Integer, Table
+    from sqlalchemy.orm import declarative_base, relationship
 
     Base = declarative_base()
 
-
 .. _relationship_patterns_o2m:
 
 One To Many
@@ -24,28 +22,30 @@ the parent.  :func:`_orm.relationship` is then specified on the parent, as refer
 a collection of items represented by the child::
 
     class Parent(Base):
-        __tablename__ = 'parent'
+        __tablename__ = "parent"
         id = Column(Integer, primary_key=True)
         children = relationship("Child")
 
+
     class Child(Base):
-        __tablename__ = 'child'
+        __tablename__ = "child"
         id = Column(Integer, primary_key=True)
-        parent_id = Column(Integer, ForeignKey('parent.id'))
+        parent_id = Column(Integer, ForeignKey("parent.id"))
 
 To establish a bidirectional relationship in one-to-many, where the "reverse"
 side is a many to one, specify an additional :func:`_orm.relationship` and connect
 the two using the :paramref:`_orm.relationship.back_populates` parameter::
 
     class Parent(Base):
-        __tablename__ = 'parent'
+        __tablename__ = "parent"
         id = Column(Integer, primary_key=True)
         children = relationship("Child", back_populates="parent")
 
+
     class Child(Base):
-        __tablename__ = 'child'
+        __tablename__ = "child"
         id = Column(Integer, primary_key=True)
-        parent_id = Column(Integer, ForeignKey('parent.id'))
+        parent_id = Column(Integer, ForeignKey("parent.id"))
         parent = relationship("Parent", back_populates="children")
 
 ``Child`` will get a ``parent`` attribute with many-to-one semantics.
@@ -55,7 +55,7 @@ on a single :func:`_orm.relationship` instead of using
 :paramref:`_orm.relationship.back_populates`::
 
     class Parent(Base):
-        __tablename__ = 'parent'
+        __tablename__ = "parent"
         id = Column(Integer, primary_key=True)
         children = relationship("Child", backref="parent")
 
@@ -88,13 +88,14 @@ Many to one places a foreign key in the parent table referencing the child.
 attribute will be created::
 
     class Parent(Base):
-        __tablename__ = 'parent'
+        __tablename__ = "parent"
         id = Column(Integer, primary_key=True)
-        child_id = Column(Integer, ForeignKey('child.id'))
+        child_id = Column(Integer, ForeignKey("child.id"))
         child = relationship("Child")
 
+
     class Child(Base):
-        __tablename__ = 'child'
+        __tablename__ = "child"
         id = Column(Integer, primary_key=True)
 
 Bidirectional behavior is achieved by adding a second :func:`_orm.relationship`
@@ -102,13 +103,14 @@ and applying the :paramref:`_orm.relationship.back_populates` parameter
 in both directions::
 
     class Parent(Base):
-        __tablename__ = 'parent'
+        __tablename__ = "parent"
         id = Column(Integer, primary_key=True)
-        child_id = Column(Integer, ForeignKey('child.id'))
+        child_id = Column(Integer, ForeignKey("child.id"))
         child = relationship("Child", back_populates="parents")
 
+
     class Child(Base):
-        __tablename__ = 'child'
+        __tablename__ = "child"
         id = Column(Integer, primary_key=True)
         parents = relationship("Parent", back_populates="child")
 
@@ -116,9 +118,9 @@ Alternatively, the :paramref:`_orm.relationship.backref` parameter
 may be applied to a single :func:`_orm.relationship`, such as ``Parent.child``::
 
     class Parent(Base):
-        __tablename__ = 'parent'
+        __tablename__ = "parent"
         id = Column(Integer, primary_key=True)
-        child_id = Column(Integer, ForeignKey('child.id'))
+        child_id = Column(Integer, ForeignKey("child.id"))
         child = relationship("Child", backref="parents")
 
 .. _relationships_one_to_one:
@@ -143,16 +145,17 @@ a :ref:`many-to-one <relationship_patterns_m2o>` (``Child.parent``)
 relationships::
 
     class Parent(Base):
-        __tablename__ = 'parent'
+        __tablename__ = "parent"
         id = Column(Integer, primary_key=True)
 
         # one-to-many collection
         children = relationship("Child", back_populates="parent")
 
+
     class Child(Base):
-        __tablename__ = 'child'
+        __tablename__ = "child"
         id = Column(Integer, primary_key=True)
-        parent_id = Column(Integer, ForeignKey('parent.id'))
+        parent_id = Column(Integer, ForeignKey("parent.id"))
 
         # many-to-one scalar
         parent = relationship("Parent", back_populates="children")
@@ -164,17 +167,18 @@ is converted into a scalar relationship using the ``uselist=False`` flag,
 renaming ``Parent.children`` to ``Parent.child`` for clarity::
 
     class Parent(Base):
-        __tablename__ = 'parent'
+        __tablename__ = "parent"
         id = Column(Integer, primary_key=True)
 
         # previously one-to-many Parent.children is now
         # one-to-one Parent.child
         child = relationship("Child", back_populates="parent", uselist=False)
 
+
     class Child(Base):
-        __tablename__ = 'child'
+        __tablename__ = "child"
         id = Column(Integer, primary_key=True)
-        parent_id = Column(Integer, ForeignKey('parent.id'))
+        parent_id = Column(Integer, ForeignKey("parent.id"))
 
         # many-to-one side remains, see tip below
         parent = relationship("Parent", back_populates="child")
@@ -212,18 +216,18 @@ in this case the ``uselist`` parameter::
 
     from sqlalchemy.orm import backref
 
+
     class Parent(Base):
-        __tablename__ = 'parent'
+        __tablename__ = "parent"
         id = Column(Integer, primary_key=True)
 
+
     class Child(Base):
-        __tablename__ = 'child'
+        __tablename__ = "child"
         id = Column(Integer, primary_key=True)
-        parent_id = Column(Integer, ForeignKey('parent.id'))
+        parent_id = Column(Integer, ForeignKey("parent.id"))
         parent = relationship("Parent", backref=backref("child", uselist=False))
 
-
-
 .. _relationships_many_to_many:
 
 Many To Many
@@ -236,19 +240,22 @@ table is indicated by the :paramref:`_orm.relationship.secondary` argument to
 class, so that the :class:`_schema.ForeignKey` directives can locate the
 remote tables with which to link::
 
-    association_table = Table('association', Base.metadata,
-        Column('left_id', ForeignKey('left.id')),
-        Column('right_id', ForeignKey('right.id'))
+    association_table = Table(
+        "association",
+        Base.metadata,
+        Column("left_id", ForeignKey("left.id")),
+        Column("right_id", ForeignKey("right.id")),
     )
 
+
     class Parent(Base):
-        __tablename__ = 'left'
+        __tablename__ = "left"
         id = Column(Integer, primary_key=True)
-        children = relationship("Child",
-                        secondary=association_table)
+        children = relationship("Child", secondary=association_table)
+
 
     class Child(Base):
-        __tablename__ = 'right'
+        __tablename__ = "right"
         id = Column(Integer, primary_key=True)
 
 .. tip::
@@ -263,55 +270,64 @@ remote tables with which to link::
     this ensures that duplicate rows won't be persisted within the table regardless
     of issues on the application side::
 
-        association_table = Table('association', Base.metadata,
-            Column('left_id', ForeignKey('left.id'), primary_key=True),
-            Column('right_id', ForeignKey('right.id'), primary_key=True)
+        association_table = Table(
+            "association",
+            Base.metadata,
+            Column("left_id", ForeignKey("left.id"), primary_key=True),
+            Column("right_id", ForeignKey("right.id"), primary_key=True),
         )
 
 For a bidirectional relationship, both sides of the relationship contain a
 collection.  Specify using :paramref:`_orm.relationship.back_populates`, and
 for each :func:`_orm.relationship` specify the common association table::
 
-    association_table = Table('association', Base.metadata,
-        Column('left_id', ForeignKey('left.id'), primary_key=True),
-        Column('right_id', ForeignKey('right.id'), primary_key=True)
+    association_table = Table(
+        "association",
+        Base.metadata,
+        Column("left_id", ForeignKey("left.id"), primary_key=True),
+        Column("right_id", ForeignKey("right.id"), primary_key=True),
     )
 
+
     class Parent(Base):
-        __tablename__ = 'left'
+        __tablename__ = "left"
         id = Column(Integer, primary_key=True)
         children = relationship(
-            "Child",
-            secondary=association_table,
-            back_populates="parents")
+            "Child", secondary=association_table, back_populates="parents"
+        )
+
 
     class Child(Base):
-        __tablename__ = 'right'
+        __tablename__ = "right"
         id = Column(Integer, primary_key=True)
         parents = relationship(
-            "Parent",
-            secondary=association_table,
-            back_populates="children")
+            "Parent", secondary=association_table, back_populates="children"
+        )
+
 
 When using the :paramref:`_orm.relationship.backref` parameter instead of
 :paramref:`_orm.relationship.back_populates`, the backref will automatically
 use the same :paramref:`_orm.relationship.secondary` argument for the
 reverse relationship::
 
-    association_table = Table('association', Base.metadata,
-        Column('left_id', ForeignKey('left.id'), primary_key=True),
-        Column('right_id', ForeignKey('right.id'), primary_key=True)
+    association_table = Table(
+        "association",
+        Base.metadata,
+        Column("left_id", ForeignKey("left.id"), primary_key=True),
+        Column("right_id", ForeignKey("right.id"), primary_key=True),
     )
 
+
     class Parent(Base):
-        __tablename__ = 'left'
+        __tablename__ = "left"
         id = Column(Integer, primary_key=True)
-        children = relationship("Child",
-                        secondary=association_table,
-                        backref="parents")
+        children = relationship(
+            "Child", secondary=association_table, backref="parents"
+        )
+
 
     class Child(Base):
-        __tablename__ = 'right'
+        __tablename__ = "right"
         id = Column(Integer, primary_key=True)
 
 The :paramref:`_orm.relationship.secondary` argument of
@@ -321,21 +337,21 @@ can define the ``association_table`` at a later point, as long as it's
 available to the callable after all module initialization is complete::
 
     class Parent(Base):
-        __tablename__ = 'left'
+        __tablename__ = "left"
         id = Column(Integer, primary_key=True)
-        children = relationship("Child",
-                        secondary=lambda: association_table,
-                        backref="parents")
+        children = relationship(
+            "Child",
+            secondary=lambda: association_table,
+            backref="parents",
+        )
 
 With the declarative extension in use, the traditional "string name of the table"
 is accepted as well, matching the name of the table as stored in ``Base.metadata.tables``::
 
     class Parent(Base):
-        __tablename__ = 'left'
+        __tablename__ = "left"
         id = Column(Integer, primary_key=True)
-        children = relationship("Child",
-                        secondary="association",
-                        backref="parents")
+        children = relationship("Child", secondary="association", backref="parents")
 
 .. warning:: When passed as a Python-evaluable string, the
     :paramref:`_orm.relationship.secondary` argument is interpreted using Python's
@@ -421,39 +437,43 @@ is stored along with each association between ``Parent`` and
 ``Child``::
 
     class Association(Base):
-        __tablename__ = 'association'
-        left_id = Column(ForeignKey('left.id'), primary_key=True)
-        right_id = Column(ForeignKey('right.id'), primary_key=True)
+        __tablename__ = "association"
+        left_id = Column(ForeignKey("left.id"), primary_key=True)
+        right_id = Column(ForeignKey("right.id"), primary_key=True)
         extra_data = Column(String(50))
         child = relationship("Child")
 
+
     class Parent(Base):
-        __tablename__ = 'left'
+        __tablename__ = "left"
         id = Column(Integer, primary_key=True)
         children = relationship("Association")
 
+
     class Child(Base):
-        __tablename__ = 'right'
+        __tablename__ = "right"
         id = Column(Integer, primary_key=True)
 
 As always, the bidirectional version makes use of :paramref:`_orm.relationship.back_populates`
 or :paramref:`_orm.relationship.backref`::
 
     class Association(Base):
-        __tablename__ = 'association'
-        left_id = Column(ForeignKey('left.id'), primary_key=True)
-        right_id = Column(ForeignKey('right.id'), primary_key=True)
+        __tablename__ = "association"
+        left_id = Column(ForeignKey("left.id"), primary_key=True)
+        right_id = Column(ForeignKey("right.id"), primary_key=True)
         extra_data = Column(String(50))
         child = relationship("Child", back_populates="parents")
         parent = relationship("Parent", back_populates="children")
 
+
     class Parent(Base):
-        __tablename__ = 'left'
+        __tablename__ = "left"
         id = Column(Integer, primary_key=True)
         children = relationship("Association", back_populates="parent")
 
+
     class Child(Base):
-        __tablename__ = 'right'
+        __tablename__ = "right"
         id = Column(Integer, primary_key=True)
         parents = relationship("Association", back_populates="child")
 
@@ -494,23 +514,25 @@ associated object, and a second to a target attribute.
   after :meth:`.Session.commit`::
 
         class Association(Base):
-            __tablename__ = 'association'
+            __tablename__ = "association"
 
-            left_id = Column(ForeignKey('left.id'), primary_key=True)
-            right_id = Column(ForeignKey('right.id'), primary_key=True)
+            left_id = Column(ForeignKey("left.id"), primary_key=True)
+            right_id = Column(ForeignKey("right.id"), primary_key=True)
             extra_data = Column(String(50))
 
             child = relationship("Child", backref="parent_associations")
             parent = relationship("Parent", backref="child_associations")
 
+
         class Parent(Base):
-            __tablename__ = 'left'
+            __tablename__ = "left"
             id = Column(Integer, primary_key=True)
 
             children = relationship("Child", secondary="association")
 
+
         class Child(Base):
-            __tablename__ = 'right'
+            __tablename__ = "right"
             id = Column(Integer, primary_key=True)
 
   Additionally, just as changes to one relationship aren't reflected in the
@@ -548,6 +570,7 @@ classes using a string name, rather than the class itself::
 
         children = relationship("Child", back_populates="parent")
 
+
     class Child(Base):
         # ...
 
@@ -584,7 +607,7 @@ package, including expression functions like :func:`_sql.desc` and
         children = relationship(
             "Child",
             order_by="desc(Child.email_address)",
-            primaryjoin="Parent.id == Child.parent_id"
+            primaryjoin="Parent.id == Child.parent_id",
         )
 
 For the case where more than one module contains a class of the same name,
@@ -597,7 +620,7 @@ within any of these string expressions::
         children = relationship(
             "myapp.mymodel.Child",
             order_by="desc(myapp.mymodel.Child.email_address)",
-            primaryjoin="myapp.mymodel.Parent.id == myapp.mymodel.Child.parent_id"
+            primaryjoin="myapp.mymodel.Parent.id == myapp.mymodel.Child.parent_id",
         )
 
 The qualified path can be any partial path that removes ambiguity between
@@ -611,7 +634,7 @@ we can specify ``model1.Child`` or ``model2.Child``::
         children = relationship(
             "model1.Child",
             order_by="desc(mymodel1.Child.email_address)",
-            primaryjoin="Parent.id == model1.Child.parent_id"
+            primaryjoin="Parent.id == model1.Child.parent_id",
         )
 
 The :func:`_orm.relationship` construct also accepts Python functions or
@@ -622,9 +645,12 @@ A Python functional approach might look like the following::
 
     from sqlalchemy import desc
 
+
     def _resolve_child_model():
-         from myapplication import Child
-         return Child
+        from myapplication import Child
+
+        return Child
+
 
     class Parent(Base):
         # ...
@@ -632,7 +658,7 @@ A Python functional approach might look like the following::
         children = relationship(
             _resolve_child_model(),
             order_by=lambda: desc(_resolve_child_model().email_address),
-            primaryjoin=lambda: Parent.id == _resolve_child_model().parent_id
+            primaryjoin=lambda: Parent.id == _resolve_child_model().parent_id,
         )
 
 The full list of parameters which accept Python functions/lambdas or strings
@@ -674,23 +700,23 @@ class were available, we could also apply it afterwards::
     # first, module A, where Child has not been created yet,
     # we create a Parent class which knows nothing about Child
 
+
     class Parent(Base):
-        # ...
+        ...
+
 
+    # ... later, in Module B, which is imported after module A:
 
-    #... later, in Module B, which is imported after module A:
 
     class Child(Base):
-        # ...
+        ...
+
 
     from module_a import Parent
 
     # assign the User.addresses relationship as a class variable.  The
     # declarative base class will intercept this and map the relationship.
-    Parent.children = relationship(
-        Child,
-        primaryjoin=Child.parent_id==Parent.id
-    )
+    Parent.children = relationship(Child, primaryjoin=Child.parent_id == Parent.id)
 
 .. note:: assignment of mapped properties to a declaratively mapped class will only
     function correctly if the "declarative base" class is used, which also
@@ -718,13 +744,15 @@ declarative base and its :class:`_orm.registry`.  We can then refer to this
 parameter::
 
     keyword_author = Table(
-        'keyword_author', Base.metadata,
-        Column('author_id', Integer, ForeignKey('authors.id')),
-        Column('keyword_id', Integer, ForeignKey('keywords.id'))
-        )
+        "keyword_author",
+        Base.metadata,
+        Column("author_id", Integer, ForeignKey("authors.id")),
+        Column("keyword_id", Integer, ForeignKey("keywords.id")),
+    )
+
 
     class Author(Base):
-        __tablename__ = 'authors'
+        __tablename__ = "authors"
         id = Column(Integer, primary_key=True)
         keywords = relationship("Keyword", secondary="keyword_author")
 
index c9e16cbacab8de26812818095b45abd9ea89adf9..8d8e1f7763052aa565c4b9f0926bd44ff683796e 100644 (file)
@@ -22,7 +22,7 @@ Cascade behavior is configured using the
 :func:`~sqlalchemy.orm.relationship`::
 
     class Order(Base):
-        __tablename__ = 'order'
+        __tablename__ = "order"
 
         items = relationship("Item", cascade="all, delete-orphan")
         customer = relationship("User", cascade="save-update")
@@ -32,11 +32,11 @@ To set cascades on a backref, the same flag can be used with the
 its arguments back into :func:`~sqlalchemy.orm.relationship`::
 
     class Item(Base):
-        __tablename__ = 'item'
+        __tablename__ = "item"
 
-        order = relationship("Order",
-                        backref=backref("items", cascade="all, delete-orphan")
-                    )
+        order = relationship(
+            "Order", backref=backref("items", cascade="all, delete-orphan")
+        )
 
 .. sidebar:: The Origins of Cascade
 
@@ -147,13 +147,17 @@ To illustrate, given a mapping of ``Order`` objects which relate
 bi-directionally to a series of ``Item`` objects via relationships
 ``Order.items`` and ``Item.order``::
 
-    mapper_registry.map_imperatively(Order, order_table, properties={
-        'items' : relationship(Item, back_populates='order')
-    })
+    mapper_registry.map_imperatively(
+        Order,
+        order_table,
+        properties={"items": relationship(Item, back_populates="order")},
+    )
 
-    mapper_registry.map_imperatively(Item, item_table, properties={
-        'order' : relationship(Order, back_populates='items')
-    })
+    mapper_registry.map_imperatively(
+        Item,
+        item_table,
+        properties={"order": relationship(Order, back_populates="items")},
+    )
 
 If an ``Order`` is already associated with a :class:`_orm.Session`, and
 an ``Item`` object is then created and appended to the ``Order.items``
@@ -319,23 +323,27 @@ The following example adapts that of :ref:`relationships_many_to_many` to
 illustrate the ``cascade="all, delete"`` setting on **one** side of the
 association::
 
-    association_table = Table('association', Base.metadata,
-        Column('left_id', Integer, ForeignKey('left.id')),
-        Column('right_id', Integer, ForeignKey('right.id'))
+    association_table = Table(
+        "association",
+        Base.metadata,
+        Column("left_id", Integer, ForeignKey("left.id")),
+        Column("right_id", Integer, ForeignKey("right.id")),
     )
 
+
     class Parent(Base):
-        __tablename__ = 'left'
+        __tablename__ = "left"
         id = Column(Integer, primary_key=True)
         children = relationship(
             "Child",
             secondary=association_table,
             back_populates="parents",
-            cascade="all, delete"
+            cascade="all, delete",
         )
 
+
     class Child(Base):
-        __tablename__ = 'right'
+        __tablename__ = "right"
         id = Column(Integer, primary_key=True)
         parents = relationship(
             "Parent",
@@ -398,18 +406,20 @@ on the relevant ``FOREIGN KEY`` constraint as well::
 
 
     class Parent(Base):
-        __tablename__ = 'parent'
+        __tablename__ = "parent"
         id = Column(Integer, primary_key=True)
         children = relationship(
-            "Child", back_populates="parent",
+            "Child",
+            back_populates="parent",
             cascade="all, delete",
-            passive_deletes=True
+            passive_deletes=True,
         )
 
+
     class Child(Base):
-        __tablename__ = 'child'
+        __tablename__ = "child"
         id = Column(Integer, primary_key=True)
-        parent_id = Column(Integer, ForeignKey('parent.id', ondelete="CASCADE"))
+        parent_id = Column(Integer, ForeignKey("parent.id", ondelete="CASCADE"))
         parent = relationship("Parent", back_populates="children")
 
 The behavior of the above configuration when a parent row is deleted
@@ -548,13 +558,16 @@ on the parent->child side of the relationship, and we can then configure
 ``passive_deletes=True`` on the **other** side of the bidirectional
 relationship as illustrated below::
 
-    association_table = Table('association', Base.metadata,
-        Column('left_id', Integer, ForeignKey('left.id', ondelete="CASCADE")),
-        Column('right_id', Integer, ForeignKey('right.id', ondelete="CASCADE"))
+    association_table = Table(
+        "association",
+        Base.metadata,
+        Column("left_id", Integer, ForeignKey("left.id", ondelete="CASCADE")),
+        Column("right_id", Integer, ForeignKey("right.id", ondelete="CASCADE")),
     )
 
+
     class Parent(Base):
-        __tablename__ = 'left'
+        __tablename__ = "left"
         id = Column(Integer, primary_key=True)
         children = relationship(
             "Child",
@@ -563,14 +576,15 @@ relationship as illustrated below::
             cascade="all, delete",
         )
 
+
     class Child(Base):
-        __tablename__ = 'right'
+        __tablename__ = "right"
         id = Column(Integer, primary_key=True)
         parents = relationship(
             "Parent",
             secondary=association_table,
             back_populates="children",
-            passive_deletes=True
+            passive_deletes=True,
         )
 
 Using the above configuration, the deletion of a ``Parent`` object proceeds
@@ -712,12 +726,11 @@ parent collection.  The ``delete-orphan`` cascade accomplishes this, as
 illustrated in the example below::
 
     class User(Base):
-        __tablename__ = 'user'
+        __tablename__ = "user"
 
         # ...
 
-        addresses = relationship(
-            "Address", cascade="all, delete-orphan")
+        addresses = relationship("Address", cascade="all, delete-orphan")
 
     # ...
 
@@ -739,9 +752,8 @@ that this related object is not to shared with any other parent simultaneously::
         # ...
 
         preference = relationship(
-            "Preference", cascade="all, delete-orphan",
-            single_parent=True)
-
+            "Preference", cascade="all, delete-orphan", single_parent=True
+        )
 
 Above, if a hypothetical ``Preference`` object is removed from a ``User``,
 it will be deleted on flush::
index b34ce7f1456a021f7ab018b4bf4f8072a95acebc..b19c7250533202e6cf70483951944e0fb66dbdcb 100644 (file)
@@ -48,14 +48,15 @@ when accessed. Filtering criterion may be applied as well as limits and
 offsets, either explicitly or via array slices::
 
     class User(Base):
-        __tablename__ = 'user'
+        __tablename__ = "user"
 
         posts = relationship(Post, lazy="dynamic")
 
+
     jack = session.get(User, id)
 
     # filter Jack's blog posts
-    posts = jack.posts.filter(Post.headline=='this is a post')
+    posts = jack.posts.filter(Post.headline == "this is a post")
 
     # apply array slices
     posts = jack.posts[5:20]
@@ -63,10 +64,10 @@ offsets, either explicitly or via array slices::
 The dynamic relationship supports limited write operations, via the
 :meth:`_orm.AppenderQuery.append` and :meth:`_orm.AppenderQuery.remove` methods::
 
-    oldpost = jack.posts.filter(Post.headline=='old post').one()
+    oldpost = jack.posts.filter(Post.headline == "old post").one()
     jack.posts.remove(oldpost)
 
-    jack.posts.append(Post('new post'))
+    jack.posts.append(Post("new post"))
 
 Since the read side of the dynamic relationship always queries the
 database, changes to the underlying collection will not be visible
@@ -81,9 +82,7 @@ function in conjunction with ``lazy='dynamic'``::
     class Post(Base):
         __table__ = posts_table
 
-        user = relationship(User,
-                    backref=backref('posts', lazy='dynamic')
-                )
+        user = relationship(User, backref=backref("posts", lazy="dynamic"))
 
 Note that eager/lazy loading options cannot be used in conjunction dynamic relationships at this time.
 
@@ -111,9 +110,9 @@ A "noload" relationship never loads from the database, even when
 accessed.   It is configured using ``lazy='noload'``::
 
     class MyClass(Base):
-        __tablename__ = 'some_table'
+        __tablename__ = "some_table"
 
-        children = relationship(MyOtherClass, lazy='noload')
+        children = relationship(MyOtherClass, lazy="noload")
 
 Above, the ``children`` collection is fully writeable, and changes to it will
 be persisted to the database as well as locally available for reading at the
@@ -127,9 +126,9 @@ Alternatively, a "raise"-loaded relationship will raise an
 emit a lazy load::
 
     class MyClass(Base):
-        __tablename__ = 'some_table'
+        __tablename__ = "some_table"
 
-        children = relationship(MyOtherClass, lazy='raise')
+        children = relationship(MyOtherClass, lazy="raise")
 
 Above, attribute access on the ``children`` collection will raise an exception
 if it was not previously eagerloaded.  This includes read access but for
@@ -166,11 +165,12 @@ values accessible through an attribute on the parent instance. By default,
 this collection is a ``list``::
 
     class Parent(Base):
-        __tablename__ = 'parent'
+        __tablename__ = "parent"
         parent_id = Column(Integer, primary_key=True)
 
         children = relationship(Child)
 
+
     parent = Parent()
     parent.children.append(Child())
     print(parent.children[0])
@@ -181,12 +181,13 @@ default list, by specifying the :paramref:`_orm.relationship.collection_class` o
 :func:`~sqlalchemy.orm.relationship`::
 
     class Parent(Base):
-        __tablename__ = 'parent'
+        __tablename__ = "parent"
         parent_id = Column(Integer, primary_key=True)
 
         # use a set
         children = relationship(Child, collection_class=set)
 
+
     parent = Parent()
     child = Child()
     parent.children.add(child)
@@ -203,24 +204,27 @@ to achieve a simple dictionary collection.  It produces a dictionary class that
 of the mapped class as a key.   Below we map an ``Item`` class containing
 a dictionary of ``Note`` items keyed to the ``Note.keyword`` attribute::
 
-    from sqlalchemy import Column, Integer, String, ForeignKey
-    from sqlalchemy.orm import relationship
+    from sqlalchemy import Column, ForeignKey, Integer, String
+    from sqlalchemy.orm import declarative_base, relationship
     from sqlalchemy.orm.collections import attribute_mapped_collection
-    from sqlalchemy.ext.declarative import declarative_base
 
     Base = declarative_base()
 
+
     class Item(Base):
-        __tablename__ = 'item'
+        __tablename__ = "item"
         id = Column(Integer, primary_key=True)
-        notes = relationship("Note",
-                    collection_class=attribute_mapped_collection('keyword'),
-                    cascade="all, delete-orphan")
+        notes = relationship(
+            "Note",
+            collection_class=attribute_mapped_collection("keyword"),
+            cascade="all, delete-orphan",
+        )
+
 
     class Note(Base):
-        __tablename__ = 'note'
+        __tablename__ = "note"
         id = Column(Integer, primary_key=True)
-        item_id = Column(Integer, ForeignKey('item.id'), nullable=False)
+        item_id = Column(Integer, ForeignKey("item.id"), nullable=False)
         keyword = Column(String)
         text = Column(String)
 
@@ -231,7 +235,7 @@ a dictionary of ``Note`` items keyed to the ``Note.keyword`` attribute::
 ``Item.notes`` is then a dictionary::
 
     >>> item = Item()
-    >>> item.notes['a'] = Note('a', 'atext')
+    >>> item.notes["a"] = Note("a", "atext")
     >>> item.notes.items()
     {'a': <__main__.Note object at 0x2eaaf0>}
 
@@ -242,9 +246,9 @@ key we supply must match that of the actual ``Note`` object::
 
     item = Item()
     item.notes = {
-                'a': Note('a', 'atext'),
-                'b': Note('b', 'btext')
-            }
+        "a": Note("a", "atext"),
+        "b": Note("b", "btext"),
+    }
 
 The attribute which :func:`.attribute_mapped_collection` uses as a key
 does not need to be mapped at all!  Using a regular Python ``@property`` allows virtually
@@ -253,17 +257,20 @@ below when we establish it as a tuple of ``Note.keyword`` and the first ten lett
 of the ``Note.text`` field::
 
     class Item(Base):
-        __tablename__ = 'item'
+        __tablename__ = "item"
         id = Column(Integer, primary_key=True)
-        notes = relationship("Note",
-                    collection_class=attribute_mapped_collection('note_key'),
-                    backref="item",
-                    cascade="all, delete-orphan")
+        notes = relationship(
+            "Note",
+            collection_class=attribute_mapped_collection("note_key"),
+            backref="item",
+            cascade="all, delete-orphan",
+        )
+
 
     class Note(Base):
-        __tablename__ = 'note'
+        __tablename__ = "note"
         id = Column(Integer, primary_key=True)
-        item_id = Column(Integer, ForeignKey('item.id'), nullable=False)
+        item_id = Column(Integer, ForeignKey("item.id"), nullable=False)
         keyword = Column(String)
         text = Column(String)
 
@@ -290,12 +297,15 @@ object directly::
 
     from sqlalchemy.orm.collections import column_mapped_collection
 
+
     class Item(Base):
-        __tablename__ = 'item'
+        __tablename__ = "item"
         id = Column(Integer, primary_key=True)
-        notes = relationship("Note",
-                    collection_class=column_mapped_collection(Note.__table__.c.keyword),
-                    cascade="all, delete-orphan")
+        notes = relationship(
+            "Note",
+            collection_class=column_mapped_collection(Note.__table__.c.keyword),
+            cascade="all, delete-orphan",
+        )
 
 as well as :func:`.mapped_collection` which is passed any callable function.
 Note that it's usually easier to use :func:`.attribute_mapped_collection` along
@@ -303,12 +313,15 @@ with a ``@property`` as mentioned earlier::
 
     from sqlalchemy.orm.collections import mapped_collection
 
+
     class Item(Base):
-        __tablename__ = 'item'
+        __tablename__ = "item"
         id = Column(Integer, primary_key=True)
-        notes = relationship("Note",
-                    collection_class=mapped_collection(lambda note: note.text[0:10]),
-                    cascade="all, delete-orphan")
+        notes = relationship(
+            "Note",
+            collection_class=mapped_collection(lambda note: note.text[0:10]),
+            cascade="all, delete-orphan",
+        )
 
 Dictionary mappings are often combined with the "Association Proxy" extension to produce
 streamlined dictionary views.  See :ref:`proxying_dictionaries` and :ref:`composite_association_proxy`
@@ -357,7 +370,7 @@ if the value of ``B.data`` is not set yet, the key will be ``None``::
 
 Setting ``b1.data`` after the fact does not update the collection::
 
-    >>> b1.data = 'the key'
+    >>> b1.data = "the key"
     >>> a1.bs
     {None: <test3.B object at 0x7f7b1023ef70>}
 
@@ -365,14 +378,14 @@ Setting ``b1.data`` after the fact does not update the collection::
 This can also be seen if one attempts to set up ``B()`` in the constructor.
 The order of arguments changes the result::
 
-    >>> B(a=a1, data='the key')
+    >>> B(a=a1, data="the key")
     <test3.B object at 0x7f7b10114280>
     >>> a1.bs
     {None: <test3.B object at 0x7f7b10114280>}
 
 vs::
 
-    >>> B(data='the key', a=a1)
+    >>> B(data="the key", a=a1)
     <test3.B object at 0x7f7b10114340>
     >>> a1.bs
     {'the key': <test3.B object at 0x7f7b10114340>}
@@ -384,9 +397,9 @@ An event handler such as the following may also be used to track changes in the
 collection as well::
 
     from sqlalchemy import event
-
     from sqlalchemy.orm import attributes
 
+
     @event.listens_for(B.data, "set")
     def set_item(obj, value, previous, initiator):
         if obj.a is not None:
@@ -394,8 +407,6 @@ collection as well::
             obj.a.bs[value] = obj
             obj.a.bs.pop(previous)
 
-
-
 .. autofunction:: attribute_mapped_collection
 
 .. autofunction:: column_mapped_collection
@@ -585,8 +596,8 @@ from within an already instrumented call can cause events to be fired off
 repeatedly, or inappropriately, leading to internal state corruption in
 rare cases::
 
-    from sqlalchemy.orm.collections import MappedCollection,\
-                                        collection
+    from sqlalchemy.orm.collections import MappedCollection, collection
+
 
     class MyMappedCollection(MappedCollection):
         """Use @internally_instrumented when your methods
@@ -618,7 +629,8 @@ Iteration will go through ``itervalues()`` unless otherwise decorated.
    of :class:`.MappedCollection` which uses :meth:`.collection.internally_instrumented`
    can be used::
 
-    from sqlalchemy.orm.collections import _instrument_class, MappedCollection
+    from sqlalchemy.orm.collections import MappedCollection, _instrument_class
+
     _instrument_class(MappedCollection)
 
    This will ensure that the :class:`.MappedCollection` has been properly
index 4e533f7cf7cef79370e36041269d5fc00f808593..56f48b00bfd384d84c19b792ec0e8746f45cc07a 100644 (file)
@@ -26,12 +26,14 @@ A simple example represents pairs of columns as a ``Point`` object.
             return self.x, self.y
 
         def __repr__(self):
-            return "Point(x=%r, y=%r)" % (self.x, self.y)
+            return f"Point(x={self.x!r}, y={self.y!r})"
 
         def __eq__(self, other):
-            return isinstance(other, Point) and \
-                other.x == self.x and \
-                other.y == self.y
+            return (
+                isinstance(other, Point)
+                and other.x == self.x
+                and other.y == self.y
+            )
 
         def __ne__(self, other):
             return not self.__eq__(other)
@@ -49,13 +51,13 @@ objects. Then, the :func:`.composite` function is used to assign new
 attributes that will represent sets of columns via the ``Point`` class::
 
     from sqlalchemy import Column, Integer
-    from sqlalchemy.orm import composite
-    from sqlalchemy.orm import declarative_base
+    from sqlalchemy.orm import composite, declarative_base
 
     Base = declarative_base()
 
+
     class Vertex(Base):
-        __tablename__ = 'vertices'
+        __tablename__ = "vertices"
 
         id = Column(Integer, primary_key=True)
         x1 = Column(Integer)
@@ -69,10 +71,14 @@ attributes that will represent sets of columns via the ``Point`` class::
 A classical mapping above would define each :func:`.composite`
 against the existing table::
 
-    mapper_registry.map_imperatively(Vertex, vertices_table, properties={
-        'start':composite(Point, vertices_table.c.x1, vertices_table.c.y1),
-        'end':composite(Point, vertices_table.c.x2, vertices_table.c.y2),
-    })
+    mapper_registry.map_imperatively(
+        Vertex,
+        vertices_table,
+        properties={
+            "start": composite(Point, vertices_table.c.x1, vertices_table.c.y1),
+            "end": composite(Point, vertices_table.c.x2, vertices_table.c.y2),
+        },
+    )
 
 We can now persist and use ``Vertex`` instances, as well as query for them,
 using the ``.start`` and ``.end`` attributes against ad-hoc ``Point`` instances:
@@ -118,19 +124,27 @@ to define existing or new operations.
 Below we illustrate the "greater than" operator, implementing
 the same expression that the base "greater than" does::
 
-    from sqlalchemy.orm.properties import CompositeProperty
     from sqlalchemy import sql
+    from sqlalchemy.orm.properties import CompositeProperty
+
 
     class PointComparator(CompositeProperty.Comparator):
         def __gt__(self, other):
             """redefine the 'greater than' operation"""
 
-            return sql.and_(*[a>b for a, b in
-                              zip(self.__clause_element__().clauses,
-                                  other.__composite_values__())])
+            return sql.and_(
+                *[
+                    a > b
+                    for a, b in zip(
+                        self.__clause_element__().clauses,
+                        other.__composite_values__(),
+                    )
+                ]
+            )
+
 
     class Vertex(Base):
-        ___tablename__ = 'vertices'
+        ___tablename__ = "vertices"
 
         id = Column(Integer, primary_key=True)
         x1 = Column(Integer)
@@ -138,10 +152,8 @@ the same expression that the base "greater than" does::
         x2 = Column(Integer)
         y2 = Column(Integer)
 
-        start = composite(Point, x1, y1,
-                            comparator_factory=PointComparator)
-        end = composite(Point, x2, y2,
-                            comparator_factory=PointComparator)
+        start = composite(Point, x1, y1, comparator_factory=PointComparator)
+        end = composite(Point, x2, y2, comparator_factory=PointComparator)
 
 Nesting Composites
 -------------------
@@ -155,6 +167,7 @@ itself be a composite object, which is then mapped to a class ``HasVertex``::
 
     from sqlalchemy.orm import composite
 
+
     class Point:
         def __init__(self, x, y):
             self.x = x
@@ -164,16 +177,19 @@ itself be a composite object, which is then mapped to a class ``HasVertex``::
             return self.x, self.y
 
         def __repr__(self):
-            return "Point(x=%r, y=%r)" % (self.x, self.y)
+            return f"Point(x={self.x!r}, y={self.y!r})"
 
         def __eq__(self, other):
-            return isinstance(other, Point) and \
-                other.x == self.x and \
-                other.y == self.y
+            return (
+                isinstance(other, Point)
+                and other.x == self.x
+                and other.y == self.y
+            )
 
         def __ne__(self, other):
             return not self.__eq__(other)
 
+
     class Vertex:
         def __init__(self, start, end):
             self.start = start
@@ -182,18 +198,17 @@ itself be a composite object, which is then mapped to a class ``HasVertex``::
         @classmethod
         def _generate(self, x1, y1, x2, y2):
             """generate a Vertex from a row"""
-            return Vertex(
-                Point(x1, y1),
-                Point(x2, y2)
-            )
+            return Vertex(Point(x1, y1), Point(x2, y2))
 
         def __composite_values__(self):
-            return \
-                self.start.__composite_values__() + \
-                self.end.__composite_values__()
+            return (
+                self.start.__composite_values__()
+                + self.end.__composite_values__()
+            )
+
 
     class HasVertex(Base):
-        __tablename__ = 'has_vertex'
+        __tablename__ = "has_vertex"
         id = Column(Integer, primary_key=True)
         x1 = Column(Integer)
         y1 = Column(Integer)
index c55d381c503dc98039a48b1d64018f8cf52a2c57..f03ce3a1a38f7224896071cfd5fb73fff3dee165 100644 (file)
@@ -29,6 +29,7 @@ useful for recreating transient properties that are normally assigned in
 
     from sqlalchemy import orm
 
+
     class MyMappedClass:
         def __init__(self, data):
             self.data = data
index d19e9ffc0c72021f8f0fe2c2986df121810029eb..38defaa9e4aa6416fbe4f3de53dcab6315bcd193 100644 (file)
@@ -42,14 +42,19 @@ objects but also relationships and SQL expressions::
     # mapping attributes using declarative with declarative table
     # i.e. __tablename__
 
-    from sqlalchemy import Column, Integer, String, Text, ForeignKey
-    from sqlalchemy.orm import column_property, relationship, deferred
-    from sqlalchemy.orm import declarative_base
+    from sqlalchemy import Column, ForeignKey, Integer, String, Text
+    from sqlalchemy.orm import (
+        column_property,
+        declarative_base,
+        deferred,
+        relationship,
+    )
 
     Base = declarative_base()
 
+
     class User(Base):
-        __tablename__ = 'user'
+        __tablename__ = "user"
 
         id = Column(Integer, primary_key=True)
         name = Column(String)
@@ -60,8 +65,9 @@ objects but also relationships and SQL expressions::
 
         addresses = relationship("Address", back_populates="user")
 
+
     class Address(Base):
-        __tablename__ = 'address'
+        __tablename__ = "address"
 
         id = Column(Integer, primary_key=True)
         user_id = Column(ForeignKey("user.id"))
@@ -90,13 +96,17 @@ hybrid table style::
     # mapping attributes using declarative with imperative table
     # i.e. __table__
 
-    from sqlalchemy import Table
-    from sqlalchemy import Column, Integer, String, Text, ForeignKey
-    from sqlalchemy.orm import column_property, relationship, deferred
-    from sqlalchemy.orm import declarative_base
+    from sqlalchemy import Column, ForeignKey, Integer, String, Table, Text
+    from sqlalchemy.orm import (
+        column_property,
+        declarative_base,
+        deferred,
+        relationship,
+    )
 
     Base = declarative_base()
 
+
     class User(Base):
         __table__ = Table(
             "user",
@@ -104,13 +114,16 @@ hybrid table style::
             Column("id", Integer, primary_key=True),
             Column("name", String),
             Column("firstname", String(50)),
-            Column("lastname", String(50))
+            Column("lastname", String(50)),
         )
 
-        fullname = column_property(__table__.c.firstname + " " + __table__.c.lastname)
+        fullname = column_property(
+            __table__.c.firstname + " " + __table__.c.lastname
+        )
 
         addresses = relationship("Address", back_populates="user")
 
+
     class Address(Base):
         __table__ = Table(
             "address",
@@ -118,7 +131,7 @@ hybrid table style::
             Column("id", Integer, primary_key=True),
             Column("user_id", ForeignKey("user.id")),
             Column("email_address", String),
-            Column("address_statistics", Text)
+            Column("address_statistics", Text),
         )
 
         address_statistics = deferred(__table__.c.address_statistics)
@@ -168,15 +181,16 @@ The :paramref:`_orm.Mapper.version_id_col` and
 
     from datetime import datetime
 
+
     class Widget(Base):
-        __tablename__ = 'widgets'
+        __tablename__ = "widgets"
 
         id = Column(Integer, primary_key=True)
         timestamp = Column(DateTime, nullable=False)
 
         __mapper_args__ = {
-            'version_id_col': timestamp,
-            'version_id_generator': lambda v:datetime.now()
+            "version_id_col": timestamp,
+            "version_id_generator": lambda v: datetime.now(),
         }
 
 **Single Table Inheritance**
@@ -185,19 +199,20 @@ The :paramref:`_orm.Mapper.polymorphic_on` and
 :paramref:`_orm.Mapper.polymorphic_identity` parameters::
 
     class Person(Base):
-        __tablename__ = 'person'
+        __tablename__ = "person"
 
         person_id = Column(Integer, primary_key=True)
         type = Column(String, nullable=False)
 
         __mapper_args__ = dict(
             polymorphic_on=type,
-            polymorphic_identity="person"
+            polymorphic_identity="person",
         )
 
+
     class Employee(Person):
         __mapper_args__ = dict(
-            polymorphic_identity="employee"
+            polymorphic_identity="employee",
         )
 
 The ``__mapper_args__`` dictionary may be generated from a class-bound
@@ -268,31 +283,35 @@ be illustrated using :meth:`_orm.registry.mapped` as follows::
 
     reg = registry()
 
+
     class BaseOne:
         metadata = MetaData()
 
+
     class BaseTwo:
         metadata = MetaData()
 
+
     @reg.mapped
     class ClassOne:
-        __tablename__ = 't1'  # will use reg.metadata
+        __tablename__ = "t1"  # will use reg.metadata
 
         id = Column(Integer, primary_key=True)
 
+
     @reg.mapped
     class ClassTwo(BaseOne):
-        __tablename__ = 't1'  # will use BaseOne.metadata
+        __tablename__ = "t1"  # will use BaseOne.metadata
 
         id = Column(Integer, primary_key=True)
 
+
     @reg.mapped
     class ClassThree(BaseTwo):
-        __tablename__ = 't1'  # will use BaseTwo.metadata
+        __tablename__ = "t1"  # will use BaseTwo.metadata
 
         id = Column(Integer, primary_key=True)
 
-
 .. versionchanged:: 1.4.3  The :meth:`_orm.registry.mapped` decorator will
    honor an attribute named ``.metadata`` on the class as an alternate
    :class:`_schema.MetaData` collection to be used in place of the
@@ -322,24 +341,27 @@ subclasses to extend just from the special class::
         __abstract__ = True
 
         def some_helpful_method(self):
-            ""
+            """"""
 
         @declared_attr
         def __mapper_args__(cls):
-            return {"helpful mapper arguments":True}
+            return {"helpful mapper arguments": True}
+
 
     class MyMappedClass(SomeAbstractBase):
-        ""
+        pass
 
 One possible use of ``__abstract__`` is to use a distinct
 :class:`_schema.MetaData` for different bases::
 
     Base = declarative_base()
 
+
     class DefaultBase(Base):
         __abstract__ = True
         metadata = MetaData()
 
+
     class OtherBase(Base):
         __abstract__ = True
         metadata = MetaData()
@@ -352,7 +374,6 @@ created perhaps within distinct databases::
     DefaultBase.metadata.create_all(some_engine)
     OtherBase.metadata.create_all(some_other_engine)
 
-
 ``__table_cls__``
 ~~~~~~~~~~~~~~~~~
 
@@ -363,10 +384,7 @@ to a :class:`_schema.Table` that one generates here::
     class MyMixin:
         @classmethod
         def __table_cls__(cls, name, metadata_obj, *arg, **kw):
-            return Table(
-                "my_" + name,
-                metadata_obj, *arg, **kw
-            )
+            return Table(f"my_{name}", metadata_obj, *arg, **kw)
 
 The above mixin would cause all :class:`_schema.Table` objects generated to include
 the prefix ``"my_"``, followed by the name normally specified using the
@@ -386,15 +404,18 @@ such as, define as single-inheritance if there is no primary key present::
         @classmethod
         def __table_cls__(cls, *arg, **kw):
             for obj in arg[1:]:
-                if (isinstance(obj, Column) and obj.primary_key) or \
-                        isinstance(obj, PrimaryKeyConstraint):
+                if (isinstance(obj, Column) and obj.primary_key) or isinstance(
+                    obj, PrimaryKeyConstraint
+                ):
                     return Table(*arg, **kw)
 
             return None
 
+
     class Person(AutoTable, Base):
         id = Column(Integer, primary_key=True)
 
+
     class Employee(Person):
         employee_name = Column(String)
 
index e78b9669862e9f77860027621e4520a17a2fa0f8..2e07646e43ef6f7e2baa5b62ce7ad95667195886 100644 (file)
@@ -16,20 +16,20 @@ or :func:`_orm.declarative_base` functions.
 
 An example of some commonly mixed-in idioms is below::
 
-    from sqlalchemy.orm import declarative_mixin
-    from sqlalchemy.orm import declared_attr
+    from sqlalchemy.orm import declarative_mixin, declared_attr
+
 
     @declarative_mixin
     class MyMixin:
-
         @declared_attr
         def __tablename__(cls):
             return cls.__name__.lower()
 
-        __table_args__ = {'mysql_engine': 'InnoDB'}
-        __mapper_args__= {'always_refresh': True}
+        __table_args__ = {"mysql_engine": "InnoDB"}
+        __mapper_args__ = {"always_refresh": True}
+
+        id = Column(Integer, primary_key=True)
 
-        id =  Column(Integer, primary_key=True)
 
     class MyModel(MyMixin, Base):
         name = Column(String(1000))
@@ -69,21 +69,22 @@ section can also be applied to the base class itself, for patterns that
 should apply to all classes derived from a particular base.  This is achieved
 using the ``cls`` argument of the :func:`_orm.declarative_base` function::
 
-    from sqlalchemy.orm import declared_attr
+    from sqlalchemy.orm import declarative_base, declared_attr
+
 
     class Base:
         @declared_attr
         def __tablename__(cls):
             return cls.__name__.lower()
 
-        __table_args__ = {'mysql_engine': 'InnoDB'}
+        __table_args__ = {"mysql_engine": "InnoDB"}
 
-        id =  Column(Integer, primary_key=True)
+        id = Column(Integer, primary_key=True)
 
-    from sqlalchemy.orm import declarative_base
 
     Base = declarative_base(cls=Base)
 
+
     class MyModel(Base):
         name = Column(String(1000))
 
@@ -101,10 +102,11 @@ declaration::
     class TimestampMixin:
         created_at = Column(DateTime, default=func.now())
 
+
     class MyModel(TimestampMixin, Base):
-        __tablename__ = 'test'
+        __tablename__ = "test"
 
-        id =  Column(Integer, primary_key=True)
+        id = Column(Integer, primary_key=True)
         name = Column(String(1000))
 
 Where above, all declarative classes that include ``TimestampMixin``
@@ -144,14 +146,16 @@ many classes can be defined as callables::
 
     from sqlalchemy.orm import declared_attr
 
+
     @declarative_mixin
     class HasRelatedDataMixin:
         @declared_attr
         def related_data(cls):
-            return deferred(Column(Text())
+            return deferred(Column(Text()))
+
 
     class User(HasRelatedDataMixin, Base):
-        __tablename__ = 'user'
+        __tablename__ = "user"
         id = Column(Integer, primary_key=True)
 
 Where above, the ``related_data`` class-level callable is executed at the
@@ -170,12 +174,13 @@ within the :class:`_orm.declared_attr`::
             def parent_id(cls):
                 return Column(Integer, ForeignKey(cls.id))
 
+
         class A(SelfReferentialMixin, Base):
-            __tablename__ = 'a'
+            __tablename__ = "a"
 
 
         class B(SelfReferentialMixin, Base):
-            __tablename__ = 'b'
+            __tablename__ = "b"
 
 Above, both classes ``A`` and ``B`` will contain columns ``id`` and
 ``parent_id``, where ``parent_id`` refers to the ``id`` column local to the
@@ -196,22 +201,25 @@ reference a common target class via many-to-one::
 
     @declarative_mixin
     class RefTargetMixin:
-        target_id = Column('target_id', ForeignKey('target.id'))
+        target_id = Column("target_id", ForeignKey("target.id"))
 
         @declared_attr
         def target(cls):
             return relationship("Target")
 
+
     class Foo(RefTargetMixin, Base):
-        __tablename__ = 'foo'
+        __tablename__ = "foo"
         id = Column(Integer, primary_key=True)
 
+
     class Bar(RefTargetMixin, Base):
-        __tablename__ = 'bar'
+        __tablename__ = "bar"
         id = Column(Integer, primary_key=True)
 
+
     class Target(Base):
-        __tablename__ = 'target'
+        __tablename__ = "target"
         id = Column(Integer, primary_key=True)
 
 
@@ -232,16 +240,17 @@ Declarative will be using as it calls the methods on its own, thus using
 The canonical example is the primaryjoin condition that depends upon
 another mixed-in column::
 
-  @declarative_mixin
-  class RefTargetMixin:
+    @declarative_mixin
+    class RefTargetMixin:
         @declared_attr
         def target_id(cls):
-            return Column('target_id', ForeignKey('target.id'))
+            return Column("target_id", ForeignKey("target.id"))
 
         @declared_attr
         def target(cls):
-            return relationship(Target,
-                primaryjoin=Target.id==cls.target_id   # this is *incorrect*
+            return relationship(
+                Target,
+                primaryjoin=Target.id == cls.target_id,  # this is *incorrect*
             )
 
 Mapping a class using the above mixin, we will get an error like::
@@ -273,12 +282,12 @@ or alternatively, the string form (which ultimately generates a lambda)::
     class RefTargetMixin:
         @declared_attr
         def target_id(cls):
-            return Column('target_id', ForeignKey('target.id'))
+            return Column("target_id", ForeignKey("target.id"))
 
         @declared_attr
         def target(cls):
-            return relationship("Target",
-                primaryjoin="Target.id==%s.target_id" % cls.__name__
+            return relationship(
+                Target, primaryjoin=f"Target.id=={cls.__name__}.target_id"
             )
 
 .. seealso::
@@ -297,11 +306,11 @@ requirement so that no reliance on copying is needed::
 
     @declarative_mixin
     class SomethingMixin:
-
         @declared_attr
         def dprop(cls):
             return deferred(Column(Integer))
 
+
     class Something(SomethingMixin, Base):
         __tablename__ = "something"
 
@@ -312,14 +321,12 @@ the :class:`_orm.declared_attr` is invoked::
     @declarative_mixin
     class SomethingMixin:
         x = Column(Integer)
-
         y = Column(Integer)
 
         @declared_attr
         def x_plus_y(cls):
             return column_property(cls.x + cls.y)
 
-
 .. versionchanged:: 1.0.0 mixin columns are copied to the final mapped class
    so that :class:`_orm.declared_attr` methods can access the actual column
    that will be mapped.
@@ -336,15 +343,18 @@ target a different type of child object.  Below is an
 :func:`.association_proxy` mixin example which provides a scalar list of
 string values to an implementing class::
 
-    from sqlalchemy import Column, Integer, ForeignKey, String
+    from sqlalchemy import Column, ForeignKey, Integer, String
     from sqlalchemy.ext.associationproxy import association_proxy
-    from sqlalchemy.orm import declarative_base
-    from sqlalchemy.orm import declarative_mixin
-    from sqlalchemy.orm import declared_attr
-    from sqlalchemy.orm import relationship
+    from sqlalchemy.orm import (
+        declarative_base,
+        declarative_mixin,
+        declared_attr,
+        relationship,
+    )
 
     Base = declarative_base()
 
+
     @declarative_mixin
     class HasStringCollection:
         @declared_attr
@@ -353,9 +363,12 @@ string values to an implementing class::
                 __tablename__ = cls.string_table_name
                 id = Column(Integer, primary_key=True)
                 value = Column(String(50), nullable=False)
-                parent_id = Column(Integer,
-                                ForeignKey('%s.id' % cls.__tablename__),
-                                nullable=False)
+                parent_id = Column(
+                    Integer,
+                    ForeignKey(f"{cls.__tablename__}.id"),
+                    nullable=False,
+                )
+
                 def __init__(self, value):
                     self.value = value
 
@@ -363,16 +376,18 @@ string values to an implementing class::
 
         @declared_attr
         def strings(cls):
-            return association_proxy('_strings', 'value')
+            return association_proxy("_strings", "value")
+
 
     class TypeA(HasStringCollection, Base):
-        __tablename__ = 'type_a'
-        string_table_name = 'type_a_strings'
+        __tablename__ = "type_a"
+        string_table_name = "type_a_strings"
         id = Column(Integer(), primary_key=True)
 
+
     class TypeB(HasStringCollection, Base):
-        __tablename__ = 'type_b'
-        string_table_name = 'type_b_strings'
+        __tablename__ = "type_b"
+        string_table_name = "type_b_strings"
         id = Column(Integer(), primary_key=True)
 
 Above, the ``HasStringCollection`` mixin produces a :func:`_orm.relationship`
@@ -386,8 +401,8 @@ attribute of each ``StringAttribute`` instance.
 ``TypeA`` or ``TypeB`` can be instantiated given the constructor
 argument ``strings``, a list of strings::
 
-    ta = TypeA(strings=['foo', 'bar'])
-    tb = TypeB(strings=['bat', 'bar'])
+    ta = TypeA(strings=["foo", "bar"])
+    tb = TypeB(strings=["bat", "bar"])
 
 This list will generate a collection
 of ``StringAttribute`` objects, which are persisted into a table that's
@@ -423,8 +438,8 @@ correct answer for each.
 For example, to create a mixin that gives every class a simple table
 name based on class name::
 
-    from sqlalchemy.orm import declarative_mixin
-    from sqlalchemy.orm import declared_attr
+    from sqlalchemy.orm import declarative_mixin, declared_attr
+
 
     @declarative_mixin
     class Tablename:
@@ -432,14 +447,16 @@ name based on class name::
         def __tablename__(cls):
             return cls.__name__.lower()
 
+
     class Person(Tablename, Base):
         id = Column(Integer, primary_key=True)
-        discriminator = Column('type', String(50))
-        __mapper_args__ = {'polymorphic_on': discriminator}
+        discriminator = Column("type", String(50))
+        __mapper_args__ = {"polymorphic_on": discriminator}
+
 
     class Engineer(Person):
         __tablename__ = None
-        __mapper_args__ = {'polymorphic_identity': 'engineer'}
+        __mapper_args__ = {"polymorphic_identity": "engineer"}
         primary_language = Column(String(50))
 
 Alternatively, we can modify our ``__tablename__`` function to return
@@ -447,9 +464,12 @@ Alternatively, we can modify our ``__tablename__`` function to return
 the effect of those subclasses being mapped with single table inheritance
 against the parent::
 
-    from sqlalchemy.orm import declarative_mixin
-    from sqlalchemy.orm import declared_attr
-    from sqlalchemy.orm import has_inherited_table
+    from sqlalchemy.orm import (
+        declarative_mixin,
+        declared_attr,
+        has_inherited_table,
+    )
+
 
     @declarative_mixin
     class Tablename:
@@ -459,14 +479,16 @@ against the parent::
                 return None
             return cls.__name__.lower()
 
+
     class Person(Tablename, Base):
         id = Column(Integer, primary_key=True)
-        discriminator = Column('type', String(50))
-        __mapper_args__ = {'polymorphic_on': discriminator}
+        discriminator = Column("type", String(50))
+        __mapper_args__ = {"polymorphic_on": discriminator}
+
 
     class Engineer(Person):
         primary_language = Column(String(50))
-        __mapper_args__ = {'polymorphic_identity': 'engineer'}
+        __mapper_args__ = {"polymorphic_identity": "engineer"}
 
 .. _mixin_inheritance_columns:
 
@@ -485,17 +507,19 @@ a primary key::
     class HasId:
         @declared_attr
         def id(cls):
-            return Column('id', Integer, primary_key=True)
+            return Column("id", Integer, primary_key=True)
+
 
     class Person(HasId, Base):
-        __tablename__ = 'person'
-        discriminator = Column('type', String(50))
-        __mapper_args__ = {'polymorphic_on': discriminator}
+        __tablename__ = "person"
+        discriminator = Column("type", String(50))
+        __mapper_args__ = {"polymorphic_on": discriminator}
+
 
     class Engineer(Person):
-        __tablename__ = 'engineer'
+        __tablename__ = "engineer"
         primary_language = Column(String(50))
-        __mapper_args__ = {'polymorphic_identity': 'engineer'}
+        __mapper_args__ = {"polymorphic_identity": "engineer"}
 
 It is usually the case in joined-table inheritance that we want distinctly
 named columns on each subclass.  However in this case, we may want to have
@@ -510,19 +534,21 @@ function should be invoked **for each class in the hierarchy**, in *almost*
         @declared_attr.cascading
         def id(cls):
             if has_inherited_table(cls):
-                return Column(ForeignKey('person.id'), primary_key=True)
+                return Column(ForeignKey("person.id"), primary_key=True)
             else:
                 return Column(Integer, primary_key=True)
 
+
     class Person(HasIdMixin, Base):
-        __tablename__ = 'person'
-        discriminator = Column('type', String(50))
-        __mapper_args__ = {'polymorphic_on': discriminator}
+        __tablename__ = "person"
+        discriminator = Column("type", String(50))
+        __mapper_args__ = {"polymorphic_on": discriminator}
+
 
     class Engineer(Person):
-        __tablename__ = 'engineer'
+        __tablename__ = "engineer"
         primary_language = Column(String(50))
-        __mapper_args__ = {'polymorphic_identity': 'engineer'}
+        __mapper_args__ = {"polymorphic_identity": "engineer"}
 
 .. warning::
 
@@ -549,19 +575,21 @@ define on the class itself. The
 here to create user-defined collation routines that pull
 from multiple collections::
 
-    from sqlalchemy.orm import declarative_mixin
-    from sqlalchemy.orm import declared_attr
+    from sqlalchemy.orm import declarative_mixin, declared_attr
+
 
     @declarative_mixin
     class MySQLSettings:
-        __table_args__ = {'mysql_engine':'InnoDB'}
+        __table_args__ = {"mysql_engine": "InnoDB"}
+
 
     @declarative_mixin
     class MyOtherMixin:
-        __table_args__ = {'info':'foo'}
+        __table_args__ = {"info": "foo"}
+
 
     class MyModel(MySQLSettings, MyOtherMixin, Base):
-        __tablename__='my_model'
+        __tablename__ = "my_model"
 
         @declared_attr
         def __table_args__(cls):
@@ -570,7 +598,7 @@ from multiple collections::
             args.update(MyOtherMixin.__table_args__)
             return args
 
-        id =  Column(Integer, primary_key=True)
+        id = Column(Integer, primary_key=True)
 
 Creating Indexes with Mixins
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -581,13 +609,17 @@ establish it as part of ``__table_args__``::
 
     @declarative_mixin
     class MyMixin:
-        a =  Column(Integer)
-        b =  Column(Integer)
+        a = Column(Integer)
+        b = Column(Integer)
 
         @declared_attr
         def __table_args__(cls):
-            return (Index('test_idx_%s' % cls.__tablename__, 'a', 'b'),)
+            return (
+                Index(f"test_idx_{cls.__tablename__}", "a", "b"),
+            )
+
 
     class MyModel(MyMixin, Base):
-        __tablename__ = 'atable'
-        c =  Column(Integer,primary_key=True)
+        __tablename__ = "atable"
+        c = Column(Integer, primary_key=True)
+
index b171fc31771dbe03ec8a57cba0d96d06c26edaa9..e39456c52a41728c1dc91fc227ec718343089a52 100644 (file)
@@ -38,15 +38,16 @@ method::
 With the declarative base class, new mapped classes are declared as subclasses
 of the base::
 
-    from sqlalchemy import Column, Integer, String, ForeignKey
+    from sqlalchemy import Column, ForeignKey, Integer, String
     from sqlalchemy.orm import declarative_base
 
     # declarative base class
     Base = declarative_base()
 
+
     # an example mapping using the base
     class User(Base):
-        __tablename__ = 'user'
+        __tablename__ = "user"
 
         id = Column(Integer, primary_key=True)
         name = Column(String)
@@ -118,25 +119,25 @@ a decorator.  The :meth:`_orm.registry.mapped` function is a class decorator
 that can be applied to any Python class with no hierarchy in place.  The
 Python class otherwise is configured in declarative style normally::
 
-    from sqlalchemy import Column, Integer, String, Text, ForeignKey
-
-    from sqlalchemy.orm import registry
-    from sqlalchemy.orm import relationship
+    from sqlalchemy import Column, ForeignKey, Integer, String, Text
+    from sqlalchemy.orm import registry, relationship
 
     mapper_registry = registry()
 
+
     @mapper_registry.mapped
     class User:
-        __tablename__ = 'user'
+        __tablename__ = "user"
 
         id = Column(Integer, primary_key=True)
         name = Column(String)
 
         addresses = relationship("Address", back_populates="user")
 
+
     @mapper_registry.mapped
     class Address:
-        __tablename__ = 'address'
+        __tablename__ = "address"
 
         id = Column(Integer, primary_key=True)
         user_id = Column(ForeignKey("user.id"))
@@ -152,8 +153,10 @@ if the decorator is applied to that class directly.   For inheritance
 mappings, the decorator should be applied to each subclass::
 
     from sqlalchemy.orm import registry
+
     mapper_registry = registry()
 
+
     @mapper_registry.mapped
     class Person:
         __tablename__ = "person"
@@ -162,9 +165,8 @@ mappings, the decorator should be applied to each subclass::
         type = Column(String, nullable=False)
 
         __mapper_args__ = {
-
             "polymorphic_on": type,
-            "polymorphic_identity": "person"
+            "polymorphic_identity": "person",
         }
 
 
@@ -175,7 +177,7 @@ mappings, the decorator should be applied to each subclass::
         person_id = Column(ForeignKey("person.person_id"), primary_key=True)
 
         __mapper_args__ = {
-            "polymorphic_identity": "employee"
+            "polymorphic_identity": "employee",
         }
 
 Both the "declarative table" and "imperative table" styles of declarative
@@ -241,18 +243,11 @@ An example of a mapping using ``@dataclass`` using
 
     from __future__ import annotations
 
-    from dataclasses import dataclass
-    from dataclasses import field
-    from typing import List
-    from typing import Optional
+    from dataclasses import dataclass, field
+    from typing import List, Optional
 
-    from sqlalchemy import Column
-    from sqlalchemy import ForeignKey
-    from sqlalchemy import Integer
-    from sqlalchemy import String
-    from sqlalchemy import Table
-    from sqlalchemy.orm import registry
-    from sqlalchemy.orm import relationship
+    from sqlalchemy import Column, ForeignKey, Integer, String, Table
+    from sqlalchemy.orm import registry, relationship
 
     mapper_registry = registry()
 
@@ -274,12 +269,13 @@ An example of a mapping using ``@dataclass`` using
         nickname: Optional[str] = None
         addresses: List[Address] = field(default_factory=list)
 
-        __mapper_args__ = {   # type: ignore
-            "properties" : {
-                "addresses": relationship("Address")
+        __mapper_args__ = {  # type: ignore
+            "properties": {
+                "addresses": relationship("Address"),
             }
         }
 
+
     @mapper_registry.mapped
     @dataclass
     class Address:
@@ -326,16 +322,11 @@ association::
 
     from __future__ import annotations
 
-    from dataclasses import dataclass
-    from dataclasses import field
+    from dataclasses import dataclass, field
     from typing import List
 
-    from sqlalchemy import Column
-    from sqlalchemy import ForeignKey
-    from sqlalchemy import Integer
-    from sqlalchemy import String
-    from sqlalchemy.orm import registry
-    from sqlalchemy.orm import relationship
+    from sqlalchemy import Column, ForeignKey, Integer, String
+    from sqlalchemy.orm import registry, relationship
 
     mapper_registry = registry()
 
@@ -386,7 +377,7 @@ example at :ref:`orm_declarative_mixins_relationships`::
     class RefTargetMixin:
         @declared_attr
         def target_id(cls):
-            return Column('target_id', ForeignKey('target.id'))
+            return Column("target_id", ForeignKey("target.id"))
 
         @declared_attr
         def target(cls):
@@ -412,6 +403,7 @@ came from a mixin that is itself a dataclass, the form would be::
             default_factory=list, metadata={"sa": lambda: relationship("Address")}
         )
 
+
     @dataclass
     class AddressMixin:
         __tablename__ = "address"
@@ -426,13 +418,15 @@ came from a mixin that is itself a dataclass, the form would be::
             default=None, metadata={"sa": Column(String(50))}
         )
 
+
     @mapper_registry.mapped
     class User(UserMixin):
         pass
 
+
     @mapper_registry.mapped
     class Address(AddressMixin):
-      pass
+        pass
 
 .. versionadded:: 1.4.2  Added support for "declared attr" style mixin attributes,
    namely :func:`_orm.relationship` constructs as well as :class:`_schema.Column`
@@ -447,10 +441,10 @@ Example Three - attrs with Imperative Table
 A mapping using ``@attr.s``, in conjunction with imperative table::
 
     import attr
+    from sqlalchemy.orm import registry
 
     # other imports
 
-    from sqlalchemy.orm import registry
 
     mapper_registry = registry()
 
@@ -472,8 +466,10 @@ A mapping using ``@attr.s``, in conjunction with imperative table::
         nickname = attr.ib()
         addresses = attr.ib()
 
+
     # other classes...
 
+
 ``@dataclass`` and attrs_ mappings may also be used with classical mappings, i.e.
 with the :meth:`_orm.registry.map_imperatively` function.  See the section
 :ref:`orm_imperative_dataclasses` for a similar example.
index 3c28ee4e47d1b64174d82f4e095c4ba7088490e2..72a48078d2402922f864d2376df36f7165300dc1 100644 (file)
@@ -29,13 +29,14 @@ With the declarative base class, the typical form of mapping includes an
 attribute ``__tablename__`` that indicates the name of a :class:`_schema.Table`
 that should be generated along with the mapping::
 
-    from sqlalchemy import Column, Integer, String, ForeignKey
+    from sqlalchemy import Column, ForeignKey, Integer, String
     from sqlalchemy.orm import declarative_base
 
     Base = declarative_base()
 
+
     class User(Base):
-        __tablename__ = 'user'
+        __tablename__ = "user"
 
         id = Column(Integer, primary_key=True)
         name = Column(String)
@@ -114,29 +115,29 @@ The attribute can be specified in one of two forms. One is as a
 dictionary::
 
     class MyClass(Base):
-        __tablename__ = 'sometable'
-        __table_args__ = {'mysql_engine':'InnoDB'}
+        __tablename__ = "sometable"
+        __table_args__ = {"mysql_engine": "InnoDB"}
 
 The other, a tuple, where each argument is positional
 (usually constraints)::
 
     class MyClass(Base):
-        __tablename__ = 'sometable'
+        __tablename__ = "sometable"
         __table_args__ = (
-                ForeignKeyConstraint(['id'], ['remote_table.id']),
-                UniqueConstraint('foo'),
-                )
+            ForeignKeyConstraint(["id"], ["remote_table.id"]),
+            UniqueConstraint("foo"),
+        )
 
 Keyword arguments can be specified with the above form by
 specifying the last argument as a dictionary::
 
     class MyClass(Base):
-        __tablename__ = 'sometable'
+        __tablename__ = "sometable"
         __table_args__ = (
-                ForeignKeyConstraint(['id'], ['remote_table.id']),
-                UniqueConstraint('foo'),
-                {'autoload':True}
-                )
+            ForeignKeyConstraint(["id"], ["remote_table.id"]),
+            UniqueConstraint("foo"),
+            {"autoload": True},
+        )
 
 A class may also specify the ``__table_args__`` declarative attribute,
 as well as the ``__tablename__`` attribute, in a dynamic style using the
@@ -156,9 +157,8 @@ dictionary::
 
 
     class MyClass(Base):
-        __tablename__ = 'sometable'
-        __table_args__ = {'schema': 'some_schema'}
-
+        __tablename__ = "sometable"
+        __table_args__ = {"schema": "some_schema"}
 
 The schema name can also be applied to all :class:`_schema.Table` objects
 globally by using the :paramref:`_schema.MetaData.schema` parameter documented
@@ -167,15 +167,15 @@ may be constructed separately and passed either to :func:`_orm.registry`
 or :func:`_orm.declarative_base`::
 
     from sqlalchemy import MetaData
+
     metadata_obj = MetaData(schema="some_schema")
 
-    Base = declarative_base(metadata = metadata_obj)
+    Base = declarative_base(metadata=metadata_obj)
 
 
     class MyClass(Base):
         # will use "some_schema" by default
-        __tablename__ = 'sometable'
-
+        __tablename__ = "sometable"
 
 .. seealso::
 
@@ -191,7 +191,7 @@ The declarative table configuration allows the addition of new
 is that of simply assigning new :class:`_schema.Column` objects to the
 class::
 
-    MyClass.some_new_column = Column('data', Unicode)
+    MyClass.some_new_column = Column("data", Unicode)
 
 The above operation performed against a declarative class that has been
 mapped using the declarative base (note, not the decorator form of declarative)
@@ -231,9 +231,8 @@ object is produced separately and passed to the declarative process
 directly::
 
 
+    from sqlalchemy import Column, ForeignKey, Integer, String
     from sqlalchemy.orm import declarative_base
-    from sqlalchemy import Column, Integer, String, ForeignKey
-
 
     Base = declarative_base()
 
@@ -250,6 +249,7 @@ directly::
         Column("nickname", String),
     )
 
+
     # construct the User class using this table.
     class User(Base):
         __table__ = user_table
@@ -278,33 +278,40 @@ mapper configuration::
 
     class Person(Base):
         __table__ = Table(
-            'person',
+            "person",
             Base.metadata,
-            Column('id', Integer, primary_key=True),
-            Column('name', String(50)),
-            Column('type', String(50))
+            Column("id", Integer, primary_key=True),
+            Column("name", String(50)),
+            Column("type", String(50)),
         )
 
         __mapper_args__ = {
             "polymorphic_on": __table__.c.type,
-            "polymorhpic_identity": "person"
+            "polymorhpic_identity": "person",
         }
 
 The "imperative table" form is also used when a non-:class:`_schema.Table`
 construct, such as a :class:`_sql.Join` or :class:`_sql.Subquery` object,
 is to be mapped.  An example below::
 
-    from sqlalchemy import select, func
+    from sqlalchemy import func, select
 
-    subq = select(
-        func.count(orders.c.id).label('order_count'),
-        func.max(orders.c.price).label('highest_order'),
-        orders.c.customer_id
-    ).group_by(orders.c.customer_id).subquery()
+    subq = (
+        select(
+            func.count(orders.c.id).label("order_count"),
+            func.max(orders.c.price).label("highest_order"),
+            orders.c.customer_id,
+        )
+        .group_by(orders.c.customer_id)
+        .subquery()
+    )
+
+    customer_select = (
+        select(customers, subq)
+        .join_from(customers, subq, customers.c.id == subq.c.customer_id)
+        .subquery()
+    )
 
-    customer_select = select(customers, subq).join_from(
-        customers, subq, customers.c.id == subq.c.customer_id
-    ).subquery()
 
     class Customer(Base):
         __table__ = customer_select
@@ -337,13 +344,16 @@ use a declarative hybrid mapping, passing the
 :paramref:`_schema.Table.autoload_with` parameter to the
 :class:`_schema.Table`::
 
-    engine = create_engine("postgresql+psycopg2://user:pass@hostname/my_existing_database")
+    engine = create_engine(
+        "postgresql+psycopg2://user:pass@hostname/my_existing_database"
+    )
+
 
     class MyClass(Base):
         __table__ = Table(
-            'mytable',
+            "mytable",
             Base.metadata,
-            autoload_with=engine
+            autoload_with=engine,
         )
 
 A major downside of the above approach however is that it requires the database
@@ -364,22 +374,25 @@ the reflection process against a target database, and will integrate the
 results with the declarative table mapping process, that is, classes which
 use the ``__tablename__`` attribute::
 
-    from sqlalchemy.orm import declarative_base
     from sqlalchemy.ext.declarative import DeferredReflection
+    from sqlalchemy.orm import declarative_base
 
     Base = declarative_base()
 
+
     class Reflected(DeferredReflection):
         __abstract__ = True
 
+
     class Foo(Reflected, Base):
-        __tablename__ = 'foo'
+        __tablename__ = "foo"
         bars = relationship("Bar")
 
+
     class Bar(Reflected, Base):
-        __tablename__ = 'bar'
+        __tablename__ = "bar"
 
-        foo_id = Column(Integer, ForeignKey('foo.id'))
+        foo_id = Column(Integer, ForeignKey("foo.id"))
 
 Above, we create a mixin class ``Reflected`` that will serve as a base
 for classes in our declarative hierarchy that should become mapped when
@@ -387,7 +400,9 @@ the ``Reflected.prepare`` method is called.   The above mapping is not
 complete until we do so, given an :class:`_engine.Engine`::
 
 
-    engine = create_engine("postgresql+psycopg2://user:pass@hostname/my_existing_database")
+    engine = create_engine(
+        "postgresql+psycopg2://user:pass@hostname/my_existing_database"
+    )
     Reflected.prepare(engine)
 
 The purpose of the ``Reflected`` class is to define the scope at which
index b788c3b617287595b194c801af578a035d5db99c..2f26517b26d2db655ad6a76c2bd0a65dba18db7d 100644 (file)
@@ -24,13 +24,13 @@ Consider a many-to-many mapping between two classes, ``User`` and ``Keyword``.
 Each ``User`` can have any number of ``Keyword`` objects, and vice-versa
 (the many-to-many pattern is described at :ref:`relationships_many_to_many`)::
 
-    from sqlalchemy import Column, Integer, String, ForeignKey, Table
+    from sqlalchemy import Column, ForeignKey, Integer, String, Table
     from sqlalchemy.orm import declarative_base, relationship
 
     Base = declarative_base()
 
     class User(Base):
-        __tablename__ = 'user'
+        __tablename__ = "user"
         id = Column(Integer, primary_key=True)
         name = Column(String(64))
         kw = relationship("Keyword", secondary=lambda: userkeywords_table)
@@ -38,27 +38,29 @@ Each ``User`` can have any number of ``Keyword`` objects, and vice-versa
         def __init__(self, name):
             self.name = name
 
+
     class Keyword(Base):
-        __tablename__ = 'keyword'
+        __tablename__ = "keyword"
         id = Column(Integer, primary_key=True)
-        keyword = Column('keyword', String(64))
+        keyword = Column("keyword", String(64))
 
         def __init__(self, keyword):
             self.keyword = keyword
 
-    userkeywords_table = Table('userkeywords', Base.metadata,
-        Column('user_id', Integer, ForeignKey("user.id"),
-               primary_key=True),
-        Column('keyword_id', Integer, ForeignKey("keyword.id"),
-               primary_key=True)
+
+    userkeywords_table = Table(
+        "userkeywords",
+        Base.metadata,
+        Column("user_id", Integer, ForeignKey("user.id"), primary_key=True),
+        Column("keyword_id", Integer, ForeignKey("keyword.id"), primary_key=True),
     )
 
 Reading and manipulating the collection of "keyword" strings associated
 with ``User`` requires traversal from each collection element to the ``.keyword``
 attribute, which can be awkward::
 
-    >>> user = User('jek')
-    >>> user.kw.append(Keyword('cheese-inspector'))
+    >>> user = User("jek")
+    >>> user.kw.append(Keyword("cheese-inspector"))
     >>> print(user.kw)
     [<__main__.Keyword object at 0x12bf830>]
     >>> print(user.kw[0].keyword)
@@ -72,8 +74,9 @@ value of ``.keyword`` associated with each ``Keyword`` object::
 
     from sqlalchemy.ext.associationproxy import association_proxy
 
+
     class User(Base):
-        __tablename__ = 'user'
+        __tablename__ = "user"
         id = Column(Integer, primary_key=True)
         name = Column(String(64))
         kw = relationship("Keyword", secondary=lambda: userkeywords_table)
@@ -82,17 +85,17 @@ value of ``.keyword`` associated with each ``Keyword`` object::
             self.name = name
 
         # proxy the 'keyword' attribute from the 'kw' relationship
-        keywords = association_proxy('kw', 'keyword')
+        keywords = association_proxy("kw", "keyword")
 
 We can now reference the ``.keywords`` collection as a listing of strings,
 which is both readable and writable.  New ``Keyword`` objects are created
 for us transparently::
 
-    >>> user = User('jek')
-    >>> user.keywords.append('cheese-inspector')
+    >>> user = User("jek")
+    >>> user.keywords.append("cheese-inspector")
     >>> user.keywords
     ['cheese-inspector']
-    >>> user.keywords.append('snack ninja')
+    >>> user.keywords.append("snack ninja")
     >>> user.kw
     [<__main__.Keyword object at 0x12cdd30>, <__main__.Keyword object at 0x12cde30>]
 
@@ -121,11 +124,11 @@ assignment event) is intercepted by the association proxy, it instantiates a
 new instance of the "intermediary" object using its constructor, passing as a
 single argument the given value. In our example above, an operation like::
 
-    user.keywords.append('cheese-inspector')
+    user.keywords.append("cheese-inspector")
 
 Is translated by the association proxy into the operation::
 
-    user.kw.append(Keyword('cheese-inspector'))
+    user.kw.append(Keyword("cheese-inspector"))
 
 The example works here because we have designed the constructor for ``Keyword``
 to accept a single positional argument, ``keyword``.   For those cases where a
@@ -138,8 +141,9 @@ singular argument.  Below we illustrate this using a lambda as is typical::
         # ...
 
         # use Keyword(keyword=kw) on append() events
-        keywords = association_proxy('kw', 'keyword',
-                        creator=lambda kw: Keyword(keyword=kw))
+        keywords = association_proxy(
+            "kw", "keyword", creator=lambda kw: Keyword(keyword=kw)
+        )
 
 The ``creator`` function accepts a single argument in the case of a list-
 or set- based collection, or a scalar attribute.  In the case of a dictionary-based
@@ -166,35 +170,36 @@ create an association proxy on the ``User`` class called
 collection of ``User`` to the ``.keyword`` attribute present on each
 ``UserKeyword``::
 
-    from sqlalchemy import Column, Integer, String, ForeignKey
+    from sqlalchemy import Column, ForeignKey, Integer, String
     from sqlalchemy.ext.associationproxy import association_proxy
     from sqlalchemy.orm import backref, declarative_base, relationship
 
     Base = declarative_base()
 
+
     class User(Base):
-        __tablename__ = 'user'
+        __tablename__ = "user"
         id = Column(Integer, primary_key=True)
         name = Column(String(64))
 
         # association proxy of "user_keywords" collection
         # to "keyword" attribute
-        keywords = association_proxy('user_keywords', 'keyword')
+        keywords = association_proxy("user_keywords", "keyword")
 
         def __init__(self, name):
             self.name = name
 
+
     class UserKeyword(Base):
-        __tablename__ = 'user_keyword'
-        user_id = Column(Integer, ForeignKey('user.id'), primary_key=True)
-        keyword_id = Column(Integer, ForeignKey('keyword.id'), primary_key=True)
+        __tablename__ = "user_keyword"
+        user_id = Column(Integer, ForeignKey("user.id"), primary_key=True)
+        keyword_id = Column(Integer, ForeignKey("keyword.id"), primary_key=True)
         special_key = Column(String(50))
 
         # bidirectional attribute/collection of "user"/"user_keywords"
-        user = relationship(User,
-                    backref=backref("user_keywords",
-                                    cascade="all, delete-orphan")
-                )
+        user = relationship(
+            User, backref=backref("user_keywords", cascade="all, delete-orphan")
+        )
 
         # reference to the "Keyword" object
         keyword = relationship("Keyword")
@@ -204,24 +209,25 @@ collection of ``User`` to the ``.keyword`` attribute present on each
             self.keyword = keyword
             self.special_key = special_key
 
+
     class Keyword(Base):
-        __tablename__ = 'keyword'
+        __tablename__ = "keyword"
         id = Column(Integer, primary_key=True)
-        keyword = Column('keyword', String(64))
+        keyword = Column("keyword", String(64))
 
         def __init__(self, keyword):
             self.keyword = keyword
 
         def __repr__(self):
-            return 'Keyword(%s)' % repr(self.keyword)
+            return "Keyword(%s)" % repr(self.keyword)
 
 With the above configuration, we can operate upon the ``.keywords`` collection
 of each ``User`` object, each of which exposes a collection of ``Keyword``
 objects that are obtained from the underyling ``UserKeyword`` elements::
 
 
-    >>> user = User('log')
-    >>> for kw in (Keyword('new_from_blammo'), Keyword('its_big')):
+    >>> user = User("log")
+    >>> for kw in (Keyword("new_from_blammo"), Keyword("its_big")):
     ...     user.keywords.append(kw)
     ...
     >>> print(user.keywords)
@@ -232,7 +238,7 @@ This example is in contrast to the example illustrated previously at
 a collection of strings, rather than a collection of composed objects.
 In this case, each ``.keywords.append()`` operation is equivalent to::
 
-    >>> user.user_keywords.append(UserKeyword(Keyword('its_heavy')))
+    >>> user.user_keywords.append(UserKeyword(Keyword("its_heavy")))
 
 The ``UserKeyword`` association object has two attributes that are both
 populated within the scope of the ``append()`` operation of the association
@@ -254,7 +260,7 @@ three attributes, wherein the assignment of ``.user`` during
 construction, has the effect of appending the new ``UserKeyword`` to
 the ``User.user_keywords`` collection (via the relationship)::
 
-    >>> UserKeyword(Keyword('its_wood'), user, special_key='my special key')
+    >>> UserKeyword(Keyword("its_wood"), user, special_key="my special key")
 
 The association proxy returns to us a collection of ``Keyword`` objects represented
 by all these operations::
@@ -285,63 +291,69 @@ argument will be used as the key for the dictionary.   We then apply a ``creator
 argument to the ``User.keywords`` proxy so that these values are assigned appropriately
 when new elements are added to the dictionary::
 
-    from sqlalchemy import Column, Integer, String, ForeignKey
+    from sqlalchemy import Column, ForeignKey, Integer, String
     from sqlalchemy.ext.associationproxy import association_proxy
     from sqlalchemy.orm import backref, declarative_base, relationship
     from sqlalchemy.orm.collections import attribute_mapped_collection
 
     Base = declarative_base()
 
+
     class User(Base):
-        __tablename__ = 'user'
+        __tablename__ = "user"
         id = Column(Integer, primary_key=True)
         name = Column(String(64))
 
         # proxy to 'user_keywords', instantiating UserKeyword
         # assigning the new key to 'special_key', values to
         # 'keyword'.
-        keywords = association_proxy('user_keywords', 'keyword',
-                        creator=lambda k, v:
-                                    UserKeyword(special_key=k, keyword=v)
-                    )
+        keywords = association_proxy(
+            "user_keywords",
+            "keyword",
+            creator=lambda k, v: UserKeyword(special_key=k, keyword=v),
+        )
 
         def __init__(self, name):
             self.name = name
 
+
     class UserKeyword(Base):
-        __tablename__ = 'user_keyword'
-        user_id = Column(Integer, ForeignKey('user.id'), primary_key=True)
-        keyword_id = Column(Integer, ForeignKey('keyword.id'), primary_key=True)
+        __tablename__ = "user_keyword"
+        user_id = Column(Integer, ForeignKey("user.id"), primary_key=True)
+        keyword_id = Column(Integer, ForeignKey("keyword.id"), primary_key=True)
         special_key = Column(String)
 
         # bidirectional user/user_keywords relationships, mapping
         # user_keywords with a dictionary against "special_key" as key.
-        user = relationship(User, backref=backref(
-                        "user_keywords",
-                        collection_class=attribute_mapped_collection("special_key"),
-                        cascade="all, delete-orphan"
-                        )
-                    )
+        user = relationship(
+            User,
+            backref=backref(
+                "user_keywords",
+                collection_class=attribute_mapped_collection("special_key"),
+                cascade="all, delete-orphan",
+            ),
+        )
         keyword = relationship("Keyword")
 
+
     class Keyword(Base):
-        __tablename__ = 'keyword'
+        __tablename__ = "keyword"
         id = Column(Integer, primary_key=True)
-        keyword = Column('keyword', String(64))
+        keyword = Column("keyword", String(64))
 
         def __init__(self, keyword):
             self.keyword = keyword
 
         def __repr__(self):
-            return 'Keyword(%s)' % repr(self.keyword)
+            return "Keyword(%s)" % repr(self.keyword)
 
 We illustrate the ``.keywords`` collection as a dictionary, mapping the
 ``UserKeyword.special_key`` value to ``Keyword`` objects::
 
-    >>> user = User('log')
+    >>> user = User("log")
 
-    >>> user.keywords['sk1'] = Keyword('kw1')
-    >>> user.keywords['sk2'] = Keyword('kw2')
+    >>> user.keywords["sk1"] = Keyword("kw1")
+    >>> user.keywords["sk2"] = Keyword("kw2")
 
     >>> print(user.keywords)
     {'sk1': Keyword('kw1'), 'sk2': Keyword('kw2')}
@@ -360,24 +372,25 @@ and ``Keyword`` classes are entirely concealed.  This is achieved by building
 an association proxy on ``User`` that refers to an association proxy
 present on ``UserKeyword``::
 
-    from sqlalchemy import Column, Integer, String, ForeignKey
+    from sqlalchemy import Column, ForeignKey, Integer, String
     from sqlalchemy.ext.associationproxy import association_proxy
     from sqlalchemy.orm import backref, declarative_base, relationship
     from sqlalchemy.orm.collections import attribute_mapped_collection
 
     Base = declarative_base()
 
+
     class User(Base):
-        __tablename__ = 'user'
+        __tablename__ = "user"
         id = Column(Integer, primary_key=True)
         name = Column(String(64))
 
         # the same 'user_keywords'->'keyword' proxy as in
         # the basic dictionary example.
         keywords = association_proxy(
-            'user_keywords',
-            'keyword',
-            creator=lambda k, v: UserKeyword(special_key=k, keyword=v)
+            "user_keywords",
+            "keyword",
+            creator=lambda k, v: UserKeyword(special_key=k, keyword=v),
         )
 
         # another proxy that is directly column-targeted
@@ -386,18 +399,19 @@ present on ``UserKeyword``::
         def __init__(self, name):
             self.name = name
 
+
     class UserKeyword(Base):
-        __tablename__ = 'user_keyword'
-        user_id = Column(ForeignKey('user.id'), primary_key=True)
-        keyword_id = Column(ForeignKey('keyword.id'), primary_key=True)
+        __tablename__ = "user_keyword"
+        user_id = Column(ForeignKey("user.id"), primary_key=True)
+        keyword_id = Column(ForeignKey("keyword.id"), primary_key=True)
         special_key = Column(String)
         user = relationship(
             User,
             backref=backref(
                 "user_keywords",
                 collection_class=attribute_mapped_collection("special_key"),
-                cascade="all, delete-orphan"
-            )
+                cascade="all, delete-orphan",
+            ),
         )
 
         # the relationship to Keyword is now called
@@ -406,17 +420,17 @@ present on ``UserKeyword``::
 
         # 'keyword' is changed to be a proxy to the
         # 'keyword' attribute of 'Keyword'
-        keyword = association_proxy('kw', 'keyword')
+        keyword = association_proxy("kw", "keyword")
+
 
     class Keyword(Base):
-        __tablename__ = 'keyword'
+        __tablename__ = "keyword"
         id = Column(Integer, primary_key=True)
-        keyword = Column('keyword', String(64))
+        keyword = Column("keyword", String(64))
 
         def __init__(self, keyword):
             self.keyword = keyword
 
-
 ``User.keywords`` is now a dictionary of string to string, where
 ``UserKeyword`` and ``Keyword`` objects are created and removed for us
 transparently using the association proxy. In the example below, we illustrate
@@ -526,23 +540,22 @@ Cascading Scalar Deletes
 Given a mapping as::
 
     class A(Base):
-        __tablename__ = 'test_a'
+        __tablename__ = "test_a"
         id = Column(Integer, primary_key=True)
-        ab = relationship(
-            'AB', backref='a', uselist=False)
+        ab = relationship("AB", backref="a", uselist=False)
         b = association_proxy(
-            'ab', 'b', creator=lambda b: AB(b=b),
-            cascade_scalar_deletes=True)
+            "ab", "b", creator=lambda b: AB(b=b), cascade_scalar_deletes=True
+        )
 
 
     class B(Base):
-        __tablename__ = 'test_b'
+        __tablename__ = "test_b"
         id = Column(Integer, primary_key=True)
-        ab = relationship('AB', backref='b', cascade='all, delete-orphan')
+        ab = relationship("AB", backref="b", cascade="all, delete-orphan")
 
 
     class AB(Base):
-        __tablename__ = 'test_ab'
+        __tablename__ = "test_ab"
         a_id = Column(Integer, ForeignKey(A.id), primary_key=True)
         b_id = Column(Integer, ForeignKey(B.id), primary_key=True)
 
index 82ba7cabb2b22f6be9e90c67ce519b4407026111..034ace01de072d9466600bcb32da0ee89706694e 100644 (file)
@@ -71,9 +71,11 @@ to deliver a streaming server-side :class:`_asyncio.AsyncResult`::
 
     from sqlalchemy.ext.asyncio import create_async_engine
 
+
     async def async_main():
         engine = create_async_engine(
-            "postgresql+asyncpg://scott:tiger@localhost/test", echo=True,
+            "postgresql+asyncpg://scott:tiger@localhost/test",
+            echo=True,
         )
 
         async with engine.begin() as conn:
@@ -85,7 +87,6 @@ to deliver a streaming server-side :class:`_asyncio.AsyncResult`::
             )
 
         async with engine.connect() as conn:
-
             # select a Result, which will be delivered with buffered
             # results
             result = await conn.execute(select(t1).where(t1.c.name == "some name 1"))
@@ -96,6 +97,7 @@ to deliver a streaming server-side :class:`_asyncio.AsyncResult`::
         # clean-up pooled connections
         await engine.dispose()
 
+
     asyncio.run(async_main())
 
 Above, the :meth:`_asyncio.AsyncConnection.run_sync` method may be used to
@@ -123,7 +125,7 @@ cursor and provides an async/await API, such as an async iterator::
         async_result = await conn.stream(select(t1))
 
         async for row in async_result:
-            print("row: %s" % (row, ))
+            print("row: %s" % (row,))
 
 .. _asyncio_orm:
 
@@ -140,19 +142,17 @@ illustrates a complete example including mapper and session configuration::
 
     import asyncio
 
-    from sqlalchemy import Column
-    from sqlalchemy import DateTime
-    from sqlalchemy import ForeignKey
-    from sqlalchemy import func
-    from sqlalchemy import Integer
-    from sqlalchemy import String
-    from sqlalchemy.ext.asyncio import AsyncSession
-    from sqlalchemy.ext.asyncio import async_sessionmaker
-    from sqlalchemy.ext.asyncio import create_async_engine
-    from sqlalchemy.future import select
-    from sqlalchemy.orm import declarative_base
-    from sqlalchemy.orm import relationship
-    from sqlalchemy.orm import selectinload
+    from sqlalchemy import (
+        Column,
+        DateTime,
+        ForeignKey,
+        Integer,
+        String,
+        func,
+        select,
+    )
+    from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
+    from sqlalchemy.orm import declarative_base, relationship, selectinload
 
     Base = declarative_base()
 
@@ -282,12 +282,9 @@ prevent this:
       async_session = AsyncSession(engine, expire_on_commit=False)
 
       # sessionmaker version
-      async_session = async_sessionmaker(
-          engine, expire_on_commit=False
-      )
+      async_session = async_sessionmaker(engine, expire_on_commit=False)
 
       async with async_session() as session:
-
           result = await session.execute(select(A).order_by(A.id))
 
           a1 = result.scalars().first()
@@ -392,8 +389,9 @@ attribute accesses within a separate function::
 
     import asyncio
 
-    from sqlalchemy.ext.asyncio import create_async_engine
-    from sqlalchemy.ext.asyncio import AsyncSession
+    from sqlalchemy import select
+    from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
+
 
     def fetch_and_update_objects(session):
         """run traditional sync-style ORM code in a function that will be
@@ -422,7 +420,8 @@ attribute accesses within a separate function::
 
     async def async_main():
         engine = create_async_engine(
-            "postgresql+asyncpg://scott:tiger@localhost/test", echo=True,
+            "postgresql+asyncpg://scott:tiger@localhost/test",
+            echo=True,
         )
         async with engine.begin() as conn:
             await conn.run_sync(Base.metadata.drop_all)
@@ -446,6 +445,7 @@ attribute accesses within a separate function::
         # clean-up pooled connections
         await engine.dispose()
 
+
     asyncio.run(async_main())
 
 The above approach of running certain functions within a "sync" runner
@@ -522,18 +522,15 @@ constructs are illustrated below::
 
     import asyncio
 
-    from sqlalchemy import text
+    from sqlalchemy import event, text
     from sqlalchemy.engine import Engine
-    from sqlalchemy import event
-    from sqlalchemy.ext.asyncio import AsyncSession
-    from sqlalchemy.ext.asyncio import create_async_engine
+    from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
     from sqlalchemy.orm import Session
 
     ## Core events ##
 
-    engine = create_async_engine(
-        "postgresql+asyncpg://scott:tiger@localhost:5432/test"
-    )
+    engine = create_async_engine("postgresql+asyncpg://scott:tiger@localhost:5432/test")
+
 
     # connect event on instance of Engine
     @event.listens_for(engine.sync_engine, "connect")
@@ -545,10 +542,15 @@ constructs are illustrated below::
         cursor.execute("select 'execute from event'")
         print(cursor.fetchone()[0])
 
+
     # before_execute event on all Engine instances
     @event.listens_for(Engine, "before_execute")
     def my_before_execute(
-        conn, clauseelement, multiparams, params, execution_options
+        conn,
+        clauseelement,
+        multiparams,
+        params,
+        execution_options,
     ):
         print("before execute!")
 
@@ -557,6 +559,7 @@ constructs are illustrated below::
 
     session = AsyncSession(engine)
 
+
     # before_commit event on instance of Session
     @event.listens_for(session.sync_session, "before_commit")
     def my_before_commit(session):
@@ -569,11 +572,13 @@ constructs are illustrated below::
         result = connection.execute(text("select 'execute from event'"))
         print(result.first())
 
+
     # after_commit event on all Session instances
     @event.listens_for(Session, "after_commit")
     def my_after_commit(session):
         print("after commit!")
 
+
     async def go():
         await session.execute(text("select 1"))
         await session.commit()
@@ -581,8 +586,10 @@ constructs are illustrated below::
         await session.close()
         await engine.dispose()
 
+
     asyncio.run(go())
 
+
 The above example prints something along the lines of::
 
     New DBAPI connection: <AdaptedConnection <asyncpg.connection.Connection ...>>
@@ -663,15 +670,18 @@ method.  The given function itself does not need to be declared as ``async``;
 it's perfectly fine for it to be a Python ``lambda:``, as the return awaitable
 value will be invoked after being returned::
 
-    from sqlalchemy.ext.asyncio import create_async_engine
     from sqlalchemy import event
+    from sqlalchemy.ext.asyncio import create_async_engine
 
     engine = create_async_engine(...)
 
+
     @event.listens_for(engine.sync_engine, "connect")
     def register_custom_types(dbapi_connection, ...):
         dbapi_connection.run_async(
-            lambda connection: connection.set_type_codec('MyCustomType', encoder, decoder, ...)
+            lambda connection: connection.set_type_codec(
+                "MyCustomType", encoder, decoder, ...
+            )
         )
 
 Above, the object passed to the ``register_custom_types`` event handler
@@ -702,12 +712,14 @@ If the same engine must be shared between different loop, it should be configure
 to disable pooling using :class:`~sqlalchemy.pool.NullPool`, preventing the Engine
 from using any connection more than once::
 
+    from sqlalchemy.ext.asyncio import create_async_engine
     from sqlalchemy.pool import NullPool
+
     engine = create_async_engine(
-        "postgresql+asyncpg://user:pass@host/dbname", poolclass=NullPool
+        "postgresql+asyncpg://user:pass@host/dbname",
+        poolclass=NullPool,
     )
 
-
 .. _asyncio_scoped_session:
 
 Using asyncio scoped session
@@ -720,13 +732,19 @@ constructor::
 
     from asyncio import current_task
 
-    from sqlalchemy.ext.asyncio import async_sessionmaker
-    from sqlalchemy.ext.asyncio import async_scoped_session
-    from sqlalchemy.ext.asyncio import AsyncSession
-
-    async_session_factory = async_sessionmaker(some_async_engine, expire_on_commit=False)
-    AsyncScopedSession = async_scoped_session(async_session_factory, scopefunc=current_task)
+    from sqlalchemy.ext.asyncio import (
+        async_scoped_session,
+        async_sessionmaker,
+    )
 
+    async_session_factory = async_sessionmaker(
+        some_async_engine,
+        expire_on_commit=False,
+    )
+    AsyncScopedSession = async_scoped_session(
+        async_session_factory,
+        scopefunc=current_task,
+    )
     some_async_session = AsyncScopedSession()
 
 :class:`_asyncio.async_scoped_session` also includes **proxy
@@ -763,13 +781,11 @@ leveraging the :meth:`_asyncio.AsyncConnection.run_sync` method of
 
     import asyncio
 
-    from sqlalchemy.ext.asyncio import create_async_engine
-    from sqlalchemy.ext.asyncio import AsyncSession
     from sqlalchemy import inspect
+    from sqlalchemy.ext.asyncio import create_async_engine
+
+    engine = create_async_engine("postgresql+asyncpg://scott:tiger@localhost/test")
 
-    engine = create_async_engine(
-      "postgresql+asyncpg://scott:tiger@localhost/test"
-    )
 
     def use_inspector(conn):
         inspector = inspect(conn)
@@ -778,10 +794,12 @@ leveraging the :meth:`_asyncio.AsyncConnection.run_sync` method of
         # return any value to the caller
         return inspector.get_table_names()
 
+
     async def async_main():
         async with engine.connect() as conn:
             tables = await conn.run_sync(use_inspector)
 
+
     asyncio.run(async_main())
 
 .. seealso::
index 4751fef3638c02d5c4b0791f8d2c3ed77b9f32bb..f22e28fa5acf5d5eb395b9553d2a677a891c9f78 100644 (file)
@@ -57,15 +57,15 @@ query build-up looks like the following::
 
     from sqlalchemy import bindparam
 
-    def search_for_user(session, username, email=None):
 
+    def search_for_user(session, username, email=None):
         baked_query = bakery(lambda session: session.query(User))
-        baked_query += lambda q: q.filter(User.name == bindparam('username'))
+        baked_query += lambda q: q.filter(User.name == bindparam("username"))
 
         baked_query += lambda q: q.order_by(User.id)
 
         if email:
-            baked_query += lambda q: q.filter(User.email == bindparam('email'))
+            baked_query += lambda q: q.filter(User.email == bindparam("email"))
 
         result = baked_query(session).params(username=username, email=email).all()
 
@@ -130,7 +130,7 @@ compared to the equivalent "baked" query::
     s = Session(bind=engine)
     for id_ in random.sample(ids, n):
         q = bakery(lambda s: s.query(Customer))
-        q += lambda q: q.filter(Customer.id == bindparam('id'))
+        q += lambda q: q.filter(Customer.id == bindparam("id"))
         q(s).params(id=id_).one()
 
 The difference in Python function call count for an iteration of 10000
@@ -178,9 +178,10 @@ just building up the query, and removing its :class:`.Session` by calling
 
     my_simple_cache = {}
 
+
     def lookup(session, id_argument):
         if "my_key" not in my_simple_cache:
-            query = session.query(Model).filter(Model.id == bindparam('id'))
+            query = session.query(Model).filter(Model.id == bindparam("id"))
             my_simple_cache["my_key"] = query.with_session(None)
         else:
             query = my_simple_cache["my_key"].with_session(session)
@@ -213,9 +214,8 @@ Our example becomes::
     my_simple_cache = {}
 
     def lookup(session, id_argument):
-
         if "my_key" not in my_simple_cache:
-            query = session.query(Model).filter(Model.id == bindparam('id'))
+            query = session.query(Model).filter(Model.id == bindparam("id"))
             my_simple_cache["my_key"] = query.with_session(None).bake()
         else:
             query = my_simple_cache["my_key"].with_session(session)
@@ -231,9 +231,10 @@ a simple improvement upon the simple "reuse a query" approach::
 
     bakery = baked.bakery()
 
+
     def lookup(session, id_argument):
         def create_model_query(session):
-            return session.query(Model).filter(Model.id == bindparam('id'))
+            return session.query(Model).filter(Model.id == bindparam("id"))
 
         parameterized_query = bakery.bake(create_model_query)
         return parameterized_query(session).params(id=id_argument).all()
@@ -256,6 +257,7 @@ query on a conditional basis::
 
     my_simple_cache = {}
 
+
     def lookup(session, id_argument, include_frobnizzle=False):
         if include_frobnizzle:
             cache_key = "my_key_with_frobnizzle"
@@ -263,7 +265,7 @@ query on a conditional basis::
             cache_key = "my_key_without_frobnizzle"
 
         if cache_key not in my_simple_cache:
-            query = session.query(Model).filter(Model.id == bindparam('id'))
+            query = session.query(Model).filter(Model.id == bindparam("id"))
             if include_frobnizzle:
                 query = query.filter(Model.frobnizzle == True)
 
@@ -284,9 +286,10 @@ into a direct use of "bakery" as follows::
 
     bakery = baked.bakery()
 
+
     def lookup(session, id_argument, include_frobnizzle=False):
         def create_model_query(session):
-            return session.query(Model).filter(Model.id == bindparam('id'))
+            return session.query(Model).filter(Model.id == bindparam("id"))
 
         parameterized_query = bakery.bake(create_model_query)
 
@@ -295,7 +298,8 @@ into a direct use of "bakery" as follows::
                 return query.filter(Model.frobnizzle == True)
 
             parameterized_query = parameterized_query.with_criteria(
-                include_frobnizzle_in_query)
+                include_frobnizzle_in_query
+            )
 
         return parameterized_query(session).params(id=id_argument).all()
 
@@ -315,10 +319,11 @@ means to reduce verbosity::
 
     bakery = baked.bakery()
 
+
     def lookup(session, id_argument, include_frobnizzle=False):
         parameterized_query = bakery.bake(
-            lambda s: s.query(Model).filter(Model.id == bindparam('id'))
-          )
+            lambda s: s.query(Model).filter(Model.id == bindparam("id"))
+        )
 
         if include_frobnizzle:
             parameterized_query += lambda q: q.filter(Model.frobnizzle == True)
@@ -358,10 +363,10 @@ statement compilation time::
 
     baked_query = bakery(lambda session: session.query(User))
     baked_query += lambda q: q.filter(
-      User.name.in_(bindparam('username', expanding=True)))
+        User.name.in_(bindparam("username", expanding=True))
+    )
 
-    result = baked_query.with_session(session).params(
-      username=['ed', 'fred']).all()
+    result = baked_query.with_session(session).params(username=["ed", "fred"]).all()
 
 .. seealso::
 
@@ -388,8 +393,7 @@ of the baked query::
 
     # select a correlated subquery in the top columns list,
     # we have the "session" argument, pass that
-    my_q = bakery(
-      lambda s: s.query(Address.id, my_subq.to_query(s).as_scalar()))
+    my_q = bakery(lambda s: s.query(Address.id, my_subq.to_query(s).as_scalar()))
 
     # use a correlated subquery in some of the criteria, we have
     # the "query" argument, pass that.
@@ -413,12 +417,11 @@ alter the query differently each time.    To allow a
 still to allow the result to be cached, the event can be registered
 passing the ``bake_ok=True`` flag::
 
-    @event.listens_for(
-        Query, "before_compile", retval=True, bake_ok=True)
+    @event.listens_for(Query, "before_compile", retval=True, bake_ok=True)
     def my_event(query):
         for desc in query.column_descriptions:
-            if desc['type'] is User:
-                entity = desc['entity']
+            if desc["type"] is User:
+                entity = desc["entity"]
                 query = query.filter(entity.deleted == False)
         return query
 
index e5fc89ab97ea733118e5f0b03500a7b7a3a33f27..2fe23331138c257bd12ee1ecfce80c5c2b847e6f 100644 (file)
@@ -94,32 +94,33 @@ alter classes dynamically at runtime.
 To cover the major areas where this occurs, consider the following ORM
 mapping, using the typical example of the ``User`` class::
 
-    from sqlalchemy import Column
-    from sqlalchemy import Integer
-    from sqlalchemy import String
-    from sqlalchemy import select
+    from sqlalchemy import Column, Integer, String, select
     from sqlalchemy.orm import declarative_base
 
     # "Base" is a class that is created dynamically from the
     # declarative_base() function
     Base = declarative_base()
 
+
     class User(Base):
-        __tablename__ = 'user'
+        __tablename__ = "user"
 
         id = Column(Integer, primary_key=True)
         name = Column(String)
 
+
     # "some_user" is an instance of the User class, which
     # accepts "id" and "name" kwargs based on the mapping
-    some_user = User(id=5, name='user')
+    some_user = User(id=5, name="user")
 
     # it has an attribute called .name that's a string
     print(f"Username: {some_user.name}")
 
     # a select() construct makes use of SQL expressions derived from the
     # User class itself
-    select_stmt = select(User).where(User.id.in_([3, 4, 5])).where(User.name.contains('s'))
+    select_stmt = (
+        select(User).where(User.id.in_([3, 4, 5])).where(User.name.contains("s"))
+    )
 
 Above, the steps that the Mypy extension can take include:
 
@@ -145,35 +146,37 @@ When the Mypy plugin processes the above file, the resulting static class
 definition and Python code passed to the Mypy tool is equivalent to the
 following::
 
-    from sqlalchemy import Column
-    from sqlalchemy import Integer
-    from sqlalchemy import String
-    from sqlalchemy import select
-    from sqlalchemy.orm import declarative_base
-    from sqlalchemy.orm.decl_api import DeclarativeMeta
+    from sqlalchemy import Column, Integer, String, select
     from sqlalchemy.orm import Mapped
+    from sqlalchemy.orm.decl_api import DeclarativeMeta
+
 
     class Base(metaclass=DeclarativeMeta):
         __abstract__ = True
 
+
     class User(Base):
-        __tablename__ = 'user'
+        __tablename__ = "user"
 
         id: Mapped[Optional[int]] = Mapped._special_method(
             Column(Integer, primary_key=True)
         )
-        name: Mapped[Optional[str]] = Mapped._special_method(
-            Column(String)
-        )
+        name: Mapped[Optional[str]] = Mapped._special_method(Column(String))
 
-        def __init__(self, id: Optional[int] = ..., name: Optional[str] = ...) -> None:
+        def __init__(
+            self, id: Optional[int] = ..., name: Optional[str] = ...
+        ) -> None:
             ...
 
-    some_user = User(id=5, name='user')
+
+    some_user = User(id=5, name="user")
 
     print(f"Username: {some_user.name}")
 
-    select_stmt = select(User).where(User.id.in_([3, 4, 5])).where(User.name.contains('s'))
+    select_stmt = (
+        select(User).where(User.id.in_([3, 4, 5])).where(User.name.contains("s"))
+    )
+
 
 The key steps which have been taken above include:
 
@@ -253,6 +256,7 @@ and convert them to include the ``Mapped[]`` type surrounding them.  The
 
     from sqlalchemy.orm import Mapped
 
+
     class MyClass(Base):
         # ...
 
@@ -309,14 +313,16 @@ needs an explicit type to be sent::
 
     Base = declarative_base()
 
+
     class User(Base):
-        __tablename__ = 'user'
+        __tablename__ = "user"
 
         id = Column(Integer, primary_key=True)
         name = Column(String)
 
+
     class Address(Base):
-        __tablename__ = 'address'
+        __tablename__ = "address"
 
         id = Column(Integer, primary_key=True)
         user_id = Column(ForeignKey("user.id"))
@@ -333,7 +339,7 @@ To resolve, apply an explicit type annotation to the ``Address.user_id``
 column::
 
     class Address(Base):
-        __tablename__ = 'address'
+        __tablename__ = "address"
 
         id = Column(Integer, primary_key=True)
         user_id: int = Column(ForeignKey("user.id"))
@@ -354,7 +360,7 @@ the attributes can be explicitly stated with a complete annotation that
             Base.metadata,
             Column(Integer, primary_key=True),
             Column("employee_name", String(50), nullable=False),
-            Column(String(50))
+            Column(String(50)),
         )
 
         id: Mapped[int]
@@ -381,13 +387,14 @@ present, as well as if the target type of the :func:`_orm.relationship`
 is a string or callable, and not a class::
 
     class User(Base):
-        __tablename__ = 'user'
+        __tablename__ = "user"
 
         id = Column(Integer, primary_key=True)
         name = Column(String)
 
+
     class Address(Base):
-        __tablename__ = 'address'
+        __tablename__ = "address"
 
         id = Column(Integer, primary_key=True)
         user_id: int = Column(ForeignKey("user.id"))
@@ -406,7 +413,7 @@ The error can be resolved either by using ``relationship(User, uselist=False)``
 or by providing the type, in this case the scalar ``User`` object::
 
     class Address(Base):
-        __tablename__ = 'address'
+        __tablename__ = "address"
 
         id = Column(Integer, primary_key=True)
         user_id: int = Column(ForeignKey("user.id"))
@@ -421,7 +428,8 @@ by pep-484, ensuring the class is imported with in
 the `TYPE_CHECKING block <https://www.python.org/dev/peps/pep-0484/#runtime-or-type-checking>`_
 as appropriate::
 
-    from typing import List, TYPE_CHECKING
+    from typing import TYPE_CHECKING, List
+
     from .mymodel import Base
 
     if TYPE_CHECKING:
@@ -429,8 +437,9 @@ as appropriate::
         # that cannot normally be imported at runtime
         from .myaddressmodel import Address
 
+
     class User(Base):
-        __tablename__ = 'user'
+        __tablename__ = "user"
 
         id = Column(Integer, primary_key=True)
         name = Column(String)
@@ -440,15 +449,18 @@ As is the case with columns, the :class:`_orm.Mapped` class may also be
 applied explicitly::
 
     class User(Base):
-        __tablename__ = 'user'
+        __tablename__ = "user"
 
         id = Column(Integer, primary_key=True)
         name = Column(String)
 
-        addresses: Mapped[List["Address"]] = relationship("Address", back_populates="user")
+        addresses: Mapped[List["Address"]] = relationship(
+            "Address", back_populates="user"
+        )
+
 
     class Address(Base):
-        __tablename__ = 'address'
+        __tablename__ = "address"
 
         id = Column(Integer, primary_key=True)
         user_id: int = Column(ForeignKey("user.id"))
@@ -471,8 +483,8 @@ such as :meth:`_orm.registry.mapped`) should be decorated with the
 :func:`_orm.declarative_mixin` decorator, which provides a hint to the Mypy
 plugin that a particular class intends to serve as a declarative mixin::
 
-    from sqlalchemy.orm import declared_attr
-    from sqlalchemy.orm import declarative_mixin
+    from sqlalchemy.orm import declarative_mixin, declared_attr
+
 
     @declarative_mixin
     class HasUpdatedAt:
@@ -480,9 +492,9 @@ plugin that a particular class intends to serve as a declarative mixin::
         def updated_at(cls) -> Column[DateTime]:  # uses Column
             return Column(DateTime)
 
+
     @declarative_mixin
     class HasCompany:
-
         @declared_attr
         def company_id(cls) -> Mapped[int]:  # uses Mapped
             return Column(ForeignKey("company.id"))
@@ -491,8 +503,9 @@ plugin that a particular class intends to serve as a declarative mixin::
         def company(cls) -> Mapped["Company"]:
             return relationship("Company")
 
+
     class Employee(HasUpdatedAt, HasCompany, Base):
-        __tablename__ = 'employee'
+        __tablename__ = "employee"
 
         id = Column(Integer, primary_key=True)
         name = Column(String)
@@ -507,7 +520,6 @@ this complexity::
         company_id: Mapped[int]
         company: Mapped["Company"]
 
-
 Combining with Dataclasses or Other Type-Sensitive Attribute Systems
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
@@ -517,7 +529,7 @@ use to build the class, and the value given in each assignment statement
 is significant.    That is, a class as follows has to be stated exactly
 as it is in order to be accepted by dataclasses::
 
-    mapper_registry : registry = registry()
+    mapper_registry: registry = registry()
 
 
     @mapper_registry.mapped
@@ -538,9 +550,7 @@ as it is in order to be accepted by dataclasses::
         addresses: List[Address] = field(default_factory=list)
 
         __mapper_args__ = {  # type: ignore
-            "properties" : {
-                "addresses": relationship("Address")
-            }
+            "properties": {"addresses": relationship("Address")}
         }
 
 We can't apply our ``Mapped[]`` types to the attributes ``id``, ``name``,
@@ -580,9 +590,7 @@ This attribute can be conditional within the ``TYPE_CHECKING`` variable::
             _mypy_mapped_attrs = [id, name, "fullname", "nickname", addresses]
 
         __mapper_args__ = {  # type: ignore
-            "properties" : {
-                "addresses": relationship("Address")
-            }
+            "properties": {"addresses": relationship("Address")}
         }
 
 With the above recipe, the attributes listed in ``_mypy_mapped_attrs``
index 4d4455b6705e5e5259cbc6f015e71d17ba7bf5b2..18bf98c4ed9982179545e25e680c06b5b845cb00 100644 (file)
@@ -45,14 +45,14 @@ additional arguments that will refer to the polymorphic discriminator
 column as well as the identifier for the base class::
 
     class Employee(Base):
-        __tablename__ = 'employee'
+        __tablename__ = "employee"
         id = Column(Integer, primary_key=True)
         name = Column(String(50))
         type = Column(String(50))
 
         __mapper_args__ = {
-            'polymorphic_identity':'employee',
-            'polymorphic_on':type
+            "polymorphic_identity": "employee",
+            "polymorphic_on": type,
         }
 
 Above, an additional column ``type`` is established to act as the
@@ -82,21 +82,22 @@ they represent. Each table also must contain a primary key column (or
 columns), as well as a foreign key reference to the parent table::
 
     class Engineer(Employee):
-        __tablename__ = 'engineer'
-        id = Column(Integer, ForeignKey('employee.id'), primary_key=True)
+        __tablename__ = "engineer"
+        id = Column(Integer, ForeignKey("employee.id"), primary_key=True)
         engineer_name = Column(String(30))
 
         __mapper_args__ = {
-            'polymorphic_identity':'engineer',
+            "polymorphic_identity": "engineer",
         }
 
+
     class Manager(Employee):
-        __tablename__ = 'manager'
-        id = Column(Integer, ForeignKey('employee.id'), primary_key=True)
+        __tablename__ = "manager"
+        id = Column(Integer, ForeignKey("employee.id"), primary_key=True)
         manager_name = Column(String(30))
 
         __mapper_args__ = {
-            'polymorphic_identity':'manager',
+            "polymorphic_identity": "manager",
         }
 
 In the above example, each mapping specifies the
@@ -159,29 +160,32 @@ the ``company`` table, the relationships are set up between ``Company``
 and ``Employee``::
 
     class Company(Base):
-        __tablename__ = 'company'
+        __tablename__ = "company"
         id = Column(Integer, primary_key=True)
         name = Column(String(50))
         employees = relationship("Employee", back_populates="company")
 
+
     class Employee(Base):
-        __tablename__ = 'employee'
+        __tablename__ = "employee"
         id = Column(Integer, primary_key=True)
         name = Column(String(50))
         type = Column(String(50))
-        company_id = Column(ForeignKey('company.id'))
+        company_id = Column(ForeignKey("company.id"))
         company = relationship("Company", back_populates="employees")
 
         __mapper_args__ = {
-            'polymorphic_identity':'employee',
-            'polymorphic_on':type
+            "polymorphic_identity": "employee",
+            "polymorphic_on": type,
         }
 
+
     class Manager(Employee):
-        # ...
+        ...
+
 
     class Engineer(Employee):
-        ...
+        ...
 
 If the foreign key constraint is on a table corresponding to a subclass,
 the relationship should target that subclass instead.  In the example
@@ -190,36 +194,39 @@ key constraint from ``manager`` to ``company``, so the relationships are
 established between the ``Manager`` and ``Company`` classes::
 
     class Company(Base):
-        __tablename__ = 'company'
+        __tablename__ = "company"
         id = Column(Integer, primary_key=True)
         name = Column(String(50))
         managers = relationship("Manager", back_populates="company")
 
+
     class Employee(Base):
-        __tablename__ = 'employee'
+        __tablename__ = "employee"
         id = Column(Integer, primary_key=True)
         name = Column(String(50))
         type = Column(String(50))
 
         __mapper_args__ = {
-            'polymorphic_identity':'employee',
-            'polymorphic_on':type
+            "polymorphic_identity": "employee",
+            "polymorphic_on": type,
         }
 
+
     class Manager(Employee):
-        __tablename__ = 'manager'
-        id = Column(Integer, ForeignKey('employee.id'), primary_key=True)
+        __tablename__ = "manager"
+        id = Column(Integer, ForeignKey("employee.id"), primary_key=True)
         manager_name = Column(String(30))
 
-        company_id = Column(ForeignKey('company.id'))
+        company_id = Column(ForeignKey("company.id"))
         company = relationship("Company", back_populates="managers")
 
         __mapper_args__ = {
-            'polymorphic_identity':'manager',
+            "polymorphic_identity": "manager",
         }
 
+
     class Engineer(Employee):
-        ...
+        ...
 
 Above, the ``Manager`` class will have a ``Manager.company`` attribute;
 ``Company`` will have a ``Company.managers`` attribute that always
@@ -263,28 +270,30 @@ subclasses, indicating that the column is to be mapped only to that subclass;
 the :class:`_schema.Column` will be applied to the same base :class:`_schema.Table` object::
 
     class Employee(Base):
-        __tablename__ = 'employee'
+        __tablename__ = "employee"
         id = Column(Integer, primary_key=True)
         name = Column(String(50))
         type = Column(String(20))
 
         __mapper_args__ = {
-            'polymorphic_on':type,
-            'polymorphic_identity':'employee'
+            "polymorphic_on": type,
+            "polymorphic_identity": "employee",
         }
 
+
     class Manager(Employee):
         manager_data = Column(String(50))
 
         __mapper_args__ = {
-            'polymorphic_identity':'manager'
+            "polymorphic_identity": "manager",
         }
 
+
     class Engineer(Employee):
         engineer_info = Column(String(50))
 
         __mapper_args__ = {
-            'polymorphic_identity':'engineer'
+            "polymorphic_identity": "engineer",
         }
 
 Note that the mappers for the derived classes Manager and Engineer omit the
@@ -302,22 +311,28 @@ declaration on a subclass that has no table of its own.   A tricky case
 comes up when two subclasses want to specify *the same* column, as below::
 
     class Employee(Base):
-        __tablename__ = 'employee'
+        __tablename__ = "employee"
         id = Column(Integer, primary_key=True)
         name = Column(String(50))
         type = Column(String(20))
 
         __mapper_args__ = {
-            'polymorphic_on':type,
-            'polymorphic_identity':'employee'
+            "polymorphic_on": type,
+            "polymorphic_identity": "employee",
         }
 
+
     class Engineer(Employee):
-        __mapper_args__ = {'polymorphic_identity': 'engineer'}
+        __mapper_args__ = {
+            "polymorphic_identity": "engineer",
+        }
         start_date = Column(DateTime)
 
+
     class Manager(Employee):
-        __mapper_args__ = {'polymorphic_identity': 'manager'}
+        __mapper_args__ = {
+            "polymorphic_identity": "manager",
+        }
         start_date = Column(DateTime)
 
 Above, the ``start_date`` column declared on both ``Engineer`` and ``Manager``
@@ -335,32 +350,39 @@ if it already exists::
 
     from sqlalchemy.orm import declared_attr
 
+
     class Employee(Base):
-        __tablename__ = 'employee'
+        __tablename__ = "employee"
         id = Column(Integer, primary_key=True)
         name = Column(String(50))
         type = Column(String(20))
 
         __mapper_args__ = {
-            'polymorphic_on':type,
-            'polymorphic_identity':'employee'
+            "polymorphic_on": type,
+            "polymorphic_identity": "employee",
         }
 
+
     class Engineer(Employee):
-        __mapper_args__ = {'polymorphic_identity': 'engineer'}
+        __mapper_args__ = {
+            "polymorphic_identity": "engineer",
+        }
 
         @declared_attr
         def start_date(cls):
             "Start date column, if not present already."
-            return Employee.__table__.c.get('start_date', Column(DateTime))
+            return Employee.__table__.c.get("start_date", Column(DateTime))
+
 
     class Manager(Employee):
-        __mapper_args__ = {'polymorphic_identity': 'manager'}
+        __mapper_args__ = {
+            "polymorphic_identity": "manager",
+        }
 
         @declared_attr
         def start_date(cls):
             "Start date column, if not present already."
-            return Employee.__table__.c.get('start_date', Column(DateTime))
+            return Employee.__table__.c.get("start_date", Column(DateTime))
 
 Above, when ``Manager`` is mapped, the ``start_date`` column is
 already present on the ``Employee`` class; by returning the existing
@@ -372,26 +394,33 @@ to define a particular series of columns and/or other mapped attributes
 from a reusable mixin class::
 
     class Employee(Base):
-        __tablename__ = 'employee'
+        __tablename__ = "employee"
         id = Column(Integer, primary_key=True)
         name = Column(String(50))
         type = Column(String(20))
 
         __mapper_args__ = {
-            'polymorphic_on':type,
-            'polymorphic_identity':'employee'
+            "polymorphic_on": type,
+            "polymorphic_identity": "employee",
         }
 
+
     class HasStartDate:
         @declared_attr
         def start_date(cls):
-            return cls.__table__.c.get('start_date', Column(DateTime))
+            return cls.__table__.c.get("start_date", Column(DateTime))
+
 
     class Engineer(HasStartDate, Employee):
-        __mapper_args__ = {'polymorphic_identity': 'engineer'}
+        __mapper_args__ = {
+            "polymorphic_identity": "engineer",
+        }
+
 
     class Manager(HasStartDate, Employee):
-        __mapper_args__ = {'polymorphic_identity': 'manager'}
+        __mapper_args__ = {
+            "polymorphic_identity": "manager",
+        }
 
 Relationships with Single Table Inheritance
 +++++++++++++++++++++++++++++++++++++++++++
@@ -402,22 +431,23 @@ attribute should be on the same class that's the "foreign" side of the
 relationship::
 
     class Company(Base):
-        __tablename__ = 'company'
+        __tablename__ = "company"
         id = Column(Integer, primary_key=True)
         name = Column(String(50))
         employees = relationship("Employee", back_populates="company")
 
+
     class Employee(Base):
-        __tablename__ = 'employee'
+        __tablename__ = "employee"
         id = Column(Integer, primary_key=True)
         name = Column(String(50))
         type = Column(String(50))
-        company_id = Column(ForeignKey('company.id'))
+        company_id = Column(ForeignKey("company.id"))
         company = relationship("Company", back_populates="employees")
 
         __mapper_args__ = {
-            'polymorphic_identity':'employee',
-            'polymorphic_on':type
+            "polymorphic_identity": "employee",
+            "polymorphic_on": type,
         }
 
 
@@ -425,14 +455,15 @@ relationship::
         manager_data = Column(String(50))
 
         __mapper_args__ = {
-            'polymorphic_identity':'manager'
+            "polymorphic_identity": "manager",
         }
 
+
     class Engineer(Employee):
         engineer_info = Column(String(50))
 
         __mapper_args__ = {
-            'polymorphic_identity':'engineer'
+            "polymorphic_identity": "engineer",
         }
 
 Also, like the case of joined inheritance, we can create relationships
@@ -441,31 +472,32 @@ include a WHERE clause that limits the class selection to that subclass
 or subclasses::
 
     class Company(Base):
-        __tablename__ = 'company'
+        __tablename__ = "company"
         id = Column(Integer, primary_key=True)
         name = Column(String(50))
         managers = relationship("Manager", back_populates="company")
 
+
     class Employee(Base):
-        __tablename__ = 'employee'
+        __tablename__ = "employee"
         id = Column(Integer, primary_key=True)
         name = Column(String(50))
         type = Column(String(50))
 
         __mapper_args__ = {
-            'polymorphic_identity':'employee',
-            'polymorphic_on':type
+            "polymorphic_identity": "employee",
+            "polymorphic_on": type,
         }
 
 
     class Manager(Employee):
         manager_name = Column(String(30))
 
-        company_id = Column(ForeignKey('company.id'))
+        company_id = Column(ForeignKey("company.id"))
         company = relationship("Company", back_populates="managers")
 
         __mapper_args__ = {
-            'polymorphic_identity':'manager',
+            "polymorphic_identity": "manager",
         }
 
 
@@ -473,7 +505,7 @@ or subclasses::
         engineer_info = Column(String(50))
 
         __mapper_args__ = {
-            'polymorphic_identity':'engineer'
+            "polymorphic_identity": "engineer",
         }
 
 Above, the ``Manager`` class will have a ``Manager.company`` attribute;
@@ -533,31 +565,33 @@ This indicates to Declarative as well as the mapping that the superclass
 table should not be considered as part of the mapping::
 
     class Employee(Base):
-        __tablename__ = 'employee'
+        __tablename__ = "employee"
 
         id = Column(Integer, primary_key=True)
         name = Column(String(50))
 
+
     class Manager(Employee):
-        __tablename__ = 'manager'
+        __tablename__ = "manager"
 
         id = Column(Integer, primary_key=True)
         name = Column(String(50))
         manager_data = Column(String(50))
 
         __mapper_args__ = {
-            'concrete': True
+            "concrete": True,
         }
 
+
     class Engineer(Employee):
-        __tablename__ = 'engineer'
+        __tablename__ = "engineer"
 
         id = Column(Integer, primary_key=True)
         name = Column(String(50))
         engineer_info = Column(String(50))
 
         __mapper_args__ = {
-            'concrete': True
+            "concrete": True,
         }
 
 Two critical points should be noted:
@@ -604,36 +638,39 @@ almost the same way as we do other forms of inheritance mappings::
 
     from sqlalchemy.ext.declarative import ConcreteBase
 
+
     class Employee(ConcreteBase, Base):
-        __tablename__ = 'employee'
+        __tablename__ = "employee"
         id = Column(Integer, primary_key=True)
         name = Column(String(50))
 
         __mapper_args__ = {
-            'polymorphic_identity': 'employee',
-            'concrete': True
+            "polymorphic_identity": "employee",
+            "concrete": True,
         }
 
+
     class Manager(Employee):
-        __tablename__ = 'manager'
+        __tablename__ = "manager"
         id = Column(Integer, primary_key=True)
         name = Column(String(50))
         manager_data = Column(String(40))
 
         __mapper_args__ = {
-            'polymorphic_identity': 'manager',
-            'concrete': True
+            "polymorphic_identity": "manager",
+            "concrete": True,
         }
 
+
     class Engineer(Employee):
-        __tablename__ = 'engineer'
+        __tablename__ = "engineer"
         id = Column(Integer, primary_key=True)
         name = Column(String(50))
         engineer_info = Column(String(40))
 
         __mapper_args__ = {
-            'polymorphic_identity': 'engineer',
-            'concrete': True
+            "polymorphic_identity": "engineer",
+            "concrete": True,
         }
 
 Above, Declarative sets up the polymorphic selectable for the
@@ -703,24 +740,26 @@ base class with the ``__abstract__`` indicator::
     class Employee(Base):
         __abstract__ = True
 
+
     class Manager(Employee):
-        __tablename__ = 'manager'
+        __tablename__ = "manager"
         id = Column(Integer, primary_key=True)
         name = Column(String(50))
         manager_data = Column(String(40))
 
         __mapper_args__ = {
-            'polymorphic_identity': 'manager',
+            "polymorphic_identity": "manager",
         }
 
+
     class Engineer(Employee):
-        __tablename__ = 'engineer'
+        __tablename__ = "engineer"
         id = Column(Integer, primary_key=True)
         name = Column(String(50))
         engineer_info = Column(String(40))
 
         __mapper_args__ = {
-            'polymorphic_identity': 'engineer',
+            "polymorphic_identity": "engineer",
         }
 
 Above, we are not actually making use of SQLAlchemy's inheritance mapping
@@ -751,29 +790,32 @@ class called :class:`.AbstractConcreteBase` which achieves this automatically::
 
     from sqlalchemy.ext.declarative import AbstractConcreteBase
 
+
     class Employee(AbstractConcreteBase, Base):
         pass
 
+
     class Manager(Employee):
-        __tablename__ = 'manager'
+        __tablename__ = "manager"
         id = Column(Integer, primary_key=True)
         name = Column(String(50))
         manager_data = Column(String(40))
 
         __mapper_args__ = {
-            'polymorphic_identity': 'manager',
-            'concrete': True
+            "polymorphic_identity": "manager",
+            "concrete": True,
         }
 
+
     class Engineer(Employee):
-        __tablename__ = 'engineer'
+        __tablename__ = "engineer"
         id = Column(Integer, primary_key=True)
         name = Column(String(50))
         engineer_info = Column(String(40))
 
         __mapper_args__ = {
-            'polymorphic_identity': 'engineer',
-            'concrete': True
+            "polymorphic_identity": "engineer",
+            "concrete": True,
         }
 
 The :class:`.AbstractConcreteBase` helper class has a more complex internal
@@ -801,34 +843,41 @@ establishes the :class:`_schema.Table` objects separately::
     metadata_obj = Base.metadata
 
     employees_table = Table(
-        'employee', metadata_obj,
-        Column('id', Integer, primary_key=True),
-        Column('name', String(50)),
+        "employee",
+        metadata_obj,
+        Column("id", Integer, primary_key=True),
+        Column("name", String(50)),
     )
 
     managers_table = Table(
-        'manager', metadata_obj,
-        Column('id', Integer, primary_key=True),
-        Column('name', String(50)),
-        Column('manager_data', String(50)),
+        "manager",
+        metadata_obj,
+        Column("id", Integer, primary_key=True),
+        Column("name", String(50)),
+        Column("manager_data", String(50)),
     )
 
     engineers_table = Table(
-        'engineer', metadata_obj,
-        Column('id', Integer, primary_key=True),
-        Column('name', String(50)),
-        Column('engineer_info', String(50)),
+        "engineer",
+        metadata_obj,
+        Column("id", Integer, primary_key=True),
+        Column("name", String(50)),
+        Column("engineer_info", String(50)),
     )
 
 Next, the UNION is produced using :func:`.polymorphic_union`::
 
     from sqlalchemy.orm import polymorphic_union
 
-    pjoin = polymorphic_union({
-        'employee': employees_table,
-        'manager': managers_table,
-        'engineer': engineers_table
-    }, 'type', 'pjoin')
+    pjoin = polymorphic_union(
+        {
+            "employee": employees_table,
+            "manager": managers_table,
+            "engineer": engineers_table,
+        },
+        "type",
+        "pjoin",
+    )
 
 With the above :class:`_schema.Table` objects, the mappings can be produced using "semi-classical" style,
 where we use Declarative in conjunction with the ``__table__`` argument;
@@ -838,22 +887,26 @@ the :paramref:`.mapper.with_polymorphic` parameter::
     class Employee(Base):
         __table__ = employee_table
         __mapper_args__ = {
-            'polymorphic_on': pjoin.c.type,
-            'with_polymorphic': ('*', pjoin),
-            'polymorphic_identity': 'employee'
+            "polymorphic_on": pjoin.c.type,
+            "with_polymorphic": ("*", pjoin),
+            "polymorphic_identity": "employee",
         }
 
+
     class Engineer(Employee):
         __table__ = engineer_table
         __mapper_args__ = {
-            'polymorphic_identity': 'engineer',
-            'concrete': True}
+            "polymorphic_identity": "engineer",
+            "concrete": True,
+        }
+
 
     class Manager(Employee):
         __table__ = manager_table
         __mapper_args__ = {
-            'polymorphic_identity': 'manager',
-            'concrete': True}
+            "polymorphic_identity": "manager",
+            "concrete": True,
+        }
 
 Alternatively, the same :class:`_schema.Table` objects can be used in
 fully "classical" style, without using Declarative at all.
@@ -864,16 +917,19 @@ A constructor similar to that supplied by Declarative is illustrated::
             for k in kw:
                 setattr(self, k, kw[k])
 
+
     class Manager(Employee):
         pass
 
+
     class Engineer(Employee):
         pass
 
+
     employee_mapper = mapper_registry.map_imperatively(
         Employee,
         pjoin,
-        with_polymorphic=('*', pjoin),
+        with_polymorphic=("*", pjoin),
         polymorphic_on=pjoin.c.type,
     )
     manager_mapper = mapper_registry.map_imperatively(
@@ -881,18 +937,16 @@ A constructor similar to that supplied by Declarative is illustrated::
         managers_table,
         inherits=employee_mapper,
         concrete=True,
-        polymorphic_identity='manager',
+        polymorphic_identity="manager",
     )
     engineer_mapper = mapper_registry.map_imperatively(
         Engineer,
         engineers_table,
         inherits=employee_mapper,
         concrete=True,
-        polymorphic_identity='engineer',
+        polymorphic_identity="engineer",
     )
 
-
-
 The "abstract" example can also be mapped using "semi-classical" or "classical"
 style.  The difference is that instead of applying the "polymorphic union"
 to the :paramref:`.mapper.with_polymorphic` parameter, we apply it directly
@@ -901,30 +955,40 @@ mapping is illustrated below::
 
     from sqlalchemy.orm import polymorphic_union
 
-    pjoin = polymorphic_union({
-        'manager': managers_table,
-        'engineer': engineers_table
-    }, 'type', 'pjoin')
+    pjoin = polymorphic_union(
+        {
+            "manager": managers_table,
+            "engineer": engineers_table,
+        },
+        "type",
+        "pjoin",
+    )
+
 
     class Employee(Base):
         __table__ = pjoin
         __mapper_args__ = {
-            'polymorphic_on': pjoin.c.type,
-            'with_polymorphic': '*',
-            'polymorphic_identity': 'employee'
+            "polymorphic_on": pjoin.c.type,
+            "with_polymorphic": "*",
+            "polymorphic_identity": "employee",
         }
 
+
     class Engineer(Employee):
         __table__ = engineer_table
         __mapper_args__ = {
-            'polymorphic_identity': 'engineer',
-            'concrete': True}
+            "polymorphic_identity": "engineer",
+            "concrete": True,
+        }
+
 
     class Manager(Employee):
         __table__ = manager_table
         __mapper_args__ = {
-            'polymorphic_identity': 'manager',
-            'concrete': True}
+            "polymorphic_identity": "manager",
+            "concrete": True,
+        }
+
 
 Above, we use :func:`.polymorphic_union` in the same manner as before, except
 that we omit the ``employee`` table.
@@ -955,47 +1019,47 @@ such a configuration is as follows::
 
 
     class Company(Base):
-        __tablename__ = 'company'
+        __tablename__ = "company"
         id = Column(Integer, primary_key=True)
         name = Column(String(50))
         employees = relationship("Employee")
 
 
     class Employee(ConcreteBase, Base):
-        __tablename__ = 'employee'
+        __tablename__ = "employee"
         id = Column(Integer, primary_key=True)
         name = Column(String(50))
-        company_id = Column(ForeignKey('company.id'))
+        company_id = Column(ForeignKey("company.id"))
 
         __mapper_args__ = {
-            'polymorphic_identity': 'employee',
-            'concrete': True
+            "polymorphic_identity": "employee",
+            "concrete": True,
         }
 
 
     class Manager(Employee):
-        __tablename__ = 'manager'
+        __tablename__ = "manager"
         id = Column(Integer, primary_key=True)
         name = Column(String(50))
         manager_data = Column(String(40))
-        company_id = Column(ForeignKey('company.id'))
+        company_id = Column(ForeignKey("company.id"))
 
         __mapper_args__ = {
-            'polymorphic_identity': 'manager',
-            'concrete': True
+            "polymorphic_identity": "manager",
+            "concrete": True,
         }
 
 
     class Engineer(Employee):
-        __tablename__ = 'engineer'
+        __tablename__ = "engineer"
         id = Column(Integer, primary_key=True)
         name = Column(String(50))
         engineer_info = Column(String(40))
-        company_id = Column(ForeignKey('company.id'))
+        company_id = Column(ForeignKey("company.id"))
 
         __mapper_args__ = {
-            'polymorphic_identity': 'engineer',
-            'concrete': True
+            "polymorphic_identity": "engineer",
+            "concrete": True,
         }
 
 The next complexity with concrete inheritance and relationships involves
@@ -1015,50 +1079,50 @@ each of the relationships::
 
 
     class Company(Base):
-        __tablename__ = 'company'
+        __tablename__ = "company"
         id = Column(Integer, primary_key=True)
         name = Column(String(50))
         employees = relationship("Employee", back_populates="company")
 
 
     class Employee(ConcreteBase, Base):
-        __tablename__ = 'employee'
+        __tablename__ = "employee"
         id = Column(Integer, primary_key=True)
         name = Column(String(50))
-        company_id = Column(ForeignKey('company.id'))
+        company_id = Column(ForeignKey("company.id"))
         company = relationship("Company", back_populates="employees")
 
         __mapper_args__ = {
-            'polymorphic_identity': 'employee',
-            'concrete': True
+            "polymorphic_identity": "employee",
+            "concrete": True,
         }
 
 
     class Manager(Employee):
-        __tablename__ = 'manager'
+        __tablename__ = "manager"
         id = Column(Integer, primary_key=True)
         name = Column(String(50))
         manager_data = Column(String(40))
-        company_id = Column(ForeignKey('company.id'))
+        company_id = Column(ForeignKey("company.id"))
         company = relationship("Company", back_populates="employees")
 
         __mapper_args__ = {
-            'polymorphic_identity': 'manager',
-            'concrete': True
+            "polymorphic_identity": "manager",
+            "concrete": True,
         }
 
 
     class Engineer(Employee):
-        __tablename__ = 'engineer'
+        __tablename__ = "engineer"
         id = Column(Integer, primary_key=True)
         name = Column(String(50))
         engineer_info = Column(String(40))
-        company_id = Column(ForeignKey('company.id'))
+        company_id = Column(ForeignKey("company.id"))
         company = relationship("Company", back_populates="employees")
 
         __mapper_args__ = {
-            'polymorphic_identity': 'engineer',
-            'concrete': True
+            "polymorphic_identity": "engineer",
+            "concrete": True,
         }
 
 The above limitation is related to the current implementation, including