]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
implement write-only colletions, typing for dynamic
authorMike Bayer <mike_mp@zzzcomputing.com>
Mon, 26 Sep 2022 18:38:44 +0000 (14:38 -0400)
committermike bayer <mike_mp@zzzcomputing.com>
Thu, 6 Oct 2022 00:36:25 +0000 (00:36 +0000)
For 2.0, we provide a truly "larger than memory collection"
implementation, a write-only collection that will never
under any circumstances implicitly load the entire
collection, even during flush.

This is essentially a much more "strict" version
of the "dynamic" loader, which in fact has a lot of
scenarios that it loads the full backing collection
into memory, mostly defeating its purpose.

Typing constructs are added that support
both the new feature WriteOnlyMapping as well as the
legacy feature DynamicMapping.  These have been
integrated with "annotion based mapping" so that
relationship() uses these annotations to configure
the loader strategy as well.

additional changes:

* the docs triggered a conflict in hybrid's
  "transformers" section, this section is hard-coded
  to Query using a pattern that doesnt seem to have
  any use and isn't part of the current select()
  interface, so just removed this section

* As the docs for WriteOnlyMapping are very long,
  collections.rst is broken up into two pages now.

Fixes: #6229
Fixes: #7123
Change-Id: I6929f3da6e441cad92285e7309030a9bac4e429d

36 files changed:
doc/build/changelog/migration_20.rst
doc/build/changelog/whatsnew_20.rst
doc/build/core/connections.rst
doc/build/orm/collection_api.rst [new file with mode: 0644]
doc/build/orm/collections.rst
doc/build/orm/extensions/asyncio.rst
doc/build/orm/large_collections.rst [new file with mode: 0644]
doc/build/orm/queryguide/relationships.rst
doc/build/orm/relationships.rst
lib/sqlalchemy/ext/hybrid.py
lib/sqlalchemy/orm/__init__.py
lib/sqlalchemy/orm/_orm_constructors.py
lib/sqlalchemy/orm/attributes.py
lib/sqlalchemy/orm/base.py
lib/sqlalchemy/orm/bulk_persistence.py
lib/sqlalchemy/orm/decl_base.py
lib/sqlalchemy/orm/descriptor_props.py
lib/sqlalchemy/orm/dynamic.py
lib/sqlalchemy/orm/evaluator.py
lib/sqlalchemy/orm/interfaces.py
lib/sqlalchemy/orm/properties.py
lib/sqlalchemy/orm/query.py
lib/sqlalchemy/orm/relationships.py
lib/sqlalchemy/orm/util.py
lib/sqlalchemy/orm/writeonly.py [new file with mode: 0644]
lib/sqlalchemy/testing/assertsql.py
lib/sqlalchemy/testing/entities.py
lib/sqlalchemy/util/typing.py
test/base/test_tutorials.py
test/ext/mypy/plain_files/dynamic_rel.py [new file with mode: 0644]
test/ext/mypy/plain_files/write_only.py [new file with mode: 0644]
test/orm/declarative/test_tm_future_annotations.py
test/orm/declarative/test_typed_mapping.py
test/orm/dml/test_bulk_statements.py
test/orm/dml/test_update_delete_where.py
test/orm/test_dynamic.py

index 210fce9af6f442c35561f657c611563e698bdd7a..799c5421ba8e06932691e3fe3b82c582fa31e69c 100644 (file)
@@ -1992,21 +1992,45 @@ and should be preferred.
 
 .. _migration_20_dynamic_loaders:
 
-Making use of "dynamic" relationship loads without using Query
+"Dynamic" relationship loaders superseded by "Write Only"
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
 **Synopsis**
 
 The ``lazy="dynamic"`` relationship loader strategy, discussed at
 :ref:`dynamic_relationship`, makes use of the :class:`_query.Query` object
-which is legacy in 2.0.
-
+which is legacy in 2.0. The "dynamic" relationship is not directly compatible
+with asyncio without workarounds, and additionally it does not fulfill its
+original purpose of preventing iteration of large collections as it has several
+behaviors where this iteration occurs implicitly.
+
+A new loader strategy known as ``lazy="write_only"`` is introduced, which
+through the :class:`_orm.WriteOnlyCollection` collection class
+provides a very strict "no implicit iteration" API and additionally integrates
+with 2.0 style statement execution, supporting asyncio as well as
+direct integrations with the new :ref:`ORM-enabled Bulk DML <change_8360>`
+featureset.
+
+At the same time, ``lazy="dynamic"`` remains **fully supported** in version
+2.0; applications can delay migrating this particular pattern until they
+are fully on the 2.0 series.
 
 **Migration to 2.0**
 
-This pattern is still under adjustment for SQLAlchemy 2.0, and it is expected
-that new APIs will be introduced.    In the interim, there are two ways
-to achieve 2.0 style querying that's in terms of a specific relationship:
+The new "write only" feature is only available in SQLAlchemy 2.0, and is
+not part of 1.4.  At the same time, the ``lazy="dynamic"`` loader strategy
+remains fully supported in version 2.0, and even includes new pep-484
+and annotated mapping support.
+
+Therefore the best strategy for migrating from "dynamic" is to **wait until
+the application is fully running on 2.0**, then migrate directly from
+:class:`.AppenderQuery`, which is the collection type used by the "dynamic"
+strategy, to :class:`.WriteOnlyCollection`, which is the collection type
+used by hte "write_only" strategy.
+
+Some techniques are available to use ``lazy="dynamic"`` under 1.4 in a more
+"2.0" style however. There are two ways to achieve 2.0 style querying that's in
+terms of a specific relationship:
 
 * Make use of the :attr:`_orm.Query.statement` attribute on an existing
   ``lazy="dynamic"`` relationship.   We can use methods like
@@ -2043,10 +2067,25 @@ to achieve 2.0 style querying that's in terms of a specific relationship:
 The original idea was that the :func:`_orm.with_parent` function should be
 sufficient, however continuing to make use of special attributes on the
 relationship itself remains appealing, and there's no reason a 2.0 style
-construct can't be made to work here as well.  There will likely be a new
-loader strategy name that sets up an API similar to the example above that
-uses the ``.statement`` attribute, such as
-``jack.posts.select().where(Post.headline == 'headline')``.
+construct can't be made to work here as well.
+
+The new "write_only" loader strategy provides a new kind of collection which
+does not support implicit iteration or item access.  Instead, reading the
+contents of the collection is performed by calling upon its ``.select()``
+method to help construct an appropriate SELECT statement.  The collection
+also includes methods ``.insert()``, ``.update()``, ``.delete()``
+which may be used to emit bulk DML statements for the items in the collection.
+In a manner similar to that of the "dynamic" feature, there are also methods
+``.add()``, ``.add_all()`` and ``.remove()`` which queue individual members
+for addition or removal using the unit of work process.  An introduction to the
+new feature is as :ref:`change_7123`.
+
+.. seealso::
+
+    :ref:`change_7123`
+
+    :ref:`write_only_relationship`
+
 
 .. _migration_20_session_autocommit:
 
index 023513e467b4dd958e3c9a352098366650a9c9aa..718ea93c02d764a041e05c45fcbfa1ff6d7cf43d 100644 (file)
@@ -986,6 +986,8 @@ get all drivers to this state:
     :ref:`engine_insertmanyvalues` - Documentation and background on the
     new feature as well as how to configure it
 
+.. _change_8360:
+
 ORM-enabled Insert, Upsert, Update and Delete Statements, with ORM RETURNING
 -----------------------------------------------------------------------------
 
@@ -1152,6 +1154,143 @@ Listed tickets for new ORM DML with RETURNING features:
 * given ORM insert, try to carry the bulk methods along, re: inheritance -
   :ticket:`8360`
 
+.. _change_7123:
+
+New "Write Only" relationship strategy supersedes "dynamic"
+-----------------------------------------------------------
+
+The ``lazy="dynamic"`` loader strategy becomes legacy, in that it is hardcoded
+to make use of legacy :class:`_orm.Query`. This loader strategy is both not
+compatible with asyncio, and additionally has many behaviors that implicitly
+iterate its contents, which defeat the original purpose of the "dynamic"
+relationship as being for very large collections that should not be implicitly
+fully loaded into memory at any time.
+
+The "dynamic" strategy is now superseded by a new strategy
+``lazy="write_only"``.  Configuration of "write only" may be achieved using
+the :paramref:`_orm.relationship.lazy` parameter of :func:`_orm.relationship`,
+or when using :ref:`type annotated mappings <whatsnew_20_orm_declarative_typing>`,
+indicating the :class:`.WriteOnlyMapped` annotation as the mapping style::
+
+    from sqlalchemy.orm import WriteOnlyMapped
+
+
+    class Base(DeclarativeBase):
+        pass
+
+
+    class Account(Base):
+        __tablename__ = "account"
+        id: Mapped[int] = mapped_column(primary_key=True)
+        identifier: Mapped[str]
+        account_transactions: WriteOnlyMapped["AccountTransaction"] = relationship(
+            cascade="all, delete-orphan",
+            passive_deletes=True,
+            order_by="AccountTransaction.timestamp",
+        )
+
+
+    class AccountTransaction(Base):
+        __tablename__ = "account_transaction"
+        id: Mapped[int] = mapped_column(primary_key=True)
+        account_id: Mapped[int] = mapped_column(
+            ForeignKey("account.id", ondelete="cascade")
+        )
+        description: Mapped[str]
+        amount: Mapped[Decimal]
+        timestamp: Mapped[datetime] = mapped_column(default=func.now())
+
+The write-only-mapped collection resembles ``lazy="dynamic"`` in that
+the collection may be assigned up front, and also has methods such as
+:meth:`_orm.WriteOnlyCollection.add` and :meth:`_orm.WriteOnlyCollection.remove`
+to modify the collection on an individual item basis::
+
+    new_account = Account(
+        identifier="account_01",
+        account_transactions=[
+            AccountTransaction(description="initial deposit", amount=Decimal("500.00")),
+            AccountTransaction(description="transfer", amount=Decimal("1000.00")),
+            AccountTransaction(description="withdrawal", amount=Decimal("-29.50")),
+        ],
+    )
+
+    new_account.account_transactions.add(
+        AccountTransaction(description="transfer", amount=Decimal("2000.00"))
+    )
+
+The bigger difference is on the database loading side, where the collection
+has no ability to load objects from the database directly; instead,
+SQL construction methods such as :meth:`_orm.WriteOnlyCollection.select` are used to
+produce SQL constructs such as :class:`_sql.Select` which are then executed
+using :term:`2.0 style` to load the desired objects in an explicit way::
+
+    account_transactions = session.scalars(
+        existing_account.account_transactions.select()
+        .where(AccountTransaction.amount < 0)
+        .limit(10)
+    ).all()
+
+The :class:`_orm.WriteOnlyCollection` also integrates with the new
+:ref:`ORM bulk dml <change_8360>` features, including support for bulk INSERT
+and UPDATE/DELETE with WHERE criteria, all including RETURNING support as
+well.   See the complete documentation at :ref:`write_only_relationship`.
+
+.. seealso::
+
+    :ref:`write_only_relationship`
+
+New pep-484 / type annotated mapping support for Dynamic Relationships
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Even though "dynamic" relationships are legacy in 2.0, as these patterns
+are expected to have a long lifespan,
+:ref:`type annotated mapping <whatsnew_20_orm_declarative_typing>` support
+is now added for "dynamic" relationships in the same way that its available
+for the new ``lazy="write_only"`` approach, using the :class:`_orm.DynamicMapped`
+annotation::
+
+    from sqlalchemy.orm import DynamicMapped
+
+
+    class Base(DeclarativeBase):
+        pass
+
+
+    class Account(Base):
+        __tablename__ = "account"
+        id: Mapped[int] = mapped_column(primary_key=True)
+        identifier: Mapped[str]
+        account_transactions: DynamicMapped["AccountTransaction"] = relationship(
+            cascade="all, delete-orphan",
+            passive_deletes=True,
+            order_by="AccountTransaction.timestamp",
+        )
+
+
+    class AccountTransaction(Base):
+        __tablename__ = "account_transaction"
+        id: Mapped[int] = mapped_column(primary_key=True)
+        account_id: Mapped[int] = mapped_column(
+            ForeignKey("account.id", ondelete="cascade")
+        )
+        description: Mapped[str]
+        amount: Mapped[Decimal]
+        timestamp: Mapped[datetime] = mapped_column(default=func.now())
+
+The above mapping will provide an ``Account.account_transactions`` collection
+that is typed as returning the :class:`_orm.AppenderQuery` collection type,
+including its element type, e.g. ``AppenderQuery[AccountTransaction]``.  This
+then allows iteration and queries to yield objects which are typed
+as ``AccountTransaction``.
+
+.. seealso::
+
+    :ref:`dynamic_relationship`
+
+
+:ticket:`7123`
+
+
 .. _change_7311:
 
 Installation is now fully pep-517 enabled
index bd99cd4c2e9142a408d879c2bf87cf05e25ba880..d092c70406c8b2603d3b6eea0f48878266006262 100644 (file)
@@ -2257,6 +2257,7 @@ the need for separate installation.   Use the ``register()`` function as follows
 
     from sqlalchemy.dialects import registry
 
+
     registry.register("mysql.foodialect", "myapp.dialect", "MyMySQLDialect")
 
 The above will respond to ``create_engine("mysql+foodialect://")`` and load the
diff --git a/doc/build/orm/collection_api.rst b/doc/build/orm/collection_api.rst
new file mode 100644 (file)
index 0000000..8f830f4
--- /dev/null
@@ -0,0 +1,654 @@
+.. highlight:: python
+
+.. _custom_collections_toplevel:
+
+.. currentmodule:: sqlalchemy.orm
+
+========================================
+Collection Customization and API Details
+========================================
+
+The :func:`_orm.relationship` function defines a linkage between two classes.
+When the linkage defines a one-to-many or many-to-many relationship, it's
+represented as a Python collection when objects are loaded and manipulated.
+This section presents additional information about collection configuration
+and techniques.
+
+
+.. currentmodule:: sqlalchemy.orm.collections
+
+.. _custom_collections:
+
+Customizing Collection Access
+-----------------------------
+
+Mapping a one-to-many or many-to-many relationship results in a collection of
+values accessible through an attribute on the parent instance.   The two
+common collection types for these are ``list`` and ``set``, which in
+:ref:`Declarative <orm_declarative_styles_toplevel>` mappings that use
+:class:`_orm.Mapped` is established by using the collection type within
+the :class:`_orm.Mapped` container, as demonstrated in the ``Parent.children`` collection
+below where ``list`` is used::
+
+    from sqlalchemy import ForeignKey
+
+    from sqlalchemy.orm import DeclarativeBase
+    from sqlalchemy.orm import Mapped
+    from sqlalchemy.orm import mapped_column
+    from sqlalchemy.orm import relationship
+
+
+    class Base(DeclarativeBase):
+        pass
+
+
+    class Parent(Base):
+        __tablename__ = "parent"
+
+        parent_id: Mapped[int] = mapped_column(primary_key=True)
+
+        # use a list
+        children: Mapped[list["Child"]] = relationship()
+
+
+    class Child(Base):
+        __tablename__ = "child"
+
+        child_id: Mapped[int] = mapped_column(primary_key=True)
+        parent_id: Mapped[int] = mapped_column(ForeignKey("parent.id"))
+
+Or for a ``set``, illustrated in the same
+``Parent.children`` collection::
+
+    from sqlalchemy import ForeignKey
+
+    from sqlalchemy.orm import DeclarativeBase
+    from sqlalchemy.orm import Mapped
+    from sqlalchemy.orm import mapped_column
+    from sqlalchemy.orm import relationship
+
+
+    class Base(DeclarativeBase):
+        pass
+
+
+    class Parent(Base):
+        __tablename__ = "parent"
+
+        parent_id: Mapped[int] = mapped_column(primary_key=True)
+
+        # use a set
+        children: Mapped[set["Child"]] = relationship()
+
+
+    class Child(Base):
+        __tablename__ = "child"
+
+        child_id: Mapped[int] = mapped_column(primary_key=True)
+        parent_id: Mapped[int] = mapped_column(ForeignKey("parent.id"))
+
+.. note::  If using Python 3.7 or 3.8, annotations for collections need
+   to use ``typing.List`` or ``typing.Set``, e.g. ``Mapped[List["Child"]]`` or
+   ``Mapped[Set["Child"]]``; the ``list`` and ``set`` Python built-ins
+   don't yet support generic annotation in these Python versions, such as::
+
+       from typing import List
+
+
+       class Parent(Base):
+           __tablename__ = "parent"
+
+           parent_id: Mapped[int] = mapped_column(primary_key=True)
+
+           # use a List, Python 3.8 and earlier
+           children: Mapped[List["Child"]] = relationship()
+
+When using mappings without the :class:`_orm.Mapped` annotation, such as when
+using :ref:`imperative mappings <orm_imperative_mapping>` or untyped
+Python code, as well as in a few special cases, the collection class for a
+:func:`_orm.relationship` can always be specified directly using the
+:paramref:`_orm.relationship.collection_class` parameter::
+
+    # non-annotated mapping
+
+
+    class Parent(Base):
+        __tablename__ = "parent"
+
+        parent_id = mapped_column(Integer, primary_key=True)
+
+        children = relationship("Child", collection_class=set)
+
+
+    class Child(Base):
+        __tablename__ = "child"
+
+        child_id = mapped_column(Integer, primary_key=True)
+        parent_id = mapped_column(ForeignKey("parent.id"))
+
+In the absence of :paramref:`_orm.relationship.collection_class`
+or :class:`_orm.Mapped`, the default collection type is ``list``.
+
+Beyond ``list`` and ``set`` builtins, there is also support for two varities of
+dictionary, described below at :ref:`orm_dictionary_collection`. There is also
+support for any arbitrary mutable sequence type can be set up as the target
+collection, with some additional configuration steps; this is described in the
+section :ref:`orm_custom_collection`.
+
+
+.. _orm_dictionary_collection:
+
+Dictionary Collections
+~~~~~~~~~~~~~~~~~~~~~~
+
+A little extra detail is needed when using a dictionary as a collection.
+This because objects are always loaded from the database as lists, and a key-generation
+strategy must be available to populate the dictionary correctly.  The
+:func:`.attribute_mapped_collection` function is by far the most common way
+to achieve a simple dictionary collection.  It produces a dictionary class that will apply a particular attribute
+of the mapped class as a key.   Below we map an ``Item`` class containing
+a dictionary of ``Note`` items keyed to the ``Note.keyword`` attribute.
+When using :func:`.attribute_mapped_collection`, the :class:`_orm.Mapped`
+annotation may be typed using the :class:`_orm.MappedCollection`
+type, however the :paramref:`_orm.relationship.collection_class` parameter
+is required in this case so that the :func:`.attribute_mapped_collection`
+may be appropriately parametrized::
+
+    from typing import Optional
+
+    from sqlalchemy import ForeignKey
+    from sqlalchemy.orm import attribute_mapped_collection
+    from sqlalchemy.orm import DeclarativeBase
+    from sqlalchemy.orm import Mapped
+    from sqlalchemy.orm import mapped_column
+    from sqlalchemy.orm import relationship
+    from sqlalchemy.orm import MappedCollection
+
+
+    class Base(DeclarativeBase):
+        pass
+
+
+    class Item(Base):
+        __tablename__ = "item"
+
+        id: Mapped[int] = mapped_column(primary_key=True)
+
+        notes: Mapped[MappedCollection[str, "Note"]] = relationship(
+            collection_class=attribute_mapped_collection("keyword"),
+            cascade="all, delete-orphan",
+        )
+
+
+    class Note(Base):
+        __tablename__ = "note"
+
+        id: Mapped[int] = mapped_column(primary_key=True)
+        item_id: Mapped[int] = mapped_column(ForeignKey("item.id"))
+        keyword: Mapped[str]
+        text: Mapped[Optional[str]]
+
+        def __init__(self, keyword: str, text: str):
+            self.keyword = keyword
+            self.text = text
+
+``Item.notes`` is then a dictionary::
+
+    >>> item = Item()
+    >>> item.notes["a"] = Note("a", "atext")
+    >>> item.notes.items()
+    {'a': <__main__.Note object at 0x2eaaf0>}
+
+:func:`.attribute_mapped_collection` will ensure that
+the ``.keyword`` attribute of each ``Note`` complies with the key in the
+dictionary.   Such as, when assigning to ``Item.notes``, the dictionary
+key we supply must match that of the actual ``Note`` object::
+
+    item = Item()
+    item.notes = {
+        "a": Note("a", "atext"),
+        "b": Note("b", "btext"),
+    }
+
+The attribute which :func:`.attribute_mapped_collection` uses as a key
+does not need to be mapped at all!  Using a regular Python ``@property`` allows virtually
+any detail or combination of details about the object to be used as the key, as
+below when we establish it as a tuple of ``Note.keyword`` and the first ten letters
+of the ``Note.text`` field::
+
+    class Item(Base):
+        __tablename__ = "item"
+
+        id: Mapped[int] = mapped_column(primary_key=True)
+
+        notes: Mapped[MappedCollection[str, "Note"]] = relationship(
+            collection_class=attribute_mapped_collection("note_key"),
+            back_populates="item",
+            cascade="all, delete-orphan",
+        )
+
+
+    class Note(Base):
+        __tablename__ = "note"
+
+        id: Mapped[int] = mapped_column(primary_key=True)
+        item_id: Mapped[int] = mapped_column(ForeignKey("item.id"))
+        keyword: Mapped[str]
+        text: Mapped[str]
+
+        item: Mapped["Item"] = relationship()
+
+        @property
+        def note_key(self):
+            return (self.keyword, self.text[0:10])
+
+        def __init__(self, keyword: str, text: str):
+            self.keyword = keyword
+            self.text = text
+
+Above we added a ``Note.item`` relationship, with a bi-directional
+:paramref:`_orm.relationship.back_populates` configuration.
+Assigning to this reverse relationship, the ``Note``
+is added to the ``Item.notes`` dictionary and the key is generated for us automatically::
+
+    >>> item = Item()
+    >>> n1 = Note("a", "atext")
+    >>> n1.item = item
+    >>> item.notes
+    {('a', 'atext'): <__main__.Note object at 0x2eaaf0>}
+
+Other built-in dictionary types include :func:`.column_mapped_collection`,
+which is almost like :func:`.attribute_mapped_collection` except given the :class:`_schema.Column`
+object directly::
+
+    from sqlalchemy.orm import column_mapped_collection
+
+
+    class Item(Base):
+        __tablename__ = "item"
+
+        id: Mapped[int] = mapped_column(primary_key=True)
+
+        notes: Mapped[MappedCollection[str, "Note"]] = relationship(
+            collection_class=column_mapped_collection(Note.__table__.c.keyword),
+            cascade="all, delete-orphan",
+        )
+
+as well as :func:`.mapped_collection` which is passed any callable function.
+Note that it's usually easier to use :func:`.attribute_mapped_collection` along
+with a ``@property`` as mentioned earlier::
+
+    from sqlalchemy.orm import mapped_collection
+
+
+    class Item(Base):
+        __tablename__ = "item"
+
+        id: Mapped[int] = mapped_column(primary_key=True)
+
+        notes: Mapped[MappedCollection[str, "Note"]] = relationship(
+            collection_class=mapped_collection(lambda note: note.text[0:10]),
+            cascade="all, delete-orphan",
+        )
+
+Dictionary mappings are often combined with the "Association Proxy" extension to produce
+streamlined dictionary views.  See :ref:`proxying_dictionaries` and :ref:`composite_association_proxy`
+for examples.
+
+.. _key_collections_mutations:
+
+Dealing with Key Mutations and back-populating for Dictionary collections
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+When using :func:`.attribute_mapped_collection`, the "key" for the dictionary
+is taken from an attribute on the target object.   **Changes to this key
+are not tracked**.  This means that the key must be assigned towards when
+it is first used, and if the key changes, the collection will not be mutated.
+A typical example where this might be an issue is when relying upon backrefs
+to populate an attribute mapped collection.  Given the following::
+
+    class A(Base):
+        __tablename__ = "a"
+
+        id: Mapped[int] = mapped_column(primary_key=True)
+
+        bs: Mapped[MappedCollection[str, "B"]] = relationship(
+            collection_class=attribute_mapped_collection("data"),
+            back_populates="a",
+        )
+
+
+    class B(Base):
+        __tablename__ = "b"
+
+        id: Mapped[int] = mapped_column(primary_key=True)
+        a_id: Mapped[int] = mapped_column(ForeignKey("a.id"))
+        data: Mapped[str]
+
+        a: Mapped["A"] = relationship(back_populates="bs")
+
+Above, if we create a ``B()`` that refers to a specific ``A()``, the back
+populates will then add the ``B()`` to the ``A.bs`` collection, however
+if the value of ``B.data`` is not set yet, the key will be ``None``::
+
+    >>> a1 = A()
+    >>> b1 = B(a=a1)
+    >>> a1.bs
+    {None: <test3.B object at 0x7f7b1023ef70>}
+
+
+Setting ``b1.data`` after the fact does not update the collection::
+
+    >>> b1.data = "the key"
+    >>> a1.bs
+    {None: <test3.B object at 0x7f7b1023ef70>}
+
+
+This can also be seen if one attempts to set up ``B()`` in the constructor.
+The order of arguments changes the result::
+
+    >>> B(a=a1, data="the key")
+    <test3.B object at 0x7f7b10114280>
+    >>> a1.bs
+    {None: <test3.B object at 0x7f7b10114280>}
+
+vs::
+
+    >>> B(data="the key", a=a1)
+    <test3.B object at 0x7f7b10114340>
+    >>> a1.bs
+    {'the key': <test3.B object at 0x7f7b10114340>}
+
+If backrefs are being used in this way, ensure that attributes are populated
+in the correct order using an ``__init__`` method.
+
+An event handler such as the following may also be used to track changes in the
+collection as well::
+
+    from sqlalchemy import event
+    from sqlalchemy.orm import attributes
+
+
+    @event.listens_for(B.data, "set")
+    def set_item(obj, value, previous, initiator):
+        if obj.a is not None:
+            previous = None if previous == attributes.NO_VALUE else previous
+            obj.a.bs[value] = obj
+            obj.a.bs.pop(previous)
+
+.. autofunction:: attribute_mapped_collection
+
+.. autofunction:: column_mapped_collection
+
+.. autofunction:: mapped_collection
+
+.. _orm_custom_collection:
+
+Custom Collection Implementations
+---------------------------------
+
+You can use your own types for collections as well.  In simple cases,
+inheriting from ``list`` or ``set``, adding custom behavior, is all that's needed.
+In other cases, special decorators are needed to tell SQLAlchemy more detail
+about how the collection operates.
+
+.. topic:: Do I need a custom collection implementation?
+
+   In most cases not at all!   The most common use cases for a "custom" collection
+   is one that validates or marshals incoming values into a new form, such as
+   a string that becomes a class instance, or one which goes a
+   step beyond and represents the data internally in some fashion, presenting
+   a "view" of that data on the outside of a different form.
+
+   For the first use case, the :func:`_orm.validates` decorator is by far
+   the simplest way to intercept incoming values in all cases for the purposes
+   of validation and simple marshaling.  See :ref:`simple_validators`
+   for an example of this.
+
+   For the second use case, the :ref:`associationproxy_toplevel` extension is a
+   well-tested, widely used system that provides a read/write "view" of a
+   collection in terms of some attribute present on the target object. As the
+   target attribute can be a ``@property`` that returns virtually anything, a
+   wide array of "alternative" views of a collection can be constructed with
+   just a few functions. This approach leaves the underlying mapped collection
+   unaffected and avoids the need to carefully tailor collection behavior on a
+   method-by-method basis.
+
+   Customized collections are useful when the collection needs to
+   have special behaviors upon access or mutation operations that can't
+   otherwise be modeled externally to the collection.   They can of course
+   be combined with the above two approaches.
+
+Collections in SQLAlchemy are transparently *instrumented*. Instrumentation
+means that normal operations on the collection are tracked and result in
+changes being written to the database at flush time. Additionally, collection
+operations can fire *events* which indicate some secondary operation must take
+place. Examples of a secondary operation include saving the child item in the
+parent's :class:`~sqlalchemy.orm.session.Session` (i.e. the ``save-update``
+cascade), as well as synchronizing the state of a bi-directional relationship
+(i.e. a :func:`.backref`).
+
+The collections package understands the basic interface of lists, sets and
+dicts and will automatically apply instrumentation to those built-in types and
+their subclasses. Object-derived types that implement a basic collection
+interface are detected and instrumented via duck-typing:
+
+.. sourcecode:: python+sql
+
+    class ListLike:
+        def __init__(self):
+            self.data = []
+
+        def append(self, item):
+            self.data.append(item)
+
+        def remove(self, item):
+            self.data.remove(item)
+
+        def extend(self, items):
+            self.data.extend(items)
+
+        def __iter__(self):
+            return iter(self.data)
+
+        def foo(self):
+            return "foo"
+
+``append``, ``remove``, and ``extend`` are known members of ``list``, and will
+be instrumented automatically. ``__iter__`` is not a mutator method and won't
+be instrumented, and ``foo`` won't be either.
+
+Duck-typing (i.e. guesswork) isn't rock-solid, of course, so you can be
+explicit about the interface you are implementing by providing an
+``__emulates__`` class attribute::
+
+    class SetLike:
+        __emulates__ = set
+
+        def __init__(self):
+            self.data = set()
+
+        def append(self, item):
+            self.data.add(item)
+
+        def remove(self, item):
+            self.data.remove(item)
+
+        def __iter__(self):
+            return iter(self.data)
+
+This class looks similar to a Python ``list`` (i.e. "list-like") as it has an
+``append`` method, but the ``__emulates__`` attribute forces it to be treated
+as a ``set``. ``remove`` is known to be part of the set interface and will be
+instrumented.
+
+But this class won't work quite yet: a little glue is needed to adapt it for
+use by SQLAlchemy. The ORM needs to know which methods to use to append, remove
+and iterate over members of the collection. When using a type like ``list`` or
+``set``, the appropriate methods are well-known and used automatically when
+present.  However the class above, which only roughly resembles a ``set``, does not
+provide the expected ``add`` method, so we must indicate to the ORM the
+method that will instead take the place of the ``add`` method, in this
+case using a decorator ``@collection.appender``; this is illustrated in the
+next section.
+
+Annotating Custom Collections via Decorators
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Decorators can be used to tag the individual methods the ORM needs to manage
+collections. Use them when your class doesn't quite meet the regular interface
+for its container type, or when you otherwise would like to use a different method to
+get the job done.
+
+.. sourcecode:: python
+
+    from sqlalchemy.orm.collections import collection
+
+
+    class SetLike:
+        __emulates__ = set
+
+        def __init__(self):
+            self.data = set()
+
+        @collection.appender
+        def append(self, item):
+            self.data.add(item)
+
+        def remove(self, item):
+            self.data.remove(item)
+
+        def __iter__(self):
+            return iter(self.data)
+
+And that's all that's needed to complete the example. SQLAlchemy will add
+instances via the ``append`` method. ``remove`` and ``__iter__`` are the
+default methods for sets and will be used for removing and iteration. Default
+methods can be changed as well:
+
+.. sourcecode:: python+sql
+
+    from sqlalchemy.orm.collections import collection
+
+
+    class MyList(list):
+        @collection.remover
+        def zark(self, item):
+            # do something special...
+            ...
+
+        @collection.iterator
+        def hey_use_this_instead_for_iteration(self):
+            ...
+
+There is no requirement to be "list-like" or "set-like" at all. Collection classes
+can be any shape, so long as they have the append, remove and iterate
+interface marked for SQLAlchemy's use. Append and remove methods will be
+called with a mapped entity as the single argument, and iterator methods are
+called with no arguments and must return an iterator.
+
+.. autoclass:: collection
+    :members:
+
+.. _dictionary_collections:
+
+Custom Dictionary-Based Collections
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The :class:`.MappedCollection` class can be used as
+a base class for your custom types or as a mix-in to quickly add ``dict``
+collection support to other classes. It uses a keying function to delegate to
+``__setitem__`` and ``__delitem__``:
+
+.. sourcecode:: python+sql
+
+    from sqlalchemy.orm.collections import MappedCollection
+
+
+    class MyNodeMap(MappedCollection):
+        """Holds 'Node' objects, keyed by the 'name' attribute."""
+
+        def __init__(self, *args, **kw):
+            super().__init__(keyfunc=lambda node: node.name)
+            dict.__init__(self, *args, **kw)
+
+When subclassing :class:`.MappedCollection`, user-defined versions
+of ``__setitem__()`` or ``__delitem__()`` should be decorated
+with :meth:`.collection.internally_instrumented`, **if** they call down
+to those same methods on :class:`.MappedCollection`.  This because the methods
+on :class:`.MappedCollection` are already instrumented - calling them
+from within an already instrumented call can cause events to be fired off
+repeatedly, or inappropriately, leading to internal state corruption in
+rare cases::
+
+    from sqlalchemy.orm.collections import MappedCollection, collection
+
+
+    class MyMappedCollection(MappedCollection):
+        """Use @internally_instrumented when your methods
+        call down to already-instrumented methods.
+
+        """
+
+        @collection.internally_instrumented
+        def __setitem__(self, key, value, _sa_initiator=None):
+            # do something with key, value
+            super(MyMappedCollection, self).__setitem__(key, value, _sa_initiator)
+
+        @collection.internally_instrumented
+        def __delitem__(self, key, _sa_initiator=None):
+            # do something with key
+            super(MyMappedCollection, self).__delitem__(key, _sa_initiator)
+
+The ORM understands the ``dict`` interface just like lists and sets, and will
+automatically instrument all "dict-like" methods if you choose to subclass
+``dict`` or provide dict-like collection behavior in a duck-typed class. You
+must decorate appender and remover methods, however- there are no compatible
+methods in the basic dictionary interface for SQLAlchemy to use by default.
+Iteration will go through ``values()`` unless otherwise decorated.
+
+.. autoclass:: sqlalchemy.orm.MappedCollection
+   :members:
+
+Instrumentation and Custom Types
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Many custom types and existing library classes can be used as a entity
+collection type as-is without further ado. However, it is important to note
+that the instrumentation process will modify the type, adding decorators
+around methods automatically.
+
+The decorations are lightweight and no-op outside of relationships, but they
+do add unneeded overhead when triggered elsewhere. When using a library class
+as a collection, it can be good practice to use the "trivial subclass" trick
+to restrict the decorations to just your usage in relationships. For example:
+
+.. sourcecode:: python+sql
+
+    class MyAwesomeList(some.great.library.AwesomeList):
+        pass
+
+
+    # ... relationship(..., collection_class=MyAwesomeList)
+
+The ORM uses this approach for built-ins, quietly substituting a trivial
+subclass when a ``list``, ``set`` or ``dict`` is used directly.
+
+Collection Internals
+--------------------
+
+Various internal methods.
+
+.. autofunction:: bulk_replace
+
+.. autodata:: collection_adapter
+
+.. autoclass:: CollectionAdapter
+
+.. autoclass:: InstrumentedDict
+
+.. autoclass:: InstrumentedList
+
+.. autoclass:: InstrumentedSet
+
+.. autofunction:: prepare_instrumentation
index 226870d67b20254b2c4fa5f18af7bff807d54b9e..43cbef23a9e256397c3a4a9f2e42bb1a7ffc4578 100644 (file)
-.. _collections_toplevel:
-
-.. currentmodule:: sqlalchemy.orm
+:orphan:
 
 =======================================
 Collection Configuration and Techniques
 =======================================
 
-The :func:`_orm.relationship` function defines a linkage between two classes.
-When the linkage defines a one-to-many or many-to-many relationship, it's
-represented as a Python collection when objects are loaded and manipulated.
-This section presents additional information about collection configuration
-and techniques.
-
-.. _largecollections:
-
-Working with Large Collections
-==============================
-
-The default behavior of :func:`_orm.relationship` is to fully load
-the collection of items in, as according to the loading strategy of the
-relationship. Additionally, the :class:`.Session` by default only knows how to delete
-objects which are actually present within the session. When a parent instance
-is marked for deletion and flushed, the :class:`.Session` loads its full list of child
-items in so that they may either be deleted as well, or have their foreign key
-value set to null; this is to avoid constraint violations. For large
-collections of child items, there are several strategies to bypass full
-loading of child items both at load time as well as deletion time.
-
-.. _dynamic_relationship:
-
-Dynamic Relationship Loaders
-----------------------------
-
-.. note:: SQLAlchemy 2.0 will have a slightly altered pattern for "dynamic"
-   loaders that does not rely upon the :class:`_orm.Query` object, which
-   will be legacy in 2.0.   For current migration strategies,
-   see :ref:`migration_20_dynamic_loaders`.
-
-.. note:: This loader is in the general case not compatible with the :ref:`asyncio_toplevel` extension.
-   It can be used with some limitations, as indicated in :ref:`Asyncio dynamic guidelines <dynamic_asyncio>`.
-
-A :func:`_orm.relationship` which corresponds to a large collection can be
-configured so that it returns a legacy :class:`_orm.Query` object when
-accessed, which allows filtering of the relationship on criteria. The class is
-a special class :class:`_orm.AppenderQuery` returned in place of a collection
-when accessed. Filtering criterion may be applied as well as limits and
-offsets, either explicitly or via array slices::
-
-    class User(Base):
-        __tablename__ = "user"
-
-        posts = relationship(Post, lazy="dynamic")
-
-
-    jack = session.get(User, id)
-
-    # filter Jack's blog posts
-    posts = jack.posts.filter(Post.headline == "this is a post")
-
-    # apply array slices
-    posts = jack.posts[5:20]
-
-The dynamic relationship supports limited write operations, via the
-:meth:`_orm.AppenderQuery.append` and :meth:`_orm.AppenderQuery.remove` methods::
-
-    oldpost = jack.posts.filter(Post.headline == "old post").one()
-    jack.posts.remove(oldpost)
-
-    jack.posts.append(Post("new post"))
-
-Since the read side of the dynamic relationship always queries the
-database, changes to the underlying collection will not be visible
-until the data has been flushed.  However, as long as "autoflush" is
-enabled on the :class:`.Session` in use, this will occur
-automatically each time the collection is about to emit a
-query.
-
-To place a dynamic relationship on a backref, use the :func:`_orm.backref`
-function in conjunction with ``lazy='dynamic'``::
-
-    class Post(Base):
-        __table__ = posts_table
-
-        user = relationship(User, backref=backref("posts", lazy="dynamic"))
-
-Note that eager/lazy loading options cannot be used in conjunction dynamic relationships at this time.
-
-.. autoclass:: sqlalchemy.orm.AppenderQuery
-    :members:
-
-.. note::
-
-   The :func:`_orm.dynamic_loader` function is essentially the same
-   as :func:`_orm.relationship` with the ``lazy='dynamic'`` argument specified.
-
-.. warning::
-
-   The "dynamic" loader applies to **collections only**.   It is not valid
-   to use "dynamic" loaders with many-to-one, one-to-one, or uselist=False
-   relationships.   Newer versions of SQLAlchemy emit warnings or exceptions
-   in these cases.
-
-.. _collections_noload_raiseload:
-
-Setting Noload, RaiseLoad
--------------------------
-
-A "noload" relationship never loads from the database, even when
-accessed.   It is configured using ``lazy='noload'``::
-
-    class MyClass(Base):
-        __tablename__ = "some_table"
-
-        children = relationship(MyOtherClass, lazy="noload")
-
-Above, the ``children`` collection is fully writeable, and changes to it will
-be persisted to the database as well as locally available for reading at the
-time they are added. However when instances of ``MyClass`` are freshly loaded
-from the database, the ``children`` collection stays empty.   The noload
-strategy is also available on a query option basis using the
-:func:`_orm.noload` loader option.
-
-Alternatively, a "raise"-loaded relationship will raise an
-:exc:`~sqlalchemy.exc.InvalidRequestError` where the attribute would normally
-emit a lazy load::
-
-    class MyClass(Base):
-        __tablename__ = "some_table"
-
-        children = relationship(MyOtherClass, lazy="raise")
-
-Above, attribute access on the ``children`` collection will raise an exception
-if it was not previously eagerloaded.  This includes read access but for
-collections will also affect write access, as collections can't be mutated
-without first loading them.  The rationale for this is to ensure that an
-application is not emitting any unexpected lazy loads within a certain context.
-Rather than having to read through SQL logs to determine that all necessary
-attributes were eager loaded, the "raise" strategy will cause unloaded
-attributes to raise immediately if accessed.  The raise strategy is
-also available on a query option basis using the :func:`_orm.raiseload`
-loader option.
-
-.. versionadded:: 1.1 added the "raise" loader strategy.
-
-.. seealso::
-
-    :ref:`prevent_lazy_with_raiseload`
-
-Using Passive Deletes
----------------------
-
-See :ref:`passive_deletes` for this section.
-
-
-.. currentmodule:: sqlalchemy.orm.collections
-
-.. _custom_collections:
-
-Customizing Collection Access
-=============================
-
-Mapping a one-to-many or many-to-many relationship results in a collection of
-values accessible through an attribute on the parent instance.   The two
-common collection types for these are ``list`` and ``set``, which in
-:ref:`Declarative <orm_declarative_styles_toplevel>` mappings that use
-:class:`_orm.Mapped` is established by using the collection type within
-the :class:`_orm.Mapped` container, as demonstrated in the ``Parent.children`` collection
-below where ``list`` is used::
-
-    from sqlalchemy import ForeignKey
-
-    from sqlalchemy.orm import DeclarativeBase
-    from sqlalchemy.orm import Mapped
-    from sqlalchemy.orm import mapped_column
-    from sqlalchemy.orm import relationship
-
-
-    class Base(DeclarativeBase):
-        pass
-
-
-    class Parent(Base):
-        __tablename__ = "parent"
-
-        parent_id: Mapped[int] = mapped_column(primary_key=True)
-
-        # use a list
-        children: Mapped[list["Child"]] = relationship()
-
-
-    class Child(Base):
-        __tablename__ = "child"
-
-        child_id: Mapped[int] = mapped_column(primary_key=True)
-        parent_id: Mapped[int] = mapped_column(ForeignKey("parent.id"))
-
-Or for a ``set``, illustrated in the same
-``Parent.children`` collection::
-
-    from sqlalchemy import ForeignKey
-
-    from sqlalchemy.orm import DeclarativeBase
-    from sqlalchemy.orm import Mapped
-    from sqlalchemy.orm import mapped_column
-    from sqlalchemy.orm import relationship
-
-
-    class Base(DeclarativeBase):
-        pass
-
-
-    class Parent(Base):
-        __tablename__ = "parent"
-
-        parent_id: Mapped[int] = mapped_column(primary_key=True)
-
-        # use a set
-        children: Mapped[set["Child"]] = relationship()
-
-
-    class Child(Base):
-        __tablename__ = "child"
-
-        child_id: Mapped[int] = mapped_column(primary_key=True)
-        parent_id: Mapped[int] = mapped_column(ForeignKey("parent.id"))
-
-.. note::  If using Python 3.7 or 3.8, annotations for collections need
-   to use ``typing.List`` or ``typing.Set``, e.g. ``Mapped[list["Child"]]`` or
-   ``Mapped[set["Child"]]``; the ``list`` and ``set`` Python built-ins
-   don't yet support generic annotation in these Python versions, such as::
-
-       from typing import List
-
-
-       class Parent(Base):
-           __tablename__ = "parent"
-
-           parent_id: Mapped[int] = mapped_column(primary_key=True)
-
-           # use a List, Python 3.8 and earlier
-           children: Mapped[List["Child"]] = relationship()
-
-When using mappings without the :class:`_orm.Mapped` annotation, such as when
-using :ref:`imperative mappings <orm_imperative_mapping>` or untyped
-Python code, as well as in a few special cases, the collection class for a
-:func:`_orm.relationship` can always be specified directly using the
-:paramref:`_orm.relationship.collection_class` parameter::
-
-    # non-annotated mapping
-
-
-    class Parent(Base):
-        __tablename__ = "parent"
-
-        parent_id = mapped_column(Integer, primary_key=True)
-
-        children = relationship("Child", collection_class=set)
-
-
-    class Child(Base):
-        __tablename__ = "child"
-
-        child_id = mapped_column(Integer, primary_key=True)
-        parent_id = mapped_column(ForeignKey("parent.id"))
-
-In the absence of :paramref:`_orm.relationship.collection_class`
-or :class:`_orm.Mapped`, the default collection type is ``list``.
-
-Beyond ``list`` and ``set`` builtins, there is also support for two varities of
-dictionary, described below at :ref:`orm_dictionary_collection`. There is also
-support for any arbitrary mutable sequence type can be set up as the target
-collection, with some additional configuration steps; this is described in the
-section :ref:`orm_custom_collection`.
-
-
-.. _orm_dictionary_collection:
-
-Dictionary Collections
-----------------------
-
-A little extra detail is needed when using a dictionary as a collection.
-This because objects are always loaded from the database as lists, and a key-generation
-strategy must be available to populate the dictionary correctly.  The
-:func:`.attribute_mapped_collection` function is by far the most common way
-to achieve a simple dictionary collection.  It produces a dictionary class that will apply a particular attribute
-of the mapped class as a key.   Below we map an ``Item`` class containing
-a dictionary of ``Note`` items keyed to the ``Note.keyword`` attribute.
-When using :func:`.attribute_mapped_collection`, the :class:`_orm.Mapped`
-annotation may be typed using the :class:`_orm.MappedCollection`
-type, however the :paramref:`_orm.relationship.collection_class` parameter
-is required in this case so that the :func:`.attribute_mapped_collection`
-may be appropriately parametrized::
-
-    from typing import Optional
-
-    from sqlalchemy import ForeignKey
-    from sqlalchemy.orm import attribute_mapped_collection
-    from sqlalchemy.orm import DeclarativeBase
-    from sqlalchemy.orm import Mapped
-    from sqlalchemy.orm import mapped_column
-    from sqlalchemy.orm import relationship
-    from sqlalchemy.orm import MappedCollection
-
-
-    class Base(DeclarativeBase):
-        pass
-
-
-    class Item(Base):
-        __tablename__ = "item"
-
-        id: Mapped[int] = mapped_column(primary_key=True)
-
-        notes: Mapped[MappedCollection[str, "Note"]] = relationship(
-            collection_class=attribute_mapped_collection("keyword"),
-            cascade="all, delete-orphan",
-        )
-
-
-    class Note(Base):
-        __tablename__ = "note"
-
-        id: Mapped[int] = mapped_column(primary_key=True)
-        item_id: Mapped[int] = mapped_column(ForeignKey("item.id"))
-        keyword: Mapped[str]
-        text: Mapped[Optional[str]]
-
-        def __init__(self, keyword: str, text: str):
-            self.keyword = keyword
-            self.text = text
-
-``Item.notes`` is then a dictionary::
-
-    >>> item = Item()
-    >>> item.notes["a"] = Note("a", "atext")
-    >>> item.notes.items()
-    {'a': <__main__.Note object at 0x2eaaf0>}
-
-:func:`.attribute_mapped_collection` will ensure that
-the ``.keyword`` attribute of each ``Note`` complies with the key in the
-dictionary.   Such as, when assigning to ``Item.notes``, the dictionary
-key we supply must match that of the actual ``Note`` object::
-
-    item = Item()
-    item.notes = {
-        "a": Note("a", "atext"),
-        "b": Note("b", "btext"),
-    }
-
-The attribute which :func:`.attribute_mapped_collection` uses as a key
-does not need to be mapped at all!  Using a regular Python ``@property`` allows virtually
-any detail or combination of details about the object to be used as the key, as
-below when we establish it as a tuple of ``Note.keyword`` and the first ten letters
-of the ``Note.text`` field::
-
-    class Item(Base):
-        __tablename__ = "item"
-
-        id: Mapped[int] = mapped_column(primary_key=True)
-
-        notes: Mapped[MappedCollection[str, "Note"]] = relationship(
-            collection_class=attribute_mapped_collection("note_key"),
-            back_populates="item",
-            cascade="all, delete-orphan",
-        )
-
-
-    class Note(Base):
-        __tablename__ = "note"
-
-        id: Mapped[int] = mapped_column(primary_key=True)
-        item_id: Mapped[int] = mapped_column(ForeignKey("item.id"))
-        keyword: Mapped[str]
-        text: Mapped[str]
-
-        item: Mapped["Item"] = relationship()
-
-        @property
-        def note_key(self):
-            return (self.keyword, self.text[0:10])
-
-        def __init__(self, keyword: str, text: str):
-            self.keyword = keyword
-            self.text = text
-
-Above we added a ``Note.item`` relationship, with a bi-directional
-:paramref:`_orm.relationship.back_populates` configuration.
-Assigning to this reverse relationship, the ``Note``
-is added to the ``Item.notes`` dictionary and the key is generated for us automatically::
-
-    >>> item = Item()
-    >>> n1 = Note("a", "atext")
-    >>> n1.item = item
-    >>> item.notes
-    {('a', 'atext'): <__main__.Note object at 0x2eaaf0>}
-
-Other built-in dictionary types include :func:`.column_mapped_collection`,
-which is almost like :func:`.attribute_mapped_collection` except given the :class:`_schema.Column`
-object directly::
-
-    from sqlalchemy.orm import column_mapped_collection
-
-
-    class Item(Base):
-        __tablename__ = "item"
-
-        id: Mapped[int] = mapped_column(primary_key=True)
-
-        notes: Mapped[MappedCollection[str, "Note"]] = relationship(
-            collection_class=column_mapped_collection(Note.__table__.c.keyword),
-            cascade="all, delete-orphan",
-        )
-
-as well as :func:`.mapped_collection` which is passed any callable function.
-Note that it's usually easier to use :func:`.attribute_mapped_collection` along
-with a ``@property`` as mentioned earlier::
-
-    from sqlalchemy.orm import mapped_collection
-
-
-    class Item(Base):
-        __tablename__ = "item"
-
-        id: Mapped[int] = mapped_column(primary_key=True)
-
-        notes: Mapped[MappedCollection[str, "Note"]] = relationship(
-            collection_class=mapped_collection(lambda note: note.text[0:10]),
-            cascade="all, delete-orphan",
-        )
-
-Dictionary mappings are often combined with the "Association Proxy" extension to produce
-streamlined dictionary views.  See :ref:`proxying_dictionaries` and :ref:`composite_association_proxy`
-for examples.
-
-.. _key_collections_mutations:
-
-Dealing with Key Mutations and back-populating for Dictionary collections
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-When using :func:`.attribute_mapped_collection`, the "key" for the dictionary
-is taken from an attribute on the target object.   **Changes to this key
-are not tracked**.  This means that the key must be assigned towards when
-it is first used, and if the key changes, the collection will not be mutated.
-A typical example where this might be an issue is when relying upon backrefs
-to populate an attribute mapped collection.  Given the following::
-
-    class A(Base):
-        __tablename__ = "a"
-
-        id: Mapped[int] = mapped_column(primary_key=True)
-
-        bs: Mapped[MappedCollection[str, "B"]] = relationship(
-            collection_class=attribute_mapped_collection("data"),
-            back_populates="a",
-        )
-
-
-    class B(Base):
-        __tablename__ = "b"
-
-        id: Mapped[int] = mapped_column(primary_key=True)
-        a_id: Mapped[int] = mapped_column(ForeignKey("a.id"))
-        data: Mapped[str]
-
-        a: Mapped["A"] = relationship(back_populates="bs")
-
-Above, if we create a ``B()`` that refers to a specific ``A()``, the back
-populates will then add the ``B()`` to the ``A.bs`` collection, however
-if the value of ``B.data`` is not set yet, the key will be ``None``::
-
-    >>> a1 = A()
-    >>> b1 = B(a=a1)
-    >>> a1.bs
-    {None: <test3.B object at 0x7f7b1023ef70>}
-
-
-Setting ``b1.data`` after the fact does not update the collection::
-
-    >>> b1.data = "the key"
-    >>> a1.bs
-    {None: <test3.B object at 0x7f7b1023ef70>}
-
-
-This can also be seen if one attempts to set up ``B()`` in the constructor.
-The order of arguments changes the result::
-
-    >>> B(a=a1, data="the key")
-    <test3.B object at 0x7f7b10114280>
-    >>> a1.bs
-    {None: <test3.B object at 0x7f7b10114280>}
-
-vs::
-
-    >>> B(data="the key", a=a1)
-    <test3.B object at 0x7f7b10114340>
-    >>> a1.bs
-    {'the key': <test3.B object at 0x7f7b10114340>}
-
-If backrefs are being used in this way, ensure that attributes are populated
-in the correct order using an ``__init__`` method.
-
-An event handler such as the following may also be used to track changes in the
-collection as well::
-
-    from sqlalchemy import event
-    from sqlalchemy.orm import attributes
-
-
-    @event.listens_for(B.data, "set")
-    def set_item(obj, value, previous, initiator):
-        if obj.a is not None:
-            previous = None if previous == attributes.NO_VALUE else previous
-            obj.a.bs[value] = obj
-            obj.a.bs.pop(previous)
-
-.. autofunction:: attribute_mapped_collection
-
-.. autofunction:: column_mapped_collection
-
-.. autofunction:: mapped_collection
-
-.. _orm_custom_collection:
-
-Custom Collection Implementations
-=================================
-
-You can use your own types for collections as well.  In simple cases,
-inheriting from ``list`` or ``set``, adding custom behavior, is all that's needed.
-In other cases, special decorators are needed to tell SQLAlchemy more detail
-about how the collection operates.
-
-.. topic:: Do I need a custom collection implementation?
-
-   In most cases not at all!   The most common use cases for a "custom" collection
-   is one that validates or marshals incoming values into a new form, such as
-   a string that becomes a class instance, or one which goes a
-   step beyond and represents the data internally in some fashion, presenting
-   a "view" of that data on the outside of a different form.
-
-   For the first use case, the :func:`_orm.validates` decorator is by far
-   the simplest way to intercept incoming values in all cases for the purposes
-   of validation and simple marshaling.  See :ref:`simple_validators`
-   for an example of this.
-
-   For the second use case, the :ref:`associationproxy_toplevel` extension is a
-   well-tested, widely used system that provides a read/write "view" of a
-   collection in terms of some attribute present on the target object. As the
-   target attribute can be a ``@property`` that returns virtually anything, a
-   wide array of "alternative" views of a collection can be constructed with
-   just a few functions. This approach leaves the underlying mapped collection
-   unaffected and avoids the need to carefully tailor collection behavior on a
-   method-by-method basis.
-
-   Customized collections are useful when the collection needs to
-   have special behaviors upon access or mutation operations that can't
-   otherwise be modeled externally to the collection.   They can of course
-   be combined with the above two approaches.
-
-Collections in SQLAlchemy are transparently *instrumented*. Instrumentation
-means that normal operations on the collection are tracked and result in
-changes being written to the database at flush time. Additionally, collection
-operations can fire *events* which indicate some secondary operation must take
-place. Examples of a secondary operation include saving the child item in the
-parent's :class:`~sqlalchemy.orm.session.Session` (i.e. the ``save-update``
-cascade), as well as synchronizing the state of a bi-directional relationship
-(i.e. a :func:`.backref`).
-
-The collections package understands the basic interface of lists, sets and
-dicts and will automatically apply instrumentation to those built-in types and
-their subclasses. Object-derived types that implement a basic collection
-interface are detected and instrumented via duck-typing:
-
-.. sourcecode:: python+sql
-
-    class ListLike:
-        def __init__(self):
-            self.data = []
-
-        def append(self, item):
-            self.data.append(item)
-
-        def remove(self, item):
-            self.data.remove(item)
-
-        def extend(self, items):
-            self.data.extend(items)
-
-        def __iter__(self):
-            return iter(self.data)
-
-        def foo(self):
-            return "foo"
-
-``append``, ``remove``, and ``extend`` are known list-like methods, and will
-be instrumented automatically. ``__iter__`` is not a mutator method and won't
-be instrumented, and ``foo`` won't be either.
-
-Duck-typing (i.e. guesswork) isn't rock-solid, of course, so you can be
-explicit about the interface you are implementing by providing an
-``__emulates__`` class attribute::
-
-    class SetLike:
-        __emulates__ = set
-
-        def __init__(self):
-            self.data = set()
-
-        def append(self, item):
-            self.data.add(item)
-
-        def remove(self, item):
-            self.data.remove(item)
-
-        def __iter__(self):
-            return iter(self.data)
-
-This class looks list-like because of ``append``, but ``__emulates__`` forces
-it to set-like. ``remove`` is known to be part of the set interface and will
-be instrumented.
-
-But this class won't work quite yet: a little glue is needed to adapt it for
-use by SQLAlchemy. The ORM needs to know which methods to use to append,
-remove and iterate over members of the collection. When using a type like
-``list`` or ``set``, the appropriate methods are well-known and used
-automatically when present. This set-like class does not provide the expected
-``add`` method, so we must supply an explicit mapping for the ORM via a
-decorator.
-
-Annotating Custom Collections via Decorators
---------------------------------------------
-
-Decorators can be used to tag the individual methods the ORM needs to manage
-collections. Use them when your class doesn't quite meet the regular interface
-for its container type, or when you otherwise would like to use a different method to
-get the job done.
-
-.. sourcecode:: python
-
-    from sqlalchemy.orm.collections import collection
-
-
-    class SetLike:
-        __emulates__ = set
-
-        def __init__(self):
-            self.data = set()
-
-        @collection.appender
-        def append(self, item):
-            self.data.add(item)
-
-        def remove(self, item):
-            self.data.remove(item)
-
-        def __iter__(self):
-            return iter(self.data)
-
-And that's all that's needed to complete the example. SQLAlchemy will add
-instances via the ``append`` method. ``remove`` and ``__iter__`` are the
-default methods for sets and will be used for removing and iteration. Default
-methods can be changed as well:
-
-.. sourcecode:: python+sql
-
-    from sqlalchemy.orm.collections import collection
-
-
-    class MyList(list):
-        @collection.remover
-        def zark(self, item):
-            # do something special...
-            ...
-
-        @collection.iterator
-        def hey_use_this_instead_for_iteration(self):
-            ...
-
-There is no requirement to be list-, or set-like at all. Collection classes
-can be any shape, so long as they have the append, remove and iterate
-interface marked for SQLAlchemy's use. Append and remove methods will be
-called with a mapped entity as the single argument, and iterator methods are
-called with no arguments and must return an iterator.
-
-.. autoclass:: collection
-    :members:
-
-.. _dictionary_collections:
-
-Custom Dictionary-Based Collections
------------------------------------
-
-The :class:`.MappedCollection` class can be used as
-a base class for your custom types or as a mix-in to quickly add ``dict``
-collection support to other classes. It uses a keying function to delegate to
-``__setitem__`` and ``__delitem__``:
-
-.. sourcecode:: python+sql
-
-    from sqlalchemy.util import OrderedDict
-    from sqlalchemy.orm.collections import MappedCollection
-
-
-    class NodeMap(OrderedDict, MappedCollection):
-        """Holds 'Node' objects, keyed by the 'name' attribute with insert order maintained."""
-
-        def __init__(self, *args, **kw):
-            MappedCollection.__init__(self, keyfunc=lambda node: node.name)
-            OrderedDict.__init__(self, *args, **kw)
-
-When subclassing :class:`.MappedCollection`, user-defined versions
-of ``__setitem__()`` or ``__delitem__()`` should be decorated
-with :meth:`.collection.internally_instrumented`, **if** they call down
-to those same methods on :class:`.MappedCollection`.  This because the methods
-on :class:`.MappedCollection` are already instrumented - calling them
-from within an already instrumented call can cause events to be fired off
-repeatedly, or inappropriately, leading to internal state corruption in
-rare cases::
-
-    from sqlalchemy.orm.collections import MappedCollection, collection
-
-
-    class MyMappedCollection(MappedCollection):
-        """Use @internally_instrumented when your methods
-        call down to already-instrumented methods.
-
-        """
-
-        @collection.internally_instrumented
-        def __setitem__(self, key, value, _sa_initiator=None):
-            # do something with key, value
-            super(MyMappedCollection, self).__setitem__(key, value, _sa_initiator)
-
-        @collection.internally_instrumented
-        def __delitem__(self, key, _sa_initiator=None):
-            # do something with key
-            super(MyMappedCollection, self).__delitem__(key, _sa_initiator)
-
-The ORM understands the ``dict`` interface just like lists and sets, and will
-automatically instrument all dict-like methods if you choose to subclass
-``dict`` or provide dict-like collection behavior in a duck-typed class. You
-must decorate appender and remover methods, however- there are no compatible
-methods in the basic dictionary interface for SQLAlchemy to use by default.
-Iteration will go through ``itervalues()`` unless otherwise decorated.
-
-.. autoclass:: sqlalchemy.orm.MappedCollection
-   :members:
-
-Instrumentation and Custom Types
---------------------------------
-
-Many custom types and existing library classes can be used as a entity
-collection type as-is without further ado. However, it is important to note
-that the instrumentation process will modify the type, adding decorators
-around methods automatically.
-
-The decorations are lightweight and no-op outside of relationships, but they
-do add unneeded overhead when triggered elsewhere. When using a library class
-as a collection, it can be good practice to use the "trivial subclass" trick
-to restrict the decorations to just your usage in relationships. For example:
-
-.. sourcecode:: python+sql
-
-    class MyAwesomeList(some.great.library.AwesomeList):
-        pass
-
-
-    # ... relationship(..., collection_class=MyAwesomeList)
-
-The ORM uses this approach for built-ins, quietly substituting a trivial
-subclass when a ``list``, ``set`` or ``dict`` is used directly.
-
-Collection Internals
-====================
-
-Various internal methods.
-
-.. autofunction:: bulk_replace
-
-.. autodata:: collection_adapter
-
-.. autoclass:: CollectionAdapter
-
-.. autoclass:: InstrumentedDict
+This page has been broken into two separate pages:
 
-.. autoclass:: InstrumentedList
+:doc:`large_collections`
 
-.. autoclass:: InstrumentedSet
+:doc:`collection_api`
 
-.. autofunction:: prepare_instrumentation
index dac14fd892bd1c854eec65f0e764b95eb0f0b221..f43c0b53e1ca71c369225ba45f1cffa4edb9bfae 100644 (file)
@@ -356,6 +356,10 @@ Other guidelines include:
       stmt = user.addresses.statement.where(Address.email_address.startswith("patrick"))
       addresses_filter = (await session.scalars(stmt)).all()
 
+  The :ref:`write only <write_only_relationship>` technique, introduced in
+  version 2.0 of SQLAlchemy, is fully compatible with asyncio and should be
+  preferred.
+
   .. seealso::
 
     :ref:`migration_20_dynamic_loaders` - notes on migration to 2.0 style
diff --git a/doc/build/orm/large_collections.rst b/doc/build/orm/large_collections.rst
new file mode 100644 (file)
index 0000000..4d9f96e
--- /dev/null
@@ -0,0 +1,677 @@
+.. highlight:: pycon+sql
+.. doctest-enable
+
+.. currentmodule:: sqlalchemy.orm
+
+.. _largecollections:
+
+Working with Large Collections
+==============================
+
+The default behavior of :func:`_orm.relationship` is to fully load
+the contents of collections into memory, based on a configured
+:ref:`loader strategy <orm_queryguide_relationship_loaders>` that controls
+when and how these contents are loaded from the database.  Related collections
+may be loaded into memory not just when they are accessed, or eagerly loaded,
+but in most cases will require population when the collection
+itself is mutated, as well as in cases where the owning object is to be
+deleted by the unit of work system.
+
+When a related collection is potentially very large, it may not be feasible
+for such a collection to be populated into memory under any circumstances,
+as the operation may be overly consuming of time, network and memory
+resources.
+
+This section includes API features intended to allow :func:`_orm.relationship`
+to be used with large collections while maintaining adequate performance.
+
+
+.. _write_only_relationship:
+
+Write Only Relationships
+------------------------
+
+The **write only** loader strategy is the primary means of configuring a
+:func:`_orm.relationship` that will remain writeable, but will not load
+its contents into memory.  A write-only ORM configuration in modern
+type-annotated Declarative form is illustrated below::
+
+    >>> from decimal import Decimal
+    >>> from datetime import datetime
+
+    >>> from sqlalchemy import ForeignKey
+    >>> from sqlalchemy import func
+    >>> from sqlalchemy.orm import DeclarativeBase
+    >>> from sqlalchemy.orm import Mapped
+    >>> from sqlalchemy.orm import mapped_column
+    >>> from sqlalchemy.orm import relationship
+    >>> from sqlalchemy.orm import Session
+    >>> from sqlalchemy.orm import WriteOnlyMapped
+
+    >>> class Base(DeclarativeBase):
+    ...     pass
+
+    >>> class Account(Base):
+    ...     __tablename__ = "account"
+    ...     id: Mapped[int] = mapped_column(primary_key=True)
+    ...     identifier: Mapped[str]
+    ...
+    ...     account_transactions: WriteOnlyMapped["AccountTransaction"] = relationship(
+    ...         cascade="all, delete-orphan",
+    ...         passive_deletes=True,
+    ...         order_by="AccountTransaction.timestamp",
+    ...     )
+    ...
+    ...     def __repr__(self):
+    ...         return f"Account(identifier={self.identifier!r})"
+
+    >>> class AccountTransaction(Base):
+    ...     __tablename__ = "account_transaction"
+    ...     id: Mapped[int] = mapped_column(primary_key=True)
+    ...     account_id: Mapped[int] = mapped_column(
+    ...         ForeignKey("account.id", ondelete="cascade")
+    ...     )
+    ...     description: Mapped[str]
+    ...     amount: Mapped[Decimal]
+    ...     timestamp: Mapped[datetime] = mapped_column(default=func.now())
+    ...
+    ...     def __repr__(self):
+    ...         return (
+    ...             f"AccountTransaction(amount={self.amount:.2f}, "
+    ...             f"timestamp={self.timestamp.isoformat()!r})"
+    ...         )
+    ...
+    ...     __mapper_args__ = {"eager_defaults": True}
+
+
+.. setup code not for display
+
+    >>> from sqlalchemy import create_engine
+    >>> from sqlalchemy import event
+    >>> engine = create_engine("sqlite://", echo=True)
+    >>> @event.listens_for(engine, "connect")
+    ... def set_sqlite_pragma(dbapi_connection, connection_record):
+    ...     cursor = dbapi_connection.cursor()
+    ...     cursor.execute("PRAGMA foreign_keys=ON")
+    ...     cursor.close()
+
+    >>> Base.metadata.create_all(engine)
+    BEGIN...
+
+
+Above, the ``account_transactions`` relationship is configured not using the
+ordinary :class:`.Mapped` annotation, but instead
+using the :class:`.WriteOnlyMapped` type annotation, which at runtime will
+assign the :ref:`loader strategy <orm_queryguide_relationship_loaders>` of
+``lazy="write_only"`` to the target :func:`_orm.relationship`.
+The :class:`.WriteOnlyMapped` annotation is an
+alternative form of the :class:`_orm.Mapped` annotation which indicate the use
+of the :class:`_orm.WriteOnlyCollection` collection type on instances of the
+object.
+
+The above :func:`_orm.relationship` configuration also includes several
+elements that are specific to what action to take when ``Account`` objects
+are deleted, as well as when ``AccountTransaction`` objects are removed from the
+``account_transactions`` collection.  These elements are:
+
+* ``passive_deletes=True`` - allows the :term:`unit of work` to forego having
+  to load the collection when ``Account`` is deleted; see
+  :ref:`passive_deletes`.
+* ``ondelete="cascade"`` configured on the :class:`.ForeignKey` constraint.
+  This is also detailed at :ref:`passive_deletes`.
+* ``cascade="all, delete-orphan"`` - instructs the :term:`unit of work` to
+  delete ``AccountTransaction`` objects when they are removed from the
+  collection.  See :ref:`cascade_delete_orphan` in the :ref:`unitofwork_cascades`
+  document.
+
+.. versionadded:: 2.0  Added "Write only" relationship loaders.
+
+
+Creating and Persisting New Write Only Collections
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The write-only collection allows for direct assignment of the collection
+as a whole **only** for :term:`transient` or :term:`pending` objects.
+With our above mapping, this indicates we can create a new ``Account``
+object with a sequence of ``AccountTransaction`` objects to be added
+to a :class:`_orm.Session`.   Any Python iterable may be used as the
+source of objects to start, where below we use a Python ``list``::
+
+    >>> new_account = Account(
+    ...     identifier="account_01",
+    ...     account_transactions=[
+    ...         AccountTransaction(description="initial deposit", amount=Decimal("500.00")),
+    ...         AccountTransaction(description="transfer", amount=Decimal("1000.00")),
+    ...         AccountTransaction(description="withdrawal", amount=Decimal("-29.50")),
+    ...     ],
+    ... )
+
+    >>> with Session(engine) as session:
+    ...     session.add(new_account)
+    ...     session.commit()
+    {opensql}BEGIN (implicit)
+    INSERT INTO account (identifier) VALUES (?)
+    [...] ('account_01',)
+    INSERT INTO account_transaction (account_id, description, amount, timestamp) VALUES
+    (?, ?, ?, CURRENT_TIMESTAMP), (?, ?, ?, CURRENT_TIMESTAMP), (?, ?, ?, CURRENT_TIMESTAMP)
+    RETURNING id, timestamp
+    [...] (1, 'initial deposit', 500.0, 1, 'transfer', 1000.0, 1, 'withdrawal', -29.5)
+    COMMIT
+
+Once an object is database-persisted (i.e. in the :term:`persistent` or
+:term:`detached` state), the collection has the ability to be extended with new
+items as well as the ability for individual items to be removed. However, the
+collection may **no longer be re-assigned with a full replacement collection**,
+as such an operation requires that the previous collection is fully
+loaded into memory in order to reconcile the old entries with the new ones::
+
+    >>> new_account.account_transactions = [
+    ...     AccountTransaction(description="some transaction", amount=Decimal("10.00"))
+    ... ]
+    Traceback (most recent call last):
+    ...
+    sqlalchemy.exc.InvalidRequestError: Collection "Account.account_transactions" does not
+    support implicit iteration; collection replacement operations can't be used
+
+Adding New Items to an Existing Collection
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+For write-only collections of persistent objects,
+modifications to the collection using :term:`unit of work` processes may proceed
+only by using the :meth:`.WriteOnlyCollection.add`,
+:meth:`.WriteOnlyCollection.add_all` and :meth:`.WriteOnlyCollection.remove`
+methods::
+
+    >>> from sqlalchemy import select
+    >>> session = Session(engine, expire_on_commit=False)
+    >>> existing_account = session.scalar(select(Account).filter_by(identifier="account_01"))
+    {opensql}BEGIN (implicit)
+    SELECT account.id, account.identifier
+    FROM account
+    WHERE account.identifier = ?
+    [...] ('account_01',)
+    {stop}
+    >>> existing_account.account_transactions.add_all(
+    ...     [
+    ...         AccountTransaction(description="paycheck", amount=Decimal("2000.00")),
+    ...         AccountTransaction(description="rent", amount=Decimal("-800.00")),
+    ...     ]
+    ... )
+    >>> session.commit()
+    {opensql}INSERT INTO account_transaction (account_id, description, amount, timestamp)
+    VALUES (?, ?, ?, CURRENT_TIMESTAMP), (?, ?, ?, CURRENT_TIMESTAMP)
+    RETURNING id, timestamp
+    [...] (1, 'paycheck', 2000.0, 1, 'rent', -800.0)
+    COMMIT
+
+The items added above are held in a pending queue within the
+:class:`_orm.Session` until the next flush, at which point they are INSERTed
+into the database, assuming the added objects were previously :term:`transient`.
+
+Querying Items
+~~~~~~~~~~~~~~
+
+The :class:`_orm.WriteOnlyCollection` does not at any point store a reference
+to the current contents of the collection, nor does it have any behavior where
+it would directly emit a SELECT to the database in order to load them; the
+overriding assumption is that the collection may contain many thousands or
+millions of rows, and should never be fully loaded into memory as a side effect
+of any other operation.
+
+Instead, the :class:`_orm.WriteOnlyCollection` includes SQL-generating helpers
+such as :meth:`_orm.WriteOnlyCollection.select`, which will generate
+a :class:`.Select` construct pre-configured with the correct WHERE / FROM
+criteria for the current parent row, which can then be further modified in
+order to SELECT any range of rows desired, as well as invoked using features
+like :ref:`server side cursors <orm_queryguide_yield_per>` for processes that
+wish to iterate through the full collection in a memory-efficient manner.
+
+The statement generated is illustrated below. Note it also includes ORDER BY
+criteria, indicated in the example mapping by the
+:paramref:`_orm.relationship.order_by` parameter of :func:`_orm.relationship`;
+this criteria would be omitted if the parameter were not configured::
+
+    >>> print(existing_account.account_transactions.select())
+    {opensql}SELECT account_transaction.id, account_transaction.account_id, account_transaction.description,
+    account_transaction.amount, account_transaction.timestamp
+    FROM account_transaction
+    WHERE :param_1 = account_transaction.account_id ORDER BY account_transaction.timestamp
+
+We may use this :class:`.Select` construct along with the :class:`_orm.Session`
+in order to query for ``AccountTransaction`` objects, most easily using the
+:meth:`_orm.Session.scalars` method that will return a :class:`.Result` that
+yields ORM objects directly. It's typical, though not required, that the
+:class:`.Select` would be modified further to limit the records returned; in
+the example below, additional WHERE criteria to load only "debit" account
+transactions is added, along with "LIMIT 10" to retrieve only the first ten
+rows::
+
+    >>> account_transactions = session.scalars(
+    ...     existing_account.account_transactions.select()
+    ...     .where(AccountTransaction.amount < 0)
+    ...     .limit(10)
+    ... ).all()
+    {opensql}BEGIN (implicit)
+    SELECT account_transaction.id, account_transaction.account_id, account_transaction.description,
+    account_transaction.amount, account_transaction.timestamp
+    FROM account_transaction
+    WHERE ? = account_transaction.account_id AND account_transaction.amount < ?
+    ORDER BY account_transaction.timestamp  LIMIT ? OFFSET ?
+    [...] (1, 0, 10, 0)
+    {stop}>>> print(account_transactions)
+    [AccountTransaction(amount=-29.50, timestamp='...'), AccountTransaction(amount=-800.00, timestamp='...')]
+
+
+Removing Items
+~~~~~~~~~~~~~~
+
+Individual items that are loaded in the :term:`persistent`
+state against the current :class:`_orm.Session` may be marked for removal
+from the collection using the :meth:`.WriteOnlyCollection.remove` method.
+The flush process will implicitly consider the object to be already part
+of the collection when the operation proceeds.   The example below
+illustrates removal of an individual ``AccountTransaction`` item,
+which per :ref:`cascade <unitofwork_cascades>` settings results in a
+DELETE of that row::
+
+  >>> existing_transaction = account_transactions[0]
+  >>> existing_account.account_transactions.remove(existing_transaction)
+  >>> session.commit()
+  {opensql}DELETE FROM account_transaction WHERE account_transaction.id = ?
+  [...] (3,)
+  COMMIT
+
+As with any ORM-mapped collection, object removal may proceed either to
+de-associate the object from the collection while leaving the object present in
+the database, or may issue a DELETE for its row, based on the
+:ref:`cascade_delete_orphan` configuration of the :func:`_orm.relationship`.
+
+Collection removal without deletion involves setting foreign key columns to
+NULL for a :ref:`one-to-many <relationship_patterns_o2m>` relationship, or
+deleting the corresponding association row for a
+:ref:`many-to-many <relationships_many_to_many>` relationship.
+
+
+
+Bulk INSERT of New Items
+~~~~~~~~~~~~~~~~~~~~~~~~
+
+The :class:`.WriteOnlyCollection` can generate DML constructs such as
+:class:`_dml.Insert` objects, which may be used in an ORM context to
+produce bulk insert behavior.  See the section
+:ref:`orm_queryguide_bulk_insert` for an overview of ORM bulk inserts.
+
+One to Many Collections
+^^^^^^^^^^^^^^^^^^^^^^^
+For a **regular one to many collection only**, the :meth:`.WriteOnlyCollection.insert`
+method will produce an :class:`_dml.Insert` construct which is pre-established with
+VALUES criteria corresponding to the parent object.  As this VALUES criteria
+is entirely against the related table, the statement can be used to
+INSERT new rows that will at the same time become new records in the
+related collection::
+
+  >>> session.execute(
+  ...     existing_account.account_transactions.insert(),
+  ...     [
+  ...         {"description": "transaction 1", "amount": Decimal("47.50")},
+  ...         {"description": "transaction 2", "amount": Decimal("-501.25")},
+  ...         {"description": "transaction 3", "amount": Decimal("1800.00")},
+  ...         {"description": "transaction 4", "amount": Decimal("-300.00")},
+  ...     ],
+  ... )
+  {opensql}BEGIN (implicit)
+  INSERT INTO account_transaction (account_id, description, amount, timestamp) VALUES (?, ?, ?, CURRENT_TIMESTAMP)
+  [...] [(1, 'transaction 1', 47.5), (1, 'transaction 2', -501.25), (1, 'transaction 3', 1800.0), (1, 'transaction 4', -300.0)]
+  <...>
+  {stop}
+  >>> session.commit()
+  COMMIT
+
+.. seealso::
+
+    :ref:`orm_queryguide_bulk_insert` - in the :ref:`queryguide_toplevel`
+
+    :ref:`relationship_patterns_o2m` - at :ref:`relationship_patterns`
+
+
+Many to Many Collections
+^^^^^^^^^^^^^^^^^^^^^^^^
+
+For a **many to many collection**, the relationship between two classes
+involves a third table that is configured using the
+:paramref:`_orm.relationship.secondary` parameter of :class:`_orm.relationship`.
+To bulk insert rows into a collection of this type using
+:class:`.WriteOnlyCollection`, the new records may be bulk-inserted separately
+first, retrieved using RETURNING, and those records then passed to the
+:meth:`.WriteOnlyCollection.add_all` method where the unit of work process
+will proceed to persist them as part of the collection.
+
+Supposing a class ``BankAudit`` referred to many ``AccountTransaction``
+records using a many-to-many table::
+
+    >>> from sqlalchemy import Table, Column
+    >>> audit_to_transaction = Table(
+    ...     "audit_transaction",
+    ...     Base.metadata,
+    ...     Column("audit_id", ForeignKey("audit.id", ondelete="CASCADE"), primary_key=True),
+    ...     Column(
+    ...         "transaction_id",
+    ...         ForeignKey("account_transaction.id", ondelete="CASCADE"),
+    ...         primary_key=True,
+    ...     ),
+    ... )
+    >>> class BankAudit(Base):
+    ...     __tablename__ = "audit"
+    ...     id: Mapped[int] = mapped_column(primary_key=True)
+    ...     account_transactions: WriteOnlyMapped["AccountTransaction"] = relationship(
+    ...         secondary=audit_to_transaction, passive_deletes=True
+    ...     )
+
+.. setup code not for display
+
+    >>> Base.metadata.create_all(engine)
+    BEGIN...
+
+To illustrate the two operations, we add more ``AccountTransaction`` objects
+using bulk insert, which we retrieve using RETURNING by adding
+``returning(AccountTransaction)`` to the bulk INSERT statement (note that
+we could just as easily use existing ``AccountTransaction`` objects as well)::
+
+  >>> new_transactions = session.scalars(
+  ...     existing_account.account_transactions.insert().returning(AccountTransaction),
+  ...     [
+  ...         {"description": "odd trans 1", "amount": Decimal("50000.00")},
+  ...         {"description": "odd trans 2", "amount": Decimal("25000.00")},
+  ...         {"description": "odd trans 3", "amount": Decimal("45.00")},
+  ...     ],
+  ... ).all()
+  {opensql}BEGIN (implicit)
+  INSERT INTO account_transaction (account_id, description, amount, timestamp) VALUES
+  (?, ?, ?, CURRENT_TIMESTAMP), (?, ?, ?, CURRENT_TIMESTAMP), (?, ?, ?, CURRENT_TIMESTAMP)
+  RETURNING id, account_id, description, amount, timestamp
+  [...] (1, 'odd trans 1', 50000.0, 1, 'odd trans 2', 25000.0, 1, 'odd trans 3', 45.0)
+  {stop}
+
+With a list of ``AccountTransaction`` objects ready, the
+:meth:`_orm.WriteOnlyCollection.add_all` method is used to associate many rows
+at once with a new ``BankAudit`` object::
+
+  >>> bank_audit = BankAudit()
+  >>> session.add(bank_audit)
+  >>> bank_audit.account_transactions.add_all(new_transactions)
+  >>> session.commit()
+  {opensql}INSERT INTO audit DEFAULT VALUES
+  [...] ()
+  INSERT INTO audit_transaction (audit_id, transaction_id) VALUES (?, ?)
+  [...] [(1, 10), (1, 11), (1, 12)]
+  COMMIT
+
+.. seealso::
+
+    :ref:`orm_queryguide_bulk_insert` - in the :ref:`queryguide_toplevel`
+
+    :ref:`relationships_many_to_many` - at :ref:`relationship_patterns`
+
+
+Bulk UPDATE and DELETE of Items
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+In a similar way in which :class:`.WriteOnlyCollection` can generate
+:class:`.Select` constructs with WHERE criteria pre-established, it can
+also generate :class:`.Update` and :class:`.Delete` constructs with that
+same WHERE criteria, to allow criteria-oriented UPDATE and DELETE statements
+against the elements in a large collection.
+
+One To Many Collections
+^^^^^^^^^^^^^^^^^^^^^^^
+
+As is the case with INSERT, this feature is most straightforward with **one
+to many collections**.
+
+In the example below, the :meth:`.WriteOnlyCollection.update` method is used
+to generate an UPDATE statement is emitted against the elements
+in the collection, locating rows where the "amount" is equal to ``-800`` and
+adding the amount of ``200`` to them::
+
+  >>> session.execute(
+  ...     existing_account.account_transactions.update()
+  ...     .values(amount=AccountTransaction.amount + 200)
+  ...     .where(AccountTransaction.amount == -800),
+  ... )
+  {opensql}BEGIN (implicit)
+  UPDATE account_transaction SET amount=(account_transaction.amount + ?)
+  WHERE ? = account_transaction.account_id AND account_transaction.amount = ?
+  [...] (200, 1, -800)
+  {stop}<...>
+
+In a similar way, :meth:`.WriteOnlyCollection.delete` will produce a
+DELETE statement that is invoked in the same way::
+
+  >>> session.execute(
+  ...     existing_account.account_transactions.delete().where(
+  ...         AccountTransaction.amount.between(0, 30)
+  ...     ),
+  ... )
+  {opensql}DELETE FROM account_transaction WHERE ? = account_transaction.account_id
+  AND account_transaction.amount BETWEEN ? AND ? RETURNING id
+  [...] (1, 0, 30)
+  <...>
+  {stop}
+
+Many to Many Collections
+^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. tip::
+
+    The techniques here involve multi-table UPDATE expressions, which are
+    slightly more advanced.
+
+For bulk UPDATE and DELETE of **many to many collections**, in order for
+an UPDATE or DELETE statement to relate to the primary key of the
+parent object, the association table must be explicitly part of the
+UPDATE/DELETE statement, which requires
+either that the backend includes supports for non-standard SQL syntaxes,
+or extra explicit steps when constructing the UPDATE or DELETE statement.
+
+For backends that support multi-table versions of UPDATE, the
+:meth:`.WriteOnlyCollection.update` method should work without extra steps
+for a many-to-many collection, as in the example below where an UPDATE
+is emitted against ``AccountTransaction`` objects in terms of the
+many-to-many ``BankAudit.account_transactions`` collection::
+
+    >>> session.execute(
+    ...     bank_audit.account_transactions.update().values(
+    ...         description=AccountTransaction.description + " (audited)"
+    ...     )
+    ... )
+    {opensql}UPDATE account_transaction SET description=(account_transaction.description || ?)
+    FROM audit_transaction WHERE ? = audit_transaction.audit_id
+    AND account_transaction.id = audit_transaction.transaction_id RETURNING id
+    [...] (' (audited)', 1)
+    {stop}<...>
+
+The above statement automatically makes use of "UPDATE..FROM" syntax,
+supported by SQLite and others, to name the additional ``audit_transaction``
+table in the WHERE clause.
+
+To UPDATE or DELETE a many-to-many collection where multi-table syntax is
+not available, the many-to-many criteria may be moved into SELECT that
+for example may be combined with IN to match rows.
+The :class:`.WriteOnlyCollection` still helps us here, as we use the
+:meth:`.WriteOnlyCollection.select` method to generate this SELECT for
+us, making use of the :meth:`_sql.Select.with_only_columns` method to
+produce a :term:`scalar subquery`::
+
+    >>> from sqlalchemy import update
+    >>> subq = bank_audit.account_transactions.select().with_only_columns(AccountTransaction.id)
+    >>> session.execute(
+    ...     update(AccountTransaction)
+    ...     .values(description=AccountTransaction.description + " (audited)")
+    ...     .where(AccountTransaction.id.in_(subq))
+    ... )
+    {opensql}UPDATE account_transaction SET description=(account_transaction.description || ?)
+    WHERE account_transaction.id IN (SELECT account_transaction.id
+    FROM audit_transaction
+    WHERE ? = audit_transaction.audit_id AND account_transaction.id = audit_transaction.transaction_id)
+    RETURNING id
+    [...] (' (audited)', 1)
+    <...>
+
+Write Only Collections - API Documentation
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+
+.. autoclass:: sqlalchemy.orm.WriteOnlyCollection
+    :members:
+    :inherited-members:
+
+.. autoclass:: sqlalchemy.orm.WriteOnlyMapped
+    :members:
+
+.. highlight:: python
+.. doctest-disable
+
+.. _dynamic_relationship:
+
+Dynamic Relationship Loaders
+----------------------------
+
+.. legacy::  The "dynamic" lazy loader strategy is the legacy form of what is
+   now the "write_only" strategy described in the section
+   :ref:`write_only_relationship`.
+
+   The "dynamic" strategy produces a legacy :class:`_orm.Query` object from the
+   related collection. However, a major drawback of "dynamic" relationships is
+   that there are several cases where the collection will fully iterate, some
+   of which are non-obvious, which can only be prevented with careful
+   programming and testing on a case-by-case basis. Therefore, for truly large
+   collection management, the :class:`_orm.WriteOnlyCollection` should be
+   preferred.
+
+   The dynamic loader is also not compatible with the :ref:`asyncio_toplevel`
+   extension. It can be used with some limitations, as indicated in
+   :ref:`Asyncio dynamic guidelines <dynamic_asyncio>`, but again the
+   :class:`_orm.WriteOnlyCollection`, which is fully compatible with asyncio,
+   should be preferred.
+
+The dynamic relationship strategy allows configuration of a
+:func:`_orm.relationship` which when accessed on an instance will return a
+legacy :class:`_orm.Query` object in place of the collection. The
+:class:`_orm.Query` can then be modified further so that the database
+collection may be iterated based on filtering criteria. The returned
+:class:`_orm.Query` object is an instance of :class:`_orm.AppenderQuery`, which
+combines the loading and iteration behavior of :class:`_orm.Query` along with
+rudimentary collection mutation methods such as
+:meth:`_orm.AppenderQuery.append` and :meth:`_orm.AppenderQuery.remove`.
+
+The "dynamic" loader strategy may be configured with
+type-annotated Declarative form using the :class:`_orm.DynamicMapped`
+annotation class::
+
+    from sqlalchemy.orm import DynamicMapped
+
+
+    class User(Base):
+        __tablename__ = "user"
+
+        id: Mapped[int] = mapped_column(primary_key=True)
+        posts: DynamicMapped[Post] = relationship()
+
+Above, the ``User.posts`` collection on an individual ``User`` object
+will return the :class:`_orm.AppenderQuery` object, which is a subclass
+of :class:`_orm.Query` that also supports basic collection mutation
+operations::
+
+
+    jack = session.get(User, id)
+
+    # filter Jack's blog posts
+    posts = jack.posts.filter(Post.headline == "this is a post")
+
+    # apply array slices
+    posts = jack.posts[5:20]
+
+The dynamic relationship supports limited write operations, via the
+:meth:`_orm.AppenderQuery.append` and :meth:`_orm.AppenderQuery.remove` methods::
+
+    oldpost = jack.posts.filter(Post.headline == "old post").one()
+    jack.posts.remove(oldpost)
+
+    jack.posts.append(Post("new post"))
+
+Since the read side of the dynamic relationship always queries the
+database, changes to the underlying collection will not be visible
+until the data has been flushed.  However, as long as "autoflush" is
+enabled on the :class:`.Session` in use, this will occur
+automatically each time the collection is about to emit a
+query.
+
+
+Dynamic Relationship Loaders - API
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. autoclass:: sqlalchemy.orm.AppenderQuery
+    :members:
+    :inherited-members: Query
+
+.. autoclass:: sqlalchemy.orm.DynamicMapped
+    :members:
+
+.. _collections_raiseload:
+
+Setting RaiseLoad
+-----------------
+
+A "raise"-loaded relationship will raise an
+:exc:`~sqlalchemy.exc.InvalidRequestError` where the attribute would normally
+emit a lazy load::
+
+    class MyClass(Base):
+        __tablename__ = "some_table"
+
+        # ...
+
+        children: Mapped[list[MyRelatedClass]] = relationship(lazy="raise")
+
+Above, attribute access on the ``children`` collection will raise an exception
+if it was not previously populated.  This includes read access but for
+collections will also affect write access, as collections can't be mutated
+without first loading them.  The rationale for this is to ensure that an
+application is not emitting any unexpected lazy loads within a certain context.
+Rather than having to read through SQL logs to determine that all necessary
+attributes were eager loaded, the "raise" strategy will cause unloaded
+attributes to raise immediately if accessed.  The raise strategy is
+also available on a query option basis using the :func:`_orm.raiseload`
+loader option.
+
+.. seealso::
+
+    :ref:`prevent_lazy_with_raiseload`
+
+Using Passive Deletes
+---------------------
+
+An important aspect of collection management in SQLAlchemy is that when an
+object that refers to a collection is deleted, SQLAlchemy needs to consider the
+objects that are inside this collection. Those objects will need to be
+de-associated from the parent, which for a one-to-many collection would mean
+that foreign key columns are set to NULL, or based on
+:ref:`cascade <unitofwork_cascades>` settings, may instead want to emit a
+DELETE for these rows.
+
+The :term:`unit of work` process only considers objects on a row-by-row basis,
+meaning a DELETE operation implies that all rows within a collection must be
+fully loaded into memory inside the flush process. This is not feasible for
+large collections, so we instead seek to rely upon the database's own
+capability to update or delete the rows automatically using foreign key ON
+DELETE rules, instructing the unit of work to forego actually needing to load
+these rows in order to handle them. The unit of work can be instructed to work
+in this manner by configuring :paramref:`_orm.relationship.passive_deletes` on
+the :func:`_orm.relationship` construct; the foreign key constraints in use
+must also be correctly configured.
+
+For further detail on a complete "passive delete" configuration, see the
+section :ref:`passive_deletes`.
+
+
+
index 4b37365322c834401da3a25fb21920f31dc1bb2b..4525a38d6c928e6c24f3fbf8c81f7b10e547ce51 100644 (file)
@@ -44,6 +44,9 @@ or scalar references at once.
 that the attribute is empty and is just never loaded, or that it raises
 an error when it is accessed, in order to guard against unwanted lazy loads.
 
+Summary of Relationship Loading Styles
+--------------------------------------
+
 The primary forms of relationship loading are:
 
 * **lazy loading** - available via ``lazy='select'`` or the :func:`.lazyload`
@@ -62,28 +65,40 @@ The primary forms of relationship loading are:
   so that related rows are loaded in the same result set.   Joined eager loading
   is detailed at :ref:`joined_eager_loading`.
 
-* **subquery loading** - available via ``lazy='subquery'`` or the :func:`.subqueryload`
-  option, this form of loading emits a second SELECT statement which re-states the
-  original query embedded inside of a subquery, then JOINs that subquery to the
-  related table to be loaded to load all members of related collections / scalar
-  references at once.  Subquery eager loading is detailed at :ref:`subquery_eager_loading`.
-
 * **raise loading** - available via ``lazy='raise'``, ``lazy='raise_on_sql'``,
   or the :func:`.raiseload` option, this form of loading is triggered at the
   same time a lazy load would normally occur, except it raises an ORM exception
   in order to guard against the application making unwanted lazy loads.
   An introduction to raise loading is at :ref:`prevent_lazy_with_raiseload`.
 
-* **no loading** - available via ``lazy='noload'``, or the :func:`.noload`
-  option; this loading style turns the attribute into an empty attribute
-  (``None`` or ``[]``) that will never load or have any loading effect. This
-  seldom-used strategy behaves somewhat like an eager loader when objects are
-  loaded in that an empty attribute or collection is placed, but for expired
-  objects relies upon the default value of the attribute being returned on
-  access; the net effect is the same except for whether or not the attribute
-  name appears in the :attr:`.InstanceState.unloaded` collection.   ``noload``
-  may be useful for implementing a "write-only" attribute but this usage is not
-  currently tested or formally supported.
+* **subquery loading** - available via ``lazy='subquery'`` or the :func:`.subqueryload`
+  option, this form of loading emits a second SELECT statement which re-states the
+  original query embedded inside of a subquery, then JOINs that subquery to the
+  related table to be loaded to load all members of related collections / scalar
+  references at once.  Subquery eager loading is detailed at :ref:`subquery_eager_loading`.
+
+* **write only loading** - available via ``lazy='write_only'``, or by
+  annotating the left side of the :class:`_orm.Relationship` object using the
+  :class:`_orm.WriteOnlyMapped` annotation.   This collection-only
+  loader style produces an alternative attribute instrumentation that never
+  implicitly loads records from the database, instead only allowing
+  :meth:`.WriteOnlyCollection.add`,
+  :meth:`.WriteOnlyCollection.add_all` and :meth:`.WriteOnlyCollection.remove`
+  methods.  Querying the collection is performed by invoking a SELECT statement
+  which is constructed using the :meth:`.WriteOnlyCollection.select`
+  method.    Write only loading is discussed at :ref:`write_only_relationship`.
+
+* **dynamic loading** - available via ``lazy='dynamic'``, or by
+  annotating the left side of the :class:`_orm.Relationship` object using the
+  :class:`_orm.DynamicMapped` annotation. This is a legacy collection-only
+  loader style which produces a :class:`_orm.Query` object when the collection
+  is accessed, allowing custom SQL to be emitted against the collection's
+  contents. However, dynamic loaders will implicitly iterate the underlying
+  collection in various circumstances which makes them less useful for managing
+  truly large collections. Dynamic loaders are superseded by
+  :ref:`"write only" <write_only_relationship>` collections, which will prevent
+  the underlying collection from being implicitly loaded under any
+  circumstances. Dynamic loaders are discussed at :ref:`dynamic_relationship`.
 
 
 .. _relationship_lazy_option:
index 0c12ba1a4b3db071c35df40744200bbb03885e8e..8388320939ec57a052d6579e87d0d85c4bcc3ab5 100644 (file)
@@ -15,7 +15,8 @@ of its usage.   For an introduction to relationships, start with the
     basic_relationships
     self_referential
     join_conditions
-    collections
+    large_collections
+    collection_api
     relationship_persistence
     backref
     relationship_api
index cfc6bd73b026807b26f70d8cc9b01459c830dfb3..de9ab52be0d831ee4ae27fc603a91501620c1328 100644 (file)
@@ -657,155 +657,6 @@ measurement, currencies and encrypted passwords.
     <https://techspot.zzzeek.org/2011/10/29/value-agnostic-types-part-ii/>`_ -
     on the techspot.zzzeek.org blog
 
-.. _hybrid_transformers:
-
-Building Transformers
-----------------------
-
-A *transformer* is an object which can receive a :class:`_query.Query`
-object and
-return a new one.   The :class:`_query.Query` object includes a method
-:meth:`.with_transformation` that returns a new :class:`_query.Query`
-transformed by
-the given function.
-
-We can combine this with the :class:`.Comparator` class to produce one type
-of recipe which can both set up the FROM clause of a query as well as assign
-filtering criterion.
-
-Consider a mapped class ``Node``, which assembles using adjacency list into a
-hierarchical tree pattern::
-
-    from sqlalchemy import Column, Integer, ForeignKey
-    from sqlalchemy.orm import relationship
-    from sqlalchemy.ext.declarative import declarative_base
-    Base = declarative_base()
-
-    class Node(Base):
-        __tablename__ = 'node'
-        id = Column(Integer, primary_key=True)
-        parent_id = Column(Integer, ForeignKey('node.id'))
-        parent = relationship("Node", remote_side=id)
-
-Suppose we wanted to add an accessor ``grandparent``.  This would return the
-``parent`` of ``Node.parent``.  When we have an instance of ``Node``, this is
-simple::
-
-    from sqlalchemy.ext.hybrid import hybrid_property
-
-    class Node(Base):
-        # ...
-
-        @hybrid_property
-        def grandparent(self):
-            return self.parent.parent
-
-For the expression, things are not so clear.   We'd need to construct a
-:class:`_query.Query` where we :meth:`_query.Query.join` twice along
-``Node.parent`` to get to the ``grandparent``.   We can instead return a
-transforming callable that we'll combine with the :class:`.Comparator` class to
-receive any :class:`_query.Query` object, and return a new one that's joined to
-the ``Node.parent`` attribute and filtered based on the given criterion::
-
-    from sqlalchemy.ext.hybrid import Comparator
-
-    class GrandparentTransformer(Comparator):
-        def operate(self, op, other, **kwargs):
-            def transform(q):
-                cls = self.__clause_element__()
-                parent_alias = aliased(cls)
-                return q.join(parent_alias, cls.parent).filter(
-                    op(parent_alias.parent, other, **kwargs)
-                )
-
-            return transform
-
-    Base = declarative_base()
-
-    class Node(Base):
-        __tablename__ = 'node'
-        id = Column(Integer, primary_key=True)
-        parent_id = Column(Integer, ForeignKey('node.id'))
-        parent = relationship("Node", remote_side=id)
-
-        @hybrid_property
-        def grandparent(self):
-            return self.parent.parent
-
-        @grandparent.comparator
-        def grandparent(cls):
-            return GrandparentTransformer(cls)
-
-The ``GrandparentTransformer`` overrides the core :meth:`.Operators.operate`
-method at the base of the :class:`.Comparator` hierarchy to return a query-
-transforming callable, which then runs the given comparison operation in a
-particular context. Such as, in the example above, the ``operate`` method is
-called, given the :attr:`.Operators.eq` callable as well as the right side of
-the comparison ``Node(id=5)``.  A function ``transform`` is then returned which
-will transform a :class:`_query.Query` first to join to ``Node.parent``,
-then to
-compare ``parent_alias`` using :attr:`.Operators.eq` against the left and right
-sides, passing into :meth:`_query.Query.filter`:
-
-.. sourcecode:: pycon+sql
-
-    >>> from sqlalchemy.orm import Session
-    >>> session = Session()
-    {sql}>>> session.query(Node).\
-    ...        with_transformation(Node.grandparent==Node(id=5)).\
-    ...        all()
-    SELECT node.id AS node_id, node.parent_id AS node_parent_id
-    FROM node JOIN node AS node_1 ON node_1.id = node.parent_id
-    WHERE :param_1 = node_1.parent_id
-    {stop}
-
-We can modify the pattern to be more verbose but flexible by separating the
-"join" step from the "filter" step.  The tricky part here is ensuring that
-successive instances of ``GrandparentTransformer`` use the same
-:class:`.AliasedClass` object against ``Node``.  Below we use a simple
-memoizing approach that associates a ``GrandparentTransformer`` with each
-class::
-
-    class Node(Base):
-
-        # ...
-
-        @grandparent.comparator
-        def grandparent(cls):
-            # memoize a GrandparentTransformer
-            # per class
-            if '_gp' not in cls.__dict__:
-                cls._gp = GrandparentTransformer(cls)
-            return cls._gp
-
-    class GrandparentTransformer(Comparator):
-
-        def __init__(self, cls):
-            self.parent_alias = aliased(cls)
-
-        @property
-        def join(self):
-            def go(q):
-                return q.join(self.parent_alias, Node.parent)
-            return go
-
-        def operate(self, op, other, **kwargs):
-            return op(self.parent_alias.parent, other, **kwargs)
-
-.. sourcecode:: pycon+sql
-
-    {sql}>>> session.query(Node).\
-    ...            with_transformation(Node.grandparent.join).\
-    ...            filter(Node.grandparent==Node(id=5))
-    SELECT node.id AS node_id, node.parent_id AS node_parent_id
-    FROM node JOIN node AS node_1 ON node_1.id = node.parent_id
-    WHERE :param_1 = node_1.parent_id
-    {stop}
-
-The "transformer" pattern is an experimental pattern that starts to make usage
-of some functional programming paradigms. While it's only recommended for
-advanced and/or patient developers, there's probably a whole lot of amazing
-things it can be used for.
 
 """  # noqa
 
index 6bfda6e2e3e15288268b8224e2ea57932a8a9175..8523e520b996f2467152021cba8ab8951eba3b69 100644 (file)
@@ -42,12 +42,14 @@ from .attributes import AttributeEventToken as AttributeEventToken
 from .attributes import InstrumentedAttribute as InstrumentedAttribute
 from .attributes import QueryableAttribute as QueryableAttribute
 from .base import class_mapper as class_mapper
+from .base import DynamicMapped as DynamicMapped
 from .base import InspectionAttrExtensionType as InspectionAttrExtensionType
 from .base import LoaderCallableStatus as LoaderCallableStatus
 from .base import Mapped as Mapped
 from .base import NotExtension as NotExtension
 from .base import ORMDescriptor as ORMDescriptor
 from .base import PassiveFlag as PassiveFlag
+from .base import WriteOnlyMapped as WriteOnlyMapped
 from .context import FromStatement as FromStatement
 from .context import QueryContext as QueryContext
 from .decl_api import add_mapped_attribute as add_mapped_attribute
@@ -147,6 +149,7 @@ from .util import object_mapper as object_mapper
 from .util import polymorphic_union as polymorphic_union
 from .util import was_deleted as was_deleted
 from .util import with_parent as with_parent
+from .writeonly import WriteOnlyCollection as WriteOnlyCollection
 from .. import util as _sa_util
 
 
index 0b4861af3b556b2a11697adfc6b9e9f4bc9f7fd0..38d9844380aa904c64e646de8ab4e2415e727f9c 100644 (file)
@@ -1172,11 +1172,10 @@ def relationship(
         issues a JOIN to the immediate parent object, specifying primary
         key identifiers using an IN clause.
 
-        .. versionadded:: 1.2
-
-      * ``noload`` - no loading should occur at any time.  This is to
-        support "write-only" attributes, or attributes which are
-        populated in some manner specific to the application.
+      * ``noload`` - no loading should occur at any time.  The related
+        collection will remain empty.   The ``noload`` strategy is not
+        recommended for general use.  For a general use "never load"
+        approach, see :ref:`write_only_relationship`
 
       * ``raise`` - lazy loading is disallowed; accessing
         the attribute, if its value were not already loaded via eager
@@ -1184,8 +1183,6 @@ def relationship(
         This strategy can be used when objects are to be detached from
         their attached :class:`.Session` after they are loaded.
 
-        .. versionadded:: 1.1
-
       * ``raise_on_sql`` - lazy loading that emits SQL is disallowed;
         accessing the attribute, if its value were not already loaded via
         eager loading, will raise an
@@ -1198,11 +1195,51 @@ def relationship(
 
         .. versionadded:: 1.1
 
+      * ``write_only`` - the attribute will be configured with a special
+        "virtual collection" that may receive
+        :meth:`_orm.WriteOnlyCollection.add` and
+        :meth:`_orm.WriteOnlyCollection.remove` commands to add or remove
+        individual objects, but will not under any circumstances load or
+        iterate the full set of objects from the database directly. Instead,
+        methods such as :meth:`_orm.WriteOnlyCollection.select`,
+        :meth:`_orm.WriteOnlyCollection.insert`,
+        :meth:`_orm.WriteOnlyCollection.update` and
+        :meth:`_orm.WriteOnlyCollection.delete` are provided which generate SQL
+        constructs that may be used to load and modify rows in bulk. Used for
+        large collections that are never appropriate to load at once into
+        memory.
+
+        The ``write_only`` loader style is configured automatically when
+        the :class:`_orm.WriteOnlyMapped` annotation is provided on the
+        left hand side within a Declarative mapping.  See the section
+        :ref:`write_only_relationship` for examples.
+
+        .. versionadded:: 2.0
+
+        .. seealso::
+
+            :ref:`write_only_relationship` - in the :ref:`queryguide_toplevel`
+
       * ``dynamic`` - the attribute will return a pre-configured
         :class:`_query.Query` object for all read
         operations, onto which further filtering operations can be
-        applied before iterating the results.  See
-        the section :ref:`dynamic_relationship` for more details.
+        applied before iterating the results.
+
+        The ``dynamic`` loader style is configured automatically when
+        the :class:`_orm.DynamicMapped` annotation is provided on the
+        left hand side within a Declarative mapping.  See the section
+        :ref:`dynamic_relationship` for examples.
+
+        .. legacy::  The "dynamic" lazy loader strategy is the legacy form of
+           what is now the "write_only" strategy described in the section
+           :ref:`write_only_relationship`.
+
+        .. seealso::
+
+            :ref:`dynamic_relationship` - in the :ref:`queryguide_toplevel`
+
+            :ref:`write_only_relationship` - more generally useful approach
+            for large collections that should not fully load into memory
 
       * True - a synonym for 'select'
 
@@ -1212,12 +1249,9 @@ def relationship(
 
       .. seealso::
 
-        :doc:`/orm/loading_relationships` - Full documentation on
-        relationship loader configuration.
-
-        :ref:`dynamic_relationship` - detail on the ``dynamic`` option.
+        :ref:`orm_queryguide_relationship_loaders` - Full documentation on
+        relationship loader configuration in the :ref:`queryguide_toplevel`.
 
-        :ref:`collections_noload_raiseload` - notes on "noload" and "raise"
 
     :param load_on_pending=False:
       Indicates loading behavior for transient or pending parent objects.
index fcc016f549c2951ea03fc390778749bffffd88d2..854bad986a9b58e39dbb41d9f9e62eeb04de47ff 100644 (file)
@@ -94,11 +94,11 @@ if TYPE_CHECKING:
     from ._typing import _O
     from .collections import _AdaptedCollectionProtocol
     from .collections import CollectionAdapter
-    from .dynamic import DynamicAttributeImpl
     from .interfaces import MapperProperty
     from .relationships import RelationshipProperty
     from .state import InstanceState
     from .util import AliasedInsp
+    from .writeonly import WriteOnlyAttributeImpl
     from ..event.base import _Dispatch
     from ..sql._typing import _ColumnExpressionArgument
     from ..sql._typing import _DMLColumnArgument
@@ -2581,9 +2581,9 @@ def register_attribute_impl(
     impl: AttributeImpl
 
     if impl_class:
-        # TODO: this appears to be the DynamicAttributeImpl constructor
-        # which is hardcoded
-        impl = cast("Type[DynamicAttributeImpl]", impl_class)(
+        # TODO: this appears to be the WriteOnlyAttributeImpl /
+        # DynamicAttributeImpl constructor which is hardcoded
+        impl = cast("Type[WriteOnlyAttributeImpl]", impl_class)(
             class_, key, typecallable, dispatch, **kw
         )
     elif uselist:
@@ -2672,7 +2672,9 @@ def init_state_collection(
         attr._dispose_previous_collection(state, old, old_collection, False)
 
     user_data = attr._default_value(state, dict_)
-    adapter: CollectionAdapter = attr.get_collection(state, dict_, user_data)
+    adapter: CollectionAdapter = attr.get_collection(
+        state, dict_, user_data, passive=PassiveFlag.PASSIVE_NO_FETCH
+    )
     adapter._reset_empty()
 
     return adapter
index d3814abd57b3e19f0c2913e27a1e965fd1173532..20a683d8ca91bdc3c970a8364739119d2f300347 100644 (file)
@@ -42,11 +42,13 @@ if typing.TYPE_CHECKING:
     from ._typing import _ExternalEntityType
     from ._typing import _InternalEntityType
     from .attributes import InstrumentedAttribute
+    from .dynamic import AppenderQuery
     from .instrumentation import ClassManager
     from .interfaces import PropComparator
     from .mapper import Mapper
     from .state import InstanceState
     from .util import AliasedClass
+    from .writeonly import WriteOnlyCollection
     from ..sql._typing import _ColumnExpressionArgument
     from ..sql._typing import _InfoType
     from ..sql.elements import ColumnElement
@@ -726,7 +728,23 @@ class ORMDescriptor(Generic[_T], TypingOnly):
             ...
 
 
-class Mapped(ORMDescriptor[_T], roles.TypedColumnsClauseRole[_T], TypingOnly):
+class _MappedAnnotationBase(Generic[_T], TypingOnly):
+    """common class for Mapped and similar ORM container classes.
+
+    these are classes that can appear on the left side of an ORM declarative
+    mapping, containing a mapped class or in some cases a collection
+    surrounding a mapped class.
+
+    """
+
+    __slots__ = ()
+
+
+class Mapped(
+    ORMDescriptor[_T],
+    roles.TypedColumnsClauseRole[_T],
+    _MappedAnnotationBase[_T],
+):
     """Represent an ORM mapped attribute on a mapped class.
 
     This class represents the complete descriptor interface for any class
@@ -811,3 +829,91 @@ class _DeclarativeMapped(Mapped[_T], _MappedAttribute[_T]):
     """
 
     __slots__ = ()
+
+
+class DynamicMapped(_MappedAnnotationBase[_T]):
+    """Represent the ORM mapped attribute type for a "dynamic" relationship.
+
+    The :class:`_orm.DynamicMapped` type annotation may be used in an
+    :ref:`Annotated Declarative Table <orm_declarative_mapped_column>` mapping
+    to indicate that the ``lazy="dynamic"`` loader strategy should be used
+    for a particular :func:`_orm.relationship`.
+
+    .. legacy::  The "dynamic" lazy loader strategy is the legacy form of what
+       is now the "write_only" strategy described in the section
+       :ref:`write_only_relationship`.
+
+    E.g.::
+
+        class User(Base):
+            __tablename__ = "user"
+            id: Mapped[int] = mapped_column(primary_key=True)
+            addresses: DynamicMapped[Address] = relationship(
+                cascade="all,delete-orphan"
+            )
+
+    See the section :ref:`dynamic_relationship` for background.
+
+    .. versionadded:: 2.0
+
+    .. seealso::
+
+        :ref:`dynamic_relationship` - complete background
+
+        :class:`.WriteOnlyMapped` - fully 2.0 style version
+
+    """
+
+    __slots__ = ()
+
+    if TYPE_CHECKING:
+
+        def __get__(
+            self, instance: Optional[object], owner: Any
+        ) -> AppenderQuery[_T]:
+            ...
+
+        def __set__(self, instance: Any, value: typing.Collection[_T]) -> None:
+            ...
+
+
+class WriteOnlyMapped(_MappedAnnotationBase[_T]):
+    """Represent the ORM mapped attribute type for a "write only" relationship.
+
+    The :class:`_orm.WriteOnlyMapped` type annotation may be used in an
+    :ref:`Annotated Declarative Table <orm_declarative_mapped_column>` mapping
+    to indicate that the ``lazy="write_only"`` loader strategy should be used
+    for a particular :func:`_orm.relationship`.
+
+    E.g.::
+
+        class User(Base):
+            __tablename__ = "user"
+            id: Mapped[int] = mapped_column(primary_key=True)
+            addresses: WriteOnlyMapped[Address] = relationship(
+                cascade="all,delete-orphan"
+            )
+
+    See the section :ref:`write_only_relationship` for background.
+
+    .. versionadded:: 2.0
+
+    .. seealso::
+
+        :ref:`write_only_relationship` - complete background
+
+        :class:`.DynamicMapped` - includes legacy :class:`_orm.Query` support
+
+    """
+
+    __slots__ = ()
+
+    if TYPE_CHECKING:
+
+        def __get__(
+            self, instance: Optional[object], owner: Any
+        ) -> WriteOnlyCollection[_T]:
+            ...
+
+        def __set__(self, instance: Any, value: typing.Collection[_T]) -> None:
+            ...
index 3ed34a57a4d7967c2480b9e9047748a34fda96b1..b407fcdca1feb05081153edf77f03c0c6bc903c4 100644 (file)
@@ -591,6 +591,7 @@ class BulkUDCompileState(ORMDMLState):
             "_sa_orm_update_options",
             {
                 "synchronize_session",
+                "autoflush",
                 "is_delete_using",
                 "is_update_from",
                 "dml_strategy",
@@ -1079,6 +1080,7 @@ class BulkORMInsert(ORMDMLState, InsertDMLState):
         _render_nulls: bool = False
         _return_defaults: bool = False
         _subject_mapper: Optional[Mapper[Any]] = None
+        _autoflush: bool = True
 
     select_statement: Optional[FromStatement] = None
 
@@ -1098,7 +1100,7 @@ class BulkORMInsert(ORMDMLState, InsertDMLState):
             execution_options,
         ) = BulkORMInsert.default_insert_options.from_execution_options(
             "_sa_orm_insert_options",
-            {"dml_strategy"},
+            {"dml_strategy", "autoflush"},
             execution_options,
             statement._execution_options,
         )
@@ -1142,6 +1144,9 @@ class BulkORMInsert(ORMDMLState, InsertDMLState):
                     context._orm_load_exec_options
                 )
 
+        if insert_options._autoflush:
+            session._autoflush()
+
         statement = statement._annotate(
             {"dml_strategy": insert_options._dml_strategy}
         )
index a383e92ca1227f9e1cffae78281bb72c67a30a17..eed04025dc7f94beaa67d5486584b2ed84da8aa8 100644 (file)
@@ -420,7 +420,7 @@ class _ClassScanMapperConfig(_MapperConfig):
 
     registry: _RegistryType
     clsdict_view: _ClassDict
-    collected_annotations: Dict[str, Tuple[Any, Any, bool]]
+    collected_annotations: Dict[str, Tuple[Any, Any, Any, bool]]
     collected_attributes: Dict[str, Any]
     local_table: Optional[FromClause]
     persist_selectable: Optional[FromClause]
@@ -997,6 +997,7 @@ class _ClassScanMapperConfig(_MapperConfig):
                 (key, mapped_anno if mapped_anno else raw_anno)
                 for key, (
                     raw_anno,
+                    mapped_container,
                     mapped_anno,
                     is_dc,
                 ) in self.collected_annotations.items()
@@ -1075,7 +1076,7 @@ class _ClassScanMapperConfig(_MapperConfig):
             is_dataclass_field = False
 
         is_dataclass_field = False
-        extracted_mapped_annotation = _extract_mapped_subtype(
+        extracted = _extract_mapped_subtype(
             raw_annotation,
             self.cls,
             name,
@@ -1086,10 +1087,13 @@ class _ClassScanMapperConfig(_MapperConfig):
             and not is_dataclass,  # self.allow_dataclass_fields,
         )
 
-        if extracted_mapped_annotation is None:
+        if extracted is None:
             # ClassVar can come out here
             return attr_value
-        elif attr_value is None:
+
+        extracted_mapped_annotation, mapped_container = extracted
+
+        if attr_value is None:
             for elem in typing_get_args(extracted_mapped_annotation):
                 # look in Annotated[...] for an ORM construct,
                 # such as Annotated[int, mapped_column(primary_key=True)]
@@ -1098,6 +1102,7 @@ class _ClassScanMapperConfig(_MapperConfig):
 
         self.collected_annotations[name] = (
             raw_annotation,
+            mapped_container,
             extracted_mapped_annotation,
             is_dataclass,
         )
@@ -1252,13 +1257,17 @@ class _ClassScanMapperConfig(_MapperConfig):
                 if isinstance(value, _IntrospectsAnnotations):
                     (
                         annotation,
+                        mapped_container,
                         extracted_mapped_annotation,
                         is_dataclass,
-                    ) = self.collected_annotations.get(k, (None, None, False))
+                    ) = self.collected_annotations.get(
+                        k, (None, None, None, False)
+                    )
                     value.declarative_scan(
                         self.registry,
                         cls,
                         k,
+                        mapped_container,
                         annotation,
                         extracted_mapped_annotation,
                         is_dataclass,
index 35b12b2ede76a4b0505d6a2b5c1e6dce090fa86e..a15cd86f4328a071aee8a862e14d954a8d50c994 100644 (file)
@@ -333,6 +333,7 @@ class CompositeProperty(
         registry: _RegistryType,
         cls: Type[Any],
         key: str,
+        mapped_container: Optional[Type[Mapped[Any]]],
         annotation: Optional[_AnnotationScanType],
         extracted_mapped_annotation: Optional[_AnnotationScanType],
         is_dataclass_field: bool,
index 8cc4c6c0423be1970c9970a3e3b80d270da921ae..be31af1e9fd1b45b36e89f98c40e2b72ca6b1693 100644 (file)
 Dynamic collections act like Query() objects for read operations and support
 basic add/delete mutation.
 
+.. legacy:: the "dynamic" loader is a legacy feature, superseded by the
+ "write_only" loader.
+
+
 """
 
 from __future__ import annotations
 
 from typing import Any
-from typing import Optional
-from typing import overload
+from typing import Iterable
+from typing import Iterator
 from typing import TYPE_CHECKING
-from typing import Union
+from typing import TypeVar
 
 from . import attributes
 from . import exc as orm_exc
-from . import interfaces
 from . import relationships
-from . import strategies
 from . import util as orm_util
-from .base import object_mapper
-from .base import PassiveFlag
 from .query import Query
 from .session import object_session
-from .. import exc
-from .. import log
+from .writeonly import AbstractCollectionWriter
+from .writeonly import WriteOnlyAttributeImpl
+from .writeonly import WriteOnlyHistory
+from .writeonly import WriteOnlyLoader
 from .. import util
 from ..engine import result
-from ..util.typing import Literal
 
 if TYPE_CHECKING:
-    from ._typing import _InstanceDict
-    from .attributes import _AdaptedCollectionProtocol
-    from .attributes import AttributeEventToken
-    from .attributes import CollectionAdapter
-    from .base import LoaderCallableStatus
-    from .state import InstanceState
+    from .session import Session
 
 
-@log.class_logger
-@relationships.RelationshipProperty.strategy_for(lazy="dynamic")
-class DynaLoader(strategies.AbstractRelationshipLoader, log.Identified):
-    def init_class_attribute(self, mapper):
-        self.is_class_level = True
-        if not self.uselist:
-            raise exc.InvalidRequestError(
-                "On relationship %s, 'dynamic' loaders cannot be used with "
-                "many-to-one/one-to-one relationships and/or "
-                "uselist=False." % self.parent_property
-            )
-        elif self.parent_property.direction not in (
-            interfaces.ONETOMANY,
-            interfaces.MANYTOMANY,
-        ):
-            util.warn(
-                "On relationship %s, 'dynamic' loaders cannot be used with "
-                "many-to-one/one-to-one relationships and/or "
-                "uselist=False.  This warning will be an exception in a "
-                "future release." % self.parent_property
-            )
+_T = TypeVar("_T", bound=Any)
 
-        strategies._register_attribute(
-            self.parent_property,
-            mapper,
-            useobject=True,
-            impl_class=DynamicAttributeImpl,
-            target_mapper=self.parent_property.mapper,
-            order_by=self.parent_property.order_by,
-            query_class=self.parent_property.query_class,
-        )
+
+class DynamicCollectionHistory(WriteOnlyHistory):
+    def __init__(self, attr, state, passive, apply_to=None):
+        if apply_to:
+            coll = AppenderQuery(attr, state).autoflush(False)
+            self.unchanged_items = util.OrderedIdentitySet(coll)
+            self.added_items = apply_to.added_items
+            self.deleted_items = apply_to.deleted_items
+            self._reconcile_collection = True
+        else:
+            self.deleted_items = util.OrderedIdentitySet()
+            self.added_items = util.OrderedIdentitySet()
+            self.unchanged_items = util.OrderedIdentitySet()
+            self._reconcile_collection = False
 
 
-class DynamicAttributeImpl(
-    attributes.HasCollectionAdapter, attributes.AttributeImpl
-):
-    uses_objects = True
-    default_accepts_scalar_loader = False
-    supports_population = False
-    collection = False
-    dynamic = True
-    order_by = ()
+class DynamicAttributeImpl(WriteOnlyAttributeImpl):
+    _supports_dynamic_iteration = True
+    collection_history_cls = DynamicCollectionHistory
 
     def __init__(
         self,
@@ -101,8 +76,8 @@ class DynamicAttributeImpl(
         query_class=None,
         **kw,
     ):
-        super(DynamicAttributeImpl, self).__init__(
-            class_, key, typecallable, dispatch, **kw
+        attributes.AttributeImpl.__init__(
+            self, class_, key, typecallable, dispatch, **kw
         )
         self.target_mapper = target_mapper
         if order_by:
@@ -114,261 +89,27 @@ class DynamicAttributeImpl(
         else:
             self.query_class = mixin_user_query(query_class)
 
-    def get(self, state, dict_, passive=attributes.PASSIVE_OFF):
-        if not passive & attributes.SQL_OK:
-            return self._get_collection_history(
-                state, attributes.PASSIVE_NO_INITIALIZE
-            ).added_items
-        else:
-            return self.query_class(self, state)
-
-    @overload
-    def get_collection(
-        self,
-        state: InstanceState[Any],
-        dict_: _InstanceDict,
-        user_data: Literal[None] = ...,
-        passive: Literal[PassiveFlag.PASSIVE_OFF] = ...,
-    ) -> CollectionAdapter:
-        ...
-
-    @overload
-    def get_collection(
-        self,
-        state: InstanceState[Any],
-        dict_: _InstanceDict,
-        user_data: _AdaptedCollectionProtocol = ...,
-        passive: PassiveFlag = ...,
-    ) -> CollectionAdapter:
-        ...
-
-    @overload
-    def get_collection(
-        self,
-        state: InstanceState[Any],
-        dict_: _InstanceDict,
-        user_data: Optional[_AdaptedCollectionProtocol] = ...,
-        passive: PassiveFlag = ...,
-    ) -> Union[
-        Literal[LoaderCallableStatus.PASSIVE_NO_RESULT], CollectionAdapter
-    ]:
-        ...
-
-    def get_collection(
-        self,
-        state: InstanceState[Any],
-        dict_: _InstanceDict,
-        user_data: Optional[_AdaptedCollectionProtocol] = None,
-        passive: PassiveFlag = PassiveFlag.PASSIVE_OFF,
-    ) -> Union[
-        Literal[LoaderCallableStatus.PASSIVE_NO_RESULT], CollectionAdapter
-    ]:
-        if not passive & attributes.SQL_OK:
-            data = self._get_collection_history(state, passive).added_items
-        else:
-            history = self._get_collection_history(state, passive)
-            data = history.added_plus_unchanged
-        return DynamicCollectionAdapter(data)
-
-    @util.memoized_property
-    def _append_token(self):
-        return attributes.AttributeEventToken(self, attributes.OP_APPEND)
-
-    @util.memoized_property
-    def _remove_token(self):
-        return attributes.AttributeEventToken(self, attributes.OP_REMOVE)
-
-    def fire_append_event(
-        self, state, dict_, value, initiator, collection_history=None
-    ):
-        if collection_history is None:
-            collection_history = self._modified_event(state, dict_)
-
-        collection_history.add_added(value)
-
-        for fn in self.dispatch.append:
-            value = fn(state, value, initiator or self._append_token)
-
-        if self.trackparent and value is not None:
-            self.sethasparent(attributes.instance_state(value), state, True)
-
-    def fire_remove_event(
-        self, state, dict_, value, initiator, collection_history=None
-    ):
-        if collection_history is None:
-            collection_history = self._modified_event(state, dict_)
-
-        collection_history.add_removed(value)
-
-        if self.trackparent and value is not None:
-            self.sethasparent(attributes.instance_state(value), state, False)
-
-        for fn in self.dispatch.remove:
-            fn(state, value, initiator or self._remove_token)
-
-    def _modified_event(self, state, dict_):
-
-        if self.key not in state.committed_state:
-            state.committed_state[self.key] = CollectionHistory(self, state)
-
-        state._modified_event(dict_, self, attributes.NEVER_SET)
-
-        # this is a hack to allow the fixtures.ComparableEntity fixture
-        # to work
-        dict_[self.key] = True
-        return state.committed_state[self.key]
-
-    def set(
-        self,
-        state: InstanceState[Any],
-        dict_: _InstanceDict,
-        value: Any,
-        initiator: Optional[AttributeEventToken] = None,
-        passive: PassiveFlag = PassiveFlag.PASSIVE_OFF,
-        check_old: Any = None,
-        pop: bool = False,
-        _adapt: bool = True,
-    ) -> None:
-        if initiator and initiator.parent_token is self.parent_token:
-            return
-
-        if pop and value is None:
-            return
-
-        iterable = value
-        new_values = list(iterable)
-        if state.has_identity:
-            old_collection = util.IdentitySet(self.get(state, dict_))
-
-        collection_history = self._modified_event(state, dict_)
-        if not state.has_identity:
-            old_collection = collection_history.added_items
-        else:
-            old_collection = old_collection.union(
-                collection_history.added_items
-            )
-
-        idset = util.IdentitySet
-        constants = old_collection.intersection(new_values)
-        additions = idset(new_values).difference(constants)
-        removals = old_collection.difference(constants)
-
-        for member in new_values:
-            if member in additions:
-                self.fire_append_event(
-                    state,
-                    dict_,
-                    member,
-                    None,
-                    collection_history=collection_history,
-                )
-
-        for member in removals:
-            self.fire_remove_event(
-                state,
-                dict_,
-                member,
-                None,
-                collection_history=collection_history,
-            )
-
-    def delete(self, *args, **kwargs):
-        raise NotImplementedError()
-
-    def set_committed_value(self, state, dict_, value):
-        raise NotImplementedError(
-            "Dynamic attributes don't support " "collection population."
-        )
-
-    def get_history(self, state, dict_, passive=attributes.PASSIVE_OFF):
-        c = self._get_collection_history(state, passive)
-        return c.as_history()
-
-    def get_all_pending(
-        self, state, dict_, passive=attributes.PASSIVE_NO_INITIALIZE
-    ):
-        c = self._get_collection_history(state, passive)
-        return [(attributes.instance_state(x), x) for x in c.all_items]
-
-    def _get_collection_history(self, state, passive=attributes.PASSIVE_OFF):
-        if self.key in state.committed_state:
-            c = state.committed_state[self.key]
-        else:
-            c = CollectionHistory(self, state)
 
-        if state.has_identity and (passive & attributes.INIT_OK):
-            return CollectionHistory(self, state, apply_to=c)
-        else:
-            return c
-
-    def append(
-        self, state, dict_, value, initiator, passive=attributes.PASSIVE_OFF
-    ):
-        if initiator is not self:
-            self.fire_append_event(state, dict_, value, initiator)
-
-    def remove(
-        self, state, dict_, value, initiator, passive=attributes.PASSIVE_OFF
-    ):
-        if initiator is not self:
-            self.fire_remove_event(state, dict_, value, initiator)
-
-    def pop(
-        self, state, dict_, value, initiator, passive=attributes.PASSIVE_OFF
-    ):
-        self.remove(state, dict_, value, initiator, passive=passive)
-
-
-class DynamicCollectionAdapter:
-    """simplified CollectionAdapter for internal API consistency"""
-
-    def __init__(self, data):
-        self.data = data
-
-    def __iter__(self):
-        return iter(self.data)
+@relationships.RelationshipProperty.strategy_for(lazy="dynamic")
+class DynaLoader(WriteOnlyLoader):
+    impl_class = DynamicAttributeImpl
 
-    def _reset_empty(self):
-        pass
 
-    def __len__(self):
-        return len(self.data)
+class AppenderMixin(AbstractCollectionWriter[_T]):
+    """A mixin that expects to be mixing in a Query class with
+    AbstractAppender.
 
-    def __bool__(self):
-        return True
 
+    """
 
-class AppenderMixin:
     query_class = None
 
     def __init__(self, attr, state):
-        super(AppenderMixin, self).__init__(attr.target_mapper, None)
-        self.instance = instance = state.obj()
-        self.attr = attr
-
-        mapper = object_mapper(instance)
-        prop = mapper._props[self.attr.key]
-
-        if prop.secondary is not None:
-            # this is a hack right now.  The Query only knows how to
-            # make subsequent joins() without a given left-hand side
-            # from self._from_obj[0].  We need to ensure prop.secondary
-            # is in the FROM.  So we purposely put the mapper selectable
-            # in _from_obj[0] to ensure a user-defined join() later on
-            # doesn't fail, and secondary is then in _from_obj[1].
-
-            # note also, we are using the official ORM-annotated selectable
-            # from __clause_element__(), see #7868
-            self._from_obj = (prop.mapper.__clause_element__(), prop.secondary)
-
-        self._where_criteria = (
-            prop._with_parent(instance, alias_secondary=False),
-        )
-
-        if self.attr.order_by:
-            self._order_by_clauses = self.attr.order_by
+        Query.__init__(self, attr.target_mapper, None)
+        super().__init__(attr, state)
 
-    def session(self):
+    @property
+    def session(self) -> Session:
         sess = object_session(self.instance)
         if (
             sess is not None
@@ -382,7 +123,9 @@ class AppenderMixin:
         else:
             return sess
 
-    session = property(session, lambda s, x: None)
+    @session.setter
+    def session(self, session: Session) -> None:
+        self.sess = session
 
     def _iter(self):
         sess = self.session
@@ -407,7 +150,12 @@ class AppenderMixin:
         else:
             return self._generate(sess)._iter()
 
-    def __getitem__(self, index):
+    if TYPE_CHECKING:
+
+        def __iter__(self) -> Iterator[_T]:
+            ...
+
+    def __getitem__(self, index: Any) -> _T:
         sess = self.session
         if sess is None:
             return self.attr._get_collection_history(
@@ -417,7 +165,7 @@ class AppenderMixin:
         else:
             return self._generate(sess).__getitem__(index)
 
-    def count(self):
+    def count(self) -> int:
         sess = self.session
         if sess is None:
             return len(
@@ -455,91 +203,74 @@ class AppenderMixin:
 
         return query
 
-    def extend(self, iterator):
-        for item in iterator:
-            self.attr.append(
-                attributes.instance_state(self.instance),
-                attributes.instance_dict(self.instance),
-                item,
-                None,
-            )
+    def add_all(self, iterator: Iterable[_T]) -> None:
+        """Add an iterable of items to this :class:`_orm.AppenderQuery`.
 
-    def append(self, item):
-        self.attr.append(
-            attributes.instance_state(self.instance),
-            attributes.instance_dict(self.instance),
-            item,
-            None,
-        )
+        The given items will be persisted to the database in terms of
+        the parent instance's collection on the next flush.
 
-    def remove(self, item):
-        self.attr.remove(
-            attributes.instance_state(self.instance),
-            attributes.instance_dict(self.instance),
-            item,
-            None,
-        )
+        This method is provided to assist in delivering forwards-compatibility
+        with the :class:`_orm.WriteOnlyCollection` collection class.
 
+        .. versionadded:: 2.0
 
-class AppenderQuery(AppenderMixin, Query):
-    """A dynamic query that supports basic collection storage operations."""
+        """
+        self._add_all_impl(iterator)
 
+    def add(self, item: _T) -> None:
+        """Add an item to this :class:`_orm.AppenderQuery`.
 
-def mixin_user_query(cls):
-    """Return a new class with AppenderQuery functionality layered over."""
-    name = "Appender" + cls.__name__
-    return type(name, (AppenderMixin, cls), {"query_class": cls})
+        The given item will be persisted to the database in terms of
+        the parent instance's collection on the next flush.
 
+        This method is provided to assist in delivering forwards-compatibility
+        with the :class:`_orm.WriteOnlyCollection` collection class.
 
-class CollectionHistory:
-    """Overrides AttributeHistory to receive append/remove events directly."""
+        .. versionadded:: 2.0
 
-    def __init__(self, attr, state, apply_to=None):
-        if apply_to:
-            coll = AppenderQuery(attr, state).autoflush(False)
-            self.unchanged_items = util.OrderedIdentitySet(coll)
-            self.added_items = apply_to.added_items
-            self.deleted_items = apply_to.deleted_items
-            self._reconcile_collection = True
-        else:
-            self.deleted_items = util.OrderedIdentitySet()
-            self.added_items = util.OrderedIdentitySet()
-            self.unchanged_items = util.OrderedIdentitySet()
-            self._reconcile_collection = False
+        """
+        self._add_all_impl([item])
 
-    @property
-    def added_plus_unchanged(self):
-        return list(self.added_items.union(self.unchanged_items))
+    def extend(self, iterator: Iterable[_T]) -> None:
+        """Add an iterable of items to this :class:`_orm.AppenderQuery`.
 
-    @property
-    def all_items(self):
-        return list(
-            self.added_items.union(self.unchanged_items).union(
-                self.deleted_items
-            )
-        )
+        The given items will be persisted to the database in terms of
+        the parent instance's collection on the next flush.
 
-    def as_history(self):
-        if self._reconcile_collection:
-            added = self.added_items.difference(self.unchanged_items)
-            deleted = self.deleted_items.intersection(self.unchanged_items)
-            unchanged = self.unchanged_items.difference(deleted)
-        else:
-            added, unchanged, deleted = (
-                self.added_items,
-                self.unchanged_items,
-                self.deleted_items,
-            )
-        return attributes.History(list(added), list(unchanged), list(deleted))
+        """
+        self._add_all_impl(iterator)
 
-    def indexed(self, index):
-        return list(self.added_items)[index]
+    def append(self, item: _T) -> None:
+        """Append an item to this :class:`_orm.AppenderQuery`.
 
-    def add_added(self, value):
-        self.added_items.add(value)
+        The given item will be removed from the parent instance's collection on
+        the next flush.
 
-    def add_removed(self, value):
-        if value in self.added_items:
-            self.added_items.remove(value)
-        else:
-            self.deleted_items.add(value)
+        """
+        self._add_all_impl([item])
+
+    def remove(self, item: _T) -> None:
+        """Remove an item from this :class:`_orm.AppenderQuery`.
+
+        The given item will be removed from the parent instance's collection on
+        the next flush.
+
+        """
+        self._remove_impl(item)
+
+
+class AppenderQuery(AppenderMixin[_T], Query[_T]):
+    """A dynamic query that supports basic collection storage operations.
+
+    Methods on :class:`.AppenderQuery` include all methods of
+    :class:`_orm.Query`, plus additional methods used for collection
+    persistence.
+
+
+    """
+
+
+def mixin_user_query(cls):
+    """Return a new class with AppenderQuery functionality layered over."""
+    name = "Appender" + cls.__name__
+    return type(name, (AppenderMixin, cls), {"query_class": cls})
index 5af14cc004909a1a1cedc3eaa7e840650270d262..3c0e62ef53d9cc90658ff6dcf3f8ec4138d3fc57 100644 (file)
@@ -9,6 +9,7 @@
 
 from __future__ import annotations
 
+from . import exc as orm_exc
 from .base import LoaderCallableStatus
 from .base import PassiveFlag
 from .. import exc
@@ -81,7 +82,14 @@ class EvaluatorCompiler:
                     "Can't evaluate criteria against "
                     f"alternate class {parentmapper.class_}"
                 )
-            key = parentmapper._columntoproperty[clause].key
+
+            try:
+                key = parentmapper._columntoproperty[clause].key
+            except orm_exc.UnmappedColumnError as err:
+                raise UnevaluatableError(
+                    f"Cannot evaluate expression: {err}"
+                ) from err
+
             impl = parentmapper.class_manager[key].impl
 
             if impl is not None:
index b3fbe6ba7c9e7c7824372c7010035b32be33e19d..9903c5f4a42d6814759b4b862b112a04e37ecfef 100644 (file)
@@ -77,6 +77,7 @@ if typing.TYPE_CHECKING:
     from ._typing import _InternalEntityType
     from ._typing import _ORMAdapterProto
     from .attributes import InstrumentedAttribute
+    from .base import Mapped
     from .context import _MapperEntity
     from .context import ORMCompileState
     from .context import QueryContext
@@ -157,6 +158,7 @@ class _IntrospectsAnnotations:
         registry: RegistryType,
         cls: Type[Any],
         key: str,
+        mapped_container: Optional[Type[Mapped[Any]]],
         annotation: Optional[_AnnotationScanType],
         extracted_mapped_annotation: Optional[_AnnotationScanType],
         is_dataclass_field: bool,
index 1f2e9706b2562f1edf321a8ea9b1c8ff0e57003d..841f29d15a4031039e36981636afbc4194da6807 100644 (file)
@@ -65,6 +65,7 @@ if TYPE_CHECKING:
     from ._typing import _InstanceDict
     from ._typing import _ORMColumnExprArgument
     from ._typing import _RegistryType
+    from .base import Mapped
     from .mapper import Mapper
     from .session import Session
     from .state import _InstallLoaderCallableProto
@@ -196,6 +197,7 @@ class ColumnProperty(
         registry: _RegistryType,
         cls: Type[Any],
         key: str,
+        mapped_container: Optional[Type[Mapped[Any]]],
         annotation: Optional[_AnnotationScanType],
         extracted_mapped_annotation: Optional[_AnnotationScanType],
         is_dataclass_field: bool,
@@ -634,6 +636,7 @@ class MappedColumn(
         registry: _RegistryType,
         cls: Type[Any],
         key: str,
+        mapped_container: Optional[Type[Mapped[Any]]],
         annotation: Optional[_AnnotationScanType],
         extracted_mapped_annotation: Optional[_AnnotationScanType],
         is_dataclass_field: bool,
index 30b0f41cf5ce635d3f4d5d40c0ec891574173a80..86137e00e61824524426e66daa66c53f507ab199 100644 (file)
@@ -1638,7 +1638,7 @@ class Query(
             q = q.with_transformation(filter_something(x==5))
 
         This allows ad-hoc recipes to be created for :class:`_query.Query`
-        objects.  See the example at :ref:`hybrid_transformers`.
+        objects.
 
         """
         return fn(self)
index c215623e214691b53016c39a5693ad1b69cef654..bae381961cf1fd2f4d1294a50288bf1d02f86439 100644 (file)
@@ -47,9 +47,11 @@ from ._typing import is_has_collection_adapter
 from .base import _DeclarativeMapped
 from .base import _is_mapped_class
 from .base import class_mapper
+from .base import DynamicMapped
 from .base import LoaderCallableStatus
 from .base import PassiveFlag
 from .base import state_str
+from .base import WriteOnlyMapped
 from .interfaces import _AttributeOptions
 from .interfaces import _IntrospectsAnnotations
 from .interfaces import MANYTOMANY
@@ -94,6 +96,7 @@ if typing.TYPE_CHECKING:
     from ._typing import _InternalEntityType
     from ._typing import _O
     from ._typing import _RegistryType
+    from .base import Mapped
     from .clsregistry import _class_resolver
     from .clsregistry import _ModNS
     from .dependency import DependencyProcessor
@@ -144,6 +147,7 @@ _LazyLoadArgumentType = Literal[
     "raise_on_sql",
     "noload",
     "immediate",
+    "write_only",
     "dynamic",
     True,
     False,
@@ -1708,6 +1712,7 @@ class RelationshipProperty(
         registry: _RegistryType,
         cls: Type[Any],
         key: str,
+        mapped_container: Optional[Type[Mapped[Any]]],
         annotation: Optional[_AnnotationScanType],
         extracted_mapped_annotation: Optional[_AnnotationScanType],
         is_dataclass_field: bool,
@@ -1723,13 +1728,27 @@ class RelationshipProperty(
 
         argument = extracted_mapped_annotation
 
+        is_write_only = mapped_container is not None and issubclass(
+            mapped_container, WriteOnlyMapped
+        )
+        if is_write_only:
+            self.lazy = "write_only"
+            self.strategy_key = (("lazy", self.lazy),)
+
+        is_dynamic = mapped_container is not None and issubclass(
+            mapped_container, DynamicMapped
+        )
+        if is_dynamic:
+            self.lazy = "dynamic"
+            self.strategy_key = (("lazy", self.lazy),)
+
         if hasattr(argument, "__origin__"):
 
             collection_class = argument.__origin__  # type: ignore
             if issubclass(collection_class, abc.Collection):
                 if self.collection_class is None:
                     self.collection_class = collection_class
-            else:
+            elif not is_write_only and not is_dynamic:
                 self.uselist = False
 
             if argument.__args__:  # type: ignore
@@ -1754,7 +1773,11 @@ class RelationshipProperty(
             # we don't allow the collection class to be a
             # __forward_arg__ right now, so if we see a forward arg here,
             # we know there was no collection class either
-            if self.collection_class is None:
+            if (
+                self.collection_class is None
+                and not is_write_only
+                and not is_dynamic
+            ):
                 self.uselist = False
 
         self.argument = argument
@@ -3344,8 +3367,14 @@ class _ColInAnnotations:
         return self.name in c._annotations
 
 
-class Relationship(RelationshipProperty[_T], _DeclarativeMapped[_T]):
-    """Declarative front-end for the :class:`.RelationshipProperty` class.
+class Relationship(  # type: ignore
+    RelationshipProperty[_T],
+    _DeclarativeMapped[_T],
+    WriteOnlyMapped[_T],  # not compatible with Mapped[_T]
+    DynamicMapped[_T],  # not compatible with Mapped[_T]
+):
+    """Describes an object property that holds a single item or list
+    of items that correspond to a related database table.
 
     Public constructor is the :func:`_orm.relationship` function.
 
index b8c2f6e9e57a2651a31647837ba43018b1cb44dd..0f16df9c8858c485d562f5c3eea3b3aaef173cc5 100644 (file)
@@ -37,6 +37,7 @@ from ._typing import insp_is_aliased_class
 from ._typing import insp_is_mapper
 from ._typing import prop_is_relationship
 from .base import _class_to_mapper as _class_to_mapper
+from .base import _MappedAnnotationBase
 from .base import _never_set as _never_set  # noqa: F401
 from .base import _none_set as _none_set  # noqa: F401
 from .base import attribute_str as attribute_str  # noqa: F401
@@ -76,7 +77,7 @@ from ..sql.elements import KeyedColumnElement
 from ..sql.selectable import FromClause
 from ..util.langhelpers import MemoizedSlots
 from ..util.typing import de_stringify_annotation
-from ..util.typing import is_origin_of
+from ..util.typing import is_origin_of_cls
 from ..util.typing import Literal
 
 if typing.TYPE_CHECKING:
@@ -1994,7 +1995,7 @@ def _is_mapped_annotation(
     except NameError:
         return False
     else:
-        return is_origin_of(annotated, "Mapped", module="sqlalchemy.orm")
+        return is_origin_of_cls(annotated, _MappedAnnotationBase)
 
 
 def _cleanup_mapped_str_annotation(annotation: str) -> str:
@@ -2006,7 +2007,7 @@ def _cleanup_mapped_str_annotation(annotation: str) -> str:
     inner: Optional[Match[str]]
 
     mm = re.match(r"^(.+?)\[(.+)\]$", annotation)
-    if mm and mm.group(1) == "Mapped":
+    if mm and mm.group(1) in ("Mapped", "WriteOnlyMapped", "DynamicMapped"):
         stack = []
         inner = mm
         while True:
@@ -2038,7 +2039,7 @@ def _extract_mapped_subtype(
     is_dataclass_field: bool,
     expect_mapped: bool = True,
     raiseerr: bool = True,
-) -> Optional[Union[type, str]]:
+) -> Optional[Tuple[Union[type, str], Optional[type]]]:
     """given an annotation, figure out if it's ``Mapped[something]`` and if
     so, return the ``something`` part.
 
@@ -2071,10 +2072,10 @@ def _extract_mapped_subtype(
         annotated = raw_annotation  # type: ignore
 
     if is_dataclass_field:
-        return annotated
+        return annotated, None
     else:
-        if not hasattr(annotated, "__origin__") or not is_origin_of(
-            annotated, "Mapped", module="sqlalchemy.orm"
+        if not hasattr(annotated, "__origin__") or not is_origin_of_cls(
+            annotated, _MappedAnnotationBase
         ):
             anno_name = (
                 getattr(annotated, "__name__", None)
@@ -2118,11 +2119,11 @@ def _extract_mapped_subtype(
                     )
 
             else:
-                return annotated
+                return annotated, None
 
         if len(annotated.__args__) != 1:  # type: ignore
             raise sa_exc.ArgumentError(
                 "Expected sub-type for Mapped[] annotation"
             )
 
-        return annotated.__args__[0]  # type: ignore
+        return annotated.__args__[0], annotated.__origin__  # type: ignore
diff --git a/lib/sqlalchemy/orm/writeonly.py b/lib/sqlalchemy/orm/writeonly.py
new file mode 100644 (file)
index 0000000..5814cef
--- /dev/null
@@ -0,0 +1,619 @@
+# orm/writeonly.py
+# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+# mypy: ignore-errors
+
+
+"""Write-only collection API.
+
+This is an alternate mapped attribute style that only supports single-item
+collection mutation operations.   To read the collection, a select()
+object must be executed each time.
+
+.. versionadded:: 2.0
+
+
+"""
+
+from __future__ import annotations
+
+from typing import Any
+from typing import Generic
+from typing import Iterable
+from typing import NoReturn
+from typing import Optional
+from typing import overload
+from typing import Tuple
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+from sqlalchemy.sql import bindparam
+from . import attributes
+from . import interfaces
+from . import relationships
+from . import strategies
+from .base import object_mapper
+from .base import PassiveFlag
+from .relationships import RelationshipDirection
+from .. import exc
+from .. import inspect
+from .. import log
+from .. import util
+from ..sql import delete
+from ..sql import insert
+from ..sql import select
+from ..sql import update
+from ..sql.dml import Delete
+from ..sql.dml import Insert
+from ..sql.dml import Update
+from ..util.typing import Literal
+
+if TYPE_CHECKING:
+    from ._typing import _InstanceDict
+    from .attributes import _AdaptedCollectionProtocol
+    from .attributes import AttributeEventToken
+    from .attributes import CollectionAdapter
+    from .base import LoaderCallableStatus
+    from .state import InstanceState
+    from ..sql.selectable import Select
+
+
+_T = TypeVar("_T", bound=Any)
+
+
+class WriteOnlyHistory:
+    """Overrides AttributeHistory to receive append/remove events directly."""
+
+    def __init__(self, attr, state, passive, apply_to=None):
+        if apply_to:
+            if passive & PassiveFlag.SQL_OK:
+                raise exc.InvalidRequestError(
+                    f"Attribute {attr} can't load the existing state from the "
+                    "database for this operation; full iteration is not "
+                    "permitted.  If this is a delete operation, configure "
+                    f"passive_deletes=True on the {attr} relationship in "
+                    "order to resolve this error."
+                )
+
+            self.unchanged_items = apply_to.unchanged_items
+            self.added_items = apply_to.added_items
+            self.deleted_items = apply_to.deleted_items
+            self._reconcile_collection = apply_to._reconcile_collection
+        else:
+            self.deleted_items = util.OrderedIdentitySet()
+            self.added_items = util.OrderedIdentitySet()
+            self.unchanged_items = util.OrderedIdentitySet()
+            self._reconcile_collection = False
+
+    @property
+    def added_plus_unchanged(self):
+        return list(self.added_items.union(self.unchanged_items))
+
+    @property
+    def all_items(self):
+        return list(
+            self.added_items.union(self.unchanged_items).union(
+                self.deleted_items
+            )
+        )
+
+    def as_history(self):
+        if self._reconcile_collection:
+            added = self.added_items.difference(self.unchanged_items)
+            deleted = self.deleted_items.intersection(self.unchanged_items)
+            unchanged = self.unchanged_items.difference(deleted)
+        else:
+            added, unchanged, deleted = (
+                self.added_items,
+                self.unchanged_items,
+                self.deleted_items,
+            )
+        return attributes.History(list(added), list(unchanged), list(deleted))
+
+    def indexed(self, index):
+        return list(self.added_items)[index]
+
+    def add_added(self, value):
+        self.added_items.add(value)
+
+    def add_removed(self, value):
+        if value in self.added_items:
+            self.added_items.remove(value)
+        else:
+            self.deleted_items.add(value)
+
+
+class WriteOnlyAttributeImpl(
+    attributes.HasCollectionAdapter, attributes.AttributeImpl
+):
+    uses_objects = True
+    default_accepts_scalar_loader = False
+    supports_population = False
+    _supports_dynamic_iteration = False
+    collection = False
+    dynamic = True
+    order_by = ()
+    collection_history_cls = WriteOnlyHistory
+
+    def __init__(
+        self,
+        class_,
+        key,
+        typecallable,
+        dispatch,
+        target_mapper,
+        order_by,
+        **kw,
+    ):
+        super().__init__(class_, key, typecallable, dispatch, **kw)
+        self.target_mapper = target_mapper
+        self.query_class = WriteOnlyCollection
+        if order_by:
+            self.order_by = tuple(order_by)
+
+    def get(self, state, dict_, passive=attributes.PASSIVE_OFF):
+        if not passive & attributes.SQL_OK:
+            return self._get_collection_history(
+                state, attributes.PASSIVE_NO_INITIALIZE
+            ).added_items
+        else:
+            return self.query_class(self, state)
+
+    @overload
+    def get_collection(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        user_data: Literal[None] = ...,
+        passive: Literal[PassiveFlag.PASSIVE_OFF] = ...,
+    ) -> CollectionAdapter:
+        ...
+
+    @overload
+    def get_collection(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        user_data: _AdaptedCollectionProtocol = ...,
+        passive: PassiveFlag = ...,
+    ) -> CollectionAdapter:
+        ...
+
+    @overload
+    def get_collection(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        user_data: Optional[_AdaptedCollectionProtocol] = ...,
+        passive: PassiveFlag = ...,
+    ) -> Union[
+        Literal[LoaderCallableStatus.PASSIVE_NO_RESULT], CollectionAdapter
+    ]:
+        ...
+
+    def get_collection(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        user_data: Optional[_AdaptedCollectionProtocol] = None,
+        passive: PassiveFlag = PassiveFlag.PASSIVE_OFF,
+    ) -> Union[
+        Literal[LoaderCallableStatus.PASSIVE_NO_RESULT], CollectionAdapter
+    ]:
+        if not passive & attributes.SQL_OK:
+            data = self._get_collection_history(state, passive).added_items
+        else:
+            history = self._get_collection_history(state, passive)
+            data = history.added_plus_unchanged
+        return DynamicCollectionAdapter(data)  # type: ignore
+
+    @util.memoized_property
+    def _append_token(self):
+        return attributes.AttributeEventToken(self, attributes.OP_APPEND)
+
+    @util.memoized_property
+    def _remove_token(self):
+        return attributes.AttributeEventToken(self, attributes.OP_REMOVE)
+
+    def fire_append_event(
+        self, state, dict_, value, initiator, collection_history=None
+    ):
+        if collection_history is None:
+            collection_history = self._modified_event(state, dict_)
+
+        collection_history.add_added(value)
+
+        for fn in self.dispatch.append:
+            value = fn(state, value, initiator or self._append_token)
+
+        if self.trackparent and value is not None:
+            self.sethasparent(attributes.instance_state(value), state, True)
+
+    def fire_remove_event(
+        self, state, dict_, value, initiator, collection_history=None
+    ):
+        if collection_history is None:
+            collection_history = self._modified_event(state, dict_)
+
+        collection_history.add_removed(value)
+
+        if self.trackparent and value is not None:
+            self.sethasparent(attributes.instance_state(value), state, False)
+
+        for fn in self.dispatch.remove:
+            fn(state, value, initiator or self._remove_token)
+
+    def _modified_event(self, state, dict_):
+
+        if self.key not in state.committed_state:
+            state.committed_state[self.key] = self.collection_history_cls(
+                self, state, PassiveFlag.PASSIVE_NO_FETCH
+            )
+
+        state._modified_event(dict_, self, attributes.NEVER_SET)
+
+        # this is a hack to allow the fixtures.ComparableEntity fixture
+        # to work
+        dict_[self.key] = True
+        return state.committed_state[self.key]
+
+    def set(
+        self,
+        state: InstanceState[Any],
+        dict_: _InstanceDict,
+        value: Any,
+        initiator: Optional[AttributeEventToken] = None,
+        passive: PassiveFlag = PassiveFlag.PASSIVE_OFF,
+        check_old: Any = None,
+        pop: bool = False,
+        _adapt: bool = True,
+    ) -> None:
+        if initiator and initiator.parent_token is self.parent_token:
+            return
+
+        if pop and value is None:
+            return
+
+        iterable = value
+        new_values = list(iterable)
+        if state.has_identity:
+            if not self._supports_dynamic_iteration:
+                raise exc.InvalidRequestError(
+                    f'Collection "{self}" does not support implicit '
+                    "iteration; collection replacement operations "
+                    "can't be used"
+                )
+            old_collection = util.IdentitySet(
+                self.get(state, dict_, passive=passive)
+            )
+
+        collection_history = self._modified_event(state, dict_)
+        if not state.has_identity:
+            old_collection = collection_history.added_items
+        else:
+            old_collection = old_collection.union(
+                collection_history.added_items
+            )
+
+        constants = old_collection.intersection(new_values)
+        additions = util.IdentitySet(new_values).difference(constants)
+        removals = old_collection.difference(constants)
+
+        for member in new_values:
+            if member in additions:
+                self.fire_append_event(
+                    state,
+                    dict_,
+                    member,
+                    None,
+                    collection_history=collection_history,
+                )
+
+        for member in removals:
+            self.fire_remove_event(
+                state,
+                dict_,
+                member,
+                None,
+                collection_history=collection_history,
+            )
+
+    def delete(self, *args, **kwargs):
+        raise NotImplementedError()
+
+    def set_committed_value(self, state, dict_, value):
+        raise NotImplementedError(
+            "Dynamic attributes don't support collection population."
+        )
+
+    def get_history(self, state, dict_, passive=attributes.PASSIVE_NO_FETCH):
+        c = self._get_collection_history(state, passive)
+        return c.as_history()
+
+    def get_all_pending(
+        self, state, dict_, passive=attributes.PASSIVE_NO_INITIALIZE
+    ):
+        c = self._get_collection_history(state, passive)
+        return [(attributes.instance_state(x), x) for x in c.all_items]
+
+    def _get_collection_history(self, state, passive):
+        if self.key in state.committed_state:
+            c = state.committed_state[self.key]
+        else:
+            c = self.collection_history_cls(
+                self, state, PassiveFlag.PASSIVE_NO_FETCH
+            )
+
+        if state.has_identity and (passive & attributes.INIT_OK):
+            return self.collection_history_cls(
+                self, state, passive, apply_to=c
+            )
+        else:
+            return c
+
+    def append(
+        self,
+        state,
+        dict_,
+        value,
+        initiator,
+        passive=attributes.PASSIVE_NO_FETCH,
+    ):
+        if initiator is not self:
+            self.fire_append_event(state, dict_, value, initiator)
+
+    def remove(
+        self,
+        state,
+        dict_,
+        value,
+        initiator,
+        passive=attributes.PASSIVE_NO_FETCH,
+    ):
+        if initiator is not self:
+            self.fire_remove_event(state, dict_, value, initiator)
+
+    def pop(
+        self,
+        state,
+        dict_,
+        value,
+        initiator,
+        passive=attributes.PASSIVE_NO_FETCH,
+    ):
+        self.remove(state, dict_, value, initiator, passive=passive)
+
+
+@log.class_logger
+@relationships.RelationshipProperty.strategy_for(lazy="write_only")
+class WriteOnlyLoader(strategies.AbstractRelationshipLoader, log.Identified):
+    impl_class = WriteOnlyAttributeImpl
+
+    def init_class_attribute(self, mapper):
+        self.is_class_level = True
+        if not self.uselist or self.parent_property.direction not in (
+            interfaces.ONETOMANY,
+            interfaces.MANYTOMANY,
+        ):
+            raise exc.InvalidRequestError(
+                "On relationship %s, 'dynamic' loaders cannot be used with "
+                "many-to-one/one-to-one relationships and/or "
+                "uselist=False." % self.parent_property
+            )
+
+        strategies._register_attribute(
+            self.parent_property,
+            mapper,
+            useobject=True,
+            impl_class=self.impl_class,
+            target_mapper=self.parent_property.mapper,
+            order_by=self.parent_property.order_by,
+            query_class=self.parent_property.query_class,
+        )
+
+
+class DynamicCollectionAdapter:
+    """simplified CollectionAdapter for internal API consistency"""
+
+    def __init__(self, data):
+        self.data = data
+
+    def __iter__(self):
+        return iter(self.data)
+
+    def _reset_empty(self):
+        pass
+
+    def __len__(self):
+        return len(self.data)
+
+    def __bool__(self):
+        return True
+
+
+class AbstractCollectionWriter(Generic[_T]):
+    """Virtual collection which includes append/remove methods that synchronize
+    into the attribute event system.
+
+    """
+
+    if not TYPE_CHECKING:
+        __slots__ = ()
+
+    def __init__(self, attr, state):
+
+        self.instance = instance = state.obj()
+        self.attr = attr
+
+        mapper = object_mapper(instance)
+        prop = mapper._props[self.attr.key]
+
+        if prop.secondary is not None:
+            # this is a hack right now.  The Query only knows how to
+            # make subsequent joins() without a given left-hand side
+            # from self._from_obj[0].  We need to ensure prop.secondary
+            # is in the FROM.  So we purposely put the mapper selectable
+            # in _from_obj[0] to ensure a user-defined join() later on
+            # doesn't fail, and secondary is then in _from_obj[1].
+
+            # note also, we are using the official ORM-annotated selectable
+            # from __clause_element__(), see #7868
+            self._from_obj = (prop.mapper.__clause_element__(), prop.secondary)
+        else:
+            self._from_obj = ()
+
+        self._where_criteria = (
+            prop._with_parent(instance, alias_secondary=False),
+        )
+
+        if self.attr.order_by:
+            self._order_by_clauses = self.attr.order_by
+        else:
+            self._order_by_clauses = ()
+
+    def _add_all_impl(self, iterator: Iterable[_T]) -> None:
+        for item in iterator:
+            self.attr.append(
+                attributes.instance_state(self.instance),
+                attributes.instance_dict(self.instance),
+                item,
+                None,
+            )
+
+    def _remove_impl(self, item: _T) -> None:
+        self.attr.remove(
+            attributes.instance_state(self.instance),
+            attributes.instance_dict(self.instance),
+            item,
+            None,
+        )
+
+
+class WriteOnlyCollection(AbstractCollectionWriter[_T]):
+    """Write-only collection which can synchronize changes into the
+    attribute event system.
+
+    The :class:`.WriteOnlyCollection` is used in a mapping by
+    using the ``"write_only"`` lazy loading strategy with
+    :func:`_orm.relationship`.     For background on this configuration,
+    see :ref:`write_only_relationship`.
+
+    .. versionadded:: 2.0
+
+    .. seealso::
+
+        :ref:`write_only_relationship`
+
+    """
+
+    __slots__ = (
+        "instance",
+        "attr",
+        "_where_criteria",
+        "_from_obj",
+        "_order_by_clauses",
+    )
+
+    def __iter__(self) -> NoReturn:
+        raise TypeError(
+            "WriteOnly collections don't support iteration in-place; "
+            "to query for collection items, use the select() method to "
+            "produce a SQL statement and execute it with session.scalars()."
+        )
+
+    def select(self) -> Select[Tuple[_T]]:
+        """Produce a :class:`_sql.Select` construct that represents the
+        rows within this instance-local :class:`_orm.WriteOnlyCollection`.
+
+        """
+        stmt = select(self.attr.target_mapper).where(*self._where_criteria)
+        if self._from_obj:
+            stmt = stmt.select_from(*self._from_obj)
+        if self._order_by_clauses:
+            stmt = stmt.order_by(*self._order_by_clauses)
+        return stmt
+
+    def insert(self) -> Insert[_T]:
+        """For one-to-many collections, produce a :class:`_dml.Insert` which
+        will insert new rows in terms of this this instance-local
+        :class:`_orm.WriteOnlyCollection`.
+
+        This construct is only supported for a :class:`_orm.Relationship`
+        that does **not** include the :paramref:`_orm.relationship.secondary`
+        parameter.  For relationships that refer to a many-to-many table,
+        use ordinary bulk insert techniques to produce new objects, then
+        use :meth:`_orm.AbstractCollectionWriter.add_all` to associate them
+        with the collection.
+
+
+        """
+
+        state = inspect(self.instance)
+        mapper = state.mapper
+        prop = mapper._props[self.attr.key]
+
+        if prop.direction is not RelationshipDirection.ONETOMANY:
+            raise exc.InvalidRequestError(
+                "Write only bulk INSERT only supported for one-to-many "
+                "collections; for many-to-many, use a separate bulk "
+                "INSERT along with add_all()."
+            )
+
+        dict_ = {}
+
+        for l, r in prop.synchronize_pairs:
+            fn = prop._get_attr_w_warn_on_none(
+                mapper,
+                state,
+                state.dict,
+                l,
+            )
+
+            dict_[r.key] = bindparam(None, callable_=fn)
+
+        return insert(self.attr.target_mapper).values(**dict_)
+
+    def update(self) -> Update[_T]:
+        """Produce a :class:`_dml.Update` which will refer to rows in terms
+        of this instance-local :class:`_orm.WriteOnlyCollection`.
+
+        """
+        return update(self.attr.target_mapper).where(*self._where_criteria)
+
+    def delete(self) -> Delete[_T]:
+        """Produce a :class:`_dml.Delete` which will refer to rows in terms
+        of this instance-local :class:`_orm.WriteOnlyCollection`.
+
+        """
+        return delete(self.attr.target_mapper).where(*self._where_criteria)
+
+    def add_all(self, iterator: Iterable[_T]) -> None:
+        """Add an iterable of items to this :class:`_orm.WriteOnlyCollection`.
+
+        The given items will be persisted to the database in terms of
+        the parent instance's collection on the next flush.
+
+        """
+        self._add_all_impl(iterator)
+
+    def add(self, item: _T) -> None:
+        """Add an item to this :class:`_orm.WriteOnlyCollection`.
+
+        The given item will be persisted to the database in terms of
+        the parent instance's collection on the next flush.
+
+        """
+        self._add_all_impl([item])
+
+    def remove(self, item: _T) -> None:
+        """Remove an item from this :class:`_orm.WriteOnlyCollection`.
+
+        The given item will be removed from the parent instance's collection on
+        the next flush.
+
+        """
+        self._remove_impl(item)
index b3a71dbffc2b7717a60f40c9e0017dc191155431..2fda1e9cbe57ea5954dc2ab4d10bdca2d8578714 100644 (file)
@@ -371,6 +371,10 @@ class EachOf(AssertRule):
         self.rules = list(rules)
 
     def process_statement(self, execute_observed):
+        if not self.rules:
+            self.is_consumed = True
+            self.consume_statement = False
+
         while self.rules:
             rule = self.rules[0]
             rule.process_statement(execute_observed)
index cf31388350204d783432db444de8a50d623d292a..15352c8c816cefa56496cf362eea3e9286b3759f 100644 (file)
@@ -11,6 +11,7 @@ from __future__ import annotations
 
 import sqlalchemy as sa
 from .. import exc as sa_exc
+from ..orm.writeonly import WriteOnlyCollection
 
 _repr_stack = set()
 
@@ -82,8 +83,12 @@ class ComparableMixin:
             for attr in list(a.__dict__):
                 if attr.startswith("_"):
                     continue
+
                 value = getattr(a, attr)
 
+                if isinstance(value, WriteOnlyCollection):
+                    continue
+
                 try:
                     # handle lazy loader errors
                     battr = getattr(b, attr)
index a0d59a6305b25f761ecc66b81da45ea8230a5414..85ef4bb455222911f53cbdab106610f3e125cac8 100644 (file)
@@ -216,6 +216,19 @@ def is_union(type_: Any) -> bool:
     return is_origin_of(type_, "Union")
 
 
+def is_origin_of_cls(
+    type_: Any, class_obj: Union[Tuple[Type[Any], ...], Type[Any]]
+) -> bool:
+    """return True if the given type has an __origin__ that shares a base
+    with the given class"""
+
+    origin = typing_get_origin(type_)
+    if origin is None:
+        return False
+
+    return isinstance(origin, type) and issubclass(origin, class_obj)
+
+
 def is_origin_of(
     type_: Any, *names: str, module: Optional[str] = None
 ) -> bool:
index 0d0ed2bd933cf5440d0b4bd9c5d71f5a06ce195b..9c2fb2d72bf602c5446b40f150ac32ab601a9c0b 100644 (file)
@@ -171,6 +171,11 @@ class DocTest(fixtures.TestBase):
             "orm/queryguide/dml.rst",
         )
 
+    def test_orm_large_collections(self):
+        self._run_doctest(
+            "orm/large_collections.rst",
+        )
+
     def test_orm_queryguide_columns(self):
         self._run_doctest(
             "orm/queryguide/columns.rst",
diff --git a/test/ext/mypy/plain_files/dynamic_rel.py b/test/ext/mypy/plain_files/dynamic_rel.py
new file mode 100644 (file)
index 0000000..78bf15f
--- /dev/null
@@ -0,0 +1,86 @@
+from __future__ import annotations
+
+import typing
+
+from sqlalchemy import ForeignKey
+from sqlalchemy.orm import DeclarativeBase
+from sqlalchemy.orm import DynamicMapped
+from sqlalchemy.orm import Mapped
+from sqlalchemy.orm import mapped_column
+from sqlalchemy.orm import relationship
+from sqlalchemy.orm import Session
+
+
+class Base(DeclarativeBase):
+    pass
+
+
+class Address(Base):
+    __tablename__ = "address"
+    id: Mapped[int] = mapped_column(primary_key=True)
+    user_id: Mapped[int] = mapped_column(ForeignKey("user.id"))
+    email_address: Mapped[str]
+
+
+class User(Base):
+    __tablename__ = "user"
+    id: Mapped[int] = mapped_column(primary_key=True)
+    addresses: DynamicMapped[Address] = relationship(
+        cascade="all,delete-orphan"
+    )
+
+
+with Session() as session:
+    u = User()
+    session.add(u)
+    session.commit()
+
+    if typing.TYPE_CHECKING:
+
+        # EXPECTED_TYPE: AppenderQuery[Address]
+        reveal_type(u.addresses)
+
+    count = u.addresses.count()
+    if typing.TYPE_CHECKING:
+
+        # EXPECTED_TYPE: int
+        reveal_type(count)
+
+    address = u.addresses.filter(Address.email_address.like("xyz")).one()
+
+    if typing.TYPE_CHECKING:
+
+        # EXPECTED_TYPE: Address
+        reveal_type(address)
+
+    u.addresses.append(Address())
+    u.addresses.extend([Address(), Address()])
+
+    current_addresses = list(u.addresses)
+
+    if typing.TYPE_CHECKING:
+
+        # EXPECTED_TYPE: list[Address]
+        reveal_type(current_addresses)
+
+    # can assign plain list
+    u.addresses = []
+
+    # or anything
+    u.addresses = set()
+
+    if typing.TYPE_CHECKING:
+        # still an AppenderQuery
+        # EXPECTED_TYPE: AppenderQuery[Address]
+        reveal_type(u.addresses)
+
+    u.addresses = set([Address(), Address()])
+
+    if typing.TYPE_CHECKING:
+        # still an AppenderQuery
+        # EXPECTED_TYPE: AppenderQuery[Address]
+        reveal_type(u.addresses)
+
+    u.addresses.append(Address())
+
+    session.commit()
diff --git a/test/ext/mypy/plain_files/write_only.py b/test/ext/mypy/plain_files/write_only.py
new file mode 100644 (file)
index 0000000..672630e
--- /dev/null
@@ -0,0 +1,59 @@
+from __future__ import annotations
+
+import typing
+
+from sqlalchemy import ForeignKey
+from sqlalchemy.orm import DeclarativeBase
+from sqlalchemy.orm import Mapped
+from sqlalchemy.orm import mapped_column
+from sqlalchemy.orm import relationship
+from sqlalchemy.orm import Session
+from sqlalchemy.orm import WriteOnlyMapped
+
+
+class Base(DeclarativeBase):
+    pass
+
+
+class Address(Base):
+    __tablename__ = "address"
+    id: Mapped[int] = mapped_column(primary_key=True)
+    user_id: Mapped[int] = mapped_column(ForeignKey("user.id"))
+    email_address: Mapped[str]
+
+
+class User(Base):
+    __tablename__ = "user"
+    id: Mapped[int] = mapped_column(primary_key=True)
+    addresses: WriteOnlyMapped[Address] = relationship()
+
+
+with Session() as session:
+    u = User()
+    session.add(u)
+    session.commit()
+
+    if typing.TYPE_CHECKING:
+
+        # EXPECTED_TYPE: WriteOnlyCollection[Address]
+        reveal_type(u.addresses)
+
+    address = session.scalars(
+        u.addresses.select().filter(Address.email_address.like("xyz"))
+    ).one()
+
+    if typing.TYPE_CHECKING:
+
+        # EXPECTED_TYPE: Address
+        reveal_type(address)
+
+    u.addresses.add(Address())
+    u.addresses.add_all([Address(), Address()])
+
+    # this should emit an error, because __iter__ is NoReturn,
+    # however typing tools don't pick up on that right now
+    current_addresses = list(u.addresses)
+
+    u.addresses.add(Address())
+
+    session.commit()
index 76ee464fad04b676f7a7bea5f5e11d8479967828..a63378c26df455b7ef473698eada29628819af9a 100644 (file)
@@ -14,10 +14,12 @@ from sqlalchemy import Numeric
 from sqlalchemy import Table
 from sqlalchemy.orm import attribute_mapped_collection
 from sqlalchemy.orm import DeclarativeBase
+from sqlalchemy.orm import DynamicMapped
 from sqlalchemy.orm import Mapped
 from sqlalchemy.orm import mapped_column
 from sqlalchemy.orm import MappedCollection
 from sqlalchemy.orm import relationship
+from sqlalchemy.orm import WriteOnlyMapped
 from sqlalchemy.testing import expect_raises_message
 from sqlalchemy.testing import is_
 from sqlalchemy.testing import is_false
@@ -25,6 +27,9 @@ from sqlalchemy.testing import is_true
 from sqlalchemy.util import compat
 from .test_typed_mapping import MappedColumnTest as _MappedColumnTest
 from .test_typed_mapping import RelationshipLHSTest as _RelationshipLHSTest
+from .test_typed_mapping import (
+    WriteOnlyRelationshipTest as _WriteOnlyRelationshipTest,
+)
 
 """runs the annotation-sensitive tests from test_typed_mappings while
 having ``from __future__ import annotations`` in effect.
@@ -288,3 +293,35 @@ class RelationshipLHSTest(_RelationshipLHSTest):
         a1.bs.set(b1)
 
         is_(a1.bs["foo"], b1)
+
+
+class WriteOnlyRelationshipTest(_WriteOnlyRelationshipTest):
+    def test_dynamic(self, decl_base):
+        class A(decl_base):
+            __tablename__ = "a"
+            id: Mapped[int] = mapped_column(primary_key=True)
+            bs: DynamicMapped[B] = relationship()
+
+        class B(decl_base):
+            __tablename__ = "b"
+            id: Mapped[int] = mapped_column(primary_key=True)
+            a_id: Mapped[int] = mapped_column(
+                ForeignKey("a.id", ondelete="cascade")
+            )
+
+        self._assertions(A, B, "dynamic")
+
+    def test_write_only(self, decl_base):
+        class A(decl_base):
+            __tablename__ = "a"
+            id: Mapped[int] = mapped_column(primary_key=True)
+            bs: WriteOnlyMapped[B] = relationship()  # noqa: F821
+
+        class B(decl_base):
+            __tablename__ = "b"
+            id: Mapped[int] = mapped_column(primary_key=True)
+            a_id: Mapped[int] = mapped_column(
+                ForeignKey("a.id", ondelete="cascade")
+            )
+
+        self._assertions(A, B, "write_only")
index 99c57e6ebe5e49456bcb4271fa2b098d47efd824..5c5b481db14ee63627bd532cbffee5b0558138d2 100644 (file)
@@ -36,10 +36,12 @@ from sqlalchemy.orm import declarative_base
 from sqlalchemy.orm import DeclarativeBase
 from sqlalchemy.orm import declared_attr
 from sqlalchemy.orm import deferred
+from sqlalchemy.orm import DynamicMapped
 from sqlalchemy.orm import Mapped
 from sqlalchemy.orm import mapped_column
 from sqlalchemy.orm import relationship
 from sqlalchemy.orm import undefer
+from sqlalchemy.orm import WriteOnlyMapped
 from sqlalchemy.orm.collections import attribute_mapped_collection
 from sqlalchemy.orm.collections import MappedCollection
 from sqlalchemy.schema import CreateTable
@@ -1858,3 +1860,41 @@ class AllYourFavoriteHitsTest(fixtures.TestBase, testing.AssertsCompiledSQL):
             "(person JOIN engineer ON person.person_id = engineer.person_id) "
             "ON company.company_id = person.company_id",
         )
+
+
+class WriteOnlyRelationshipTest(fixtures.TestBase):
+    def _assertions(self, A, B, lazy):
+        is_(A.bs.property.mapper, B.__mapper__)
+
+        is_true(A.bs.property.uselist)
+        eq_(A.bs.property.lazy, lazy)
+
+    def test_dynamic(self, decl_base):
+        class B(decl_base):
+            __tablename__ = "b"
+            id: Mapped[int] = mapped_column(primary_key=True)
+            a_id: Mapped[int] = mapped_column(
+                ForeignKey("a.id", ondelete="cascade")
+            )
+
+        class A(decl_base):
+            __tablename__ = "a"
+            id: Mapped[int] = mapped_column(primary_key=True)
+            bs: DynamicMapped[B] = relationship()
+
+        self._assertions(A, B, "dynamic")
+
+    def test_write_only(self, decl_base):
+        class B(decl_base):
+            __tablename__ = "b"
+            id: Mapped[int] = mapped_column(primary_key=True)
+            a_id: Mapped[int] = mapped_column(
+                ForeignKey("a.id", ondelete="cascade")
+            )
+
+        class A(decl_base):
+            __tablename__ = "a"
+            id: Mapped[int] = mapped_column(primary_key=True)
+            bs: WriteOnlyMapped[B] = relationship()
+
+        self._assertions(A, B, "write_only")
index 0cca9e6f5f9b54b67e88c6ad55b3131ce7baf623..557b5e9da4a417daefcb60c23b02f5ae8f105fa5 100644 (file)
@@ -104,6 +104,35 @@ class BulkDMLReturningInhTest:
         s.execute(insert(A).values(type="a", data="d", xcol=10))
         eq_(s.scalars(select(A.x)).all(), [10])
 
+    @testing.combinations("default", "session_disable", "opt_disable")
+    def test_autoflush(self, autoflush_option):
+        A = self.classes.A
+
+        s = fixture_session()
+
+        a1 = A(data="x1")
+        s.add(a1)
+
+        if autoflush_option == "default":
+            s.execute(insert(A).values(type="a", data="x2"))
+            assert inspect(a1).persistent
+            eq_(s.scalars(select(A.data).order_by(A.id)).all(), ["x1", "x2"])
+        elif autoflush_option == "session_disable":
+            with s.no_autoflush:
+                s.execute(insert(A).values(type="a", data="x2"))
+                assert inspect(a1).pending
+                eq_(s.scalars(select(A.data).order_by(A.id)).all(), ["x2"])
+        elif autoflush_option == "opt_disable":
+            s.execute(
+                insert(A).values(type="a", data="x2"),
+                execution_options={"autoflush": False},
+            )
+            assert inspect(a1).pending
+            with s.no_autoflush:
+                eq_(s.scalars(select(A.data).order_by(A.id)).all(), ["x2"])
+        else:
+            assert False
+
     @testing.combinations(True, False, argnames="use_returning")
     def test_heterogeneous_keys(self, use_returning):
         A, B = self.classes("A", "B")
index 836feb6595a8bb818c5cdd2664cb88754994d443..3250cb3f92464d0dddc6f0aff23602cf692b505e 100644 (file)
@@ -127,6 +127,49 @@ class UpdateDeleteTest(fixtures.MappedTest):
             },
         )
 
+    @testing.combinations("default", "session_disable", "opt_disable")
+    def test_autoflush(self, autoflush_option):
+        User = self.classes.User
+
+        s = fixture_session()
+
+        u1 = User(id=5, name="x1")
+        s.add(u1)
+
+        assert_stmt = (
+            select(User.name)
+            .where(User.name.startswith("x"))
+            .order_by(User.id)
+        )
+        if autoflush_option == "default":
+            s.execute(update(User).values(age=5))
+            assert inspect(u1).persistent
+            eq_(
+                s.scalars(assert_stmt).all(),
+                ["x1"],
+            )
+        elif autoflush_option == "session_disable":
+            with s.no_autoflush:
+                s.execute(update(User).values(age=5))
+                assert inspect(u1).pending
+                eq_(
+                    s.scalars(assert_stmt).all(),
+                    [],
+                )
+        elif autoflush_option == "opt_disable":
+            s.execute(
+                update(User).values(age=5),
+                execution_options={"autoflush": False},
+            )
+            assert inspect(u1).pending
+            with s.no_autoflush:
+                eq_(
+                    s.scalars(assert_stmt).all(),
+                    [],
+                )
+        else:
+            assert False
+
     def test_update_dont_use_col_key(self):
         User = self.classes.User
 
index d004998c8071a578681cc2887043c6ffec64bdd3..505594e7c66079a81670b0ecfa18b721d4bca773 100644 (file)
@@ -4,6 +4,7 @@ from sqlalchemy import desc
 from sqlalchemy import exc
 from sqlalchemy import ForeignKey
 from sqlalchemy import func
+from sqlalchemy import Identity
 from sqlalchemy import inspect
 from sqlalchemy import Integer
 from sqlalchemy import select
@@ -13,24 +14,31 @@ from sqlalchemy.orm import attributes
 from sqlalchemy.orm import backref
 from sqlalchemy.orm import configure_mappers
 from sqlalchemy.orm import exc as orm_exc
+from sqlalchemy.orm import Mapped
+from sqlalchemy.orm import mapped_column
 from sqlalchemy.orm import noload
+from sqlalchemy.orm import PassiveFlag
 from sqlalchemy.orm import Query
 from sqlalchemy.orm import relationship
+from sqlalchemy.orm import WriteOnlyMapped
 from sqlalchemy.orm.session import make_transient_to_detached
 from sqlalchemy.testing import assert_raises
 from sqlalchemy.testing import assert_raises_message
-from sqlalchemy.testing import assert_warns_message
 from sqlalchemy.testing import AssertsCompiledSQL
 from sqlalchemy.testing import eq_
 from sqlalchemy.testing import expect_raises_message
 from sqlalchemy.testing import is_
 from sqlalchemy.testing.assertsql import CompiledSQL
+from sqlalchemy.testing.assertsql import Conditional
 from sqlalchemy.testing.fixtures import fixture_session
 from test.orm import _fixtures
 
 
 class _DynamicFixture:
-    def _user_address_fixture(self, addresses_args={}):
+    lazy = "dynamic"
+
+    @testing.fixture
+    def user_address_fixture(self):
         users, Address, addresses, User = (
             self.tables.users,
             self.classes.Address,
@@ -38,40 +46,51 @@ class _DynamicFixture:
             self.classes.User,
         )
 
-        self.mapper_registry.map_imperatively(
-            User,
-            users,
-            properties={
-                "addresses": relationship(
-                    Address, lazy="dynamic", **addresses_args
-                )
-            },
-        )
-        self.mapper_registry.map_imperatively(Address, addresses)
-        return User, Address
+        def _user_address_fixture(addresses_args={}):
+            self.mapper_registry.map_imperatively(
+                User,
+                users,
+                properties={
+                    "addresses": relationship(
+                        Address, lazy=self.lazy, **addresses_args
+                    )
+                },
+            )
+            self.mapper_registry.map_imperatively(Address, addresses)
+            return User, Address
+
+        yield _user_address_fixture
+
+    @testing.fixture
+    def order_item_fixture(self):
+        def _order_item_fixture(items_args={}):
+            items, Order, orders, order_items, Item = (
+                self.tables.items,
+                self.classes.Order,
+                self.tables.orders,
+                self.tables.order_items,
+                self.classes.Item,
+            )
 
-    def _order_item_fixture(self, items_args={}):
-        items, Order, orders, order_items, Item = (
-            self.tables.items,
-            self.classes.Order,
-            self.tables.orders,
-            self.tables.order_items,
-            self.classes.Item,
-        )
+            self.mapper_registry.map_imperatively(
+                Order,
+                orders,
+                properties={
+                    "items": relationship(
+                        Item,
+                        secondary=order_items,
+                        lazy=self.lazy,
+                        **items_args,
+                    )
+                },
+            )
+            self.mapper_registry.map_imperatively(Item, items)
+            return Order, Item
 
-        self.mapper_registry.map_imperatively(
-            Order,
-            orders,
-            properties={
-                "items": relationship(
-                    Item, secondary=order_items, lazy="dynamic", **items_args
-                )
-            },
-        )
-        self.mapper_registry.map_imperatively(Item, items)
-        return Order, Item
+        yield _order_item_fixture
 
-    def _user_order_item_fixture(self):
+    @testing.fixture
+    def user_order_item_fixture(self):
         (
             users,
             Keyword,
@@ -96,43 +115,58 @@ class _DynamicFixture:
             self.tables.orders,
         )
 
-        self.mapper_registry.map_imperatively(
-            User,
-            users,
-            properties={
-                "orders": relationship(
-                    Order, order_by=orders.c.id, lazy="dynamic"
-                )
-            },
-        )
-        self.mapper_registry.map_imperatively(
-            Order,
-            orders,
-            properties={
-                "items": relationship(
-                    Item, secondary=order_items, order_by=items.c.id
-                ),
-            },
-        )
-        self.mapper_registry.map_imperatively(
-            Item,
-            items,
-            properties={
-                "keywords": relationship(
-                    Keyword, secondary=item_keywords
-                )  # m2m
-            },
+        def _user_order_item_fixture():
+            self.mapper_registry.map_imperatively(
+                User,
+                users,
+                properties={
+                    "orders": relationship(
+                        Order, order_by=orders.c.id, lazy=self.lazy
+                    )
+                },
+            )
+            self.mapper_registry.map_imperatively(
+                Order,
+                orders,
+                properties={
+                    "items": relationship(
+                        Item, secondary=order_items, order_by=items.c.id
+                    ),
+                },
+            )
+            self.mapper_registry.map_imperatively(
+                Item,
+                items,
+                properties={
+                    "keywords": relationship(
+                        Keyword, secondary=item_keywords
+                    )  # m2m
+                },
+            )
+            self.mapper_registry.map_imperatively(Keyword, keywords)
+
+            return User, Order, Item, Keyword
+
+        yield _user_order_item_fixture
+
+    def _expect_no_iteration(self):
+        return expect_raises_message(
+            exc.InvalidRequestError,
+            'Collection "User.addresses" does not support implicit '
+            "iteration",
         )
-        self.mapper_registry.map_imperatively(Keyword, keywords)
 
-        return User, Order, Item, Keyword
+
+class _WriteOnlyFixture(_DynamicFixture):
+    lazy = "write_only"
 
 
 class DynamicTest(_DynamicFixture, _fixtures.FixtureTest, AssertsCompiledSQL):
     __dialect__ = "default"
 
-    def test_basic(self):
-        User, Address = self._user_address_fixture()
+    def test_basic(self, user_address_fixture):
+        User, Address = user_address_fixture()
+
         sess = fixture_session()
         q = sess.query(User)
 
@@ -157,8 +191,9 @@ class DynamicTest(_DynamicFixture, _fixtures.FixtureTest, AssertsCompiledSQL):
             q.filter_by(id=7).all(),
         )
 
-    def test_slice_access(self):
-        User, Address = self._user_address_fixture()
+    def test_slice_access(self, user_address_fixture):
+        User, Address = user_address_fixture()
+
         sess = fixture_session()
         u1 = sess.get(User, 8)
 
@@ -167,8 +202,9 @@ class DynamicTest(_DynamicFixture, _fixtures.FixtureTest, AssertsCompiledSQL):
         eq_(u1.addresses[0], Address(id=2))
         eq_(u1.addresses[0:2], [Address(id=2), Address(id=3)])
 
-    def test_negative_slice_access_raises(self):
-        User, Address = self._user_address_fixture()
+    def test_negative_slice_access_raises(self, user_address_fixture):
+        User, Address = user_address_fixture()
+
         sess = fixture_session(future=True)
         u1 = sess.get(User, 8)
 
@@ -196,11 +232,12 @@ class DynamicTest(_DynamicFixture, _fixtures.FixtureTest, AssertsCompiledSQL):
         ):
             u1.addresses[:-2]
 
-    def test_statement(self):
+    def test_statement(self, user_address_fixture):
         """test that the .statement accessor returns the actual statement that
         would render, without any _clones called."""
 
-        User, Address = self._user_address_fixture()
+        User, Address = user_address_fixture()
+
         sess = fixture_session()
         q = sess.query(User)
 
@@ -213,12 +250,12 @@ class DynamicTest(_DynamicFixture, _fixtures.FixtureTest, AssertsCompiledSQL):
             use_default_dialect=True,
         )
 
-    def test_query_class_custom_method(self):
+    def test_query_class_custom_method(self, user_address_fixture):
         class MyClass(Query):
             def my_filter(self, arg):
                 return self.filter(Address.email_address == arg)
 
-        User, Address = self._user_address_fixture(
+        User, Address = user_address_fixture(
             addresses_args=dict(query_class=MyClass)
         )
 
@@ -238,10 +275,11 @@ class DynamicTest(_DynamicFixture, _fixtures.FixtureTest, AssertsCompiledSQL):
             use_default_dialect=True,
         )
 
-    def test_detached_raise(self):
+    def test_detached_raise(self, user_address_fixture):
         """so filtering on a detached dynamic list raises an error..."""
 
-        User, Address = self._user_address_fixture()
+        User, Address = user_address_fixture()
+
         sess = fixture_session()
         u = sess.get(User, 8)
         sess.expunge(u)
@@ -251,14 +289,15 @@ class DynamicTest(_DynamicFixture, _fixtures.FixtureTest, AssertsCompiledSQL):
             email_address="e",
         )
 
-    def test_detached_all_empty_list(self):
+    def test_detached_all_empty_list(self, user_address_fixture):
         """test #6426 - but you can call .all() on it and you get an empty
         list.   This is legacy stuff, as this should be raising
         DetachedInstanceError.
 
         """
 
-        User, Address = self._user_address_fixture()
+        User, Address = user_address_fixture()
+
         sess = fixture_session()
         u = sess.get(User, 8)
         sess.expunge(u)
@@ -273,17 +312,16 @@ class DynamicTest(_DynamicFixture, _fixtures.FixtureTest, AssertsCompiledSQL):
         ):
             eq_(list(u.addresses), [])
 
-    def test_transient_all_empty_list(self):
-        User, Address = self._user_address_fixture()
+    def test_transient_all_empty_list(self, user_address_fixture):
+        User, Address = user_address_fixture()
+
         u1 = User()
         eq_(u1.addresses.all(), [])
 
         eq_(list(u1.addresses), [])
 
-    def test_no_uselist_false(self):
-        User, Address = self._user_address_fixture(
-            addresses_args={"uselist": False}
-        )
+    def test_no_uselist_false(self, user_address_fixture):
+        User, Address = user_address_fixture(addresses_args={"uselist": False})
         assert_raises_message(
             exc.InvalidRequestError,
             "On relationship User.addresses, 'dynamic' loaders cannot be "
@@ -292,52 +330,38 @@ class DynamicTest(_DynamicFixture, _fixtures.FixtureTest, AssertsCompiledSQL):
             configure_mappers,
         )
 
-    def test_no_m2o(self):
+    @testing.combinations(False, True, None, argnames="uselist")
+    def test_no_m2o(self, uselist):
         users, Address, addresses, User = (
             self.tables.users,
             self.classes.Address,
             self.tables.addresses,
             self.classes.User,
         )
+
+        if uselist in (True, False):
+            kw = {"uselist": uselist}
+        else:
+            kw = {}
+
         self.mapper_registry.map_imperatively(
             Address,
             addresses,
-            properties={"user": relationship(User, lazy="dynamic")},
+            properties={"user": relationship(User, lazy="dynamic", **kw)},
         )
         self.mapper_registry.map_imperatively(User, users)
-        assert_raises_message(
+
+        with expect_raises_message(
             exc.InvalidRequestError,
             "On relationship Address.user, 'dynamic' loaders cannot be "
             "used with many-to-one/one-to-one relationships and/or "
             "uselist=False.",
-            configure_mappers,
-        )
+        ):
+            configure_mappers()
 
-    def test_no_m2o_w_uselist(self):
-        users, Address, addresses, User = (
-            self.tables.users,
-            self.classes.Address,
-            self.tables.addresses,
-            self.classes.User,
-        )
-        self.mapper_registry.map_imperatively(
-            Address,
-            addresses,
-            properties={
-                "user": relationship(User, uselist=True, lazy="dynamic")
-            },
-        )
-        self.mapper_registry.map_imperatively(User, users)
-        assert_warns_message(
-            exc.SAWarning,
-            "On relationship Address.user, 'dynamic' loaders cannot be "
-            "used with many-to-one/one-to-one relationships and/or "
-            "uselist=False.",
-            configure_mappers,
-        )
+    def test_order_by(self, user_address_fixture):
+        User, Address = user_address_fixture()
 
-    def test_order_by(self):
-        User, Address = self._user_address_fixture()
         sess = fixture_session()
         u = sess.get(User, 8)
         eq_(
@@ -350,9 +374,11 @@ class DynamicTest(_DynamicFixture, _fixtures.FixtureTest, AssertsCompiledSQL):
         )
 
     @testing.requires.dupe_order_by_ok
-    def test_order_by_composition_uses_immutable_tuple(self):
+    def test_order_by_composition_uses_immutable_tuple(
+        self, user_address_fixture
+    ):
         addresses = self.tables.addresses
-        User, Address = self._user_address_fixture(
+        User, Address = user_address_fixture(
             addresses_args={"order_by": addresses.c.email_address.desc()}
         )
 
@@ -384,9 +410,9 @@ class DynamicTest(_DynamicFixture, _fixtures.FixtureTest, AssertsCompiledSQL):
             ]
         )
 
-    def test_configured_order_by(self):
+    def test_configured_order_by(self, user_address_fixture):
         addresses = self.tables.addresses
-        User, Address = self._user_address_fixture(
+        User, Address = user_address_fixture(
             addresses_args={"order_by": addresses.c.email_address.desc()}
         )
 
@@ -423,8 +449,9 @@ class DynamicTest(_DynamicFixture, _fixtures.FixtureTest, AssertsCompiledSQL):
             ),
         )
 
-    def test_count(self):
-        User, Address = self._user_address_fixture()
+    def test_count(self, user_address_fixture):
+        User, Address = user_address_fixture()
+
         sess = fixture_session()
         u = sess.query(User).first()
         eq_(u.addresses.count(), 1)
@@ -459,8 +486,9 @@ class DynamicTest(_DynamicFixture, _fixtures.FixtureTest, AssertsCompiledSQL):
         u = sess.get(User, 7)
         assert ad not in u.addresses
 
-    def test_no_count(self):
-        User, Address = self._user_address_fixture()
+    def test_no_count(self, user_address_fixture):
+        User, Address = user_address_fixture()
+
         sess = fixture_session()
         q = sess.query(User)
 
@@ -482,8 +510,9 @@ class DynamicTest(_DynamicFixture, _fixtures.FixtureTest, AssertsCompiledSQL):
 
         self.assert_sql_count(testing.db, go, 2)
 
-    def test_no_populate(self):
-        User, Address = self._user_address_fixture()
+    def test_no_populate(self, user_address_fixture):
+        User, Address = user_address_fixture()
+
         u1 = User()
         assert_raises_message(
             NotImplementedError,
@@ -494,17 +523,14 @@ class DynamicTest(_DynamicFixture, _fixtures.FixtureTest, AssertsCompiledSQL):
             [],
         )
 
-    @testing.combinations(
-        ("star",),
-        ("attronly",),
-    )
-    def test_noload_issue(self, type_):
+    @testing.combinations(("star",), ("attronly",), argnames="type_")
+    def test_noload_issue(self, type_, user_address_fixture):
         """test #6420.   a noload that hits the dynamic loader
         should have no effect.
 
         """
 
-        User, Address = self._user_address_fixture()
+        User, Address = user_address_fixture()
 
         s = fixture_session()
 
@@ -524,27 +550,21 @@ class DynamicTest(_DynamicFixture, _fixtures.FixtureTest, AssertsCompiledSQL):
         # noload doesn't affect a dynamic loader, because it has no state
         eq_(list(u1.addresses), [Address(id=1)])
 
-    def test_m2m(self):
-        Order, Item = self._order_item_fixture(
+    def test_m2m(self, order_item_fixture):
+        Order, Item = order_item_fixture(
             items_args={"backref": backref("orders", lazy="dynamic")}
         )
 
         sess = fixture_session()
         o1 = Order(id=15, description="order 10")
         i1 = Item(id=10, description="item 8")
-        o1.items.append(i1)
+        o1.items.add(i1)
         sess.add(o1)
         sess.flush()
 
         assert o1 in i1.orders.all()
         assert i1 in o1.items.all()
 
-    @testing.exclude(
-        "mysql",
-        "between",
-        ((5, 1, 49), (5, 1, 52)),
-        "https://bugs.launchpad.net/ubuntu/+source/mysql-5.1/+bug/706988",
-    )
     def test_association_nonaliased(self):
         items, Order, orders, order_items, Item = (
             self.tables.items,
@@ -561,8 +581,8 @@ class DynamicTest(_DynamicFixture, _fixtures.FixtureTest, AssertsCompiledSQL):
                 "items": relationship(
                     Item,
                     secondary=order_items,
-                    lazy="dynamic",
                     order_by=order_items.c.item_id,
+                    lazy="dynamic",
                 )
             },
         )
@@ -626,11 +646,10 @@ class DynamicTest(_DynamicFixture, _fixtures.FixtureTest, AssertsCompiledSQL):
             use_default_dialect=True,
         )
 
-    def test_secondary_as_join_complex_entity(self, registry):
+    def test_secondary_as_join_complex_entity(self, decl_base):
         """integration test for #7868"""
-        Base = registry.generate_base()
 
-        class GrandParent(Base):
+        class GrandParent(decl_base):
             __tablename__ = "grandparent"
             id = Column(Integer, primary_key=True)
 
@@ -638,14 +657,14 @@ class DynamicTest(_DynamicFixture, _fixtures.FixtureTest, AssertsCompiledSQL):
                 "Child", secondary="parent", lazy="dynamic", viewonly=True
             )
 
-        class Parent(Base):
+        class Parent(decl_base):
             __tablename__ = "parent"
             id = Column(Integer, primary_key=True)
             grand_parent_id = Column(
                 Integer, ForeignKey("grandparent.id"), nullable=False
             )
 
-        class Child(Base):
+        class Child(decl_base):
             __tablename__ = "child"
             id = Column(Integer, primary_key=True)
             type = Column(String)
@@ -735,138 +754,364 @@ class DynamicTest(_DynamicFixture, _fixtures.FixtureTest, AssertsCompiledSQL):
             use_default_dialect=True,
         )
 
-    @testing.combinations(
-        # lambda
-    )
-    def test_join_syntaxes(self, expr):
-        User, Order, Item, Keyword = self._user_order_item_fixture()
+    def test_transient_count(self, user_address_fixture):
+        User, Address = user_address_fixture()
 
-    def test_transient_count(self):
-        User, Address = self._user_address_fixture()
         u1 = User()
-        u1.addresses.append(Address())
+        u1.addresses.add(Address())
         eq_(u1.addresses.count(), 1)
 
-    def test_transient_access(self):
-        User, Address = self._user_address_fixture()
+    def test_transient_access(self, user_address_fixture):
+        User, Address = user_address_fixture()
+
         u1 = User()
-        u1.addresses.append(Address())
+        u1.addresses.add(Address())
         eq_(u1.addresses[0], Address())
 
 
-class UOWTest(
-    _DynamicFixture, _fixtures.FixtureTest, testing.AssertsExecutionResults
+class WriteOnlyTest(
+    _WriteOnlyFixture, _fixtures.FixtureTest, AssertsCompiledSQL
 ):
+    __dialect__ = "default"
 
-    run_inserts = None
+    @testing.combinations(("star",), ("attronly",), argnames="type_")
+    def test_noload_issue(self, type_, user_address_fixture):
+        """test #6420.   a noload that hits the dynamic loader
+        should have no effect.
 
-    def test_persistence(self):
-        addresses = self.tables.addresses
-        User, Address = self._user_address_fixture()
+        """
+
+        User, Address = user_address_fixture()
+
+        s = fixture_session()
+
+        if type_ == "star":
+            u1 = s.query(User).filter_by(id=7).options(noload("*")).first()
+            assert "name" not in u1.__dict__["name"]
+        elif type_ == "attronly":
+            u1 = (
+                s.query(User)
+                .filter_by(id=7)
+                .options(noload(User.addresses))
+                .first()
+            )
+
+            eq_(u1.__dict__["name"], "jack")
+
+    def test_iteration_error(self, user_address_fixture):
+        User, Address = user_address_fixture()
 
         sess = fixture_session()
-        u1 = User(name="jack")
-        a1 = Address(email_address="foo")
-        sess.add_all([u1, a1])
-        sess.flush()
+        u = sess.get(User, 8)
+
+        with expect_raises_message(
+            TypeError,
+            "WriteOnly collections don't support iteration in-place; to "
+            "query for collection items",
+        ):
+            list(u.addresses)
+
+    def test_order_by(self, user_address_fixture):
+        User, Address = user_address_fixture()
 
+        sess = fixture_session()
+        u = sess.get(User, 8)
         eq_(
-            sess.connection().scalar(
-                select(func.count(cast(1, Integer))).where(
-                    addresses.c.user_id != None
+            list(
+                sess.scalars(
+                    u.addresses.select().order_by(desc(Address.email_address))
                 )
-            ),  # noqa
-            0,
+            ),
+            [
+                Address(email_address="ed@wood.com"),
+                Address(email_address="ed@lala.com"),
+                Address(email_address="ed@bettyboop.com"),
+            ],
+        )
+
+    def test_configured_order_by(self, user_address_fixture):
+        addresses = self.tables.addresses
+        User, Address = user_address_fixture(
+            addresses_args={"order_by": addresses.c.email_address.desc()}
         )
-        u1 = sess.get(User, u1.id)
-        u1.addresses.append(a1)
-        sess.flush()
 
+        sess = fixture_session()
+        u = sess.get(User, 8)
         eq_(
-            sess.connection()
-            .execute(
-                select(addresses).where(addresses.c.user_id != None)  # noqa
-            )
-            .fetchall(),
-            [(a1.id, u1.id, "foo")],
+            list(sess.scalars(u.addresses.select())),
+            [
+                Address(email_address="ed@wood.com"),
+                Address(email_address="ed@lala.com"),
+                Address(email_address="ed@bettyboop.com"),
+            ],
         )
 
-        u1.addresses.remove(a1)
-        sess.flush()
+        # test cancellation of None, replacement with something else
         eq_(
-            sess.connection().scalar(
-                select(func.count(cast(1, Integer))).where(
-                    addresses.c.user_id != None
+            list(
+                sess.scalars(
+                    u.addresses.select()
+                    .order_by(None)
+                    .order_by(Address.email_address)
                 )
-            ),  # noqa
-            0,
+            ),
+            [
+                Address(email_address="ed@bettyboop.com"),
+                Address(email_address="ed@lala.com"),
+                Address(email_address="ed@wood.com"),
+            ],
         )
 
-        u1.addresses.append(a1)
-        sess.flush()
+        # test cancellation of None, replacement with nothing
         eq_(
-            sess.connection()
-            .execute(
-                select(addresses).where(addresses.c.user_id != None)  # noqa
-            )
-            .fetchall(),
-            [(a1.id, u1.id, "foo")],
+            set(sess.scalars(u.addresses.select().order_by(None))),
+            set(
+                [
+                    Address(email_address="ed@bettyboop.com"),
+                    Address(email_address="ed@lala.com"),
+                    Address(email_address="ed@wood.com"),
+                ]
+            ),
         )
 
-        a2 = Address(email_address="bar")
-        u1.addresses.remove(a1)
-        u1.addresses.append(a2)
-        sess.flush()
-        eq_(
-            sess.connection()
-            .execute(
-                select(addresses).where(addresses.c.user_id != None)  # noqa
-            )
-            .fetchall(),
-            [(a2.id, u1.id, "bar")],
+    def test_secondary_as_join(self):
+        # test [ticket:4349]
+        User, users = self.classes.User, self.tables.users
+        items, orders, order_items, Item = (
+            self.tables.items,
+            self.tables.orders,
+            self.tables.order_items,
+            self.classes.Item,
         )
 
-    def test_merge(self):
-        addresses = self.tables.addresses
-        User, Address = self._user_address_fixture(
-            addresses_args={"order_by": addresses.c.email_address}
+        self.mapper_registry.map_imperatively(
+            User,
+            users,
+            properties={
+                "items": relationship(
+                    Item, secondary=order_items.join(orders), lazy="write_only"
+                )
+            },
         )
-        sess = fixture_session(autoflush=False)
-        u1 = User(name="jack")
-        a1 = Address(email_address="a1")
-        a2 = Address(email_address="a2")
-        a3 = Address(email_address="a3")
+        item_mapper = self.mapper_registry.map_imperatively(Item, items)
 
-        u1.addresses.append(a2)
-        u1.addresses.append(a3)
+        sess = fixture_session()
 
-        sess.add_all([u1, a1])
-        sess.flush()
+        u1 = sess.query(User).first()
 
-        u1 = User(id=u1.id, name="jack")
-        u1.addresses.append(a1)
-        u1.addresses.append(a3)
-        u1 = sess.merge(u1)
-        eq_(attributes.get_history(u1, "addresses"), ([a1], [a3], [a2]))
+        dyn = u1.items.select()
 
-        sess.flush()
+        # test for #7868
+        eq_(dyn._from_obj[0]._annotations["parententity"], item_mapper)
 
-        eq_(list(u1.addresses), [a1, a3])
+        self.assert_compile(
+            u1.items.select(),
+            "SELECT items.id, "
+            "items.description "
+            "FROM items, order_items JOIN orders "
+            "ON orders.id = order_items.order_id "
+            "WHERE :param_1 = orders.user_id "
+            "AND items.id = order_items.item_id",
+            use_default_dialect=True,
+        )
 
-    def test_hasattr(self):
-        User, Address = self._user_address_fixture()
+    def test_secondary_as_join_complex_entity(self, decl_base):
+        """integration test for #7868"""
 
-        u1 = User(name="jack")
+        class GrandParent(decl_base):
+            __tablename__ = "grandparent"
+            id = Column(Integer, primary_key=True)
 
-        assert "addresses" not in u1.__dict__
-        u1.addresses = [Address(email_address="test")]
-        assert "addresses" in u1.__dict__
+            grand_children = relationship(
+                "Child", secondary="parent", viewonly=True, lazy="write_only"
+            )
 
-    def test_collection_set(self):
-        addresses = self.tables.addresses
-        User, Address = self._user_address_fixture(
-            addresses_args={"order_by": addresses.c.email_address}
-        )
+        class Parent(decl_base):
+            __tablename__ = "parent"
+            id = Column(Integer, primary_key=True)
+            grand_parent_id = Column(
+                Integer, ForeignKey("grandparent.id"), nullable=False
+            )
+
+        class Child(decl_base):
+            __tablename__ = "child"
+            id = Column(Integer, primary_key=True)
+            type = Column(String)
+            parent_id = Column(
+                Integer, ForeignKey("parent.id"), nullable=False
+            )
+
+            __mapper_args__ = {
+                "polymorphic_on": type,
+                "polymorphic_identity": "unknown",
+                "with_polymorphic": "*",
+            }
+
+        class SubChild(Child):
+            __tablename__ = "subchild"
+            id = Column(Integer, ForeignKey("child.id"), primary_key=True)
+
+            __mapper_args__ = {
+                "polymorphic_identity": "sub",
+            }
+
+        gp = GrandParent(id=1)
+        make_transient_to_detached(gp)
+        self.assert_compile(
+            gp.grand_children.select().filter_by(id=1),
+            "SELECT child.id, child.type, "
+            "child.parent_id, subchild.id AS id_1 "
+            "FROM child LEFT OUTER JOIN subchild "
+            "ON child.id = subchild.id, parent "
+            "WHERE :param_1 = parent.grand_parent_id "
+            "AND parent.id = child.parent_id AND child.id = :id_2",
+            {"id_2": 1},
+        )
+
+    def test_secondary_doesnt_interfere_w_join_to_fromlist(self):
+        # tests that the "secondary" being added to the FROM
+        # as part of [ticket:4349] does not prevent a subsequent join to
+        # an entity that does not provide any "left side".  Query right now
+        # does not know how to join() like this unambiguously if _from_obj is
+        # more than one element long.
+        Order, orders = self.classes.Order, self.tables.orders
+
+        items, order_items, Item = (
+            self.tables.items,
+            self.tables.order_items,
+            self.classes.Item,
+        )
+        item_keywords = self.tables.item_keywords
+
+        class ItemKeyword:
+            pass
+
+        self.mapper_registry.map_imperatively(
+            Order,
+            orders,
+            properties={
+                "items": relationship(
+                    Item, secondary=order_items, lazy="write_only"
+                )
+            },
+        )
+        self.mapper_registry.map_imperatively(
+            ItemKeyword,
+            item_keywords,
+            primary_key=[item_keywords.c.item_id, item_keywords.c.keyword_id],
+        )
+        self.mapper_registry.map_imperatively(
+            Item,
+            items,
+            properties={"item_keywords": relationship(ItemKeyword)},
+        )
+
+        sess = fixture_session()
+        order = sess.query(Order).first()
+
+        self.assert_compile(
+            order.items.select().join(ItemKeyword),
+            "SELECT items.id, "
+            "items.description "
+            "FROM items "
+            "JOIN item_keywords ON items.id = item_keywords.item_id, "
+            "order_items "
+            "WHERE :param_1 = order_items.order_id "
+            "AND items.id = order_items.item_id",
+            use_default_dialect=True,
+        )
+
+
+class _UOWTests:
+    run_inserts = None
+
+    def _list_collection(self, collection):
+        if self.lazy == "dynamic":
+            return list(collection)
+
+        sess = inspect(collection.instance).session
+        return sess.scalars(collection.select()).all()
+
+    def test_persistence(self, user_address_fixture):
+        addresses = self.tables.addresses
+        User, Address = user_address_fixture()
+
+        sess = fixture_session()
+        u1 = User(name="jack")
+        a1 = Address(email_address="foo")
+        sess.add_all([u1, a1])
+        sess.flush()
+
+        eq_(
+            sess.connection().scalar(
+                select(func.count(cast(1, Integer))).where(
+                    addresses.c.user_id != None
+                )
+            ),  # noqa
+            0,
+        )
+        u1 = sess.get(User, u1.id)
+        u1.addresses.add(a1)
+        sess.flush()
+
+        eq_(
+            sess.connection()
+            .execute(
+                select(addresses).where(addresses.c.user_id != None)  # noqa
+            )
+            .fetchall(),
+            [(a1.id, u1.id, "foo")],
+        )
+
+        u1.addresses.remove(a1)
+        sess.flush()
+        eq_(
+            sess.connection().scalar(
+                select(func.count(cast(1, Integer))).where(
+                    addresses.c.user_id != None
+                )
+            ),  # noqa
+            0,
+        )
+
+        u1.addresses.add(a1)
+        sess.flush()
+        eq_(
+            sess.connection()
+            .execute(
+                select(addresses).where(addresses.c.user_id != None)  # noqa
+            )
+            .fetchall(),
+            [(a1.id, u1.id, "foo")],
+        )
+
+        a2 = Address(email_address="bar")
+        u1.addresses.remove(a1)
+        u1.addresses.add(a2)
+        sess.flush()
+        eq_(
+            sess.connection()
+            .execute(
+                select(addresses).where(addresses.c.user_id != None)  # noqa
+            )
+            .fetchall(),
+            [(a2.id, u1.id, "bar")],
+        )
+
+    def test_hasattr(self, user_address_fixture):
+        User, Address = user_address_fixture()
+
+        u1 = User(name="jack")
+
+        assert "addresses" not in u1.__dict__
+        u1.addresses = [Address(email_address="test")]
+        assert "addresses" in u1.__dict__
+
+    def test_collection_set(self, user_address_fixture):
+        addresses = self.tables.addresses
+        User, Address = user_address_fixture(
+            addresses_args={"order_by": addresses.c.email_address}
+        )
         sess = fixture_session(
             autoflush=True,
         )
@@ -878,7 +1123,13 @@ class UOWTest(
 
         sess.add(u1)
         u1.addresses = [a1, a3]
-        eq_(list(u1.addresses), [a1, a3])
+        eq_(self._list_collection(u1.addresses), [a1, a3])
+
+        if User.addresses.property.lazy == "write_only":
+            with self._expect_no_iteration():
+                u1.addresses = [a1, a2, a4]
+            return
+
         u1.addresses = [a1, a2, a4]
         eq_(list(u1.addresses), [a1, a2, a4])
         u1.addresses = [a2, a3]
@@ -886,10 +1137,10 @@ class UOWTest(
         u1.addresses = []
         eq_(list(u1.addresses), [])
 
-    def test_noload_append(self):
+    def test_noload_add(self, user_address_fixture):
         # test that a load of User.addresses is not emitted
-        # when flushing an append
-        User, Address = self._user_address_fixture()
+        # when flushing an add
+        User, Address = user_address_fixture()
 
         sess = fixture_session()
         u1 = User(name="jack", addresses=[Address(email_address="a1")])
@@ -899,7 +1150,7 @@ class UOWTest(
         u1_id = u1.id
         sess.expire_all()
 
-        u1.addresses.append(Address(email_address="a2"))
+        u1.addresses.add(Address(email_address="a2"))
 
         self.assert_sql_execution(
             testing.db,
@@ -916,15 +1167,15 @@ class UOWTest(
             ),
         )
 
-    def test_noload_remove(self):
+    def test_noload_remove(self, user_address_fixture):
         # test that a load of User.addresses is not emitted
         # when flushing a remove
-        User, Address = self._user_address_fixture()
+        User, Address = user_address_fixture()
 
         sess = fixture_session()
         u1 = User(name="jack", addresses=[Address(email_address="a1")])
         a2 = Address(email_address="a2")
-        u1.addresses.append(a2)
+        u1.addresses.add(a2)
         sess.add(u1)
         sess.commit()
 
@@ -955,100 +1206,35 @@ class UOWTest(
             ),
         )
 
-    def test_rollback(self):
-        User, Address = self._user_address_fixture()
+    def test_rollback(self, user_address_fixture):
+        User, Address = user_address_fixture()
+
         sess = fixture_session(expire_on_commit=False, autoflush=True)
         u1 = User(name="jack")
-        u1.addresses.append(Address(email_address="lala@hoho.com"))
+        u1.addresses.add(Address(email_address="lala@hoho.com"))
         sess.add(u1)
         sess.flush()
         sess.commit()
-        u1.addresses.append(Address(email_address="foo@bar.com"))
+        u1.addresses.add(Address(email_address="foo@bar.com"))
+
+        if self.lazy == "dynamic":
+            stmt = u1.addresses.statement
+        else:
+            stmt = u1.addresses.select()
+
         eq_(
-            u1.addresses.order_by(Address.id).all(),
+            sess.scalars(stmt.order_by(Address.id)).all(),
             [
                 Address(email_address="lala@hoho.com"),
                 Address(email_address="foo@bar.com"),
             ],
         )
         sess.rollback()
-        eq_(u1.addresses.all(), [Address(email_address="lala@hoho.com")])
-
-    def _test_delete_cascade(self, expected):
-        addresses = self.tables.addresses
-        User, Address = self._user_address_fixture(
-            addresses_args={
-                "order_by": addresses.c.id,
-                "backref": "user",
-                "cascade": "save-update" if expected else "all, delete",
-            }
-        )
-
-        sess = fixture_session(
-            autoflush=True,
-        )
-        u = User(name="ed")
-        u.addresses.extend(
-            [Address(email_address=letter) for letter in "abcdef"]
-        )
-        sess.add(u)
-        sess.commit()
-        eq_(
-            sess.connection()
-            .execute(
-                select(func.count("*")).where(addresses.c.user_id == None)
-            )
-            .scalar(),  # noqa
-            0,
-        )
         eq_(
-            sess.connection()
-            .execute(
-                select(func.count("*")).where(addresses.c.user_id != None)
-            )
-            .scalar(),  # noqa
-            6,
+            sess.scalars(stmt).all(),
+            [Address(email_address="lala@hoho.com")],
         )
 
-        sess.delete(u)
-
-        sess.commit()
-
-        if expected:
-            eq_(
-                sess.connection()
-                .execute(
-                    select(func.count("*")).where(
-                        addresses.c.user_id == None
-                    )  # noqa
-                )
-                .scalar(),
-                6,
-            )
-            eq_(
-                sess.connection()
-                .execute(
-                    select(func.count("*")).where(
-                        addresses.c.user_id != None
-                    )  # noqa
-                )
-                .scalar(),
-                0,
-            )
-        else:
-            eq_(
-                sess.connection()
-                .execute(select(func.count("*")).select_from(addresses))
-                .scalar(),
-                0,
-            )
-
-    def test_delete_nocascade(self):
-        self._test_delete_cascade(True)
-
-    def test_delete_cascade(self):
-        self._test_delete_cascade(False)
-
     def test_self_referential(self):
         Node, nodes = self.classes.Node, self.tables.nodes
 
@@ -1070,9 +1256,9 @@ class UOWTest(
 
         eq_(n1.children.all(), [n2, n3])
 
-    def test_remove_orphans(self):
+    def test_remove_orphans(self, user_address_fixture):
         addresses = self.tables.addresses
-        User, Address = self._user_address_fixture(
+        User, Address = user_address_fixture(
             addresses_args={
                 "order_by": addresses.c.id,
                 "backref": "user",
@@ -1084,13 +1270,18 @@ class UOWTest(
             autoflush=True,
         )
         u = User(name="ed")
-        u.addresses.extend(
+        u.addresses.add_all(
             [Address(email_address=letter) for letter in "abcdef"]
         )
         sess.add(u)
 
-        for a in u.addresses.filter(
-            Address.email_address.in_(["c", "e", "f"])
+        if self.lazy == "dynamic":
+            stmt = u.addresses.statement
+        else:
+            stmt = u.addresses.select()
+
+        for a in sess.scalars(
+            stmt.filter(Address.email_address.in_(["c", "e", "f"]))
         ):
             u.addresses.remove(a)
 
@@ -1099,8 +1290,10 @@ class UOWTest(
             set(["a", "b", "d"]),
         )
 
-    def _backref_test(self, autoflush, saveuser):
-        User, Address = self._user_address_fixture(
+    @testing.combinations(True, False, argnames="autoflush")
+    @testing.combinations(True, False, argnames="saveuser")
+    def test_backref(self, autoflush, saveuser, user_address_fixture):
+        User, Address = user_address_fixture(
             addresses_args={"backref": "user"}
         )
         sess = fixture_session(
@@ -1123,40 +1316,28 @@ class UOWTest(
         assert u in sess
         assert a in sess
 
-        eq_(list(u.addresses), [a])
+        eq_(self._list_collection(u.addresses), [a])
 
         a.user = None
         if not autoflush:
-            eq_(list(u.addresses), [a])
+            eq_(self._list_collection(u.addresses), [a])
 
         if not autoflush:
             sess.flush()
-        eq_(list(u.addresses), [])
+        eq_(self._list_collection(u.addresses), [])
 
-    def test_backref_autoflush_saveuser(self):
-        self._backref_test(True, True)
-
-    def test_backref_autoflush_savead(self):
-        self._backref_test(True, False)
-
-    def test_backref_saveuser(self):
-        self._backref_test(False, True)
-
-    def test_backref_savead(self):
-        self._backref_test(False, False)
-
-    def test_backref_events(self):
-        User, Address = self._user_address_fixture(
+    def test_backref_events(self, user_address_fixture):
+        User, Address = user_address_fixture(
             addresses_args={"backref": "user"}
         )
 
         u1 = User()
         a1 = Address()
-        u1.addresses.append(a1)
+        u1.addresses.add(a1)
         is_(a1.user, u1)
 
-    def test_no_deref(self):
-        User, Address = self._user_address_fixture(
+    def test_no_deref(self, user_address_fixture):
+        User, Address = user_address_fixture(
             addresses_args={"backref": "user"}
         )
 
@@ -1174,54 +1355,573 @@ class UOWTest(
         def query1():
             session = fixture_session()
             user = session.query(User).first()
-            return user.addresses.all()
+
+            return self._list_collection(user.addresses)
 
         def query2():
             session = fixture_session()
-            return session.query(User).first().addresses.all()
+
+            return self._list_collection(session.query(User).first().addresses)
 
         def query3():
             session = fixture_session()
-            return session.query(User).first().addresses.all()
+
+            return self._list_collection(session.query(User).first().addresses)
 
         eq_(query1(), [Address(email_address="joe@joesdomain.example")])
         eq_(query2(), [Address(email_address="joe@joesdomain.example")])
         eq_(query3(), [Address(email_address="joe@joesdomain.example")])
 
 
-class HistoryTest(_DynamicFixture, _fixtures.FixtureTest):
+class DynamicUOWTest(
+    _DynamicFixture,
+    _UOWTests,
+    _fixtures.FixtureTest,
+    testing.AssertsExecutionResults,
+):
+
     run_inserts = None
 
-    def _transient_fixture(self, addresses_args={}):
-        User, Address = self._user_address_fixture(
-            addresses_args=addresses_args
+    @testing.combinations(
+        "empty", "persistent", "transient", argnames="merge_type"
+    )
+    def test_merge_persistent(self, merge_type, user_address_fixture):
+        addresses = self.tables.addresses
+        User, Address = user_address_fixture(
+            addresses_args={"order_by": addresses.c.email_address}
         )
+        sess = fixture_session(autoflush=False)
 
-        u1 = User()
-        a1 = Address()
-        return u1, a1
+        a1 = Address(email_address="a1")
+        a2 = Address(email_address="a2")
+        a3 = Address(email_address="a3")
+        u1 = User(name="jack", addresses=[a2, a3])
+
+        if merge_type == "transient":
+            # merge transient.  no collection iteration is implied by this.
+            u1 = sess.merge(u1)
+            sess.add(a1)
+        else:
+            sess.add_all([u1, a1])
+        sess.flush()
+
+        if merge_type == "persistent":
+            u1 = User(id=u1.id, name="jane", addresses=[a1, a3])
+
+            # for Dynamic, the list is iterated.   it's been this way the
+            # whole time, which is clearly not very useful for a
+            # "collection that's too large to load".  however we maintain
+            # legacy behavior here
+            u1 = sess.merge(u1)
+            eq_(attributes.get_history(u1, "addresses"), ([a1], [a3], [a2]))
+
+            sess.flush()
+
+            if self.lazy == "dynamic":
+                stmt = u1.addresses.statement
+            else:
+                stmt = u1.addresses.select()
+            eq_(sess.scalars(stmt).all(), [a1, a3])
+
+        elif merge_type == "empty":
+            # merge while omitting the "too large to load" collection
+            # works fine.
+            u1 = User(id=u1.id, name="jane")
+            u1 = sess.merge(u1)
+
+            eq_(attributes.get_history(u1, "addresses"), ([], [a2, a3], []))
+
+            sess.flush()
 
-    def _persistent_fixture(self, autoflush=True, addresses_args={}):
-        User, Address = self._user_address_fixture(
-            addresses_args=addresses_args
+            if self.lazy == "dynamic":
+                stmt = u1.addresses.statement
+            else:
+                stmt = u1.addresses.select()
+
+            eq_(sess.scalars(stmt).all(), [a2, a3])
+
+    @testing.combinations(True, False, argnames="delete_cascade_configured")
+    def test_delete_cascade(
+        self, delete_cascade_configured, user_address_fixture
+    ):
+        addresses = self.tables.addresses
+        User, Address = user_address_fixture(
+            addresses_args={
+                "order_by": addresses.c.id,
+                "backref": "user",
+                "cascade": "save-update"
+                if not delete_cascade_configured
+                else "all, delete",
+            }
+        )
+
+        sess = fixture_session(
+            autoflush=True,
+        )
+        u = User(name="ed")
+        u.addresses.add_all(
+            [Address(email_address=letter) for letter in "abcdef"]
+        )
+        sess.add(u)
+        sess.commit()
+
+        isnull_stmt = select(
+            addresses.c.user_id == None, func.count("*")
+        ).group_by(addresses.c.user_id == None)
+
+        eq_(
+            {isnull: count for isnull, count in sess.execute(isnull_stmt)},
+            {False: 6},
+        )
+
+        sess.delete(u)
+
+        sess.commit()
+
+        if not delete_cascade_configured:
+            eq_(
+                {isnull: count for isnull, count in sess.execute(isnull_stmt)},
+                {True: 6},
+            )
+        else:
+            eq_(
+                sess.connection()
+                .execute(select(func.count("*")).select_from(addresses))
+                .scalar(),
+                0,
+            )
+
+
+class WriteOnlyUOWTest(
+    _WriteOnlyFixture,
+    _UOWTests,
+    _fixtures.FixtureTest,
+    testing.AssertsExecutionResults,
+):
+    __backend__ = True
+
+    @testing.fixture
+    def passive_deletes_fixture(self, decl_base, connection):
+        """passive deletes fixture
+
+        this fixture is separate from the FixtureTest setup because we need
+        to produce the related Table using ON DELETE cascade for the
+        foreign key.
+
+        """
+
+        def go(passive_deletes, cascade_deletes):
+            class A(decl_base):
+                __tablename__ = "a"
+                id: Mapped[int] = mapped_column(Identity(), primary_key=True)
+                data: Mapped[str]
+                bs: WriteOnlyMapped["B"] = relationship(  # noqa: F821
+                    passive_deletes=passive_deletes,
+                    cascade="all, delete-orphan"
+                    if cascade_deletes
+                    else "save-update, merge",
+                    order_by="B.id",
+                )
+
+            class B(decl_base):
+                __tablename__ = "b"
+                id: Mapped[int] = mapped_column(Identity(), primary_key=True)
+                a_id: Mapped[int] = mapped_column(
+                    ForeignKey(
+                        "a.id",
+                        ondelete="cascade" if cascade_deletes else "set null",
+                    ),
+                    nullable=not cascade_deletes,
+                )
+
+            decl_base.metadata.create_all(connection)
+            return A, B
+
+        yield go
+
+    @testing.combinations(
+        "empty", "persistent", "transient", argnames="merge_type"
+    )
+    def test_merge_persistent(self, merge_type, user_address_fixture):
+        addresses = self.tables.addresses
+        User, Address = user_address_fixture(
+            addresses_args={"order_by": addresses.c.email_address}
         )
+        sess = fixture_session(autoflush=False)
 
-        u1 = User(name="u1")
         a1 = Address(email_address="a1")
-        s = fixture_session(autoflush=autoflush)
-        s.add(u1)
-        s.flush()
-        return u1, a1, s
+        a2 = Address(email_address="a2")
+        a3 = Address(email_address="a3")
+        u1 = User(name="jack", addresses=[a2, a3])
 
-    def _persistent_m2m_fixture(self, autoflush=True, items_args={}):
-        Order, Item = self._order_item_fixture(items_args=items_args)
+        if merge_type == "transient":
+            # merge transient.  no collection iteration is implied by this.
+            u1 = sess.merge(u1)
+            sess.add(a1)
+        else:
+            sess.add_all([u1, a1])
+        sess.flush()
 
-        o1 = Order()
-        i1 = Item(description="i1")
-        s = fixture_session(autoflush=autoflush)
-        s.add(o1)
-        s.flush()
-        return o1, i1, s
+        if merge_type == "persistent":
+            u1 = User(id=u1.id, name="jane", addresses=[a1, a3])
+
+            # merge of populated list into persistent not supported with
+            # write_only because we would need to iterate the existing list
+            with self._expect_no_iteration():
+                u1 = sess.merge(u1)
+
+        elif merge_type == "empty":
+            # merge while omitting the "too large to load" collection
+            # works fine.
+            u1 = User(id=u1.id, name="jane")
+            u1 = sess.merge(u1)
+
+            eq_(
+                attributes.get_history(
+                    u1, "addresses", PassiveFlag.PASSIVE_NO_FETCH
+                ),
+                ([], [], []),
+            )
+
+            sess.flush()
+            eq_(sess.scalars(u1.addresses.select()).all(), [a2, a3])
+
+    def test_passive_deletes_required(self, user_address_fixture):
+        addresses = self.tables.addresses
+        User, Address = user_address_fixture(
+            addresses_args={
+                "order_by": addresses.c.id,
+                "backref": "user",
+                "cascade": "save-update",
+            }
+        )
+
+        sess = fixture_session(
+            autoflush=True,
+        )
+        u = User(
+            name="ed",
+            addresses=[Address(email_address=letter) for letter in "abcdef"],
+        )
+        sess.add(u)
+        sess.commit()
+
+        sess.delete(u)
+
+        with expect_raises_message(
+            exc.InvalidRequestError,
+            "Attribute User.addresses can't load the existing state from the "
+            "database for this operation; full iteration is not permitted.",
+        ):
+            sess.commit()
+
+    @testing.combinations(True, False, argnames="cascade_deletes")
+    def test_passive_deletes_succeed(
+        self, passive_deletes_fixture, connection, cascade_deletes
+    ):
+        A, B = passive_deletes_fixture(True, cascade_deletes)
+
+        sess = fixture_session(bind=connection)
+
+        a1 = A(data="d1", bs=[B(), B(), B()])
+        sess.add(a1)
+        sess.commit()
+
+        sess.delete(a1)
+
+        sess.commit()
+
+        if testing.requires.foreign_keys.enabled and cascade_deletes:
+            eq_(sess.scalar(select(func.count()).select_from(B)), 0)
+        else:
+            eq_(sess.scalar(select(func.count()).select_from(B)), 3)
+
+    @testing.combinations(True, False, argnames="cascade_deletes")
+    def test_remove_orphans(
+        self, passive_deletes_fixture, connection, cascade_deletes
+    ):
+        A, B = passive_deletes_fixture(True, cascade_deletes)
+
+        sess = fixture_session(bind=connection)
+
+        b1, b2, b3 = B(), B(), B()
+        a1 = A(data="d1", bs=[b1, b2, b3])
+        sess.add(a1)
+        sess.commit()
+
+        eq_(sess.scalars(a1.bs.select()).all(), [b1, b2, b3])
+
+        a1.bs.remove(b2)
+
+        sess.commit()
+
+        eq_(sess.scalars(a1.bs.select()).all(), [b1, b3])
+
+        if cascade_deletes:
+            eq_(sess.scalar(select(func.count()).select_from(B)), 2)
+        else:
+            eq_(sess.scalar(select(func.count()).select_from(B)), 3)
+
+
+class WriteOnlyBulkTest(
+    _WriteOnlyFixture,
+    _UOWTests,
+    _fixtures.FixtureTest,
+    testing.AssertsExecutionResults,
+):
+    run_inserts = None
+    __backend__ = True
+
+    @testing.requires.insert_executemany_returning
+    @testing.combinations(True, False, argnames="flush_user_first")
+    def test_bulk_insert(self, user_address_fixture, flush_user_first):
+        User, Address = user_address_fixture(
+            addresses_args={"backref": "user"}
+        )
+        sess = fixture_session()
+
+        u1 = User(name="x")
+        sess.add(u1)
+
+        # ha ha!  u1 is not persistent yet.  autoflush wont happen
+        # until sess.scalars() actually runs.  statement has to be
+        # created with a pending parameter, not actual parameter
+        assert inspect(u1).pending
+
+        if flush_user_first:
+            sess.flush()
+
+        with self.sql_execution_asserter() as asserter:
+            addresses = sess.scalars(
+                u1.addresses.insert().returning(Address),
+                [
+                    {"email_address": "e1"},
+                    {"email_address": "e2"},
+                    {"email_address": "e3"},
+                ],
+            ).all()
+
+        eq_(
+            addresses,
+            [
+                Address(user=User(name="x"), email_address="e1"),
+                Address(user=User(name="x"), email_address="e2"),
+                Address(user=User(name="x"), email_address="e3"),
+            ],
+        )
+
+        uid = u1.id
+
+        asserter.assert_(
+            Conditional(
+                not flush_user_first,
+                [
+                    CompiledSQL(
+                        "INSERT INTO users (name) VALUES (:name)",
+                        [{"name": "x"}],
+                    )
+                ],
+                [],
+            ),
+            CompiledSQL(
+                "INSERT INTO addresses (user_id, email_address) "
+                "VALUES (:param_1, :email_address) "
+                "RETURNING addresses.id, addresses.user_id, "
+                "addresses.email_address",
+                [
+                    {"param_1": uid, "email_address": "e1"},
+                    {"param_1": uid, "email_address": "e2"},
+                    {"param_1": uid, "email_address": "e3"},
+                ],
+            ),
+        )
+
+    @testing.requires.update_returning
+    @testing.combinations(True, False, argnames="flush_user_first")
+    def test_bulk_update(self, user_address_fixture, flush_user_first):
+        User, Address = user_address_fixture(
+            addresses_args={"backref": "user"}
+        )
+        sess = fixture_session()
+
+        u1 = User(
+            name="x",
+            addresses=[
+                Address(email_address="e1"),
+                Address(email_address="e2"),
+                Address(email_address="e3"),
+            ],
+        )
+        sess.add(u1)
+
+        # ha ha!  u1 is not persistent yet.  autoflush wont happen
+        # until sess.scalars() actually runs.  statement has to be
+        # created with a pending parameter, not actual parameter
+        assert inspect(u1).pending
+
+        if flush_user_first:
+            sess.flush()
+
+        with self.sql_execution_asserter() as asserter:
+            addresses = sess.scalars(
+                u1.addresses.update()
+                .values(email_address=Address.email_address + "@foo.com")
+                .returning(Address),
+            ).all()
+
+        eq_(
+            addresses,
+            [
+                Address(user=User(name="x"), email_address="e1@foo.com"),
+                Address(user=User(name="x"), email_address="e2@foo.com"),
+                Address(user=User(name="x"), email_address="e3@foo.com"),
+            ],
+        )
+
+        uid = u1.id
+
+        asserter.assert_(
+            Conditional(
+                not flush_user_first,
+                [
+                    CompiledSQL(
+                        "INSERT INTO users (name) VALUES (:name)",
+                        [{"name": "x"}],
+                    ),
+                    CompiledSQL(
+                        "INSERT INTO addresses (user_id, email_address) "
+                        "VALUES (:user_id, :email_address) "
+                        "RETURNING addresses.id",
+                        [
+                            {"user_id": uid, "email_address": "e1"},
+                            {"user_id": uid, "email_address": "e2"},
+                            {"user_id": uid, "email_address": "e3"},
+                        ],
+                    ),
+                ],
+                [],
+            ),
+            CompiledSQL(
+                "UPDATE addresses SET email_address=(addresses.email_address "
+                "|| :email_address_1) WHERE :param_1 = addresses.user_id "
+                "RETURNING addresses.id, addresses.user_id, "
+                "addresses.email_address",
+                [{"email_address_1": "@foo.com", "param_1": uid}],
+            ),
+        )
+
+    @testing.requires.delete_returning
+    @testing.combinations(True, False, argnames="flush_user_first")
+    def test_bulk_delete(self, user_address_fixture, flush_user_first):
+        User, Address = user_address_fixture(
+            addresses_args={"backref": "user"}
+        )
+        sess = fixture_session()
+
+        u1 = User(
+            name="x",
+            addresses=[
+                Address(email_address="e1"),
+                Address(email_address="e2"),
+                Address(email_address="e3"),
+            ],
+        )
+        sess.add(u1)
+
+        # ha ha!  u1 is not persistent yet.  autoflush wont happen
+        # until sess.scalars() actually runs.  statement has to be
+        # created with a pending parameter, not actual parameter
+        assert inspect(u1).pending
+
+        if flush_user_first:
+            sess.flush()
+
+        with self.sql_execution_asserter() as asserter:
+            addresses = sess.scalars(
+                u1.addresses.delete()
+                .where(Address.email_address == "e2")
+                .returning(Address),
+            ).all()
+
+        eq_(
+            addresses,
+            [
+                Address(email_address="e2"),
+            ],
+        )
+
+        uid = u1.id
+
+        asserter.assert_(
+            Conditional(
+                not flush_user_first,
+                [
+                    CompiledSQL(
+                        "INSERT INTO users (name) VALUES (:name)",
+                        [{"name": "x"}],
+                    ),
+                    CompiledSQL(
+                        "INSERT INTO addresses (user_id, email_address) "
+                        "VALUES (:user_id, :email_address) "
+                        "RETURNING addresses.id",
+                        [
+                            {"user_id": uid, "email_address": "e1"},
+                            {"user_id": uid, "email_address": "e2"},
+                            {"user_id": uid, "email_address": "e3"},
+                        ],
+                    ),
+                ],
+                [],
+            ),
+            CompiledSQL(
+                "DELETE FROM addresses WHERE :param_1 = addresses.user_id "
+                "AND addresses.email_address = :email_address_1 "
+                "RETURNING addresses.id, addresses.user_id, "
+                "addresses.email_address",
+                [{"param_1": uid, "email_address_1": "e2"}],
+            ),
+        )
+
+
+class _HistoryTest:
+    @testing.fixture
+    def transient_fixture(self, user_address_fixture):
+        def _transient_fixture(addresses_args={}):
+            User, Address = user_address_fixture(addresses_args=addresses_args)
+
+            u1 = User()
+            a1 = Address()
+            return u1, a1
+
+        yield _transient_fixture
+
+    @testing.fixture
+    def persistent_fixture(self, user_address_fixture):
+        def _persistent_fixture(autoflush=True, addresses_args={}):
+            User, Address = user_address_fixture(addresses_args=addresses_args)
+
+            u1 = User(name="u1")
+            a1 = Address(email_address="a1")
+            s = fixture_session(autoflush=autoflush)
+            s.add(u1)
+            s.flush()
+            return u1, a1, s
+
+        yield _persistent_fixture
+
+    @testing.fixture
+    def persistent_m2m_fixture(self, order_item_fixture):
+        def _persistent_m2m_fixture(autoflush=True, items_args={}):
+            Order, Item = order_item_fixture(items_args=items_args)
+
+            o1 = Order()
+            i1 = Item(description="i1")
+            s = fixture_session(autoflush=autoflush)
+            s.add(o1)
+            s.flush()
+            return o1, i1, s
+
+        yield _persistent_m2m_fixture
 
     def _assert_history(self, obj, compare, compare_passive=None):
         if isinstance(obj, self.classes.User):
@@ -1234,7 +1934,25 @@ class HistoryTest(_DynamicFixture, _fixtures.FixtureTest):
         if sess:
             sess.autoflush = False
         try:
-            eq_(attributes.get_history(obj, attrname), compare)
+
+            if self.lazy == "write_only" and compare_passive is not None:
+                eq_(
+                    attributes.get_history(
+                        obj, attrname, PassiveFlag.PASSIVE_NO_FETCH
+                    ),
+                    compare_passive,
+                )
+            else:
+                eq_(
+                    attributes.get_history(
+                        obj,
+                        attrname,
+                        PassiveFlag.PASSIVE_NO_FETCH
+                        if self.lazy == "write_only"
+                        else PassiveFlag.PASSIVE_OFF,
+                    ),
+                    compare,
+                )
 
             if compare_passive is None:
                 compare_passive = compare
@@ -1249,28 +1967,28 @@ class HistoryTest(_DynamicFixture, _fixtures.FixtureTest):
             if sess:
                 sess.autoflush = True
 
-    def test_append_transient(self):
-        u1, a1 = self._transient_fixture()
-        u1.addresses.append(a1)
+    def test_add_transient(self, transient_fixture):
+        u1, a1 = transient_fixture()
+        u1.addresses.add(a1)
 
         self._assert_history(u1, ([a1], [], []))
 
-    def test_append_persistent(self):
-        u1, a1, s = self._persistent_fixture()
-        u1.addresses.append(a1)
+    def test_add_persistent(self, persistent_fixture):
+        u1, a1, s = persistent_fixture()
+        u1.addresses.add(a1)
 
         self._assert_history(u1, ([a1], [], []))
 
-    def test_remove_transient(self):
-        u1, a1 = self._transient_fixture()
-        u1.addresses.append(a1)
+    def test_remove_transient(self, transient_fixture):
+        u1, a1 = transient_fixture()
+        u1.addresses.add(a1)
         u1.addresses.remove(a1)
 
         self._assert_history(u1, ([], [], []))
 
-    def test_backref_pop_transient(self):
-        u1, a1 = self._transient_fixture(addresses_args={"backref": "user"})
-        u1.addresses.append(a1)
+    def test_backref_pop_transient(self, transient_fixture):
+        u1, a1 = transient_fixture(addresses_args={"backref": "user"})
+        u1.addresses.add(a1)
 
         self._assert_history(u1, ([a1], [], []))
 
@@ -1279,9 +1997,9 @@ class HistoryTest(_DynamicFixture, _fixtures.FixtureTest):
         # removed from added
         self._assert_history(u1, ([], [], []))
 
-    def test_remove_persistent(self):
-        u1, a1, s = self._persistent_fixture()
-        u1.addresses.append(a1)
+    def test_remove_persistent(self, persistent_fixture):
+        u1, a1, s = persistent_fixture()
+        u1.addresses.add(a1)
         s.flush()
         s.expire_all()
 
@@ -1289,11 +2007,13 @@ class HistoryTest(_DynamicFixture, _fixtures.FixtureTest):
 
         self._assert_history(u1, ([], [], [a1]))
 
-    def test_backref_pop_persistent_autoflush_o2m_active_hist(self):
-        u1, a1, s = self._persistent_fixture(
+    def test_backref_pop_persistent_autoflush_o2m_active_hist(
+        self, persistent_fixture
+    ):
+        u1, a1, s = persistent_fixture(
             addresses_args={"backref": backref("user", active_history=True)}
         )
-        u1.addresses.append(a1)
+        u1.addresses.add(a1)
         s.flush()
         s.expire_all()
 
@@ -1301,11 +2021,11 @@ class HistoryTest(_DynamicFixture, _fixtures.FixtureTest):
 
         self._assert_history(u1, ([], [], [a1]))
 
-    def test_backref_pop_persistent_autoflush_m2m(self):
-        o1, i1, s = self._persistent_m2m_fixture(
-            items_args={"backref": "orders"}
-        )
-        o1.items.append(i1)
+    def test_backref_pop_persistent_autoflush_m2m(
+        self, persistent_m2m_fixture
+    ):
+        o1, i1, s = persistent_m2m_fixture(items_args={"backref": "orders"})
+        o1.items.add(i1)
         s.flush()
         s.expire_all()
 
@@ -1313,11 +2033,11 @@ class HistoryTest(_DynamicFixture, _fixtures.FixtureTest):
 
         self._assert_history(o1, ([], [], [i1]))
 
-    def test_backref_pop_persistent_noflush_m2m(self):
-        o1, i1, s = self._persistent_m2m_fixture(
+    def test_backref_pop_persistent_noflush_m2m(self, persistent_m2m_fixture):
+        o1, i1, s = persistent_m2m_fixture(
             items_args={"backref": "orders"}, autoflush=False
         )
-        o1.items.append(i1)
+        o1.items.add(i1)
         s.flush()
         s.expire_all()
 
@@ -1325,27 +2045,27 @@ class HistoryTest(_DynamicFixture, _fixtures.FixtureTest):
 
         self._assert_history(o1, ([], [], [i1]))
 
-    def test_unchanged_persistent(self):
+    def test_unchanged_persistent(self, persistent_fixture):
         Address = self.classes.Address
 
-        u1, a1, s = self._persistent_fixture()
+        u1, a1, s = persistent_fixture()
         a2, a3 = Address(email_address="a2"), Address(email_address="a3")
 
-        u1.addresses.append(a1)
-        u1.addresses.append(a2)
+        u1.addresses.add(a1)
+        u1.addresses.add(a2)
         s.flush()
 
-        u1.addresses.append(a3)
+        u1.addresses.add(a3)
         u1.addresses.remove(a2)
 
         self._assert_history(
             u1, ([a3], [a1], [a2]), compare_passive=([a3], [], [a2])
         )
 
-    def test_replace_transient(self):
+    def test_replace_transient(self, transient_fixture):
         Address = self.classes.Address
 
-        u1, a1 = self._transient_fixture()
+        u1, a1 = transient_fixture()
         a2, a3, a4, a5 = (
             Address(email_address="a2"),
             Address(email_address="a3"),
@@ -1358,10 +2078,12 @@ class HistoryTest(_DynamicFixture, _fixtures.FixtureTest):
 
         self._assert_history(u1, ([a2, a3, a4, a5], [], []))
 
-    def test_replace_persistent_noflush(self):
+    @testing.combinations(True, False, argnames="autoflush")
+    def test_replace_persistent(self, autoflush, persistent_fixture):
+        User = self.classes.User
         Address = self.classes.Address
 
-        u1, a1, s = self._persistent_fixture(autoflush=False)
+        u1, a1, s = persistent_fixture(autoflush=autoflush)
         a2, a3, a4, a5 = (
             Address(email_address="a2"),
             Address(email_address="a3"),
@@ -1369,56 +2091,46 @@ class HistoryTest(_DynamicFixture, _fixtures.FixtureTest):
             Address(email_address="a5"),
         )
 
-        u1.addresses = [a1, a2]
-        u1.addresses = [a2, a3, a4, a5]
-
-        self._assert_history(u1, ([a2, a3, a4, a5], [], []))
-
-    def test_replace_persistent_autoflush(self):
-        Address = self.classes.Address
-
-        u1, a1, s = self._persistent_fixture(autoflush=True)
-        a2, a3, a4, a5 = (
-            Address(email_address="a2"),
-            Address(email_address="a3"),
-            Address(email_address="a4"),
-            Address(email_address="a5"),
-        )
+        if User.addresses.property.lazy == "write_only":
+            with self._expect_no_iteration():
+                u1.addresses = [a1, a2]
+            return
 
         u1.addresses = [a1, a2]
         u1.addresses = [a2, a3, a4, a5]
 
-        self._assert_history(
-            u1,
-            ([a3, a4, a5], [a2], [a1]),
-            compare_passive=([a3, a4, a5], [], [a1]),
-        )
+        if not autoflush:
+            self._assert_history(u1, ([a2, a3, a4, a5], [], []))
+        else:
+            self._assert_history(
+                u1,
+                ([a3, a4, a5], [a2], [a1]),
+                compare_passive=([a3, a4, a5], [], [a1]),
+            )
 
-    def test_persistent_but_readded_noflush(self):
-        u1, a1, s = self._persistent_fixture(autoflush=False)
-        u1.addresses.append(a1)
+    @testing.combinations(True, False, argnames="autoflush")
+    def test_persistent_but_readded(self, autoflush, persistent_fixture):
+        u1, a1, s = persistent_fixture(autoflush=autoflush)
+        u1.addresses.add(a1)
         s.flush()
 
-        u1.addresses.append(a1)
+        u1.addresses.add(a1)
 
         self._assert_history(
             u1, ([], [a1], []), compare_passive=([a1], [], [])
         )
 
-    def test_persistent_but_readded_autoflush(self):
-        u1, a1, s = self._persistent_fixture(autoflush=True)
-        u1.addresses.append(a1)
-        s.flush()
+    def test_missing_but_removed_noflush(self, persistent_fixture):
+        u1, a1, s = persistent_fixture(autoflush=False)
 
-        u1.addresses.append(a1)
+        u1.addresses.remove(a1)
 
-        self._assert_history(
-            u1, ([], [a1], []), compare_passive=([a1], [], [])
-        )
+        self._assert_history(u1, ([], [], []), compare_passive=([], [], [a1]))
 
-    def test_missing_but_removed_noflush(self):
-        u1, a1, s = self._persistent_fixture(autoflush=False)
 
-        u1.addresses.remove(a1)
+class DynamicHistoryTest(_DynamicFixture, _HistoryTest, _fixtures.FixtureTest):
+    run_inserts = None
 
-        self._assert_history(u1, ([], [], []), compare_passive=([], [], [a1]))
+
+class WriteOnlyHistoryTest(_WriteOnlyFixture, DynamicHistoryTest):
+    pass