]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
implement use_descriptor_defaults for dataclass defaults
authorMike Bayer <mike_mp@zzzcomputing.com>
Tue, 10 Dec 2024 15:59:25 +0000 (10:59 -0500)
committerMike Bayer <mike_mp@zzzcomputing.com>
Wed, 19 Mar 2025 19:14:37 +0000 (15:14 -0400)
A significant behavioral change has been made to the behavior of the
:paramref:`_orm.mapped_column.default` and
:paramref:`_orm.relationship.default` parameters, when used with
SQLAlchemy's :ref:`orm_declarative_native_dataclasses` feature introduced
in 2.0, where the given value (assumed to be an immutable scalar value) is
no longer passed to the ``@dataclass`` API as a real default, instead a
token that leaves the value un-set in the object's ``__dict__`` is used, in
conjunction with a descriptor-level default.  This prevents an un-set
default value from overriding a default that was actually set elsewhere,
such as in relationship / foreign key assignment patterns as well as in
:meth:`_orm.Session.merge` scenarios.   See the full writeup in the
:ref:`whatsnew_21_toplevel` document which includes guidance on how to
re-enable the 2.0 version of the behavior if needed.

This adds a new implicit default field to ScalarAttributeImpl
so that we can have defaults that are not in the dictionary but
are instead passed through to the class-level descriptor, effectively
allowing custom defaults that are not used in INSERT or merge

Fixes: #12168
Change-Id: Ia327d18d6ec47c430e926ab7658e7b9f0666206e

17 files changed:
doc/build/changelog/migration_21.rst
doc/build/changelog/unreleased_21/12168.rst [new file with mode: 0644]
doc/build/faq/ormconfiguration.rst
lib/sqlalchemy/orm/_orm_constructors.py
lib/sqlalchemy/orm/attributes.py
lib/sqlalchemy/orm/base.py
lib/sqlalchemy/orm/decl_api.py
lib/sqlalchemy/orm/decl_base.py
lib/sqlalchemy/orm/descriptor_props.py
lib/sqlalchemy/orm/interfaces.py
lib/sqlalchemy/orm/properties.py
lib/sqlalchemy/orm/relationships.py
lib/sqlalchemy/orm/strategies.py
lib/sqlalchemy/orm/writeonly.py
lib/sqlalchemy/sql/schema.py
test/orm/declarative/test_dc_transforms.py
test/sql/test_metadata.py

index 304f9a5d24944073a33fb8c8b42e08cb9aeb3831..5dcc9bea09efcdbcf4f63c505cab5ec2f3c3e319 100644 (file)
@@ -134,6 +134,184 @@ lambdas which do the same::
 
 :ticket:`10050`
 
+.. _change_12168:
+
+ORM Mapped Dataclasses no longer populate implicit ``default`` in ``__dict__``
+------------------------------------------------------------------------------
+
+This behavioral change addresses a widely reported issue with SQLAlchemy's
+:ref:`orm_declarative_native_dataclasses` feature that was introduced in 2.0.
+SQLAlchemy ORM has always featured a behavior where a particular attribute on
+an ORM mapped class will have different behaviors depending on if it has an
+actively set value, including if that value is ``None``, versus if the
+attribute is not set at all.  When Declarative Dataclass Mapping was introduced, the
+:paramref:`_orm.mapped_column.default` parameter introduced a new capability
+which is to set up a dataclass-level default to be present in the generated
+``__init__`` method. This had the unfortunate side effect of breaking various
+popular workflows, the most prominent of which is creating an ORM object with
+the foreign key value in lieu of a many-to-one reference::
+
+    class Base(MappedAsDataclass, DeclarativeBase):
+        pass
+
+
+    class Parent(Base):
+        __tablename__ = "parent"
+
+        id: Mapped[int] = mapped_column(primary_key=True, init=False)
+
+        related_id: Mapped[int | None] = mapped_column(ForeignKey("child.id"), default=None)
+        related: Mapped[Child | None] = relationship(default=None)
+
+
+    class Child(Base):
+        __tablename__ = "child"
+
+        id: Mapped[int] = mapped_column(primary_key=True, init=False)
+
+In the above mapping, the ``__init__`` method generated for ``Parent``
+would in Python code look like this::
+
+
+    def __init__(self, related_id=None, related=None): ...
+
+This means that creating a new ``Parent`` with ``related_id`` only would populate
+both ``related_id`` and ``related`` in ``__dict__``::
+
+    # 2.0 behavior; will INSERT NULL for related_id due to the presence
+    # of related=None
+    >>> p1 = Parent(related_id=5)
+    >>> p1.__dict__
+    {'related_id': 5, 'related': None, '_sa_instance_state': ...}
+
+The ``None`` value for ``'related'`` means that SQLAlchemy favors the non-present
+related ``Child`` over the present value for ``'related_id'``, which would be
+discarded, and ``NULL`` would be inserted for ``'related_id'`` instead.
+
+In the new behavior, the ``__init__`` method instead looks like the example below,
+using a special constant ``DONT_SET`` indicating a non-present value for ``'related'``
+should be ignored.  This allows the class to behave more closely to how
+SQLAlchemy ORM mapped classes traditionally operate::
+
+    def __init__(self, related_id=DONT_SET, related=DONT_SET): ...
+
+We then get a ``__dict__`` setup that will follow the expected behavior of
+omitting ``related`` from ``__dict__`` and later running an INSERT with
+``related_id=5``::
+
+    # 2.1 behavior; will INSERT 5 for related_id
+    >>> p1 = Parent(related_id=5)
+    >>> p1.__dict__
+    {'related_id': 5, '_sa_instance_state': ...}
+
+Dataclass defaults are delivered via descriptor instead of __dict__
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The above behavior goes a step further, which is that in order to
+honor default values that are something other than ``None``, the value of the
+dataclass-level default (i.e. set using any of the
+:paramref:`_orm.mapped_column.default`,
+:paramref:`_orm.column_property.default`, or :paramref:`_orm.deferred.default`
+parameters) is directed to be delivered at the
+Python :term:`descriptor` level using mechanisms in SQLAlchemy's attribute
+system that normally return ``None`` for un-popualted columns, so that even though the default is not
+populated into ``__dict__``, it's still delivered when the attribute is
+accessed.  This behavior is based on what Python dataclasses itself does
+when a default is indicated for a field that also includes ``init=False``.
+
+In the example below, an immutable default ``"default_status"``
+is applied to a column called ``status``::
+
+    class Base(MappedAsDataclass, DeclarativeBase):
+        pass
+
+
+    class SomeObject(Base):
+        __tablename__ = "parent"
+
+        id: Mapped[int] = mapped_column(primary_key=True, init=False)
+
+        status: Mapped[str] = mapped_column(default="default_status")
+
+In the above mapping, constructing ``SomeObject`` with no parameters will
+deliver no values inside of ``__dict__``, but will deliver the default
+value via descriptor::
+
+    # object is constructed with no value for ``status``
+    >>> s1 = SomeObject()
+
+    # the default value is not placed in ``__dict__``
+    >>> s1.__dict__
+    {'_sa_instance_state': ...}
+
+    # but the default value is delivered at the object level via descriptor
+    >>> s1.status
+    'default_status'
+
+    # the value still remains unpopulated in ``__dict__``
+    >>> s1.__dict__
+    {'_sa_instance_state': ...}
+
+The value passed
+as :paramref:`_orm.mapped_column.default` is also assigned as was the
+case before to the :paramref:`_schema.Column.default` parameter of the
+underlying :class:`_schema.Column`, where it takes
+place as a Python-level default for INSERT statements.  So while ``__dict__``
+is never populated with the default value on the object, the INSERT
+still includes the value in the parameter set.  This essentially modifies
+the Declarative Dataclass Mapping system to work more like traditional
+ORM mapped classes, where a "default" means just that, a column level
+default.
+
+Dataclass defaults are accessible on objects even without init
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+As the new behavior makes use of descriptors in a similar way as Python
+dataclasses do themselves when ``init=False``, the new feature implements
+this behavior as well.   This is an all new behavior where an ORM mapped
+class can deliver a default value for fields even if they are not part of
+the ``__init__()`` method at all.  In the mapping below, the ``status``
+field is configured with ``init=False``, meaning it's not part of the
+constructor at all::
+
+    class Base(MappedAsDataclass, DeclarativeBase):
+        pass
+
+
+    class SomeObject(Base):
+        __tablename__ = "parent"
+        id: Mapped[int] = mapped_column(primary_key=True, init=False)
+        status: Mapped[str] = mapped_column(default="default_status", init=False)
+
+When we construct ``SomeObject()`` with no arguments, the default is accessible
+on the instance, delivered via descriptor::
+
+    >>> so = SomeObject()
+    >>> so.status
+    default_status
+
+Related Changes
+^^^^^^^^^^^^^^^
+
+This change includes the following API changes:
+
+* The :paramref:`_orm.relationship.default` parameter, when present, only
+  accepts a value of ``None``, and is only accepted when the relationship is
+  ultimately a many-to-one relationship or one that establishes
+  :paramref:`_orm.relationship.uselist` as ``False``.
+* The :paramref:`_orm.mapped_column.default` and :paramref:`_orm.mapped_column.insert_default`
+  parameters are mutually exclusive, and only one may be passed at a time.
+  The behavior of the two parameters is equivalent at the :class:`_schema.Column`
+  level, however at the Declarative Dataclass Mapping level, only
+  :paramref:`_orm.mapped_column.default` actually sets the dataclass-level
+  default with descriptor access; using :paramref:`_orm.mapped_column.insert_default`
+  will have the effect of the object attribute defaulting to ``None`` on the
+  instance until the INSERT takes place, in the same way it works on traditional
+  ORM mapped classes.
+
+:ticket:`12168`
+
+
 .. _change_11234:
 
 URL stringify and parse now supports URL escaping for the "database" portion
diff --git a/doc/build/changelog/unreleased_21/12168.rst b/doc/build/changelog/unreleased_21/12168.rst
new file mode 100644 (file)
index 0000000..6521733
--- /dev/null
@@ -0,0 +1,21 @@
+.. change::
+    :tags: bug, orm
+    :tickets: 12168
+
+    A significant behavioral change has been made to the behavior of the
+    :paramref:`_orm.mapped_column.default` and
+    :paramref:`_orm.relationship.default` parameters, when used with
+    SQLAlchemy's :ref:`orm_declarative_native_dataclasses` feature introduced
+    in 2.0, where the given value (assumed to be an immutable scalar value) is
+    no longer passed to the ``@dataclass`` API as a real default, instead a
+    token that leaves the value un-set in the object's ``__dict__`` is used, in
+    conjunction with a descriptor-level default.  This prevents an un-set
+    default value from overriding a default that was actually set elsewhere,
+    such as in relationship / foreign key assignment patterns as well as in
+    :meth:`_orm.Session.merge` scenarios.   See the full writeup in the
+    :ref:`whatsnew_21_toplevel` document which includes guidance on how to
+    re-enable the 2.0 version of the behavior if needed.
+
+    .. seealso::
+
+        :ref:`change_12168`
index 9388789cc6afbd0e44d99ae39878970b5f3c72ee..53904f74091115f2b74210ace98b2ef090b4a225 100644 (file)
@@ -389,29 +389,48 @@ parameters are **synonymous**.
 Part Two - Using Dataclasses support with MappedAsDataclass
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
+.. versionchanged:: 2.1 The behavior of column level defaults when using
+   dataclasses has changed to use an approach that uses class-level descriptors
+   to provide class behavior, in conjunction with Core-level column defaults
+   to provide the correct INSERT behavior. See :ref:`change_12168` for
+   background.
+
 When you **are** using :class:`_orm.MappedAsDataclass`, that is, the specific form
 of mapping used at :ref:`orm_declarative_native_dataclasses`, the meaning of the
 :paramref:`_orm.mapped_column.default` keyword changes. We recognize that it's not
 ideal that this name changes its behavior, however there was no alternative as
 PEP-681 requires :paramref:`_orm.mapped_column.default` to take on this meaning.
 
-When dataclasses are used, the :paramref:`_orm.mapped_column.default` parameter must
-be used the way it's described at
-`Python Dataclasses <https://docs.python.org/3/library/dataclasses.html>`_ - it refers
-to a constant value like a string or a number, and **is applied to your object
-immediately when constructed**. It is also at the moment also applied to the
-:paramref:`_orm.mapped_column.default` parameter of :class:`_schema.Column` where
-it would be used in an ``INSERT`` statement automatically even if not present
-on the object. If you instead want to use a callable for your dataclass,
-which will be applied to the object when constructed, you would use
-:paramref:`_orm.mapped_column.default_factory`.
-
-To get access to the ``INSERT``-only behavior of :paramref:`_orm.mapped_column.default`
-that is described in part one above, you would use the
-:paramref:`_orm.mapped_column.insert_default` parameter instead.
-:paramref:`_orm.mapped_column.insert_default` when dataclasses are used continues
-to be a direct route to the Core-level "default" process where the parameter can
-be a static value or callable.
+When dataclasses are used, the :paramref:`_orm.mapped_column.default` parameter
+must be used the way it's described at `Python Dataclasses
+<https://docs.python.org/3/library/dataclasses.html>`_ - it refers to a
+constant value like a string or a number, and **is available on your object
+immediately when constructed**.  As of SQLAlchemy 2.1, the value is delivered
+using a descriptor if not otherwise set, without the value actually being
+placed in ``__dict__`` unless it were passed to the constructor explicitly.
+
+The value used for :paramref:`_orm.mapped_column.default` is also applied to the
+:paramref:`_schema.Column.default` parameter of :class:`_schema.Column`.
+This is so that the value used as the dataclass default is also applied in
+an ORM INSERT statement for a mapped object where the value was not
+explicitly passed.  Using this parameter is **mutually exclusive** against the
+:paramref:`_schema.Column.insert_default` parameter, meaning that both cannot
+be used at the same time.
+
+The :paramref:`_orm.mapped_column.default` and
+:paramref:`_orm.mapped_column.insert_default` parameters may also be used
+(one or the other, not both)
+for a SQLAlchemy-mapped dataclass field, or for a dataclass overall,
+that indicates ``init=False``.
+In this usage, if :paramref:`_orm.mapped_column.default` is used, the default
+value will be available on the constructed object immediately as well as
+used within the INSERT statement.  If :paramref:`_orm.mapped_column.insert_default`
+is used, the constructed object will return ``None`` for the attribute value,
+but the default value will still be used for the INSERT statement.
+
+To use a callable to generate defaults for the dataclass, which would be
+applied to the object when constructed by populating it into ``__dict__``,
+:paramref:`_orm.mapped_column.default_factory` may be used instead.
 
 .. list-table:: Summary Chart
    :header-rows: 1
@@ -421,7 +440,7 @@ be a static value or callable.
      - Works without dataclasses?
      - Accepts scalar?
      - Accepts callable?
-     - Populates object immediately?
+     - Available on object immediately?
    * - :paramref:`_orm.mapped_column.default`
      - ✔
      - ✔
@@ -429,7 +448,7 @@ be a static value or callable.
      - Only if no dataclasses
      - Only if dataclasses
    * - :paramref:`_orm.mapped_column.insert_default`
-     - ✔
+     - ✔ (only if no ``default``)
      - ✔
      - ✔
      - ✔
index 63ba5cd79644478b8237bf3e2969b462fc54e08a..5dad065396094e7ea39edfbeb2ab3ec9f2192a66 100644 (file)
@@ -1814,6 +1814,17 @@ def relationship(
          automatically detected; if it is not detected, then the
          optimization is not supported.
 
+    :param default: Specific to :ref:`orm_declarative_native_dataclasses`,
+     specifies an immutable scalar default value for the relationship that
+     will behave as though it is the default value for the parameter in the
+     ``__init__()`` method.  This is only supported for a ``uselist=False``
+     relationship, that is many-to-one or one-to-one, and only supports the
+     scalar value ``None``, since no other immutable value is valid for such a
+     relationship.
+
+     .. versionchanged:: 2.1 the :paramref:`_orm.relationship.default`
+        parameter only supports a value of ``None``.
+
     :param init: Specific to :ref:`orm_declarative_native_dataclasses`,
      specifies if the mapped attribute should be part of the ``__init__()``
      method as generated by the dataclass process.
index fc95401ca2b9f39a0e2d7eb2f1c594913241fc1f..1722de484859c03ce7f9a96aa419177b20de891c 100644 (file)
@@ -45,6 +45,7 @@ from .base import ATTR_EMPTY
 from .base import ATTR_WAS_SET
 from .base import CALLABLES_OK
 from .base import DEFERRED_HISTORY_LOAD
+from .base import DONT_SET
 from .base import INCLUDE_PENDING_MUTATIONS  # noqa
 from .base import INIT_OK
 from .base import instance_dict as instance_dict
@@ -1045,20 +1046,9 @@ class _AttributeImpl:
     def _default_value(
         self, state: InstanceState[Any], dict_: _InstanceDict
     ) -> Any:
-        """Produce an empty value for an uninitialized scalar attribute."""
-
-        assert self.key not in dict_, (
-            "_default_value should only be invoked for an "
-            "uninitialized or expired attribute"
-        )
+        """Produce an empty value for an uninitialized attribute."""
 
-        value = None
-        for fn in self.dispatch.init_scalar:
-            ret = fn(state, value, dict_)
-            if ret is not ATTR_EMPTY:
-                value = ret
-
-        return value
+        raise NotImplementedError()
 
     def get(
         self,
@@ -1211,15 +1201,38 @@ class _ScalarAttributeImpl(_AttributeImpl):
     collection = False
     dynamic = False
 
-    __slots__ = "_replace_token", "_append_token", "_remove_token"
+    __slots__ = (
+        "_default_scalar_value",
+        "_replace_token",
+        "_append_token",
+        "_remove_token",
+    )
 
-    def __init__(self, *arg, **kw):
+    def __init__(self, *arg, default_scalar_value=None, **kw):
         super().__init__(*arg, **kw)
+        self._default_scalar_value = default_scalar_value
         self._replace_token = self._append_token = AttributeEventToken(
             self, OP_REPLACE
         )
         self._remove_token = AttributeEventToken(self, OP_REMOVE)
 
+    def _default_value(
+        self, state: InstanceState[Any], dict_: _InstanceDict
+    ) -> Any:
+        """Produce an empty value for an uninitialized scalar attribute."""
+
+        assert self.key not in dict_, (
+            "_default_value should only be invoked for an "
+            "uninitialized or expired attribute"
+        )
+        value = self._default_scalar_value
+        for fn in self.dispatch.init_scalar:
+            ret = fn(state, value, dict_)
+            if ret is not ATTR_EMPTY:
+                value = ret
+
+        return value
+
     def delete(self, state: InstanceState[Any], dict_: _InstanceDict) -> None:
         if self.dispatch._active_history:
             old = self.get(state, dict_, PASSIVE_RETURN_NO_VALUE)
@@ -1268,6 +1281,9 @@ class _ScalarAttributeImpl(_AttributeImpl):
         check_old: Optional[object] = None,
         pop: bool = False,
     ) -> None:
+        if value is DONT_SET:
+            return
+
         if self.dispatch._active_history:
             old = self.get(state, dict_, PASSIVE_RETURN_NO_VALUE)
         else:
@@ -1434,6 +1450,9 @@ class _ScalarObjectAttributeImpl(_ScalarAttributeImpl):
     ) -> None:
         """Set a value on the given InstanceState."""
 
+        if value is DONT_SET:
+            return
+
         if self.dispatch._active_history:
             old = self.get(
                 state,
index 14a0eae6f7378d0ad2ce08d9451d0e9f41d1b5f2..aff2b23ae22ec366bb254c32c6112e93fbceba35 100644 (file)
@@ -97,6 +97,8 @@ class LoaderCallableStatus(Enum):
 
     """
 
+    DONT_SET = 5
+
 
 (
     PASSIVE_NO_RESULT,
@@ -104,6 +106,7 @@ class LoaderCallableStatus(Enum):
     ATTR_WAS_SET,
     ATTR_EMPTY,
     NO_VALUE,
+    DONT_SET,
 ) = tuple(LoaderCallableStatus)
 
 NEVER_SET = NO_VALUE
index daafc83f143faac20c9c4b8200cbe85c1e1479e6..f3cec699b8de7f3c9427c0e5c634e9c06b8592e5 100644 (file)
@@ -81,8 +81,8 @@ from ..util.typing import Self
 if TYPE_CHECKING:
     from ._typing import _O
     from ._typing import _RegistryType
-    from .decl_base import _DataclassArguments
     from .instrumentation import ClassManager
+    from .interfaces import _DataclassArguments
     from .interfaces import MapperProperty
     from .state import InstanceState  # noqa
     from ..sql._typing import _TypeEngineArgument
@@ -594,7 +594,6 @@ class MappedAsDataclass(metaclass=DCTransformDeclarative):
             "kw_only": kw_only,
             "dataclass_callable": dataclass_callable,
         }
-
         current_transforms: _DataclassArguments
 
         if hasattr(cls, "_sa_apply_dc_transforms"):
@@ -1597,20 +1596,18 @@ class registry:
         """
 
         def decorate(cls: Type[_O]) -> Type[_O]:
-            setattr(
-                cls,
-                "_sa_apply_dc_transforms",
-                {
-                    "init": init,
-                    "repr": repr,
-                    "eq": eq,
-                    "order": order,
-                    "unsafe_hash": unsafe_hash,
-                    "match_args": match_args,
-                    "kw_only": kw_only,
-                    "dataclass_callable": dataclass_callable,
-                },
-            )
+            apply_dc_transforms: _DataclassArguments = {
+                "init": init,
+                "repr": repr,
+                "eq": eq,
+                "order": order,
+                "unsafe_hash": unsafe_hash,
+                "match_args": match_args,
+                "kw_only": kw_only,
+                "dataclass_callable": dataclass_callable,
+            }
+
+            setattr(cls, "_sa_apply_dc_transforms", apply_dc_transforms)
             _as_declarative(self, cls, cls.__dict__)
             return cls
 
index fdd6b7eaeea10ae9f3002e69882c4a7a9150f722..020c8492579c6e7de2d3a58303dc0c5b51f737be 100644 (file)
@@ -27,7 +27,6 @@ from typing import Sequence
 from typing import Tuple
 from typing import Type
 from typing import TYPE_CHECKING
-from typing import TypedDict
 from typing import TypeVar
 from typing import Union
 import weakref
@@ -46,6 +45,7 @@ from .base import InspectionAttr
 from .descriptor_props import CompositeProperty
 from .descriptor_props import SynonymProperty
 from .interfaces import _AttributeOptions
+from .interfaces import _DataclassArguments
 from .interfaces import _DCAttributeOptions
 from .interfaces import _IntrospectsAnnotations
 from .interfaces import _MappedAttribute
@@ -115,17 +115,6 @@ class _DeclMappedClassProtocol(MappedClassProtocol[_O], Protocol):
     def __declare_last__(self) -> None: ...
 
 
-class _DataclassArguments(TypedDict):
-    init: Union[_NoArg, bool]
-    repr: Union[_NoArg, bool]
-    eq: Union[_NoArg, bool]
-    order: Union[_NoArg, bool]
-    unsafe_hash: Union[_NoArg, bool]
-    match_args: Union[_NoArg, bool]
-    kw_only: Union[_NoArg, bool]
-    dataclass_callable: Union[_NoArg, Callable[..., Type[Any]]]
-
-
 def _declared_mapping_info(
     cls: Type[Any],
 ) -> Optional[Union[_DeferredMapperConfig, Mapper[Any]]]:
@@ -1085,10 +1074,12 @@ class _ClassScanMapperConfig(_MapperConfig):
 
         field_list = [
             _AttributeOptions._get_arguments_for_make_dataclass(
+                self,
                 key,
                 anno,
                 mapped_container,
                 self.collected_attributes.get(key, _NoArg.NO_ARG),
+                dataclass_setup_arguments,
             )
             for key, anno, mapped_container in (
                 (
@@ -1121,7 +1112,6 @@ class _ClassScanMapperConfig(_MapperConfig):
                 )
             )
         ]
-
         if warn_for_non_dc_attrs:
             for (
                 originating_class,
@@ -1218,7 +1208,8 @@ class _ClassScanMapperConfig(_MapperConfig):
                 **{
                     k: v
                     for k, v in dataclass_setup_arguments.items()
-                    if v is not _NoArg.NO_ARG and k != "dataclass_callable"
+                    if v is not _NoArg.NO_ARG
+                    and k not in ("dataclass_callable",)
                 },
             )
         except (TypeError, ValueError) as ex:
index 89124c4e439a827c5350ec346c10dbb39ff5db45..6842cd149a435fd71e6457db82820bec8af486d4 100644 (file)
@@ -34,6 +34,7 @@ import weakref
 from . import attributes
 from . import util as orm_util
 from .base import _DeclarativeMapped
+from .base import DONT_SET
 from .base import LoaderCallableStatus
 from .base import Mapped
 from .base import PassiveFlag
@@ -52,6 +53,7 @@ from .. import sql
 from .. import util
 from ..sql import expression
 from ..sql import operators
+from ..sql.base import _NoArg
 from ..sql.elements import BindParameter
 from ..util.typing import get_args
 from ..util.typing import is_fwd_ref
@@ -68,6 +70,7 @@ if typing.TYPE_CHECKING:
     from .attributes import QueryableAttribute
     from .context import _ORMCompileState
     from .decl_base import _ClassScanMapperConfig
+    from .interfaces import _DataclassArguments
     from .mapper import Mapper
     from .properties import ColumnProperty
     from .properties import MappedColumn
@@ -158,6 +161,7 @@ class DescriptorProperty(MapperProperty[_T]):
             doc=self.doc,
             original_property=self,
         )
+
         proxy_attr.impl = _ProxyImpl(self.key)
         mapper.class_manager.instrument_attribute(self.key, proxy_attr)
 
@@ -305,6 +309,9 @@ class CompositeProperty(
             return dict_.get(self.key, None)
 
         def fset(instance: Any, value: Any) -> None:
+            if value is LoaderCallableStatus.DONT_SET:
+                return
+
             dict_ = attributes.instance_dict(instance)
             state = attributes.instance_state(instance)
             attr = state.manager[self.key]
@@ -1022,6 +1029,39 @@ class SynonymProperty(DescriptorProperty[_T]):
         attr: QueryableAttribute[Any] = getattr(self.parent.class_, self.name)
         return attr.impl.get_history(state, dict_, passive=passive)
 
+    def _get_dataclass_setup_options(
+        self,
+        decl_scan: _ClassScanMapperConfig,
+        key: str,
+        dataclass_setup_arguments: _DataclassArguments,
+    ) -> _AttributeOptions:
+        dataclasses_default = self._attribute_options.dataclasses_default
+        if (
+            dataclasses_default is not _NoArg.NO_ARG
+            and not callable(dataclasses_default)
+            and not getattr(
+                decl_scan.cls, "_sa_disable_descriptor_defaults", False
+            )
+        ):
+            proxied = decl_scan.collected_attributes[self.name]
+            proxied_default = proxied._attribute_options.dataclasses_default
+            if proxied_default != dataclasses_default:
+                raise sa_exc.ArgumentError(
+                    f"Synonym {key!r} default argument "
+                    f"{dataclasses_default!r} must match the dataclasses "
+                    f"default value of proxied object {self.name!r}, "
+                    f"""currently {
+                        repr(proxied_default)
+                        if proxied_default is not _NoArg.NO_ARG
+                        else 'not set'}"""
+                )
+            self._default_scalar_value = dataclasses_default
+            return self._attribute_options._replace(
+                dataclasses_default=DONT_SET
+            )
+
+        return self._attribute_options
+
     @util.preload_module("sqlalchemy.orm.properties")
     def set_parent(self, parent: Mapper[Any], init: bool) -> None:
         properties = util.preloaded.orm_properties
index 1cedd3910285230e04d0b1c729d4271f1a879c40..9045e09a7c8e1259b9715b628fd471e4f08da696 100644 (file)
@@ -44,6 +44,7 @@ from typing import Union
 from . import exc as orm_exc
 from . import path_registry
 from .base import _MappedAttribute as _MappedAttribute
+from .base import DONT_SET as DONT_SET  # noqa: F401
 from .base import EXT_CONTINUE as EXT_CONTINUE  # noqa: F401
 from .base import EXT_SKIP as EXT_SKIP  # noqa: F401
 from .base import EXT_STOP as EXT_STOP  # noqa: F401
@@ -193,6 +194,22 @@ class _IntrospectsAnnotations:
         )
 
 
+class _DataclassArguments(TypedDict):
+    """define arguments that can be passed to ORM Annotated Dataclass
+    class definitions.
+
+    """
+
+    init: Union[_NoArg, bool]
+    repr: Union[_NoArg, bool]
+    eq: Union[_NoArg, bool]
+    order: Union[_NoArg, bool]
+    unsafe_hash: Union[_NoArg, bool]
+    match_args: Union[_NoArg, bool]
+    kw_only: Union[_NoArg, bool]
+    dataclass_callable: Union[_NoArg, Callable[..., Type[Any]]]
+
+
 class _AttributeOptions(NamedTuple):
     """define Python-local attribute behavior options common to all
     :class:`.MapperProperty` objects.
@@ -211,7 +228,9 @@ class _AttributeOptions(NamedTuple):
     dataclasses_kw_only: Union[_NoArg, bool]
     dataclasses_hash: Union[_NoArg, bool, None]
 
-    def _as_dataclass_field(self, key: str) -> Any:
+    def _as_dataclass_field(
+        self, key: str, dataclass_setup_arguments: _DataclassArguments
+    ) -> Any:
         """Return a ``dataclasses.Field`` object given these arguments."""
 
         kw: Dict[str, Any] = {}
@@ -263,10 +282,12 @@ class _AttributeOptions(NamedTuple):
     @classmethod
     def _get_arguments_for_make_dataclass(
         cls,
+        decl_scan: _ClassScanMapperConfig,
         key: str,
         annotation: _AnnotationScanType,
         mapped_container: Optional[Any],
         elem: _T,
+        dataclass_setup_arguments: _DataclassArguments,
     ) -> Union[
         Tuple[str, _AnnotationScanType],
         Tuple[str, _AnnotationScanType, dataclasses.Field[Any]],
@@ -277,7 +298,12 @@ class _AttributeOptions(NamedTuple):
 
         """
         if isinstance(elem, _DCAttributeOptions):
-            dc_field = elem._attribute_options._as_dataclass_field(key)
+            attribute_options = elem._get_dataclass_setup_options(
+                decl_scan, key, dataclass_setup_arguments
+            )
+            dc_field = attribute_options._as_dataclass_field(
+                key, dataclass_setup_arguments
+            )
 
             return (key, annotation, dc_field)
         elif elem is not _NoArg.NO_ARG:
@@ -344,6 +370,44 @@ class _DCAttributeOptions:
 
     _has_dataclass_arguments: bool
 
+    def _get_dataclass_setup_options(
+        self,
+        decl_scan: _ClassScanMapperConfig,
+        key: str,
+        dataclass_setup_arguments: _DataclassArguments,
+    ) -> _AttributeOptions:
+        return self._attribute_options
+
+
+class _DataclassDefaultsDontSet(_DCAttributeOptions):
+    __slots__ = ()
+
+    _default_scalar_value: Any
+
+    def _get_dataclass_setup_options(
+        self,
+        decl_scan: _ClassScanMapperConfig,
+        key: str,
+        dataclass_setup_arguments: _DataclassArguments,
+    ) -> _AttributeOptions:
+
+        dataclasses_default = self._attribute_options.dataclasses_default
+        if (
+            dataclasses_default is not _NoArg.NO_ARG
+            and not callable(dataclasses_default)
+            and not getattr(
+                decl_scan.cls, "_sa_disable_descriptor_defaults", False
+            )
+        ):
+            self._default_scalar_value = (
+                self._attribute_options.dataclasses_default
+            )
+            return self._attribute_options._replace(
+                dataclasses_default=DONT_SET
+            )
+
+        return self._attribute_options
+
 
 class _MapsColumns(_DCAttributeOptions, _MappedAttribute[_T]):
     """interface for declarative-capable construct that delivers one or more
index 2923ca6e4f5fc4ad9fa88099fe415f043cecb242..6e4f1cf8470c86c6782cff72799225db904e5592 100644 (file)
@@ -36,6 +36,7 @@ from .descriptor_props import CompositeProperty
 from .descriptor_props import ConcreteInheritedProperty
 from .descriptor_props import SynonymProperty
 from .interfaces import _AttributeOptions
+from .interfaces import _DataclassDefaultsDontSet
 from .interfaces import _DEFAULT_ATTRIBUTE_OPTIONS
 from .interfaces import _IntrospectsAnnotations
 from .interfaces import _MapsColumns
@@ -96,6 +97,7 @@ __all__ = [
 
 @log.class_logger
 class ColumnProperty(
+    _DataclassDefaultsDontSet,
     _MapsColumns[_T],
     StrategizedProperty[_T],
     _IntrospectsAnnotations,
@@ -130,6 +132,7 @@ class ColumnProperty(
         "comparator_factory",
         "active_history",
         "expire_on_flush",
+        "_default_scalar_value",
         "_creation_order",
         "_is_polymorphic_discriminator",
         "_mapped_by_synonym",
@@ -149,6 +152,7 @@ class ColumnProperty(
         raiseload: bool = False,
         comparator_factory: Optional[Type[PropComparator[_T]]] = None,
         active_history: bool = False,
+        default_scalar_value: Any = None,
         expire_on_flush: bool = True,
         info: Optional[_InfoType] = None,
         doc: Optional[str] = None,
@@ -173,6 +177,7 @@ class ColumnProperty(
             else self.__class__.Comparator
         )
         self.active_history = active_history
+        self._default_scalar_value = default_scalar_value
         self.expire_on_flush = expire_on_flush
 
         if info is not None:
@@ -324,6 +329,7 @@ class ColumnProperty(
             deferred=self.deferred,
             group=self.group,
             active_history=self.active_history,
+            default_scalar_value=self._default_scalar_value,
         )
 
     def merge(
@@ -505,6 +511,7 @@ class MappedSQLExpression(ColumnProperty[_T], _DeclarativeMapped[_T]):
 
 
 class MappedColumn(
+    _DataclassDefaultsDontSet,
     _IntrospectsAnnotations,
     _MapsColumns[_T],
     _DeclarativeMapped[_T],
@@ -534,6 +541,7 @@ class MappedColumn(
         "deferred_group",
         "deferred_raiseload",
         "active_history",
+        "_default_scalar_value",
         "_attribute_options",
         "_has_dataclass_arguments",
         "_use_existing_column",
@@ -564,12 +572,11 @@ class MappedColumn(
             )
         )
 
-        insert_default = kw.pop("insert_default", _NoArg.NO_ARG)
+        insert_default = kw.get("insert_default", _NoArg.NO_ARG)
         self._has_insert_default = insert_default is not _NoArg.NO_ARG
+        self._default_scalar_value = _NoArg.NO_ARG
 
-        if self._has_insert_default:
-            kw["default"] = insert_default
-        elif attr_opts.dataclasses_default is not _NoArg.NO_ARG:
+        if attr_opts.dataclasses_default is not _NoArg.NO_ARG:
             kw["default"] = attr_opts.dataclasses_default
 
         self.deferred_group = kw.pop("deferred_group", None)
@@ -578,7 +585,13 @@ class MappedColumn(
         self.active_history = kw.pop("active_history", False)
 
         self._sort_order = kw.pop("sort_order", _NoArg.NO_ARG)
+
+        # note that this populates "default" into the Column, so that if
+        # we are a dataclass and "default" is a dataclass default, it is still
+        # used as a Core-level default for the Column in addition to its
+        # dataclass role
         self.column = cast("Column[_T]", Column(*arg, **kw))
+
         self.foreign_keys = self.column.foreign_keys
         self._has_nullable = "nullable" in kw and kw.get("nullable") not in (
             None,
@@ -600,6 +613,7 @@ class MappedColumn(
         new._has_dataclass_arguments = self._has_dataclass_arguments
         new._use_existing_column = self._use_existing_column
         new._sort_order = self._sort_order
+        new._default_scalar_value = self._default_scalar_value
         util.set_creation_order(new)
         return new
 
@@ -615,7 +629,11 @@ class MappedColumn(
                 self.deferred_group or self.deferred_raiseload
             )
 
-        if effective_deferred or self.active_history:
+        if (
+            effective_deferred
+            or self.active_history
+            or self._default_scalar_value is not _NoArg.NO_ARG
+        ):
             return ColumnProperty(
                 self.column,
                 deferred=effective_deferred,
@@ -623,6 +641,11 @@ class MappedColumn(
                 raiseload=self.deferred_raiseload,
                 attribute_options=self._attribute_options,
                 active_history=self.active_history,
+                default_scalar_value=(
+                    self._default_scalar_value
+                    if self._default_scalar_value is not _NoArg.NO_ARG
+                    else None
+                ),
             )
         else:
             return None
@@ -774,13 +797,19 @@ class MappedColumn(
             use_args_from = None
 
         if use_args_from is not None:
+
             if (
-                not self._has_insert_default
-                and use_args_from.column.default is not None
+                self._has_insert_default
+                or self._attribute_options.dataclasses_default
+                is not _NoArg.NO_ARG
             ):
-                self.column.default = None
+                omit_defaults = True
+            else:
+                omit_defaults = False
 
-            use_args_from.column._merge(self.column)
+            use_args_from.column._merge(
+                self.column, omit_defaults=omit_defaults
+            )
             sqltype = self.column.type
 
             if (
index 390ea7aee49f4be54600cfa08a259de6fc202b6f..3c46d26502af952e14858a772aafbe431c476432 100644 (file)
@@ -56,6 +56,7 @@ from .base import PassiveFlag
 from .base import state_str
 from .base import WriteOnlyMapped
 from .interfaces import _AttributeOptions
+from .interfaces import _DataclassDefaultsDontSet
 from .interfaces import _IntrospectsAnnotations
 from .interfaces import MANYTOMANY
 from .interfaces import MANYTOONE
@@ -81,6 +82,7 @@ from ..sql import visitors
 from ..sql._typing import _ColumnExpressionArgument
 from ..sql._typing import _HasClauseElement
 from ..sql.annotation import _safe_annotate
+from ..sql.base import _NoArg
 from ..sql.elements import ColumnClause
 from ..sql.elements import ColumnElement
 from ..sql.util import _deep_annotate
@@ -340,7 +342,10 @@ class _RelationshipArgs(NamedTuple):
 
 @log.class_logger
 class RelationshipProperty(
-    _IntrospectsAnnotations, StrategizedProperty[_T], log.Identified
+    _DataclassDefaultsDontSet,
+    _IntrospectsAnnotations,
+    StrategizedProperty[_T],
+    log.Identified,
 ):
     """Describes an object property that holds a single item or list
     of items that correspond to a related database table.
@@ -454,6 +459,15 @@ class RelationshipProperty(
             _StringRelationshipArg("back_populates", back_populates, None),
         )
 
+        if self._attribute_options.dataclasses_default not in (
+            _NoArg.NO_ARG,
+            None,
+        ):
+            raise sa_exc.ArgumentError(
+                "Only 'None' is accepted as dataclass "
+                "default for a relationship()"
+            )
+
         self.post_update = post_update
         self.viewonly = viewonly
         if viewonly:
@@ -2187,6 +2201,18 @@ class RelationshipProperty(
                 dependency._DependencyProcessor.from_relationship
             )(self)
 
+        if (
+            self.uselist
+            and self._attribute_options.dataclasses_default
+            is not _NoArg.NO_ARG
+        ):
+            raise sa_exc.ArgumentError(
+                f"On relationship {self}, the dataclass default for "
+                "relationship may only be set for "
+                "a relationship that references a scalar value, i.e. "
+                "many-to-one or explicitly uselist=False"
+            )
+
     @util.memoized_property
     def _use_get(self) -> bool:
         """memoize the 'use_get' attribute of this RelationshipLoader's
index 8b89eb4523819cd5514750cb539db2eb7ddc17a5..44718689115c4df9dc5677100bd846da4676bae7 100644 (file)
@@ -77,6 +77,7 @@ def _register_attribute(
     proxy_property=None,
     active_history=False,
     impl_class=None,
+    default_scalar_value=None,
     **kw,
 ):
     listen_hooks = []
@@ -138,6 +139,7 @@ def _register_attribute(
                 typecallable=typecallable,
                 callable_=callable_,
                 active_history=active_history,
+                default_scalar_value=default_scalar_value,
                 impl_class=impl_class,
                 send_modified_events=not useobject or not prop.viewonly,
                 doc=prop.doc,
@@ -257,6 +259,7 @@ class _ColumnLoader(LoaderStrategy):
             useobject=False,
             compare_function=coltype.compare_values,
             active_history=active_history,
+            default_scalar_value=self.parent_property._default_scalar_value,
         )
 
     def create_row_processor(
@@ -370,6 +373,7 @@ class _ExpressionColumnLoader(_ColumnLoader):
             useobject=False,
             compare_function=self.columns[0].type.compare_values,
             accepts_scalar_loader=False,
+            default_scalar_value=self.parent_property._default_scalar_value,
         )
 
 
@@ -455,6 +459,7 @@ class _DeferredColumnLoader(LoaderStrategy):
             compare_function=self.columns[0].type.compare_values,
             callable_=self._load_for_state,
             load_on_unexpire=False,
+            default_scalar_value=self.parent_property._default_scalar_value,
         )
 
     def setup_query(
index 809fdd2b0e115bfd52f2b1279a1f83d7635cbfac..9a0193e9fa44eae3f8391309f213c859c03ab0ff 100644 (file)
@@ -39,6 +39,7 @@ from . import attributes
 from . import interfaces
 from . import relationships
 from . import strategies
+from .base import ATTR_EMPTY
 from .base import NEVER_SET
 from .base import object_mapper
 from .base import PassiveFlag
@@ -389,6 +390,17 @@ class _WriteOnlyAttributeImpl(
         c = self._get_collection_history(state, passive)
         return [(attributes.instance_state(x), x) for x in c.all_items]
 
+    def _default_value(
+        self, state: InstanceState[Any], dict_: _InstanceDict
+    ) -> Any:
+        value = None
+        for fn in self.dispatch.init_scalar:
+            ret = fn(state, value, dict_)
+            if ret is not ATTR_EMPTY:
+                value = ret
+
+        return value
+
     def _get_collection_history(
         self, state: InstanceState[Any], passive: PassiveFlag
     ) -> WriteOnlyHistory[Any]:
index 8edc75b9512ff1ac579d167bdb21fc550e8eb43f..77047f10b633de22708654b42e2c343531c6f843 100644 (file)
@@ -2113,6 +2113,11 @@ class Column(DialectKWArgs, SchemaItem, ColumnClause[_T]):
             self._set_type(self.type)
 
         if insert_default is not _NoArg.NO_ARG:
+            if default is not _NoArg.NO_ARG:
+                raise exc.ArgumentError(
+                    "The 'default' and 'insert_default' parameters "
+                    "of Column are mutually exclusive"
+                )
             resolved_default = insert_default
         elif default is not _NoArg.NO_ARG:
             resolved_default = default
@@ -2523,8 +2528,10 @@ class Column(DialectKWArgs, SchemaItem, ColumnClause[_T]):
 
         return self._schema_item_copy(c)
 
-    def _merge(self, other: Column[Any]) -> None:
-        """merge the elements of another column into this one.
+    def _merge(
+        self, other: Column[Any], *, omit_defaults: bool = False
+    ) -> None:
+        """merge the elements of this column onto "other"
 
         this is used by ORM pep-593 merge and will likely need a lot
         of fixes.
@@ -2565,7 +2572,11 @@ class Column(DialectKWArgs, SchemaItem, ColumnClause[_T]):
             other.nullable = self.nullable
             other._user_defined_nullable = self._user_defined_nullable
 
-        if self.default is not None and other.default is None:
+        if (
+            not omit_defaults
+            and self.default is not None
+            and other.default is None
+        ):
             new_default = self.default._copy()
             new_default._set_parent(other)
 
index 51a74d5afc5284cb456ad3c1b9ee70dee33ddc07..004a119acde082fa64725c4b4fc9793c4283908f 100644 (file)
@@ -46,6 +46,7 @@ from sqlalchemy.orm import registry as _RegistryType
 from sqlalchemy.orm import relationship
 from sqlalchemy.orm import Session
 from sqlalchemy.orm import synonym
+from sqlalchemy.orm.attributes import LoaderCallableStatus
 from sqlalchemy.sql.base import _NoArg
 from sqlalchemy.testing import AssertsCompiledSQL
 from sqlalchemy.testing import eq_
@@ -80,7 +81,9 @@ class DCTransformsTest(AssertsCompiledSQL, fixtures.TestBase):
                 _mad_before = True
                 metadata = _md
                 type_annotation_map = {
-                    str: String().with_variant(String(50), "mysql", "mariadb")
+                    str: String().with_variant(
+                        String(50), "mysql", "mariadb", "oracle"
+                    )
                 }
 
         else:
@@ -89,7 +92,9 @@ class DCTransformsTest(AssertsCompiledSQL, fixtures.TestBase):
                 _mad_before = False
                 metadata = _md
                 type_annotation_map = {
-                    str: String().with_variant(String(50), "mysql", "mariadb")
+                    str: String().with_variant(
+                        String(50), "mysql", "mariadb", "oracle"
+                    )
                 }
 
         yield Base
@@ -129,7 +134,7 @@ class DCTransformsTest(AssertsCompiledSQL, fixtures.TestBase):
                 args=["self", "data", "x", "bs"],
                 varargs=None,
                 varkw=None,
-                defaults=(None, mock.ANY),
+                defaults=(LoaderCallableStatus.DONT_SET, mock.ANY),
                 kwonlyargs=[],
                 kwonlydefaults=None,
                 annotations={},
@@ -141,7 +146,7 @@ class DCTransformsTest(AssertsCompiledSQL, fixtures.TestBase):
                 args=["self", "data", "x"],
                 varargs=None,
                 varkw=None,
-                defaults=(None,),
+                defaults=(LoaderCallableStatus.DONT_SET,),
                 kwonlyargs=[],
                 kwonlydefaults=None,
                 annotations={},
@@ -274,7 +279,7 @@ class DCTransformsTest(AssertsCompiledSQL, fixtures.TestBase):
                 args=["self", "data", "x", "bs"],
                 varargs=None,
                 varkw=None,
-                defaults=(None, mock.ANY),
+                defaults=(LoaderCallableStatus.DONT_SET, mock.ANY),
                 kwonlyargs=[],
                 kwonlydefaults=None,
                 annotations={},
@@ -286,7 +291,7 @@ class DCTransformsTest(AssertsCompiledSQL, fixtures.TestBase):
                 args=["self", "data", "x"],
                 varargs=None,
                 varkw=None,
-                defaults=(None,),
+                defaults=(LoaderCallableStatus.DONT_SET,),
                 kwonlyargs=[],
                 kwonlydefaults=None,
                 annotations={},
@@ -377,7 +382,9 @@ class DCTransformsTest(AssertsCompiledSQL, fixtures.TestBase):
 
     def test_combine_args_from_pep593(self, decl_base: Type[DeclarativeBase]):
         """test that we can set up column-level defaults separate from
-        dataclass defaults
+        dataclass defaults with a pep593 setup; however the dataclass
+        defaults need to override the insert_defaults so that they
+        take place on INSERT
 
         """
         intpk = Annotated[int, mapped_column(primary_key=True)]
@@ -396,9 +403,20 @@ class DCTransformsTest(AssertsCompiledSQL, fixtures.TestBase):
             # we need this case for dataclasses that can't derive things
             # from Annotated yet at the typing level
             id: Mapped[intpk] = mapped_column(init=False)
+            name_plain: Mapped[str30] = mapped_column()
+            name_no_init: Mapped[str30] = mapped_column(init=False)
             name_none: Mapped[Optional[str30]] = mapped_column(default=None)
+            name_insert_none: Mapped[Optional[str30]] = mapped_column(
+                insert_default=None, init=False
+            )
             name: Mapped[str30] = mapped_column(default="hi")
+            name_insert: Mapped[str30] = mapped_column(
+                insert_default="hi", init=False
+            )
             name2: Mapped[s_str30] = mapped_column(default="there")
+            name2_insert: Mapped[s_str30] = mapped_column(
+                insert_default="there", init=False
+            )
             addresses: Mapped[List["Address"]] = relationship(  # noqa: F821
                 back_populates="user", default_factory=list
             )
@@ -414,15 +432,34 @@ class DCTransformsTest(AssertsCompiledSQL, fixtures.TestBase):
             )
 
         is_true(User.__table__.c.id.primary_key)
-        is_true(User.__table__.c.name_none.default.arg.compare(func.foo()))
-        is_true(User.__table__.c.name.default.arg.compare(func.foo()))
+
+        # the default from the Annotated overrides mapped_cols that have
+        # nothing for default or insert default
+        is_true(User.__table__.c.name_plain.default.arg.compare(func.foo()))
+        is_true(User.__table__.c.name_no_init.default.arg.compare(func.foo()))
+
+        # mapped cols that have None for default or insert default, that
+        # default overrides
+        is_true(User.__table__.c.name_none.default is None)
+        is_true(User.__table__.c.name_insert_none.default is None)
+
+        # mapped cols that have a value for default or insert default, that
+        # default overrides
+        is_true(User.__table__.c.name.default.arg == "hi")
+        is_true(User.__table__.c.name2.default.arg == "there")
+        is_true(User.__table__.c.name_insert.default.arg == "hi")
+        is_true(User.__table__.c.name2_insert.default.arg == "there")
+
         eq_(User.__table__.c.name2.server_default.arg, "some server default")
 
         is_true(Address.__table__.c.user_id.references(User.__table__.c.id))
-        u1 = User()
+        u1 = User(name_plain="name")
         eq_(u1.name_none, None)
+        eq_(u1.name_insert_none, None)
         eq_(u1.name, "hi")
         eq_(u1.name2, "there")
+        eq_(u1.name_insert, None)
+        eq_(u1.name2_insert, None)
 
     def test_inheritance(self, dc_decl_base: Type[MappedAsDataclass]):
         class Person(dc_decl_base):
@@ -825,7 +862,7 @@ class DCTransformsTest(AssertsCompiledSQL, fixtures.TestBase):
         eq_(a.call_no_init, 20)
 
         fields = {f.name: f for f in dataclasses.fields(A)}
-        eq_(fields["def_init"].default, 42)
+        eq_(fields["def_init"].default, LoaderCallableStatus.DONT_SET)
         eq_(fields["call_init"].default_factory, c10)
         eq_(fields["def_no_init"].default, dataclasses.MISSING)
         ne_(fields["def_no_init"].default_factory, dataclasses.MISSING)
@@ -1459,14 +1496,12 @@ class DataclassArgsTest(fixtures.TestBase):
         else:
             return args, args
 
-    @testing.fixture(params=["mapped_column", "synonym", "deferred"])
+    @testing.fixture(params=["mapped_column", "deferred"])
     def mapped_expr_constructor(self, request):
         name = request.param
 
         if name == "mapped_column":
             yield mapped_column(default=7, init=True)
-        elif name == "synonym":
-            yield synonym("some_int", default=7, init=True)
         elif name == "deferred":
             yield deferred(Column(Integer), default=7, init=True)
 
@@ -1620,18 +1655,19 @@ class DataclassArgsTest(fixtures.TestBase):
         with expect_raises(TypeError):
             cls("Some data", 5)
 
-        # we run real "dataclasses" on the class.  so with init=False, it
-        # doesn't touch what was there, and the SQLA default constructor
-        # gets put on.
+        # behavior change in 2.1, even if init=False we set descriptor
+        # defaults
+
         a1 = cls(data="some data")
         eq_(a1.data, "some data")
-        eq_(a1.x, None)
+
+        eq_(a1.x, 7)
 
         a1 = cls()
         eq_(a1.data, None)
 
-        # no constructor, it sets None for x...ok
-        eq_(a1.x, None)
+        # but this breaks for synonyms
+        eq_(a1.x, 7)
 
     def _assert_match_args(self, cls, create, dc_arguments):
         if not dc_arguments["kw_only"]:
@@ -1836,14 +1872,14 @@ class DataclassArgsTest(fixtures.TestBase):
             kw = {
                 "init": False,
                 "repr": False,
-                "default": False,
+                "default": None,
                 "default_factory": list,
                 "compare": True,
                 "kw_only": False,
                 "hash": False,
             }
             exp = interfaces._AttributeOptions(
-                False, False, False, list, True, False, False
+                False, False, None, list, True, False, False
             )
         else:
             kw = {}
@@ -2181,3 +2217,456 @@ class ReadOnlyAttrTest(fixtures.TestBase, testing.AssertsCompiledSQL):
         m3 = MyClass(data="foo")
         m3.const = "some const"
         eq_(m2, m3)
+
+
+class UseDescriptorDefaultsTest(fixtures.TestBase, testing.AssertsCompiledSQL):
+    """tests related to #12168"""
+
+    __dialect__ = "default"
+
+    @testing.fixture(params=[True, False])
+    def dc_decl_base(self, request, metadata):
+        _md = metadata
+
+        udd = request.param
+
+        class Base(MappedAsDataclass, DeclarativeBase):
+            use_descriptor_defaults = udd
+
+            if not use_descriptor_defaults:
+                _sa_disable_descriptor_defaults = True
+
+            metadata = _md
+            type_annotation_map = {
+                str: String().with_variant(
+                    String(50), "mysql", "mariadb", "oracle"
+                )
+            }
+
+        yield Base
+        Base.registry.dispose()
+
+    def test_mapped_column_default(self, dc_decl_base):
+
+        class MyClass(dc_decl_base):
+            __tablename__ = "a"
+
+            id: Mapped[int] = mapped_column(primary_key=True, init=False)
+            data: Mapped[str] = mapped_column(default="my_default")
+
+        mc = MyClass()
+        eq_(mc.data, "my_default")
+
+        if not MyClass.use_descriptor_defaults:
+            eq_(mc.__dict__["data"], "my_default")
+        else:
+            assert "data" not in mc.__dict__
+
+        eq_(MyClass.__table__.c.data.default.arg, "my_default")
+
+    def test_mapped_column_default_and_insert_default(self, dc_decl_base):
+        with expect_raises_message(
+            exc.ArgumentError,
+            "The 'default' and 'insert_default' parameters of "
+            "Column are mutually exclusive",
+        ):
+            mapped_column(default="x", insert_default="y")
+
+    def test_relationship_only_none_default(self):
+        with expect_raises_message(
+            exc.ArgumentError,
+            r"Only 'None' is accepted as dataclass "
+            r"default for a relationship\(\)",
+        ):
+            relationship(default="not none")
+
+    @testing.variation("uselist_type", ["implicit", "m2o_explicit"])
+    def test_relationship_only_nouselist_none_default(
+        self, dc_decl_base, uselist_type
+    ):
+        with expect_raises_message(
+            exc.ArgumentError,
+            rf"On relationship {'A.bs' if uselist_type.implicit else 'B.a'}, "
+            "the dataclass default for relationship "
+            "may only be set for a relationship that references a scalar "
+            "value, i.e. many-to-one or explicitly uselist=False",
+        ):
+
+            class A(dc_decl_base):
+                __tablename__ = "a"
+
+                id: Mapped[int] = mapped_column(primary_key=True)
+                data: Mapped[str]
+
+                if uselist_type.implicit:
+                    bs: Mapped[List["B"]] = relationship("B", default=None)
+
+            class B(dc_decl_base):
+                __tablename__ = "b"
+                id: Mapped[int] = mapped_column(primary_key=True)
+                a_id: Mapped[int] = mapped_column(ForeignKey("a.id"))
+                data: Mapped[str]
+
+                if uselist_type.m2o_explicit:
+                    a: Mapped[List[A]] = relationship(
+                        "A", uselist=True, default=None
+                    )
+
+            dc_decl_base.registry.configure()
+
+    def test_constructor_repr(self, dc_decl_base):
+
+        class A(dc_decl_base):
+            __tablename__ = "a"
+
+            id: Mapped[int] = mapped_column(primary_key=True, init=False)
+            data: Mapped[str]
+
+            x: Mapped[Optional[int]] = mapped_column(default=None)
+
+            bs: Mapped[List["B"]] = relationship(  # noqa: F821
+                default_factory=list
+            )
+
+        class B(dc_decl_base):
+            __tablename__ = "b"
+
+            id: Mapped[int] = mapped_column(primary_key=True, init=False)
+            data: Mapped[str]
+            a_id: Mapped[Optional[int]] = mapped_column(
+                ForeignKey("a.id"), init=False
+            )
+            x: Mapped[Optional[int]] = mapped_column(default=None)
+
+        A.__qualname__ = "some_module.A"
+        B.__qualname__ = "some_module.B"
+
+        eq_(
+            pyinspect.getfullargspec(A.__init__),
+            pyinspect.FullArgSpec(
+                args=["self", "data", "x", "bs"],
+                varargs=None,
+                varkw=None,
+                defaults=(
+                    (LoaderCallableStatus.DONT_SET, mock.ANY)
+                    if A.use_descriptor_defaults
+                    else (None, mock.ANY)
+                ),
+                kwonlyargs=[],
+                kwonlydefaults=None,
+                annotations={},
+            ),
+        )
+        eq_(
+            pyinspect.getfullargspec(B.__init__),
+            pyinspect.FullArgSpec(
+                args=["self", "data", "x"],
+                varargs=None,
+                varkw=None,
+                defaults=(
+                    (LoaderCallableStatus.DONT_SET,)
+                    if B.use_descriptor_defaults
+                    else (None,)
+                ),
+                kwonlyargs=[],
+                kwonlydefaults=None,
+                annotations={},
+            ),
+        )
+
+        a2 = A("10", x=5, bs=[B("data1"), B("data2", x=12)])
+        eq_(
+            repr(a2),
+            "some_module.A(id=None, data='10', x=5, "
+            "bs=[some_module.B(id=None, data='data1', a_id=None, x=None), "
+            "some_module.B(id=None, data='data2', a_id=None, x=12)])",
+        )
+
+        a3 = A("data")
+        eq_(repr(a3), "some_module.A(id=None, data='data', x=None, bs=[])")
+
+    def test_defaults_if_no_init_dc_level(
+        self, dc_decl_base: Type[MappedAsDataclass]
+    ):
+
+        class MyClass(dc_decl_base, init=False):
+            __tablename__ = "a"
+
+            id: Mapped[int] = mapped_column(primary_key=True, init=False)
+            data: Mapped[str] = mapped_column(default="default_status")
+
+        mc = MyClass()
+        if MyClass.use_descriptor_defaults:
+            # behavior change of honoring default when dataclass init=False
+            eq_(mc.data, "default_status")
+        else:
+            eq_(mc.data, None)  # "default_status")
+
+    def test_defaults_w_no_init_attr_level(
+        self, dc_decl_base: Type[MappedAsDataclass]
+    ):
+
+        class MyClass(dc_decl_base):
+            __tablename__ = "a"
+
+            id: Mapped[int] = mapped_column(primary_key=True, init=False)
+            data: Mapped[str] = mapped_column(
+                default="default_status", init=False
+            )
+
+        mc = MyClass()
+        eq_(mc.data, "default_status")
+
+        if MyClass.use_descriptor_defaults:
+            assert "data" not in mc.__dict__
+        else:
+            eq_(mc.__dict__["data"], "default_status")
+
+    @testing.variation("use_attr_init", [True, False])
+    def test_fk_set_scenario(self, dc_decl_base, use_attr_init):
+        if use_attr_init:
+            attr_init_kw = {}
+        else:
+            attr_init_kw = {"init": False}
+
+        class Parent(dc_decl_base):
+            __tablename__ = "parent"
+            id: Mapped[int] = mapped_column(
+                primary_key=True, autoincrement=False
+            )
+
+        class Child(dc_decl_base):
+            __tablename__ = "child"
+            id: Mapped[int] = mapped_column(primary_key=True)
+            parent_id: Mapped[Optional[int]] = mapped_column(
+                ForeignKey("parent.id"), default=None
+            )
+            parent: Mapped[Optional[Parent]] = relationship(
+                default=None, **attr_init_kw
+            )
+
+        dc_decl_base.metadata.create_all(testing.db)
+
+        with Session(testing.db) as sess:
+            p1 = Parent(id=14)
+            sess.add(p1)
+            sess.flush()
+
+            # parent_id=14, parent=None but fk is kept
+            c1 = Child(id=7, parent_id=14)
+            sess.add(c1)
+            sess.flush()
+
+            if Parent.use_descriptor_defaults:
+                assert c1.parent is p1
+            else:
+                assert c1.parent is None
+
+    @testing.variation("use_attr_init", [True, False])
+    def test_merge_scenario(self, dc_decl_base, use_attr_init):
+        if use_attr_init:
+            attr_init_kw = {}
+        else:
+            attr_init_kw = {"init": False}
+
+        class MyClass(dc_decl_base):
+            __tablename__ = "myclass"
+
+            id: Mapped[int] = mapped_column(
+                primary_key=True, autoincrement=False
+            )
+            name: Mapped[str]
+            status: Mapped[str] = mapped_column(
+                default="default_status", **attr_init_kw
+            )
+
+        dc_decl_base.metadata.create_all(testing.db)
+
+        with Session(testing.db) as sess:
+            if use_attr_init:
+                u1 = MyClass(id=1, name="x", status="custom_status")
+            else:
+                u1 = MyClass(id=1, name="x")
+                u1.status = "custom_status"
+            sess.add(u1)
+
+            sess.flush()
+
+            u2 = sess.merge(MyClass(id=1, name="y"))
+            is_(u2, u1)
+            eq_(u2.name, "y")
+
+            if MyClass.use_descriptor_defaults:
+                eq_(u2.status, "custom_status")
+            else:
+                # was overridden by the default in __dict__
+                eq_(u2.status, "default_status")
+
+            if use_attr_init:
+                u3 = sess.merge(
+                    MyClass(id=1, name="z", status="default_status")
+                )
+            else:
+                mc = MyClass(id=1, name="z")
+                mc.status = "default_status"
+                u3 = sess.merge(mc)
+
+            is_(u3, u1)
+            eq_(u3.name, "z")
+
+            # field was explicit so is overridden by merge
+            eq_(u3.status, "default_status")
+
+
+class SynonymDescriptorDefaultTest(AssertsCompiledSQL, fixtures.TestBase):
+    """test new behaviors for synonyms given dataclasses descriptor defaults
+    introduced in 2.1.  Related to #12168"""
+
+    __dialect__ = "default"
+
+    @testing.fixture(params=[True, False])
+    def dc_decl_base(self, request, metadata):
+        _md = metadata
+
+        udd = request.param
+
+        class Base(MappedAsDataclass, DeclarativeBase):
+            use_descriptor_defaults = udd
+
+            if not use_descriptor_defaults:
+                _sa_disable_descriptor_defaults = True
+
+            metadata = _md
+            type_annotation_map = {
+                str: String().with_variant(
+                    String(50), "mysql", "mariadb", "oracle"
+                )
+            }
+
+        yield Base
+        Base.registry.dispose()
+
+    def test_syn_matches_col_default(
+        self, dc_decl_base: Type[MappedAsDataclass]
+    ):
+        class A(dc_decl_base):
+            __tablename__ = "a"
+
+            id: Mapped[int] = mapped_column(primary_key=True, init=False)
+            some_int: Mapped[int] = mapped_column(default=7, init=False)
+            some_syn: Mapped[int] = synonym("some_int", default=7)
+
+        a1 = A()
+        eq_(a1.some_syn, 7)
+        eq_(a1.some_int, 7)
+
+        a1 = A(some_syn=10)
+        eq_(a1.some_syn, 10)
+        eq_(a1.some_int, 10)
+
+    @testing.variation("some_int_init", [True, False])
+    def test_syn_does_not_match_col_default(
+        self, dc_decl_base: Type[MappedAsDataclass], some_int_init
+    ):
+        with (
+            expect_raises_message(
+                exc.ArgumentError,
+                "Synonym 'some_syn' default argument 10 must match the "
+                "dataclasses default value of proxied object 'some_int', "
+                "currently 7",
+            )
+            if dc_decl_base.use_descriptor_defaults
+            else contextlib.nullcontext()
+        ):
+
+            class A(dc_decl_base):
+                __tablename__ = "a"
+
+                id: Mapped[int] = mapped_column(primary_key=True, init=False)
+                some_int: Mapped[int] = mapped_column(
+                    default=7, init=bool(some_int_init)
+                )
+                some_syn: Mapped[int] = synonym("some_int", default=10)
+
+    @testing.variation("some_int_init", [True, False])
+    def test_syn_requires_col_default(
+        self, dc_decl_base: Type[MappedAsDataclass], some_int_init
+    ):
+        with (
+            expect_raises_message(
+                exc.ArgumentError,
+                "Synonym 'some_syn' default argument 10 must match the "
+                "dataclasses default value of proxied object 'some_int', "
+                "currently not set",
+            )
+            if dc_decl_base.use_descriptor_defaults
+            else contextlib.nullcontext()
+        ):
+
+            class A(dc_decl_base):
+                __tablename__ = "a"
+
+                id: Mapped[int] = mapped_column(primary_key=True, init=False)
+                some_int: Mapped[int] = mapped_column(init=bool(some_int_init))
+                some_syn: Mapped[int] = synonym("some_int", default=10)
+
+    @testing.variation("intermediary_init", [True, False])
+    @testing.variation("some_syn_2_first", [True, False])
+    def test_syn_matches_syn_default_one(
+        self,
+        intermediary_init,
+        some_syn_2_first,
+        dc_decl_base: Type[MappedAsDataclass],
+    ):
+        class A(dc_decl_base):
+            __tablename__ = "a"
+
+            id: Mapped[int] = mapped_column(primary_key=True, init=False)
+
+            if some_syn_2_first:
+                some_syn_2: Mapped[int] = synonym("some_syn", default=7)
+
+            some_int: Mapped[int] = mapped_column(default=7, init=False)
+            some_syn: Mapped[int] = synonym(
+                "some_int", default=7, init=bool(intermediary_init)
+            )
+
+            if not some_syn_2_first:
+                some_syn_2: Mapped[int] = synonym("some_syn", default=7)
+
+        a1 = A()
+        eq_(a1.some_syn_2, 7)
+        eq_(a1.some_syn, 7)
+        eq_(a1.some_int, 7)
+
+        a1 = A(some_syn_2=10)
+
+        if not A.use_descriptor_defaults:
+            if some_syn_2_first:
+                eq_(a1.some_syn_2, 7)
+                eq_(a1.some_syn, 7)
+                eq_(a1.some_int, 7)
+            else:
+                eq_(a1.some_syn_2, 10)
+                eq_(a1.some_syn, 10)
+                eq_(a1.some_int, 10)
+        else:
+            eq_(a1.some_syn_2, 10)
+            eq_(a1.some_syn, 10)
+            eq_(a1.some_int, 10)
+
+        # here we have both some_syn and some_syn_2 in the constructor,
+        # which makes absolutely no sense to do in practice.
+        # the new 2.1 behavior we can see is better, however, having
+        # multiple synonyms in a chain with dataclasses with more than one
+        # of them in init is pretty much a bad idea
+        if intermediary_init:
+            a1 = A(some_syn_2=10, some_syn=12)
+            if some_syn_2_first:
+                eq_(a1.some_syn_2, 12)
+                eq_(a1.some_syn, 12)
+                eq_(a1.some_int, 12)
+            else:
+                eq_(a1.some_syn_2, 10)
+                eq_(a1.some_syn, 10)
+                eq_(a1.some_int, 10)
index b7a2dedbf1c260d0e8fbac4db143d2e34ba3e667..ac43b1bf6206cb919d42ad461fb5a8235dea565e 100644 (file)
@@ -4799,11 +4799,13 @@ class ColumnDefaultsTest(fixtures.TestBase):
         c = self._fixture(insert_default="y")
         assert c.default.arg == "y"
 
-    def test_column_insert_default_predecende_on_default(self):
-        c = self._fixture(insert_default="x", default="y")
-        assert c.default.arg == "x"
-        c = self._fixture(default="y", insert_default="x")
-        assert c.default.arg == "x"
+    def test_column_insert_default_mututally_exclusive(self):
+        with expect_raises_message(
+            exc.ArgumentError,
+            "The 'default' and 'insert_default' parameters of "
+            "Column are mutually exclusive",
+        ):
+            self._fixture(insert_default="x", default="y")
 
 
 class ColumnOptionsTest(fixtures.TestBase):