]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
apply underscores to ORM class and def names
authorMike Bayer <mike_mp@zzzcomputing.com>
Mon, 18 Nov 2024 18:43:08 +0000 (13:43 -0500)
committerMike Bayer <mike_mp@zzzcomputing.com>
Mon, 18 Nov 2024 19:56:07 +0000 (14:56 -0500)
criteria used here is:

* The class or def should definitely not be used directly by
  a third party
* The class would never be the subject of an `isinstance()` check
* The class is not exposed as the type of a return value for a public
  function

A sweep through class and function names in the ORM renames many classes
and functions that have no intent of public visibility to be underscored.
This is to reduce ambiguity as to which APIs are intended to be targeted by
third party applications and extensions.  Third parties are encouraged to
propose new public APIs in Discussions to the extent they are needed to
replace those that have been clarified as private.

Fixes: #10497
Change-Id: I9900e759be8510e01bba2c25984b9f30dc1fa9c0

49 files changed:
doc/build/changelog/unreleased_21/10497.rst [new file with mode: 0644]
lib/sqlalchemy/ext/horizontal_shard.py
lib/sqlalchemy/ext/hybrid.py
lib/sqlalchemy/ext/instrumentation.py
lib/sqlalchemy/orm/_typing.py
lib/sqlalchemy/orm/attributes.py
lib/sqlalchemy/orm/bulk_persistence.py
lib/sqlalchemy/orm/clsregistry.py
lib/sqlalchemy/orm/collections.py
lib/sqlalchemy/orm/context.py
lib/sqlalchemy/orm/decl_api.py
lib/sqlalchemy/orm/decl_base.py
lib/sqlalchemy/orm/dependency.py
lib/sqlalchemy/orm/descriptor_props.py
lib/sqlalchemy/orm/dynamic.py
lib/sqlalchemy/orm/identity.py
lib/sqlalchemy/orm/instrumentation.py
lib/sqlalchemy/orm/interfaces.py
lib/sqlalchemy/orm/loading.py
lib/sqlalchemy/orm/mapper.py
lib/sqlalchemy/orm/path_registry.py
lib/sqlalchemy/orm/persistence.py
lib/sqlalchemy/orm/properties.py
lib/sqlalchemy/orm/query.py
lib/sqlalchemy/orm/relationships.py
lib/sqlalchemy/orm/session.py
lib/sqlalchemy/orm/state.py
lib/sqlalchemy/orm/strategies.py
lib/sqlalchemy/orm/strategy_options.py
lib/sqlalchemy/orm/sync.py
lib/sqlalchemy/orm/unitofwork.py
lib/sqlalchemy/orm/util.py
lib/sqlalchemy/orm/writeonly.py
test/ext/test_extendedattr.py
test/orm/declarative/test_clsregistry.py
test/orm/declarative/test_tm_future_annotations_sync.py
test/orm/declarative/test_typed_mapping.py
test/orm/test_attributes.py
test/orm/test_bind.py
test/orm/test_collection.py
test/orm/test_deprecations.py
test/orm/test_expire.py
test/orm/test_froms.py
test/orm/test_instrumentation.py
test/orm/test_mapper.py
test/orm/test_rel_fn.py
test/orm/test_selectin_relations.py
test/orm/test_session.py
test/orm/test_sync.py

diff --git a/doc/build/changelog/unreleased_21/10497.rst b/doc/build/changelog/unreleased_21/10497.rst
new file mode 100644 (file)
index 0000000..f3e4a91
--- /dev/null
@@ -0,0 +1,10 @@
+.. change::
+    :tags: change, orm
+    :tickets: 10497
+
+    A sweep through class and function names in the ORM renames many classes
+    and functions that have no intent of public visibility to be underscored.
+    This is to reduce ambiguity as to which APIs are intended to be targeted by
+    third party applications and extensions.  Third parties are encouraged to
+    propose new public APIs in Discussions to the extent they are needed to
+    replace those that have been clarified as private.
index 71fda2fb3954ac469cd74691f481fdd1717e8296..53a8f5ae7cdd103a134fc7721c8cc81bf95d158c 100644 (file)
@@ -62,7 +62,7 @@ if TYPE_CHECKING:
     from ..engine.result import Result
     from ..orm import LoaderCallableStatus
     from ..orm._typing import _O
-    from ..orm.bulk_persistence import BulkUDCompileState
+    from ..orm.bulk_persistence import _BulkUDCompileState
     from ..orm.context import QueryContext
     from ..orm.session import _EntityBindKey
     from ..orm.session import _SessionBind
@@ -433,8 +433,8 @@ def execute_and_instances(
         None,
         QueryContext.default_load_options,
         Type[QueryContext.default_load_options],
-        BulkUDCompileState.default_update_options,
-        Type[BulkUDCompileState.default_update_options],
+        _BulkUDCompileState.default_update_options,
+        Type[_BulkUDCompileState.default_update_options],
     ]
 
     if orm_context.is_select:
index b61f24150288940b8c34429b7e54e51e32cfbf89..8de6128f20d2f977dd484e84fb09512301c03272 100644 (file)
@@ -1401,7 +1401,7 @@ class hybrid_property(interfaces.InspectionAttrInfo, ORMDescriptor[_T]):
     def _get_comparator(
         self, comparator: Any
     ) -> Callable[[Any], _HybridClassLevelAccessor[_T]]:
-        proxy_attr = attributes.create_proxied_attribute(self)
+        proxy_attr = attributes._create_proxied_attribute(self)
 
         def expr_comparator(
             owner: Type[object],
index 5f3c71282b7b6c74369cc706848d4305cc8aba5d..0e58cda384e336094ca88162c291e9e8ba0d42d8 100644 (file)
@@ -275,7 +275,7 @@ class InstrumentationManager:
         delattr(class_, key)
 
     def instrument_collection_class(self, class_, key, collection_class):
-        return collections.prepare_instrumentation(collection_class)
+        return collections._prepare_instrumentation(collection_class)
 
     def get_instance_dict(self, class_, instance):
         return instance.__dict__
index 95fbd9e7e250105cbbf0ac64f6a12ef61d0f733c..914515203a73af8a2ac9ee4ba4f0c82534100767 100644 (file)
@@ -30,9 +30,9 @@ from ..sql.elements import ColumnElement
 from ..util.typing import TypeGuard
 
 if TYPE_CHECKING:
-    from .attributes import AttributeImpl
-    from .attributes import CollectionAttributeImpl
-    from .attributes import HasCollectionAdapter
+    from .attributes import _AttributeImpl
+    from .attributes import _CollectionAttributeImpl
+    from .attributes import _HasCollectionAdapter
     from .attributes import QueryableAttribute
     from .base import PassiveFlag
     from .decl_api import registry as _registry_type
@@ -159,12 +159,12 @@ if TYPE_CHECKING:
     ) -> TypeGuard[RelationshipProperty[Any]]: ...
 
     def is_collection_impl(
-        impl: AttributeImpl,
-    ) -> TypeGuard[CollectionAttributeImpl]: ...
+        impl: _AttributeImpl,
+    ) -> TypeGuard[_CollectionAttributeImpl]: ...
 
     def is_has_collection_adapter(
-        impl: AttributeImpl,
-    ) -> TypeGuard[HasCollectionAdapter]: ...
+        impl: _AttributeImpl,
+    ) -> TypeGuard[_HasCollectionAdapter]: ...
 
 else:
     insp_is_mapper_property = operator.attrgetter("is_property")
index 33cca564927e7d96e3ae001068ef74add5f132b6..de02141bda20b44f4733d8ffc17a61e3c654c948 100644 (file)
@@ -106,7 +106,7 @@ if TYPE_CHECKING:
     from .relationships import RelationshipProperty
     from .state import InstanceState
     from .util import AliasedInsp
-    from .writeonly import WriteOnlyAttributeImpl
+    from .writeonly import _WriteOnlyAttributeImpl
     from ..event.base import _Dispatch
     from ..sql._typing import _ColumnExpressionArgument
     from ..sql._typing import _DMLColumnArgument
@@ -184,7 +184,7 @@ class QueryableAttribute(
     class_: _ExternalEntityType[Any]
     key: str
     parententity: _InternalEntityType[Any]
-    impl: AttributeImpl
+    impl: _AttributeImpl
     comparator: interfaces.PropComparator[_T_co]
     _of_type: Optional[_InternalEntityType[Any]]
     _extra_criteria: Tuple[ColumnElement[bool], ...]
@@ -200,7 +200,7 @@ class QueryableAttribute(
         key: str,
         parententity: _InternalEntityType[_O],
         comparator: interfaces.PropComparator[_T_co],
-        impl: Optional[AttributeImpl] = None,
+        impl: Optional[_AttributeImpl] = None,
         of_type: Optional[_InternalEntityType[Any]] = None,
         extra_criteria: Tuple[ColumnElement[bool], ...] = (),
     ):
@@ -567,7 +567,7 @@ class InstrumentedAttribute(QueryableAttribute[_T_co]):
 
 
 @dataclasses.dataclass(frozen=True)
-class AdHocHasEntityNamespace(HasCacheKey):
+class _AdHocHasEntityNamespace(HasCacheKey):
     _traverse_internals: ClassVar[_TraverseInternalsType] = [
         ("_entity_namespace", InternalTraversal.dp_has_cache_key),
     ]
@@ -583,7 +583,7 @@ class AdHocHasEntityNamespace(HasCacheKey):
         return self._entity_namespace.entity_namespace
 
 
-def create_proxied_attribute(
+def _create_proxied_attribute(
     descriptor: Any,
 ) -> Callable[..., QueryableAttribute[Any]]:
     """Create an QueryableAttribute / user descriptor hybrid.
@@ -655,7 +655,7 @@ def create_proxied_attribute(
             else:
                 # used by hybrid attributes which try to remain
                 # agnostic of any ORM concepts like mappers
-                return AdHocHasEntityNamespace(self._parententity)
+                return _AdHocHasEntityNamespace(self._parententity)
 
         @property
         def property(self):
@@ -791,7 +791,7 @@ class AttributeEventToken:
 
     __slots__ = "impl", "op", "parent_token"
 
-    def __init__(self, attribute_impl: AttributeImpl, op: util.symbol):
+    def __init__(self, attribute_impl: _AttributeImpl, op: util.symbol):
         self.impl = attribute_impl
         self.op = op
         self.parent_token = self.impl.parent_token
@@ -815,7 +815,7 @@ AttributeEvent = AttributeEventToken  # legacy
 Event = AttributeEventToken  # legacy
 
 
-class AttributeImpl:
+class _AttributeImpl:
     """internal implementation for instrumented attributes."""
 
     collection: bool
@@ -1202,7 +1202,7 @@ class AttributeImpl:
         return value
 
 
-class ScalarAttributeImpl(AttributeImpl):
+class _ScalarAttributeImpl(_AttributeImpl):
     """represents a scalar value-holding InstrumentedAttribute."""
 
     default_accepts_scalar_loader = True
@@ -1305,7 +1305,7 @@ class ScalarAttributeImpl(AttributeImpl):
             fn(state, value, initiator or self._remove_token)
 
 
-class ScalarObjectAttributeImpl(ScalarAttributeImpl):
+class _ScalarObjectAttributeImpl(_ScalarAttributeImpl):
     """represents a scalar-holding InstrumentedAttribute,
     where the target object is also instrumented.
 
@@ -1516,7 +1516,7 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl):
         return value
 
 
-class HasCollectionAdapter:
+class _HasCollectionAdapter:
     __slots__ = ()
 
     collection: bool
@@ -1588,14 +1588,14 @@ class HasCollectionAdapter:
 if TYPE_CHECKING:
 
     def _is_collection_attribute_impl(
-        impl: AttributeImpl,
-    ) -> TypeGuard[CollectionAttributeImpl]: ...
+        impl: _AttributeImpl,
+    ) -> TypeGuard[_CollectionAttributeImpl]: ...
 
 else:
     _is_collection_attribute_impl = operator.attrgetter("collection")
 
 
-class CollectionAttributeImpl(HasCollectionAdapter, AttributeImpl):
+class _CollectionAttributeImpl(_HasCollectionAdapter, _AttributeImpl):
     """A collection-holding attribute that instruments changes in membership.
 
     Only handles collections of instrumented objects.
@@ -2093,7 +2093,7 @@ class CollectionAttributeImpl(HasCollectionAdapter, AttributeImpl):
         return user_data._sa_adapter
 
 
-def backref_listeners(
+def _backref_listeners(
     attribute: QueryableAttribute[Any], key: str, uselist: bool
 ) -> None:
     """Apply listeners to synchronize a two-way relationship."""
@@ -2395,7 +2395,7 @@ class History(NamedTuple):
     @classmethod
     def from_scalar_attribute(
         cls,
-        attribute: ScalarAttributeImpl,
+        attribute: _ScalarAttributeImpl,
         state: InstanceState[Any],
         current: Any,
     ) -> History:
@@ -2436,7 +2436,7 @@ class History(NamedTuple):
     @classmethod
     def from_object_attribute(
         cls,
-        attribute: ScalarObjectAttributeImpl,
+        attribute: _ScalarObjectAttributeImpl,
         state: InstanceState[Any],
         current: Any,
         original: Any = _NO_HISTORY,
@@ -2475,7 +2475,7 @@ class History(NamedTuple):
     @classmethod
     def from_collection(
         cls,
-        attribute: CollectionAttributeImpl,
+        attribute: _CollectionAttributeImpl,
         state: InstanceState[Any],
         current: Any,
     ) -> History:
@@ -2566,7 +2566,7 @@ def has_parent(
     return manager.has_parent(state, key, optimistic)
 
 
-def register_attribute(
+def _register_attribute(
     class_: Type[_O],
     key: str,
     *,
@@ -2575,20 +2575,20 @@ def register_attribute(
     doc: Optional[str] = None,
     **kw: Any,
 ) -> InstrumentedAttribute[_T]:
-    desc = register_descriptor(
+    desc = _register_descriptor(
         class_, key, comparator=comparator, parententity=parententity, doc=doc
     )
-    register_attribute_impl(class_, key, **kw)
+    _register_attribute_impl(class_, key, **kw)
     return desc
 
 
-def register_attribute_impl(
+def _register_attribute_impl(
     class_: Type[_O],
     key: str,
     uselist: bool = False,
     callable_: Optional[_LoaderCallable] = None,
     useobject: bool = False,
-    impl_class: Optional[Type[AttributeImpl]] = None,
+    impl_class: Optional[Type[_AttributeImpl]] = None,
     backref: Optional[str] = None,
     **kw: Any,
 ) -> QueryableAttribute[Any]:
@@ -2605,35 +2605,35 @@ def register_attribute_impl(
         "_Dispatch[QueryableAttribute[Any]]", manager[key].dispatch
     )  # noqa: E501
 
-    impl: AttributeImpl
+    impl: _AttributeImpl
 
     if impl_class:
         # TODO: this appears to be the WriteOnlyAttributeImpl /
         # DynamicAttributeImpl constructor which is hardcoded
-        impl = cast("Type[WriteOnlyAttributeImpl]", impl_class)(
+        impl = cast("Type[_WriteOnlyAttributeImpl]", impl_class)(
             class_, key, dispatch, **kw
         )
     elif uselist:
-        impl = CollectionAttributeImpl(
+        impl = _CollectionAttributeImpl(
             class_, key, callable_, dispatch, typecallable=typecallable, **kw
         )
     elif useobject:
-        impl = ScalarObjectAttributeImpl(
+        impl = _ScalarObjectAttributeImpl(
             class_, key, callable_, dispatch, **kw
         )
     else:
-        impl = ScalarAttributeImpl(class_, key, callable_, dispatch, **kw)
+        impl = _ScalarAttributeImpl(class_, key, callable_, dispatch, **kw)
 
     manager[key].impl = impl
 
     if backref:
-        backref_listeners(manager[key], backref, uselist)
+        _backref_listeners(manager[key], backref, uselist)
 
     manager.post_configure_attribute(key)
     return manager[key]
 
 
-def register_descriptor(
+def _register_descriptor(
     class_: Type[Any],
     key: str,
     *,
@@ -2653,7 +2653,7 @@ def register_descriptor(
     return descriptor
 
 
-def unregister_attribute(class_: Type[Any], key: str) -> None:
+def _unregister_attribute(class_: Type[Any], key: str) -> None:
     manager_of_class(class_).uninstrument_attribute(key)
 
 
@@ -2691,7 +2691,7 @@ def init_state_collection(
     attr = state.manager[key].impl
 
     if TYPE_CHECKING:
-        assert isinstance(attr, HasCollectionAdapter)
+        assert isinstance(attr, _HasCollectionAdapter)
 
     old = dict_.pop(key, None)  # discard old collection
     if old is not None:
index 3c033be5850d4238a3065ea46dd30602ce512910..5acc42ef54d7a79993f6a8191e1a4a2574d1ebd7 100644 (file)
@@ -31,9 +31,9 @@ from . import exc as orm_exc
 from . import loading
 from . import persistence
 from .base import NO_VALUE
-from .context import AbstractORMCompileState
+from .context import _AbstractORMCompileState
+from .context import _ORMFromStatementCompileState
 from .context import FromStatement
-from .context import ORMFromStatementCompileState
 from .context import QueryContext
 from .. import exc as sa_exc
 from .. import util
@@ -386,9 +386,9 @@ def _expand_composites(mapper, mappings):
             populators[key](mapping)
 
 
-class ORMDMLState(AbstractORMCompileState):
+class _ORMDMLState(_AbstractORMCompileState):
     is_dml_returning = True
-    from_statement_ctx: Optional[ORMFromStatementCompileState] = None
+    from_statement_ctx: Optional[_ORMFromStatementCompileState] = None
 
     @classmethod
     def _get_orm_crud_kv_pairs(
@@ -560,7 +560,9 @@ class ORMDMLState(AbstractORMCompileState):
             fs = fs.options(*orm_level_statement._with_options)
             self.select_statement = fs
             self.from_statement_ctx = fsc = (
-                ORMFromStatementCompileState.create_for_statement(fs, compiler)
+                _ORMFromStatementCompileState.create_for_statement(
+                    fs, compiler
+                )
             )
             fsc.setup_dml_returning_compile_state(dml_mapper)
 
@@ -633,7 +635,7 @@ class ORMDMLState(AbstractORMCompileState):
             return result
 
 
-class BulkUDCompileState(ORMDMLState):
+class _BulkUDCompileState(_ORMDMLState):
     class default_update_options(Options):
         _dml_strategy: DMLStrategyArgument = "auto"
         _synchronize_session: SynchronizeSessionArgument = "auto"
@@ -674,7 +676,7 @@ class BulkUDCompileState(ORMDMLState):
         (
             update_options,
             execution_options,
-        ) = BulkUDCompileState.default_update_options.from_execution_options(
+        ) = _BulkUDCompileState.default_update_options.from_execution_options(
             "_sa_orm_update_options",
             {
                 "synchronize_session",
@@ -1152,7 +1154,7 @@ class BulkUDCompileState(ORMDMLState):
 
 
 @CompileState.plugin_for("orm", "insert")
-class BulkORMInsert(ORMDMLState, InsertDMLState):
+class _BulkORMInsert(_ORMDMLState, InsertDMLState):
     class default_insert_options(Options):
         _dml_strategy: DMLStrategyArgument = "auto"
         _render_nulls: bool = False
@@ -1176,7 +1178,7 @@ class BulkORMInsert(ORMDMLState, InsertDMLState):
         (
             insert_options,
             execution_options,
-        ) = BulkORMInsert.default_insert_options.from_execution_options(
+        ) = _BulkORMInsert.default_insert_options.from_execution_options(
             "_sa_orm_insert_options",
             {"dml_strategy", "autoflush", "populate_existing", "render_nulls"},
             execution_options,
@@ -1321,9 +1323,9 @@ class BulkORMInsert(ORMDMLState, InsertDMLState):
         )
 
     @classmethod
-    def create_for_statement(cls, statement, compiler, **kw) -> BulkORMInsert:
+    def create_for_statement(cls, statement, compiler, **kw) -> _BulkORMInsert:
         self = cast(
-            BulkORMInsert,
+            _BulkORMInsert,
             super().create_for_statement(statement, compiler, **kw),
         )
 
@@ -1412,7 +1414,7 @@ class BulkORMInsert(ORMDMLState, InsertDMLState):
 
 
 @CompileState.plugin_for("orm", "update")
-class BulkORMUpdate(BulkUDCompileState, UpdateDMLState):
+class _BulkORMUpdate(_BulkUDCompileState, UpdateDMLState):
     @classmethod
     def create_for_statement(cls, statement, compiler, **kw):
         self = cls.__new__(cls)
@@ -1899,7 +1901,7 @@ class BulkORMUpdate(BulkUDCompileState, UpdateDMLState):
 
 
 @CompileState.plugin_for("orm", "delete")
-class BulkORMDelete(BulkUDCompileState, DeleteDMLState):
+class _BulkORMDelete(_BulkUDCompileState, DeleteDMLState):
     @classmethod
     def create_for_statement(cls, statement, compiler, **kw):
         self = cls.__new__(cls)
index dac94a36612c82513c5c5e4c9032d286f1647ea8..bab45480cb142caed9cb86ddfa1823e40d0df9f9 100644 (file)
@@ -52,16 +52,16 @@ if TYPE_CHECKING:
 
 _T = TypeVar("_T", bound=Any)
 
-_ClsRegistryType = MutableMapping[str, Union[type, "ClsRegistryToken"]]
+_ClsRegistryType = MutableMapping[str, Union[type, "_ClsRegistryToken"]]
 
 # strong references to registries which we place in
 # the _decl_class_registry, which is usually weak referencing.
 # the internal registries here link to classes with weakrefs and remove
 # themselves when all references to contained classes are removed.
-_registries: Set[ClsRegistryToken] = set()
+_registries: Set[_ClsRegistryToken] = set()
 
 
-def add_class(
+def _add_class(
     classname: str, cls: Type[_T], decl_class_registry: _ClsRegistryType
 ) -> None:
     """Add a class to the _decl_class_registry associated with the
@@ -115,7 +115,7 @@ def add_class(
                 raise
 
 
-def remove_class(
+def _remove_class(
     classname: str, cls: Type[Any], decl_class_registry: _ClsRegistryType
 ) -> None:
     if classname in decl_class_registry:
@@ -180,13 +180,13 @@ def _key_is_empty(
         return not test(thing)
 
 
-class ClsRegistryToken:
+class _ClsRegistryToken:
     """an object that can be in the registry._class_registry as a value."""
 
     __slots__ = ()
 
 
-class _MultipleClassMarker(ClsRegistryToken):
+class _MultipleClassMarker(_ClsRegistryToken):
     """refers to multiple classes of the same name
     within _decl_class_registry.
 
@@ -255,7 +255,7 @@ class _MultipleClassMarker(ClsRegistryToken):
         self.contents.add(weakref.ref(item, self._remove_item))
 
 
-class _ModuleMarker(ClsRegistryToken):
+class _ModuleMarker(_ClsRegistryToken):
     """Refers to a module name within
     _decl_class_registry.
 
@@ -282,7 +282,7 @@ class _ModuleMarker(ClsRegistryToken):
     def __contains__(self, name: str) -> bool:
         return name in self.contents
 
-    def __getitem__(self, name: str) -> ClsRegistryToken:
+    def __getitem__(self, name: str) -> _ClsRegistryToken:
         return self.contents[name]
 
     def _remove_item(self, name: str) -> None:
index 956cbd651ac0a5a12086b16e6219fab2981d4b75..ace7542c12d23ec02cc0c58e03bf2f77469d64f1 100644 (file)
@@ -133,8 +133,8 @@ from ..sql.base import NO_ARG
 from ..util.compat import inspect_getfullargspec
 
 if typing.TYPE_CHECKING:
+    from .attributes import _CollectionAttributeImpl
     from .attributes import AttributeEventToken
-    from .attributes import CollectionAttributeImpl
     from .mapped_collection import attribute_keyed_dict
     from .mapped_collection import column_keyed_dict
     from .mapped_collection import keyfunc_mapping
@@ -476,7 +476,7 @@ class CollectionAdapter:
         "empty",
     )
 
-    attr: CollectionAttributeImpl
+    attr: _CollectionAttributeImpl
     _key: str
 
     # this is actually a weakref; see note in constructor
@@ -489,7 +489,7 @@ class CollectionAdapter:
 
     def __init__(
         self,
-        attr: CollectionAttributeImpl,
+        attr: _CollectionAttributeImpl,
         owner_state: InstanceState[Any],
         data: _AdaptedCollectionProtocol,
     ):
@@ -812,7 +812,7 @@ def bulk_replace(values, existing_adapter, new_adapter, initiator=None):
         existing_adapter._fire_remove_event_bulk(removals, initiator=initiator)
 
 
-def prepare_instrumentation(
+def _prepare_instrumentation(
     factory: Union[Type[Collection[Any]], _CollectionFactoryType],
 ) -> _CollectionFactoryType:
     """Prepare a callable for future use as a collection class factory.
index f8d3711fc4f3b23e100dca2607b617015b812802..cdbb6ccec3d27a45cba29415cbc1db20c3f592d4 100644 (file)
@@ -75,7 +75,7 @@ from ..util.typing import Unpack
 if TYPE_CHECKING:
     from ._typing import _InternalEntityType
     from ._typing import OrmExecuteOptionsParameter
-    from .loading import PostLoad
+    from .loading import _PostLoad
     from .mapper import Mapper
     from .query import Query
     from .session import _BindArguments
@@ -132,8 +132,8 @@ class QueryContext:
     )
 
     runid: int
-    post_load_paths: Dict[PathRegistry, PostLoad]
-    compile_state: ORMCompileState
+    post_load_paths: Dict[PathRegistry, _PostLoad]
+    compile_state: _ORMCompileState
 
     class default_load_options(Options):
         _only_return_tuples = False
@@ -226,7 +226,7 @@ _orm_load_exec_options = util.immutabledict(
 )
 
 
-class AbstractORMCompileState(CompileState):
+class _AbstractORMCompileState(CompileState):
     is_dml_returning = False
 
     def _init_global_attributes(
@@ -274,7 +274,7 @@ class AbstractORMCompileState(CompileState):
         statement: Union[Select, FromStatement],
         compiler: Optional[SQLCompiler],
         **kw: Any,
-    ) -> AbstractORMCompileState:
+    ) -> _AbstractORMCompileState:
         """Create a context for a statement given a :class:`.Compiler`.
 
         This method is always invoked in the context of SQLCompiler.process().
@@ -334,7 +334,7 @@ class AbstractORMCompileState(CompileState):
         raise NotImplementedError()
 
 
-class AutoflushOnlyORMCompileState(AbstractORMCompileState):
+class _AutoflushOnlyORMCompileState(_AbstractORMCompileState):
     """ORM compile state that is a passthrough, except for autoflush."""
 
     @classmethod
@@ -379,7 +379,7 @@ class AutoflushOnlyORMCompileState(AbstractORMCompileState):
         return result
 
 
-class ORMCompileState(AbstractORMCompileState):
+class _ORMCompileState(_AbstractORMCompileState):
     class default_compile_options(CacheableOptions):
         _cache_key_traversal = [
             ("_use_legacy_query_style", InternalTraversal.dp_boolean),
@@ -453,7 +453,7 @@ class ORMCompileState(AbstractORMCompileState):
             statement: Union[Select, FromStatement],
             compiler: Optional[SQLCompiler],
             **kw: Any,
-        ) -> ORMCompileState: ...
+        ) -> _ORMCompileState: ...
 
     def _append_dedupe_col_collection(self, obj, col_collection):
         dedupe = self.dedupe_columns
@@ -663,7 +663,7 @@ class ORMCompileState(AbstractORMCompileState):
         )
 
 
-class DMLReturningColFilter:
+class _DMLReturningColFilter:
     """an adapter used for the DML RETURNING case.
 
     Has a subset of the interface used by
@@ -707,7 +707,7 @@ class DMLReturningColFilter:
 
 
 @sql.base.CompileState.plugin_for("orm", "orm_from_statement")
-class ORMFromStatementCompileState(ORMCompileState):
+class _ORMFromStatementCompileState(_ORMCompileState):
     _from_obj_alias = None
     _has_mapper_entities = False
 
@@ -729,7 +729,7 @@ class ORMFromStatementCompileState(ORMCompileState):
         statement_container: Union[Select, FromStatement],
         compiler: Optional[SQLCompiler],
         **kw: Any,
-    ) -> ORMFromStatementCompileState:
+    ) -> _ORMFromStatementCompileState:
         assert isinstance(statement_container, FromStatement)
 
         if compiler is not None and compiler.stack:
@@ -867,7 +867,7 @@ class ORMFromStatementCompileState(ORMCompileState):
         target_mapper = self.statement._propagate_attrs.get(
             "plugin_subject", None
         )
-        adapter = DMLReturningColFilter(target_mapper, dml_mapper)
+        adapter = _DMLReturningColFilter(target_mapper, dml_mapper)
 
         if self.compile_options._is_star and (len(self._entities) != 1):
             raise sa_exc.CompileError(
@@ -891,9 +891,9 @@ class FromStatement(GroupedElement, Generative, TypedReturnsRows[Unpack[_Ts]]):
 
     __visit_name__ = "orm_from_statement"
 
-    _compile_options = ORMFromStatementCompileState.default_compile_options
+    _compile_options = _ORMFromStatementCompileState.default_compile_options
 
-    _compile_state_factory = ORMFromStatementCompileState.create_for_statement
+    _compile_state_factory = _ORMFromStatementCompileState.create_for_statement
 
     _for_update_arg = None
 
@@ -969,7 +969,7 @@ class FromStatement(GroupedElement, Generative, TypedReturnsRows[Unpack[_Ts]]):
 
         """
         meth = cast(
-            ORMSelectCompileState, SelectState.get_plugin_class(self)
+            _ORMSelectCompileState, SelectState.get_plugin_class(self)
         ).get_column_descriptions
         return meth(self)
 
@@ -1000,14 +1000,14 @@ class FromStatement(GroupedElement, Generative, TypedReturnsRows[Unpack[_Ts]]):
 
 
 @sql.base.CompileState.plugin_for("orm", "compound_select")
-class CompoundSelectCompileState(
-    AutoflushOnlyORMCompileState, CompoundSelectState
+class _CompoundSelectCompileState(
+    _AutoflushOnlyORMCompileState, CompoundSelectState
 ):
     pass
 
 
 @sql.base.CompileState.plugin_for("orm", "select")
-class ORMSelectCompileState(ORMCompileState, SelectState):
+class _ORMSelectCompileState(_ORMCompileState, SelectState):
     _already_joined_edges = ()
 
     _memoized_entities = _EMPTY_DICT
@@ -1031,7 +1031,7 @@ class ORMSelectCompileState(ORMCompileState, SelectState):
         statement: Union[Select, FromStatement],
         compiler: Optional[SQLCompiler],
         **kw: Any,
-    ) -> ORMSelectCompileState:
+    ) -> _ORMSelectCompileState:
         """compiler hook, we arrive here from compiler.visit_select() only."""
 
         self = cls.__new__(cls)
@@ -2442,11 +2442,11 @@ class ORMSelectCompileState(ORMCompileState, SelectState):
 
 def _column_descriptions(
     query_or_select_stmt: Union[Query, Select, FromStatement],
-    compile_state: Optional[ORMSelectCompileState] = None,
+    compile_state: Optional[_ORMSelectCompileState] = None,
     legacy: bool = False,
 ) -> List[ORMColumnDescription]:
     if compile_state is None:
-        compile_state = ORMSelectCompileState._create_entities_collection(
+        compile_state = _ORMSelectCompileState._create_entities_collection(
             query_or_select_stmt, legacy=legacy
         )
     ctx = compile_state
@@ -2538,13 +2538,13 @@ class _QueryEntity:
     expr: Union[_InternalEntityType, ColumnElement[Any]]
     entity_zero: Optional[_InternalEntityType]
 
-    def setup_compile_state(self, compile_state: ORMCompileState) -> None:
+    def setup_compile_state(self, compile_state: _ORMCompileState) -> None:
         raise NotImplementedError()
 
     def setup_dml_returning_compile_state(
         self,
-        compile_state: ORMCompileState,
-        adapter: DMLReturningColFilter,
+        compile_state: _ORMCompileState,
+        adapter: _DMLReturningColFilter,
     ) -> None:
         raise NotImplementedError()
 
@@ -2745,8 +2745,8 @@ class _MapperEntity(_QueryEntity):
 
     def setup_dml_returning_compile_state(
         self,
-        compile_state: ORMCompileState,
-        adapter: DMLReturningColFilter,
+        compile_state: _ORMCompileState,
+        adapter: _DMLReturningColFilter,
     ) -> None:
         loading._setup_entity_query(
             compile_state,
@@ -2904,8 +2904,8 @@ class _BundleEntity(_QueryEntity):
 
     def setup_dml_returning_compile_state(
         self,
-        compile_state: ORMCompileState,
-        adapter: DMLReturningColFilter,
+        compile_state: _ORMCompileState,
+        adapter: _DMLReturningColFilter,
     ) -> None:
         return self.setup_compile_state(compile_state)
 
@@ -3094,8 +3094,8 @@ class _RawColumnEntity(_ColumnEntity):
 
     def setup_dml_returning_compile_state(
         self,
-        compile_state: ORMCompileState,
-        adapter: DMLReturningColFilter,
+        compile_state: _ORMCompileState,
+        adapter: _DMLReturningColFilter,
     ) -> None:
         return self.setup_compile_state(compile_state)
 
@@ -3211,8 +3211,8 @@ class _ORMColumnEntity(_ColumnEntity):
 
     def setup_dml_returning_compile_state(
         self,
-        compile_state: ORMCompileState,
-        adapter: DMLReturningColFilter,
+        compile_state: _ORMCompileState,
+        adapter: _DMLReturningColFilter,
     ) -> None:
         self._fetch_column = self.column
         column = adapter(self.column, False)
index 3c26a17036af8aa5c1ba9fbdb2b653bce52592c5..421a8c675a7ce16ecb0c804b4dc81575794612a5 100644 (file)
@@ -1371,7 +1371,7 @@ class registry:
         self._non_primary_mappers[np_mapper] = True
 
     def _dispose_cls(self, cls: Type[_O]) -> None:
-        clsregistry.remove_class(cls.__name__, cls, self._class_registry)
+        clsregistry._remove_class(cls.__name__, cls, self._class_registry)
 
     def _add_manager(self, manager: ClassManager[Any]) -> None:
         self._managers[manager] = True
index 8b42a32dfb08ce956c38bde5aefb201d7a6863e5..dbb52d3c3c01c2dd74c5e53397012a904c83547f 100644 (file)
@@ -382,7 +382,7 @@ class _ImperativeMapperConfig(_MapperConfig):
 
         with mapperlib._CONFIGURE_MUTEX:
             if not mapper_kw.get("non_primary", False):
-                clsregistry.add_class(
+                clsregistry._add_class(
                     self.classname, self.cls, registry._class_registry
                 )
 
@@ -563,7 +563,7 @@ class _ClassScanMapperConfig(_MapperConfig):
         self._setup_dataclasses_transforms()
 
         with mapperlib._CONFIGURE_MUTEX:
-            clsregistry.add_class(
+            clsregistry._add_class(
                 self.classname, self.cls, registry._class_registry
             )
 
index 71c06fbeb195e53a01e275dac41de5aa6977ff26..9cd1d786cbc2d06500f6ca84ab102e93153ab908 100644 (file)
@@ -26,7 +26,7 @@ from .. import sql
 from .. import util
 
 
-class DependencyProcessor:
+class _DependencyProcessor:
     def __init__(self, prop):
         self.prop = prop
         self.cascade = prop.cascade
@@ -78,20 +78,20 @@ class DependencyProcessor:
         uow.register_preprocessor(self, True)
 
     def per_property_flush_actions(self, uow):
-        after_save = unitofwork.ProcessAll(uow, self, False, True)
-        before_delete = unitofwork.ProcessAll(uow, self, True, True)
+        after_save = unitofwork._ProcessAll(uow, self, False, True)
+        before_delete = unitofwork._ProcessAll(uow, self, True, True)
 
-        parent_saves = unitofwork.SaveUpdateAll(
+        parent_saves = unitofwork._SaveUpdateAll(
             uow, self.parent.primary_base_mapper
         )
-        child_saves = unitofwork.SaveUpdateAll(
+        child_saves = unitofwork._SaveUpdateAll(
             uow, self.mapper.primary_base_mapper
         )
 
-        parent_deletes = unitofwork.DeleteAll(
+        parent_deletes = unitofwork._DeleteAll(
             uow, self.parent.primary_base_mapper
         )
-        child_deletes = unitofwork.DeleteAll(
+        child_deletes = unitofwork._DeleteAll(
             uow, self.mapper.primary_base_mapper
         )
 
@@ -115,17 +115,17 @@ class DependencyProcessor:
         """
 
         child_base_mapper = self.mapper.primary_base_mapper
-        child_saves = unitofwork.SaveUpdateAll(uow, child_base_mapper)
-        child_deletes = unitofwork.DeleteAll(uow, child_base_mapper)
+        child_saves = unitofwork._SaveUpdateAll(uow, child_base_mapper)
+        child_deletes = unitofwork._DeleteAll(uow, child_base_mapper)
 
         # locate and disable the aggregate processors
         # for this dependency
 
         if isdelete:
-            before_delete = unitofwork.ProcessAll(uow, self, True, True)
+            before_delete = unitofwork._ProcessAll(uow, self, True, True)
             before_delete.disabled = True
         else:
-            after_save = unitofwork.ProcessAll(uow, self, False, True)
+            after_save = unitofwork._ProcessAll(uow, self, False, True)
             after_save.disabled = True
 
         # check if the "child" side is part of the cycle
@@ -146,14 +146,16 @@ class DependencyProcessor:
 
         # check if the "parent" side is part of the cycle
         if not isdelete:
-            parent_saves = unitofwork.SaveUpdateAll(
+            parent_saves = unitofwork._SaveUpdateAll(
                 uow, self.parent.base_mapper
             )
             parent_deletes = before_delete = None
             if parent_saves in uow.cycles:
                 parent_in_cycles = True
         else:
-            parent_deletes = unitofwork.DeleteAll(uow, self.parent.base_mapper)
+            parent_deletes = unitofwork._DeleteAll(
+                uow, self.parent.base_mapper
+            )
             parent_saves = after_save = None
             if parent_deletes in uow.cycles:
                 parent_in_cycles = True
@@ -178,13 +180,15 @@ class DependencyProcessor:
                 continue
 
             if isdelete:
-                before_delete = unitofwork.ProcessState(uow, self, True, state)
+                before_delete = unitofwork._ProcessState(
+                    uow, self, True, state
+                )
                 if parent_in_cycles:
-                    parent_deletes = unitofwork.DeleteState(uow, state)
+                    parent_deletes = unitofwork._DeleteState(uow, state)
             else:
-                after_save = unitofwork.ProcessState(uow, self, False, state)
+                after_save = unitofwork._ProcessState(uow, self, False, state)
                 if parent_in_cycles:
-                    parent_saves = unitofwork.SaveUpdateState(uow, state)
+                    parent_saves = unitofwork._SaveUpdateState(uow, state)
 
             if child_in_cycles:
                 child_actions = []
@@ -195,12 +199,12 @@ class DependencyProcessor:
                         (deleted, listonly) = uow.states[child_state]
                         if deleted:
                             child_action = (
-                                unitofwork.DeleteState(uow, child_state),
+                                unitofwork._DeleteState(uow, child_state),
                                 True,
                             )
                         else:
                             child_action = (
-                                unitofwork.SaveUpdateState(uow, child_state),
+                                unitofwork._SaveUpdateState(uow, child_state),
                                 False,
                             )
                     child_actions.append(child_action)
@@ -331,7 +335,7 @@ class DependencyProcessor:
         return "%s(%s)" % (self.__class__.__name__, self.prop)
 
 
-class OneToManyDP(DependencyProcessor):
+class _OneToManyDP(_DependencyProcessor):
     def per_property_dependencies(
         self,
         uow,
@@ -343,10 +347,10 @@ class OneToManyDP(DependencyProcessor):
         before_delete,
     ):
         if self.post_update:
-            child_post_updates = unitofwork.PostUpdateAll(
+            child_post_updates = unitofwork._PostUpdateAll(
                 uow, self.mapper.primary_base_mapper, False
             )
-            child_pre_updates = unitofwork.PostUpdateAll(
+            child_pre_updates = unitofwork._PostUpdateAll(
                 uow, self.mapper.primary_base_mapper, True
             )
 
@@ -385,10 +389,10 @@ class OneToManyDP(DependencyProcessor):
         childisdelete,
     ):
         if self.post_update:
-            child_post_updates = unitofwork.PostUpdateAll(
+            child_post_updates = unitofwork._PostUpdateAll(
                 uow, self.mapper.primary_base_mapper, False
             )
-            child_pre_updates = unitofwork.PostUpdateAll(
+            child_pre_updates = unitofwork._PostUpdateAll(
                 uow, self.mapper.primary_base_mapper, True
             )
 
@@ -622,9 +626,9 @@ class OneToManyDP(DependencyProcessor):
         ):
             return
         if clearkeys:
-            sync.clear(dest, self.mapper, self.prop.synchronize_pairs)
+            sync._clear(dest, self.mapper, self.prop.synchronize_pairs)
         else:
-            sync.populate(
+            sync._populate(
                 source,
                 self.parent,
                 dest,
@@ -635,16 +639,16 @@ class OneToManyDP(DependencyProcessor):
             )
 
     def _pks_changed(self, uowcommit, state):
-        return sync.source_modified(
+        return sync._source_modified(
             uowcommit, state, self.parent, self.prop.synchronize_pairs
         )
 
 
-class ManyToOneDP(DependencyProcessor):
+class _ManyToOneDP(_DependencyProcessor):
     def __init__(self, prop):
-        DependencyProcessor.__init__(self, prop)
+        _DependencyProcessor.__init__(self, prop)
         for mapper in self.mapper.self_and_descendants:
-            mapper._dependency_processors.append(DetectKeySwitch(prop))
+            mapper._dependency_processors.append(_DetectKeySwitch(prop))
 
     def per_property_dependencies(
         self,
@@ -657,10 +661,10 @@ class ManyToOneDP(DependencyProcessor):
         before_delete,
     ):
         if self.post_update:
-            parent_post_updates = unitofwork.PostUpdateAll(
+            parent_post_updates = unitofwork._PostUpdateAll(
                 uow, self.parent.primary_base_mapper, False
             )
-            parent_pre_updates = unitofwork.PostUpdateAll(
+            parent_pre_updates = unitofwork._PostUpdateAll(
                 uow, self.parent.primary_base_mapper, True
             )
 
@@ -698,7 +702,7 @@ class ManyToOneDP(DependencyProcessor):
     ):
         if self.post_update:
             if not isdelete:
-                parent_post_updates = unitofwork.PostUpdateAll(
+                parent_post_updates = unitofwork._PostUpdateAll(
                     uow, self.parent.primary_base_mapper, False
                 )
                 if childisdelete:
@@ -717,7 +721,7 @@ class ManyToOneDP(DependencyProcessor):
                         ]
                     )
             else:
-                parent_pre_updates = unitofwork.PostUpdateAll(
+                parent_pre_updates = unitofwork._PostUpdateAll(
                     uow, self.parent.primary_base_mapper, True
                 )
 
@@ -851,10 +855,10 @@ class ManyToOneDP(DependencyProcessor):
             return
 
         if clearkeys or child is None:
-            sync.clear(state, self.parent, self.prop.synchronize_pairs)
+            sync._clear(state, self.parent, self.prop.synchronize_pairs)
         else:
             self._verify_canload(child)
-            sync.populate(
+            sync._populate(
                 child,
                 self.mapper,
                 state,
@@ -865,7 +869,7 @@ class ManyToOneDP(DependencyProcessor):
             )
 
 
-class DetectKeySwitch(DependencyProcessor):
+class _DetectKeySwitch(_DependencyProcessor):
     """For many-to-one relationships with no one-to-many backref,
     searches for parents through the unit of work when a primary
     key has changed and updates them.
@@ -891,8 +895,8 @@ class DetectKeySwitch(DependencyProcessor):
         uow.register_preprocessor(self, False)
 
     def per_property_flush_actions(self, uow):
-        parent_saves = unitofwork.SaveUpdateAll(uow, self.parent.base_mapper)
-        after_save = unitofwork.ProcessAll(uow, self, False, False)
+        parent_saves = unitofwork._SaveUpdateAll(uow, self.parent.base_mapper)
+        after_save = unitofwork._ProcessAll(uow, self, False, False)
         uow.dependencies.update([(parent_saves, after_save)])
 
     def per_state_flush_actions(self, uow, states, isdelete):
@@ -966,7 +970,7 @@ class DetectKeySwitch(DependencyProcessor):
                         uowcommit.register_object(
                             state, False, self.passive_updates
                         )
-                        sync.populate(
+                        sync._populate(
                             related_state,
                             self.mapper,
                             state,
@@ -977,12 +981,12 @@ class DetectKeySwitch(DependencyProcessor):
                         )
 
     def _pks_changed(self, uowcommit, state):
-        return bool(state.key) and sync.source_modified(
+        return bool(state.key) and sync._source_modified(
             uowcommit, state, self.mapper, self.prop.synchronize_pairs
         )
 
 
-class ManyToManyDP(DependencyProcessor):
+class _ManyToManyDP(_DependencyProcessor):
     def per_property_dependencies(
         self,
         uow,
@@ -1174,14 +1178,14 @@ class ManyToManyDP(DependencyProcessor):
                 if need_cascade_pks:
                     for child in history.unchanged:
                         associationrow = {}
-                        sync.update(
+                        sync._update(
                             state,
                             self.parent,
                             associationrow,
                             "old_",
                             self.prop.synchronize_pairs,
                         )
-                        sync.update(
+                        sync._update(
                             child,
                             self.mapper,
                             associationrow,
@@ -1279,10 +1283,10 @@ class ManyToManyDP(DependencyProcessor):
                 )
             return False
 
-        sync.populate_dict(
+        sync._populate_dict(
             state, self.parent, associationrow, self.prop.synchronize_pairs
         )
-        sync.populate_dict(
+        sync._populate_dict(
             child,
             self.mapper,
             associationrow,
@@ -1292,13 +1296,13 @@ class ManyToManyDP(DependencyProcessor):
         return True
 
     def _pks_changed(self, uowcommit, state):
-        return sync.source_modified(
+        return sync._source_modified(
             uowcommit, state, self.parent, self.prop.synchronize_pairs
         )
 
 
 _direction_to_processor = {
-    ONETOMANY: OneToManyDP,
-    MANYTOONE: ManyToOneDP,
-    MANYTOMANY: ManyToManyDP,
+    ONETOMANY: _OneToManyDP,
+    MANYTOONE: _ManyToOneDP,
+    MANYTOMANY: _ManyToManyDP,
 }
index b43824e2ef0001dc0e6feab7b2d13839bf7328bc..28d5981aaa55774d92d0eb3df396338c80e321a7 100644 (file)
@@ -66,7 +66,7 @@ if typing.TYPE_CHECKING:
     from .attributes import History
     from .attributes import InstrumentedAttribute
     from .attributes import QueryableAttribute
-    from .context import ORMCompileState
+    from .context import _ORMCompileState
     from .decl_base import _ClassScanMapperConfig
     from .mapper import Mapper
     from .properties import ColumnProperty
@@ -112,7 +112,7 @@ class DescriptorProperty(MapperProperty[_T]):
     def instrument_class(self, mapper: Mapper[Any]) -> None:
         prop = self
 
-        class _ProxyImpl(attributes.AttributeImpl):
+        class _ProxyImpl(attributes._AttributeImpl):
             accepts_scalar_loader = False
             load_on_unexpire = True
             collection = False
@@ -150,7 +150,7 @@ class DescriptorProperty(MapperProperty[_T]):
 
             self.descriptor = property(fget=fget, fset=fset, fdel=fdel)
 
-        proxy_attr = attributes.create_proxied_attribute(self.descriptor)(
+        proxy_attr = attributes._create_proxied_attribute(self.descriptor)(
             self.parent.class_,
             self.key,
             self.descriptor,
@@ -544,13 +544,13 @@ class CompositeProperty(
         """Establish events that populate/expire the composite attribute."""
 
         def load_handler(
-            state: InstanceState[Any], context: ORMCompileState
+            state: InstanceState[Any], context: _ORMCompileState
         ) -> None:
             _load_refresh_handler(state, context, None, is_refresh=False)
 
         def refresh_handler(
             state: InstanceState[Any],
-            context: ORMCompileState,
+            context: _ORMCompileState,
             to_load: Optional[Sequence[str]],
         ) -> None:
             # note this corresponds to sqlalchemy.ext.mutable load_attrs()
@@ -562,7 +562,7 @@ class CompositeProperty(
 
         def _load_refresh_handler(
             state: InstanceState[Any],
-            context: ORMCompileState,
+            context: _ORMCompileState,
             to_load: Optional[Sequence[str]],
             is_refresh: bool,
         ) -> None:
index ad1b239c13c25eccefc32d42d38747d92f1dd4bd..74997c0b5c1fc03dce7bd97bff1af297c68d81d9 100644 (file)
@@ -37,10 +37,10 @@ from . import util as orm_util
 from .base import PassiveFlag
 from .query import Query
 from .session import object_session
-from .writeonly import AbstractCollectionWriter
-from .writeonly import WriteOnlyAttributeImpl
+from .writeonly import _AbstractCollectionWriter
+from .writeonly import _WriteOnlyAttributeImpl
+from .writeonly import _WriteOnlyLoader
 from .writeonly import WriteOnlyHistory
-from .writeonly import WriteOnlyLoader
 from .. import util
 from ..engine import result
 
@@ -61,7 +61,7 @@ _T = TypeVar("_T", bound=Any)
 class DynamicCollectionHistory(WriteOnlyHistory[_T]):
     def __init__(
         self,
-        attr: DynamicAttributeImpl,
+        attr: _DynamicAttributeImpl,
         state: InstanceState[_T],
         passive: PassiveFlag,
         apply_to: Optional[DynamicCollectionHistory[_T]] = None,
@@ -79,10 +79,10 @@ class DynamicCollectionHistory(WriteOnlyHistory[_T]):
             self._reconcile_collection = False
 
 
-class DynamicAttributeImpl(WriteOnlyAttributeImpl):
+class _DynamicAttributeImpl(_WriteOnlyAttributeImpl):
     _supports_dynamic_iteration = True
     collection_history_cls = DynamicCollectionHistory[Any]
-    query_class: Type[AppenderMixin[Any]]  # type: ignore[assignment]
+    query_class: Type[_AppenderMixin[Any]]  # type: ignore[assignment]
 
     def __init__(
         self,
@@ -91,10 +91,10 @@ class DynamicAttributeImpl(WriteOnlyAttributeImpl):
         dispatch: _Dispatch[QueryableAttribute[Any]],
         target_mapper: Mapper[_T],
         order_by: _RelationshipOrderByArg,
-        query_class: Optional[Type[AppenderMixin[_T]]] = None,
+        query_class: Optional[Type[_AppenderMixin[_T]]] = None,
         **kw: Any,
     ) -> None:
-        attributes.AttributeImpl.__init__(
+        attributes._AttributeImpl.__init__(
             self, class_, key, None, dispatch, **kw
         )
         self.target_mapper = target_mapper
@@ -102,18 +102,18 @@ class DynamicAttributeImpl(WriteOnlyAttributeImpl):
             self.order_by = tuple(order_by)
         if not query_class:
             self.query_class = AppenderQuery
-        elif AppenderMixin in query_class.mro():
+        elif _AppenderMixin in query_class.mro():
             self.query_class = query_class
         else:
             self.query_class = mixin_user_query(query_class)
 
 
 @relationships.RelationshipProperty.strategy_for(lazy="dynamic")
-class DynaLoader(WriteOnlyLoader):
-    impl_class = DynamicAttributeImpl
+class _DynaLoader(_WriteOnlyLoader):
+    impl_class = _DynamicAttributeImpl
 
 
-class AppenderMixin(AbstractCollectionWriter[_T]):
+class _AppenderMixin(_AbstractCollectionWriter[_T]):
     """A mixin that expects to be mixing in a Query class with
     AbstractAppender.
 
@@ -124,7 +124,7 @@ class AppenderMixin(AbstractCollectionWriter[_T]):
     _order_by_clauses: Tuple[ColumnElement[Any], ...]
 
     def __init__(
-        self, attr: DynamicAttributeImpl, state: InstanceState[_T]
+        self, attr: _DynamicAttributeImpl, state: InstanceState[_T]
     ) -> None:
         Query.__init__(
             self,  # type: ignore[arg-type]
@@ -283,7 +283,7 @@ class AppenderMixin(AbstractCollectionWriter[_T]):
         self._remove_impl(item)
 
 
-class AppenderQuery(AppenderMixin[_T], Query[_T]):  # type: ignore[misc]
+class AppenderQuery(_AppenderMixin[_T], Query[_T]):  # type: ignore[misc]
     """A dynamic query that supports basic collection storage operations.
 
     Methods on :class:`.AppenderQuery` include all methods of
@@ -294,7 +294,7 @@ class AppenderQuery(AppenderMixin[_T], Query[_T]):  # type: ignore[misc]
     """
 
 
-def mixin_user_query(cls: Any) -> type[AppenderMixin[Any]]:
+def mixin_user_query(cls: Any) -> type[_AppenderMixin[Any]]:
     """Return a new class with AppenderQuery functionality layered over."""
     name = "Appender" + cls.__name__
-    return type(name, (AppenderMixin, cls), {"query_class": cls})
+    return type(name, (_AppenderMixin, cls), {"query_class": cls})
index 23682f7ef22ff8dd549761bec78e14b146299e12..d7c97c6b9e543f154aac9e53dbd21c1356a5fe40 100644 (file)
@@ -123,7 +123,7 @@ class IdentityMap:
         return len(self._dict)
 
 
-class WeakInstanceDict(IdentityMap):
+class _WeakInstanceDict(IdentityMap):
     _dict: Dict[_IdentityKeyType[Any], InstanceState[Any]]
 
     def __getitem__(self, key: _IdentityKeyType[_O]) -> _O:
index 8104e3cabd22b92d9ab01efa44989e1a3d7d988f..5f50031cac4250c77d315051a5ed243d665417b9 100644 (file)
@@ -65,7 +65,7 @@ from ..util.typing import Literal
 
 if TYPE_CHECKING:
     from ._typing import _RegistryType
-    from .attributes import AttributeImpl
+    from .attributes import _AttributeImpl
     from .attributes import QueryableAttribute
     from .collections import _AdaptedCollectionProtocol
     from .collections import _CollectionFactoryType
@@ -469,7 +469,7 @@ class ClassManager(
     def instrument_collection_class(
         self, key: str, collection_class: Type[Collection[Any]]
     ) -> _CollectionFactoryType:
-        return collections.prepare_instrumentation(collection_class)
+        return collections._prepare_instrumentation(collection_class)
 
     def initialize_collection(
         self,
@@ -489,7 +489,7 @@ class ClassManager(
         else:
             return key in self.local_attrs
 
-    def get_impl(self, key: str) -> AttributeImpl:
+    def get_impl(self, key: str) -> _AttributeImpl:
         return self[key].impl
 
     @property
index a9d7207d5d5c56a81848f1a833ef6a7ec3460c5d..4ad141288900b9ebe4b877952ca572bc6db6f423 100644 (file)
@@ -85,13 +85,13 @@ if typing.TYPE_CHECKING:
     from .attributes import InstrumentedAttribute
     from .base import Mapped
     from .context import _MapperEntity
-    from .context import ORMCompileState
+    from .context import _ORMCompileState
     from .context import QueryContext
     from .decl_api import RegistryType
     from .decl_base import _ClassScanMapperConfig
     from .loading import _PopulatorDict
     from .mapper import Mapper
-    from .path_registry import AbstractEntityRegistry
+    from .path_registry import _AbstractEntityRegistry
     from .query import Query
     from .session import Session
     from .state import InstanceState
@@ -479,9 +479,9 @@ class MapperProperty(
 
     def setup(
         self,
-        context: ORMCompileState,
+        context: _ORMCompileState,
         query_entity: _MapperEntity,
-        path: AbstractEntityRegistry,
+        path: _AbstractEntityRegistry,
         adapter: Optional[ORMAdapter],
         **kwargs: Any,
     ) -> None:
@@ -495,9 +495,9 @@ class MapperProperty(
 
     def create_row_processor(
         self,
-        context: ORMCompileState,
+        context: _ORMCompileState,
         query_entity: _MapperEntity,
-        path: AbstractEntityRegistry,
+        path: _AbstractEntityRegistry,
         mapper: Mapper[Any],
         result: Result[Unpack[TupleAny]],
         adapter: Optional[ORMAdapter],
@@ -1005,7 +1005,7 @@ class StrategizedProperty(MapperProperty[_T]):
         )
 
     def _get_context_loader(
-        self, context: ORMCompileState, path: AbstractEntityRegistry
+        self, context: _ORMCompileState, path: _AbstractEntityRegistry
     ) -> Optional[_LoadElement]:
         load: Optional[_LoadElement] = None
 
@@ -1047,9 +1047,9 @@ class StrategizedProperty(MapperProperty[_T]):
 
     def setup(
         self,
-        context: ORMCompileState,
+        context: _ORMCompileState,
         query_entity: _MapperEntity,
-        path: AbstractEntityRegistry,
+        path: _AbstractEntityRegistry,
         adapter: Optional[ORMAdapter],
         **kwargs: Any,
     ) -> None:
@@ -1064,9 +1064,9 @@ class StrategizedProperty(MapperProperty[_T]):
 
     def create_row_processor(
         self,
-        context: ORMCompileState,
+        context: _ORMCompileState,
         query_entity: _MapperEntity,
-        path: AbstractEntityRegistry,
+        path: _AbstractEntityRegistry,
         mapper: Mapper[Any],
         result: Result[Unpack[TupleAny]],
         adapter: Optional[ORMAdapter],
@@ -1259,7 +1259,7 @@ class CompileStateOption(HasCacheKey, ORMOption):
 
     _is_compile_state = True
 
-    def process_compile_state(self, compile_state: ORMCompileState) -> None:
+    def process_compile_state(self, compile_state: _ORMCompileState) -> None:
         """Apply a modification to a given :class:`.ORMCompileState`.
 
         This method is part of the implementation of a particular
@@ -1270,7 +1270,7 @@ class CompileStateOption(HasCacheKey, ORMOption):
 
     def process_compile_state_replaced_entities(
         self,
-        compile_state: ORMCompileState,
+        compile_state: _ORMCompileState,
         mapper_entities: Sequence[_MapperEntity],
     ) -> None:
         """Apply a modification to a given :class:`.ORMCompileState`,
@@ -1297,7 +1297,7 @@ class LoaderOption(CompileStateOption):
 
     def process_compile_state_replaced_entities(
         self,
-        compile_state: ORMCompileState,
+        compile_state: _ORMCompileState,
         mapper_entities: Sequence[_MapperEntity],
     ) -> None:
         self.process_compile_state(compile_state)
@@ -1436,9 +1436,9 @@ class LoaderStrategy:
 
     def setup_query(
         self,
-        compile_state: ORMCompileState,
+        compile_state: _ORMCompileState,
         query_entity: _MapperEntity,
-        path: AbstractEntityRegistry,
+        path: _AbstractEntityRegistry,
         loadopt: Optional[_LoadElement],
         adapter: Optional[ORMAdapter],
         **kwargs: Any,
@@ -1454,9 +1454,9 @@ class LoaderStrategy:
 
     def create_row_processor(
         self,
-        context: ORMCompileState,
+        context: _ORMCompileState,
         query_entity: _MapperEntity,
-        path: AbstractEntityRegistry,
+        path: _AbstractEntityRegistry,
         loadopt: Optional[_LoadElement],
         mapper: Mapper[Any],
         result: Result[Unpack[TupleAny]],
index b79bb5fb6fb12974cb9d2633b8d47b354eead7de..8fcc89e7359bb2ee834feda8a85fb6a8d1caa737 100644 (file)
@@ -36,8 +36,8 @@ from .base import _DEFER_FOR_STATE
 from .base import _RAISE_FOR_STATE
 from .base import _SET_DEFERRED_EXPIRED
 from .base import PassiveFlag
+from .context import _ORMCompileState
 from .context import FromStatement
-from .context import ORMCompileState
 from .context import QueryContext
 from .util import _none_set
 from .util import state_str
@@ -323,7 +323,7 @@ def merge_frozen_result(session, statement, frozen_result, load=True):
         # flush current contents if we expect to load data
         session._autoflush()
 
-    ctx = querycontext.ORMSelectCompileState._create_entities_collection(
+    ctx = querycontext._ORMSelectCompileState._create_entities_collection(
         statement, legacy=False
     )
 
@@ -393,7 +393,7 @@ def merge_result(
     else:
         frozen_result = None
 
-    ctx = querycontext.ORMSelectCompileState._create_entities_collection(
+    ctx = querycontext._ORMSelectCompileState._create_entities_collection(
         query, legacy=True
     )
 
@@ -488,7 +488,7 @@ def get_from_identity(
         return None
 
 
-def load_on_ident(
+def _load_on_ident(
     session: Session,
     statement: Union[Select, FromStatement],
     key: Optional[_IdentityKeyType],
@@ -510,7 +510,7 @@ def load_on_ident(
     else:
         ident = identity_token = None
 
-    return load_on_pk_identity(
+    return _load_on_pk_identity(
         session,
         statement,
         ident,
@@ -527,7 +527,7 @@ def load_on_ident(
     )
 
 
-def load_on_pk_identity(
+def _load_on_pk_identity(
     session: Session,
     statement: Union[Select, FromStatement],
     primary_key_identity: Optional[Tuple[Any, ...]],
@@ -557,7 +557,7 @@ def load_on_pk_identity(
         statement._compile_options
         is SelectState.default_select_compile_options
     ):
-        compile_options = ORMCompileState.default_compile_options
+        compile_options = _ORMCompileState.default_compile_options
     else:
         compile_options = statement._compile_options
 
@@ -1027,7 +1027,7 @@ def _instance_processor(
                 _load_supers = [selectin_load_via]
 
             for _selectinload_entity in _load_supers:
-                if PostLoad.path_exists(
+                if _PostLoad.path_exists(
                     context, load_path, _selectinload_entity
                 ):
                     continue
@@ -1038,7 +1038,7 @@ def _instance_processor(
                     _polymorphic_from,
                     option_entities,
                 )
-                PostLoad.callable_for_path(
+                _PostLoad.callable_for_path(
                     context,
                     load_path,
                     _selectinload_entity.mapper,
@@ -1047,7 +1047,7 @@ def _instance_processor(
                     _selectinload_entity,
                 )
 
-    post_load = PostLoad.for_context(context, load_path, only_load_props)
+    post_load = _PostLoad.for_context(context, load_path, only_load_props)
 
     if refresh_state:
         refresh_identity_key = refresh_state.key
@@ -1526,7 +1526,7 @@ def _decorate_polymorphic_switch(
     return polymorphic_instance
 
 
-class PostLoad:
+class _PostLoad:
     """Track loaders and states for "post load" operations."""
 
     __slots__ = "loaders", "states", "load_keys"
@@ -1587,7 +1587,7 @@ class PostLoad:
         if path.path in context.post_load_paths:
             pl = context.post_load_paths[path.path]
         else:
-            pl = context.post_load_paths[path.path] = PostLoad()
+            pl = context.post_load_paths[path.path] = _PostLoad()
         pl.loaders[token] = (
             context,
             token,
@@ -1598,7 +1598,7 @@ class PostLoad:
         )
 
 
-def load_scalar_attributes(mapper, state, attribute_names, passive):
+def _load_scalar_attributes(mapper, state, attribute_names, passive):
     """initiate a column-based attribute refresh operation."""
 
     # assert mapper is _state_mapper(state)
@@ -1630,7 +1630,7 @@ def load_scalar_attributes(mapper, state, attribute_names, passive):
             # columns needed already, this implicitly undefers that column
             stmt = FromStatement(mapper, statement)
 
-            return load_on_ident(
+            return _load_on_ident(
                 session,
                 stmt,
                 None,
@@ -1671,7 +1671,7 @@ def load_scalar_attributes(mapper, state, attribute_names, passive):
         )
         return
 
-    result = load_on_ident(
+    result = _load_on_ident(
         session,
         select(mapper).set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL),
         identity_key,
index 53d2fa40eade1bc569bac605916d3b685cc927f4..d22878e7d792aeb3b2b1041aa32180e72e48bcda 100644 (file)
@@ -98,12 +98,12 @@ if TYPE_CHECKING:
     from ._typing import _ORMColumnExprArgument
     from ._typing import _RegistryType
     from .decl_api import registry
-    from .dependency import DependencyProcessor
+    from .dependency import _DependencyProcessor
     from .descriptor_props import CompositeProperty
     from .descriptor_props import SynonymProperty
     from .events import MapperEvents
     from .instrumentation import ClassManager
-    from .path_registry import CachingEntityRegistry
+    from .path_registry import _CachingEntityRegistry
     from .properties import ColumnProperty
     from .relationships import RelationshipProperty
     from .state import InstanceState
@@ -919,7 +919,7 @@ class Mapper(
     _identity_class: Type[_O]
 
     _delete_orphans: List[Tuple[str, Type[Any]]]
-    _dependency_processors: List[DependencyProcessor]
+    _dependency_processors: List[_DependencyProcessor]
     _memoized_values: Dict[Any, Callable[[], Any]]
     _inheriting_mappers: util.WeakSequence[Mapper[Any]]
     _all_tables: Set[TableClause]
@@ -1192,7 +1192,7 @@ class Mapper(
         return self.persist_selectable
 
     @util.memoized_property
-    def _path_registry(self) -> CachingEntityRegistry:
+    def _path_registry(self) -> _CachingEntityRegistry:
         return PathRegistry.per_mapper(self)
 
     def _configure_inheritance(self):
@@ -1517,7 +1517,7 @@ class Mapper(
             self.class_,
             mapper=self,
             expired_attribute_loader=util.partial(
-                loading.load_scalar_attributes, self
+                loading._load_scalar_attributes, self
             ),
             # finalize flag means instrument the __init__ method
             # and call the class_instrument event
index 4ee8ac71b84d6b872cfc317e4d844cdd409bd965..2fc632aaea430257455efab45f287d6fc8a728e9 100644 (file)
@@ -47,7 +47,9 @@ if TYPE_CHECKING:
 
     def is_root(path: PathRegistry) -> TypeGuard[RootRegistry]: ...
 
-    def is_entity(path: PathRegistry) -> TypeGuard[AbstractEntityRegistry]: ...
+    def is_entity(
+        path: PathRegistry,
+    ) -> TypeGuard[_AbstractEntityRegistry]: ...
 
 else:
     is_root = operator.attrgetter("is_root")
@@ -183,7 +185,7 @@ class PathRegistry(HasCacheKey):
         return id(self)
 
     @overload
-    def __getitem__(self, entity: _StrPathToken) -> TokenRegistry: ...
+    def __getitem__(self, entity: _StrPathToken) -> _TokenRegistry: ...
 
     @overload
     def __getitem__(self, entity: int) -> _PathElementType: ...
@@ -194,12 +196,12 @@ class PathRegistry(HasCacheKey):
     @overload
     def __getitem__(
         self, entity: _InternalEntityType[Any]
-    ) -> AbstractEntityRegistry: ...
+    ) -> _AbstractEntityRegistry: ...
 
     @overload
     def __getitem__(
         self, entity: StrategizedProperty[Any]
-    ) -> PropRegistry: ...
+    ) -> _PropRegistry: ...
 
     def __getitem__(
         self,
@@ -211,11 +213,11 @@ class PathRegistry(HasCacheKey):
             StrategizedProperty[Any],
         ],
     ) -> Union[
-        TokenRegistry,
+        _TokenRegistry,
         _PathElementType,
         _PathRepresentation,
-        PropRegistry,
-        AbstractEntityRegistry,
+        _PropRegistry,
+        _AbstractEntityRegistry,
     ]:
         raise NotImplementedError()
 
@@ -315,20 +317,20 @@ class PathRegistry(HasCacheKey):
 
     @overload
     @classmethod
-    def per_mapper(cls, mapper: Mapper[Any]) -> CachingEntityRegistry: ...
+    def per_mapper(cls, mapper: Mapper[Any]) -> _CachingEntityRegistry: ...
 
     @overload
     @classmethod
-    def per_mapper(cls, mapper: AliasedInsp[Any]) -> SlotsEntityRegistry: ...
+    def per_mapper(cls, mapper: AliasedInsp[Any]) -> _SlotsEntityRegistry: ...
 
     @classmethod
     def per_mapper(
         cls, mapper: _InternalEntityType[Any]
-    ) -> AbstractEntityRegistry:
+    ) -> _AbstractEntityRegistry:
         if mapper.is_mapper:
-            return CachingEntityRegistry(cls.root, mapper)
+            return _CachingEntityRegistry(cls.root, mapper)
         else:
-            return SlotsEntityRegistry(cls.root, mapper)
+            return _SlotsEntityRegistry(cls.root, mapper)
 
     @classmethod
     def coerce(cls, raw: _PathRepresentation) -> PathRegistry:
@@ -351,22 +353,22 @@ class PathRegistry(HasCacheKey):
         return f"{self.__class__.__name__}({self.path!r})"
 
 
-class CreatesToken(PathRegistry):
+class _CreatesToken(PathRegistry):
     __slots__ = ()
 
     is_aliased_class: bool
     is_root: bool
 
-    def token(self, token: _StrPathToken) -> TokenRegistry:
+    def token(self, token: _StrPathToken) -> _TokenRegistry:
         if token.endswith(f":{_WILDCARD_TOKEN}"):
-            return TokenRegistry(self, token)
+            return _TokenRegistry(self, token)
         elif token.endswith(f":{_DEFAULT_TOKEN}"):
-            return TokenRegistry(self.root, token)
+            return _TokenRegistry(self.root, token)
         else:
             raise exc.ArgumentError(f"invalid token: {token}")
 
 
-class RootRegistry(CreatesToken):
+class RootRegistry(_CreatesToken):
     """Root registry, defers to mappers so that
     paths are maintained per-root-mapper.
 
@@ -384,11 +386,11 @@ class RootRegistry(CreatesToken):
 
     def _getitem(
         self, entity: Any
-    ) -> Union[TokenRegistry, AbstractEntityRegistry]:
+    ) -> Union[_TokenRegistry, _AbstractEntityRegistry]:
         if entity in PathToken._intern:
             if TYPE_CHECKING:
                 assert isinstance(entity, _StrPathToken)
-            return TokenRegistry(self, PathToken._intern[entity])
+            return _TokenRegistry(self, PathToken._intern[entity])
         else:
             try:
                 return entity._path_registry  # type: ignore
@@ -430,15 +432,15 @@ class PathToken(orm_base.InspectionAttr, HasCacheKey, str):
             return result
 
 
-class TokenRegistry(PathRegistry):
+class _TokenRegistry(PathRegistry):
     __slots__ = ("token", "parent", "path", "natural_path")
 
     inherit_cache = True
 
     token: _StrPathToken
-    parent: CreatesToken
+    parent: _CreatesToken
 
-    def __init__(self, parent: CreatesToken, token: _StrPathToken):
+    def __init__(self, parent: _CreatesToken, token: _StrPathToken):
         token = PathToken.intern(token)
 
         self.token = token
@@ -458,10 +460,10 @@ class TokenRegistry(PathRegistry):
             return
 
         if TYPE_CHECKING:
-            assert isinstance(parent, AbstractEntityRegistry)
+            assert isinstance(parent, _AbstractEntityRegistry)
         if not parent.is_aliased_class:
             for mp_ent in parent.mapper.iterate_to_root():
-                yield TokenRegistry(parent.parent[mp_ent], self.token)
+                yield _TokenRegistry(parent.parent[mp_ent], self.token)
         elif (
             parent.is_aliased_class
             and cast(
@@ -473,7 +475,7 @@ class TokenRegistry(PathRegistry):
             for ent in cast(
                 "AliasedInsp[Any]", parent.entity
             )._with_polymorphic_entities:
-                yield TokenRegistry(parent.parent[ent], self.token)
+                yield _TokenRegistry(parent.parent[ent], self.token)
         else:
             yield self
 
@@ -486,9 +488,11 @@ class TokenRegistry(PathRegistry):
             return
 
         if TYPE_CHECKING:
-            assert isinstance(parent, AbstractEntityRegistry)
+            assert isinstance(parent, _AbstractEntityRegistry)
         for mp_ent in parent.mapper.iterate_to_root():
-            yield TokenRegistry(parent.parent[mp_ent], self.token).natural_path
+            yield _TokenRegistry(
+                parent.parent[mp_ent], self.token
+            ).natural_path
         if (
             parent.is_aliased_class
             and cast(
@@ -501,7 +505,7 @@ class TokenRegistry(PathRegistry):
                 "AliasedInsp[Any]", parent.entity
             )._with_polymorphic_entities:
                 yield (
-                    TokenRegistry(parent.parent[ent], self.token).natural_path
+                    _TokenRegistry(parent.parent[ent], self.token).natural_path
                 )
         else:
             yield self.natural_path
@@ -516,7 +520,7 @@ class TokenRegistry(PathRegistry):
         __getitem__ = _getitem
 
 
-class PropRegistry(PathRegistry):
+class _PropRegistry(PathRegistry):
     __slots__ = (
         "prop",
         "parent",
@@ -538,13 +542,13 @@ class PropRegistry(PathRegistry):
     entity: Optional[_InternalEntityType[Any]]
 
     def __init__(
-        self, parent: AbstractEntityRegistry, prop: StrategizedProperty[Any]
+        self, parent: _AbstractEntityRegistry, prop: StrategizedProperty[Any]
     ):
 
         # restate this path in terms of the
         # given StrategizedProperty's parent.
         insp = cast("_InternalEntityType[Any]", parent[-1])
-        natural_parent: AbstractEntityRegistry = parent
+        natural_parent: _AbstractEntityRegistry = parent
 
         # inherit "is_unnatural" from the parent
         self.is_unnatural = parent.parent.is_unnatural or bool(
@@ -627,7 +631,7 @@ class PropRegistry(PathRegistry):
         self._default_path_loader_key = self.prop._default_path_loader_key
         self._loader_key = ("loader", self.natural_path)
 
-    def _truncate_recursive(self) -> PropRegistry:
+    def _truncate_recursive(self) -> _PropRegistry:
         earliest = None
         for i, token in enumerate(reversed(self.path[:-1])):
             if token is self.prop:
@@ -639,23 +643,23 @@ class PropRegistry(PathRegistry):
             return self.coerce(self.path[0 : -(earliest + 1)])  # type: ignore
 
     @property
-    def entity_path(self) -> AbstractEntityRegistry:
+    def entity_path(self) -> _AbstractEntityRegistry:
         assert self.entity is not None
         return self[self.entity]
 
     def _getitem(
         self, entity: Union[int, slice, _InternalEntityType[Any]]
-    ) -> Union[AbstractEntityRegistry, _PathElementType, _PathRepresentation]:
+    ) -> Union[_AbstractEntityRegistry, _PathElementType, _PathRepresentation]:
         if isinstance(entity, (int, slice)):
             return self.path[entity]
         else:
-            return SlotsEntityRegistry(self, entity)
+            return _SlotsEntityRegistry(self, entity)
 
     if not TYPE_CHECKING:
         __getitem__ = _getitem
 
 
-class AbstractEntityRegistry(CreatesToken):
+class _AbstractEntityRegistry(_CreatesToken):
     __slots__ = (
         "key",
         "parent",
@@ -668,14 +672,14 @@ class AbstractEntityRegistry(CreatesToken):
     has_entity = True
     is_entity = True
 
-    parent: Union[RootRegistry, PropRegistry]
+    parent: Union[RootRegistry, _PropRegistry]
     key: _InternalEntityType[Any]
     entity: _InternalEntityType[Any]
     is_aliased_class: bool
 
     def __init__(
         self,
-        parent: Union[RootRegistry, PropRegistry],
+        parent: Union[RootRegistry, _PropRegistry],
         entity: _InternalEntityType[Any],
     ):
         self.key = entity
@@ -719,7 +723,7 @@ class AbstractEntityRegistry(CreatesToken):
         else:
             self.natural_path = self.path
 
-    def _truncate_recursive(self) -> AbstractEntityRegistry:
+    def _truncate_recursive(self) -> _AbstractEntityRegistry:
         return self.parent._truncate_recursive()[self.entity]
 
     @property
@@ -743,31 +747,31 @@ class AbstractEntityRegistry(CreatesToken):
         if isinstance(entity, (int, slice)):
             return self.path[entity]
         elif entity in PathToken._intern:
-            return TokenRegistry(self, PathToken._intern[entity])
+            return _TokenRegistry(self, PathToken._intern[entity])
         else:
-            return PropRegistry(self, entity)
+            return _PropRegistry(self, entity)
 
     if not TYPE_CHECKING:
         __getitem__ = _getitem
 
 
-class SlotsEntityRegistry(AbstractEntityRegistry):
+class _SlotsEntityRegistry(_AbstractEntityRegistry):
     # for aliased class, return lightweight, no-cycles created
     # version
     inherit_cache = True
 
 
 class _ERDict(Dict[Any, Any]):
-    def __init__(self, registry: CachingEntityRegistry):
+    def __init__(self, registry: _CachingEntityRegistry):
         self.registry = registry
 
-    def __missing__(self, key: Any) -> PropRegistry:
-        self[key] = item = PropRegistry(self.registry, key)
+    def __missing__(self, key: Any) -> _PropRegistry:
+        self[key] = item = _PropRegistry(self.registry, key)
 
         return item
 
 
-class CachingEntityRegistry(AbstractEntityRegistry):
+class _CachingEntityRegistry(_AbstractEntityRegistry):
     # for long lived mapper, return dict based caching
     # version that creates reference cycles
 
@@ -777,7 +781,7 @@ class CachingEntityRegistry(AbstractEntityRegistry):
 
     def __init__(
         self,
-        parent: Union[RootRegistry, PropRegistry],
+        parent: Union[RootRegistry, _PropRegistry],
         entity: _InternalEntityType[Any],
     ):
         super().__init__(parent, entity)
@@ -790,7 +794,7 @@ class CachingEntityRegistry(AbstractEntityRegistry):
         if isinstance(entity, (int, slice)):
             return self.path[entity]
         elif isinstance(entity, PathToken):
-            return TokenRegistry(self, entity)
+            return _TokenRegistry(self, entity)
         else:
             return self._cache[entity]
 
@@ -802,9 +806,9 @@ if TYPE_CHECKING:
 
     def path_is_entity(
         path: PathRegistry,
-    ) -> TypeGuard[AbstractEntityRegistry]: ...
+    ) -> TypeGuard[_AbstractEntityRegistry]: ...
 
-    def path_is_property(path: PathRegistry) -> TypeGuard[PropRegistry]: ...
+    def path_is_property(path: PathRegistry) -> TypeGuard[_PropRegistry]: ...
 
 else:
     path_is_entity = operator.attrgetter("is_entity")
index 369fc59986cbb58aff321dda7022738afcfdaff6..464a0bcdc22f06fe1d94a13d0b973612fc0897ec 100644 (file)
@@ -37,7 +37,7 @@ from ..sql.elements import BooleanClauseList
 from ..sql.selectable import LABEL_STYLE_TABLENAME_PLUS_COL
 
 
-def save_obj(base_mapper, states, uowtransaction, single=False):
+def _save_obj(base_mapper, states, uowtransaction, single=False):
     """Issue ``INSERT`` and/or ``UPDATE`` statements for a list
     of objects.
 
@@ -51,7 +51,7 @@ def save_obj(base_mapper, states, uowtransaction, single=False):
     # if batch=false, call _save_obj separately for each object
     if not single and not base_mapper.batch:
         for state in _sort_states(base_mapper, states):
-            save_obj(base_mapper, [state], uowtransaction, single=True)
+            _save_obj(base_mapper, [state], uowtransaction, single=True)
         return
 
     states_to_update = []
@@ -120,7 +120,7 @@ def save_obj(base_mapper, states, uowtransaction, single=False):
     )
 
 
-def post_update(base_mapper, states, uowtransaction, post_update_cols):
+def _post_update(base_mapper, states, uowtransaction, post_update_cols):
     """Issue UPDATE statements on behalf of a relationship() which
     specifies post_update.
 
@@ -165,7 +165,7 @@ def post_update(base_mapper, states, uowtransaction, post_update_cols):
         )
 
 
-def delete_obj(base_mapper, states, uowtransaction):
+def _delete_obj(base_mapper, states, uowtransaction):
     """Issue ``DELETE`` statements for a list of objects.
 
     This is called within the context of a UOWTransaction during a
@@ -622,7 +622,7 @@ def _collect_update_commands(
             # occurs after the UPDATE is emitted however we invoke it here
             # explicitly in the absence of our invoking an UPDATE
             for m, equated_pairs in mapper._table_to_equated[table]:
-                sync.populate(
+                sync._populate(
                     state,
                     m,
                     state,
@@ -1551,7 +1551,7 @@ def _finalize_insert_update_commands(base_mapper, uowtransaction, states):
             stmt = future.select(mapper).set_label_style(
                 LABEL_STYLE_TABLENAME_PLUS_COL
             )
-            loading.load_on_ident(
+            loading._load_on_ident(
                 uowtransaction.session,
                 stmt,
                 state.key,
@@ -1720,7 +1720,7 @@ def _postfetch(
     # TODO: this still goes a little too often.  would be nice to
     # have definitive list of "columns that changed" here
     for m, equated_pairs in mapper._table_to_equated[table]:
-        sync.populate(
+        sync._populate(
             state,
             m,
             state,
@@ -1733,7 +1733,7 @@ def _postfetch(
 
 def _postfetch_bulk_save(mapper, dict_, table):
     for m, equated_pairs in mapper._table_to_equated[table]:
-        sync.bulk_populate_inherit_keys(dict_, m, equated_pairs)
+        sync._bulk_populate_inherit_keys(dict_, m, equated_pairs)
 
 
 def _connections_for_states(base_mapper, uowtransaction, states):
index 5c49222be1536900b4c12019489b4af496cf2f5c..4b17c0c5d3640ba19ccbf4d8c280f666a21cb444 100644 (file)
@@ -245,7 +245,7 @@ class ColumnProperty(
         strategies = util.preloaded.orm_strategies
         return state.InstanceState._instance_level_callable_processor(
             self.parent.class_manager,
-            strategies.LoadDeferredColumns(self.key),
+            strategies._LoadDeferredColumns(self.key),
             self.key,
         )
 
@@ -257,7 +257,7 @@ class ColumnProperty(
         strategies = util.preloaded.orm_strategies
         return state.InstanceState._instance_level_callable_processor(
             self.parent.class_manager,
-            strategies.LoadDeferredColumns(self.key, True),
+            strategies._LoadDeferredColumns(self.key, True),
             self.key,
         )
 
@@ -294,7 +294,7 @@ class ColumnProperty(
         if not self.instrument:
             return
 
-        attributes.register_descriptor(
+        attributes._register_descriptor(
             mapper.class_,
             self.key,
             comparator=self.comparator_factory(self, mapper),
index fc1cf2b121111d85a484bd7f98e86c67c9b52eb6..8f58143e6145e5d26b19dc272c4255b84a8c6032 100644 (file)
@@ -49,8 +49,8 @@ from .base import _assertions
 from .context import _column_descriptions
 from .context import _determine_last_joined_entity
 from .context import _legacy_filter_by_entity_zero
+from .context import _ORMCompileState
 from .context import FromStatement
-from .context import ORMCompileState
 from .context import QueryContext
 from .interfaces import ORMColumnDescription
 from .interfaces import ORMColumnsClauseRole
@@ -209,7 +209,7 @@ class Query(
     _memoized_select_entities = ()
 
     _compile_options: Union[Type[CacheableOptions], CacheableOptions] = (
-        ORMCompileState.default_compile_options
+        _ORMCompileState.default_compile_options
     )
 
     _with_options: Tuple[ExecutableOption, ...]
@@ -1137,7 +1137,7 @@ class Query(
 
         # we still implement _get_impl() so that baked query can override
         # it
-        return self._get_impl(ident, loading.load_on_pk_identity)
+        return self._get_impl(ident, loading._load_on_pk_identity)
 
     def _get_impl(
         self,
@@ -3284,7 +3284,7 @@ class Query(
 
     def _compile_state(
         self, for_statement: bool = False, **kw: Any
-    ) -> ORMCompileState:
+    ) -> _ORMCompileState:
         """Create an out-of-compiler ORMCompileState object.
 
         The ORMCompileState object is normally created directly as a result
@@ -3309,8 +3309,8 @@ class Query(
         # query._statement is not None as we have the ORM Query here
         # however this is the more general path.
         compile_state_cls = cast(
-            ORMCompileState,
-            ORMCompileState._get_plugin_class_for_plugin(stmt, "orm"),
+            _ORMCompileState,
+            _ORMCompileState._get_plugin_class_for_plugin(stmt, "orm"),
         )
 
         return compile_state_cls.create_for_statement(stmt, None)
@@ -3345,7 +3345,7 @@ class AliasOption(interfaces.LoaderOption):
 
         """
 
-    def process_compile_state(self, compile_state: ORMCompileState) -> None:
+    def process_compile_state(self, compile_state: _ORMCompileState) -> None:
         pass
 
 
index 49b7079936b2e3c117df13f57bac21f8b7c1b5f1..3a9c4d3ad84cc99fe4e48907e0d5a3ff908b8234 100644 (file)
@@ -107,12 +107,12 @@ if typing.TYPE_CHECKING:
     from .clsregistry import _class_resolver
     from .clsregistry import _ModNS
     from .decl_base import _ClassScanMapperConfig
-    from .dependency import DependencyProcessor
+    from .dependency import _DependencyProcessor
     from .mapper import Mapper
     from .query import Query
     from .session import Session
     from .state import InstanceState
-    from .strategies import LazyLoader
+    from .strategies import _LazyLoader
     from .util import AliasedClass
     from .util import AliasedInsp
     from ..sql._typing import _CoreAdapterProto
@@ -362,7 +362,7 @@ class RelationshipProperty(
 
     _overlaps: Sequence[str]
 
-    _lazy_strategy: LazyLoader
+    _lazy_strategy: _LazyLoader
 
     _persistence_only = dict(
         passive_deletes=False,
@@ -372,12 +372,12 @@ class RelationshipProperty(
         cascade_backrefs=False,
     )
 
-    _dependency_processor: Optional[DependencyProcessor] = None
+    _dependency_processor: Optional[_DependencyProcessor] = None
 
     primaryjoin: ColumnElement[bool]
     secondaryjoin: Optional[ColumnElement[bool]]
     secondary: Optional[FromClause]
-    _join_condition: JoinCondition
+    _join_condition: _JoinCondition
     order_by: _RelationshipOrderByArg
 
     _user_defined_foreign_keys: Set[ColumnElement[Any]]
@@ -559,7 +559,7 @@ class RelationshipProperty(
                 )
 
     def instrument_class(self, mapper: Mapper[Any]) -> None:
-        attributes.register_descriptor(
+        attributes._register_descriptor(
             mapper.class_,
             self.key,
             comparator=self.comparator_factory(self, mapper),
@@ -1687,7 +1687,7 @@ class RelationshipProperty(
         self._join_condition._warn_for_conflicting_sync_targets()
         super().do_init()
         self._lazy_strategy = cast(
-            "LazyLoader", self._get_strategy((("lazy", "select"),))
+            "_LazyLoader", self._get_strategy((("lazy", "select"),))
         )
 
     def _setup_registry_dependencies(self) -> None:
@@ -1921,7 +1921,7 @@ class RelationshipProperty(
         self.target = self.entity.persist_selectable
 
     def _setup_join_conditions(self) -> None:
-        self._join_condition = jc = JoinCondition(
+        self._join_condition = jc = _JoinCondition(
             parent_persist_selectable=self.parent.persist_selectable,
             child_persist_selectable=self.entity.persist_selectable,
             parent_local_selectable=self.parent.local_table,
@@ -2193,7 +2193,7 @@ class RelationshipProperty(
             self.uselist = self.direction is not MANYTOONE
         if not self.viewonly:
             self._dependency_processor = (  # type: ignore
-                dependency.DependencyProcessor.from_relationship
+                dependency._DependencyProcessor.from_relationship
             )(self)
 
     @util.memoized_property
@@ -2305,7 +2305,7 @@ def _annotate_columns(element: _CE, annotations: _AnnotationDict) -> _CE:
     return element
 
 
-class JoinCondition:
+class _JoinCondition:
     primaryjoin_initial: Optional[ColumnElement[bool]]
     primaryjoin: ColumnElement[bool]
     secondaryjoin: Optional[ColumnElement[bool]]
index a23239e098ee55c35d3aeb6eb03e0556ef687015..eee6a433114576951fae9dd68bb3a08f4bc389b9 100644 (file)
@@ -58,8 +58,8 @@ from .base import object_mapper
 from .base import object_state
 from .base import PassiveFlag
 from .base import state_str
+from .context import _ORMCompileState
 from .context import FromStatement
-from .context import ORMCompileState
 from .identity import IdentityMap
 from .query import Query
 from .state import InstanceState
@@ -349,7 +349,7 @@ class ORMExecuteState(util.MemoizedSlots):
 
     """
 
-    _compile_state_cls: Optional[Type[ORMCompileState]]
+    _compile_state_cls: Optional[Type[_ORMCompileState]]
     _starting_event_idx: int
     _events_todo: List[Any]
     _update_execution_options: Optional[_ExecuteOptions]
@@ -361,7 +361,7 @@ class ORMExecuteState(util.MemoizedSlots):
         parameters: Optional[_CoreAnyExecuteParams],
         execution_options: _ExecuteOptions,
         bind_arguments: _BindArguments,
-        compile_state_cls: Optional[Type[ORMCompileState]],
+        compile_state_cls: Optional[Type[_ORMCompileState]],
         events_todo: List[_InstanceLevelDispatch[Session]],
     ):
         """Construct a new :class:`_orm.ORMExecuteState`.
@@ -655,8 +655,8 @@ class ORMExecuteState(util.MemoizedSlots):
         self,
     ) -> Optional[
         Union[
-            context.ORMCompileState.default_compile_options,
-            Type[context.ORMCompileState.default_compile_options],
+            context._ORMCompileState.default_compile_options,
+            Type[context._ORMCompileState.default_compile_options],
         ]
     ]:
         if not self.is_select:
@@ -667,7 +667,7 @@ class ORMExecuteState(util.MemoizedSlots):
             return None
 
         if opts is not None and opts.isinstance(
-            context.ORMCompileState.default_compile_options
+            context._ORMCompileState.default_compile_options
         ):
             return opts  # type: ignore
         else:
@@ -782,8 +782,8 @@ class ORMExecuteState(util.MemoizedSlots):
     def update_delete_options(
         self,
     ) -> Union[
-        bulk_persistence.BulkUDCompileState.default_update_options,
-        Type[bulk_persistence.BulkUDCompileState.default_update_options],
+        bulk_persistence._BulkUDCompileState.default_update_options,
+        Type[bulk_persistence._BulkUDCompileState.default_update_options],
     ]:
         """Return the update_delete_options that will be used for this
         execution."""
@@ -794,11 +794,11 @@ class ORMExecuteState(util.MemoizedSlots):
                 "statement so there are no update options."
             )
         uo: Union[
-            bulk_persistence.BulkUDCompileState.default_update_options,
-            Type[bulk_persistence.BulkUDCompileState.default_update_options],
+            bulk_persistence._BulkUDCompileState.default_update_options,
+            Type[bulk_persistence._BulkUDCompileState.default_update_options],
         ] = self.execution_options.get(
             "_sa_orm_update_options",
-            bulk_persistence.BulkUDCompileState.default_update_options,
+            bulk_persistence._BulkUDCompileState.default_update_options,
         )
         return uo
 
@@ -1747,7 +1747,7 @@ class Session(_SessionClassMethods, EventTarget):
             raise sa_exc.ArgumentError(
                 "autocommit=True is no longer supported"
             )
-        self.identity_map = identity.WeakInstanceDict()
+        self.identity_map = identity._WeakInstanceDict()
 
         if not future:
             raise sa_exc.ArgumentError(
@@ -2160,7 +2160,7 @@ class Session(_SessionClassMethods, EventTarget):
             )
             if TYPE_CHECKING:
                 assert isinstance(
-                    compile_state_cls, context.AbstractORMCompileState
+                    compile_state_cls, context._AbstractORMCompileState
                 )
         else:
             compile_state_cls = None
@@ -2602,7 +2602,7 @@ class Session(_SessionClassMethods, EventTarget):
 
         all_states = self.identity_map.all_states() + list(self._new)
         self.identity_map._kill()
-        self.identity_map = identity.WeakInstanceDict()
+        self.identity_map = identity._WeakInstanceDict()
         self._new = {}
         self._deleted = {}
 
@@ -3175,7 +3175,7 @@ class Session(_SessionClassMethods, EventTarget):
 
         stmt: Select[Unpack[TupleAny]] = sql.select(object_mapper(instance))
         if (
-            loading.load_on_ident(
+            loading._load_on_ident(
                 self,
                 stmt,
                 state.key,
@@ -3707,7 +3707,7 @@ class Session(_SessionClassMethods, EventTarget):
         return self._get_impl(
             entity,
             ident,
-            loading.load_on_pk_identity,
+            loading._load_on_pk_identity,
             options=options,
             populate_existing=populate_existing,
             with_for_update=with_for_update,
index 594f6837d518843ef6775f9ba47aab2e4a719a0c..da6dd456eff57356cbec9b551bd5e7fa4f9fbe5d 100644 (file)
@@ -53,7 +53,7 @@ if TYPE_CHECKING:
     from ._typing import _IdentityKeyType
     from ._typing import _InstanceDict
     from ._typing import _LoaderCallable
-    from .attributes import AttributeImpl
+    from .attributes import _AttributeImpl
     from .attributes import History
     from .base import PassiveFlag
     from .collections import _AdaptedCollectionProtocol
@@ -579,7 +579,7 @@ class InstanceState(interfaces.InspectionAttrInfo, Generic[_O]):
     def get_history(self, key: str, passive: PassiveFlag) -> History:
         return self.manager[key].impl.get_history(self, self.dict, passive)
 
-    def get_impl(self, key: str) -> AttributeImpl:
+    def get_impl(self, key: str) -> _AttributeImpl:
         return self.manager[key].impl
 
     def _get_pending_mutation(self, key: str) -> PendingCollection:
@@ -874,7 +874,7 @@ class InstanceState(interfaces.InspectionAttrInfo, Generic[_O]):
     def _modified_event(
         self,
         dict_: _InstanceDict,
-        attr: Optional[AttributeImpl],
+        attr: Optional[_AttributeImpl],
         previous: Any,
         collection: bool = False,
         is_userland: bool = False,
@@ -973,7 +973,9 @@ class InstanceState(interfaces.InspectionAttrInfo, Generic[_O]):
                 del self.callables[key]
 
     def _commit_all(
-        self, dict_: _InstanceDict, instance_dict: Optional[IdentityMap] = None
+        self,
+        dict_: _InstanceDict,
+        instance_dict: Optional[IdentityMap] = None,
     ) -> None:
         """commit all attributes unconditionally.
 
index c89a12efd667b1db1f648d7715c9135d37eab779..e7f333690257c359b860d3d77a959af4bdab6ab1 100644 (file)
@@ -39,8 +39,8 @@ from .base import LoaderCallableStatus
 from .base import PASSIVE_OFF
 from .base import PassiveFlag
 from .context import _column_descriptions
-from .context import ORMCompileState
-from .context import ORMSelectCompileState
+from .context import _ORMCompileState
+from .context import _ORMSelectCompileState
 from .context import QueryContext
 from .interfaces import LoaderStrategy
 from .interfaces import StrategizedProperty
@@ -84,7 +84,7 @@ def _register_attribute(
     uselist = useobject and prop.uselist
 
     if useobject and prop.single_parent:
-        listen_hooks.append(single_parent_validator)
+        listen_hooks.append(_single_parent_validator)
 
     if prop.key in prop.parent.validators:
         fn, opts = prop.parent.validators[prop.key]
@@ -95,7 +95,7 @@ def _register_attribute(
         )
 
     if useobject:
-        listen_hooks.append(unitofwork.track_cascade_events)
+        listen_hooks.append(unitofwork._track_cascade_events)
 
     # need to assemble backref listeners
     # after the singleparentvalidator, mapper validator
@@ -103,7 +103,7 @@ def _register_attribute(
         backref = prop.back_populates
         if backref and prop._effective_sync_backref:
             listen_hooks.append(
-                lambda desc, prop: attributes.backref_listeners(
+                lambda desc, prop: attributes._backref_listeners(
                     desc, backref, uselist
                 )
             )
@@ -123,7 +123,7 @@ def _register_attribute(
         if prop is m._props.get(
             prop.key
         ) and not m.class_manager._attr_has_impl(prop.key):
-            desc = attributes.register_attribute_impl(
+            desc = attributes._register_attribute_impl(
                 m.class_,
                 prop.key,
                 parent_token=prop,
@@ -149,7 +149,7 @@ def _register_attribute(
 
 
 @properties.ColumnProperty.strategy_for(instrument=False, deferred=False)
-class UninstrumentedColumnLoader(LoaderStrategy):
+class _UninstrumentedColumnLoader(LoaderStrategy):
     """Represent a non-instrumented MapperProperty.
 
     The polymorphic_on argument of mapper() often results in this,
@@ -194,7 +194,7 @@ class UninstrumentedColumnLoader(LoaderStrategy):
 
 @log.class_logger
 @properties.ColumnProperty.strategy_for(instrument=True, deferred=False)
-class ColumnLoader(LoaderStrategy):
+class _ColumnLoader(LoaderStrategy):
     """Provide loading behavior for a :class:`.ColumnProperty`."""
 
     __slots__ = "columns", "is_composite"
@@ -286,7 +286,7 @@ class ColumnLoader(LoaderStrategy):
 
 @log.class_logger
 @properties.ColumnProperty.strategy_for(query_expression=True)
-class ExpressionColumnLoader(ColumnLoader):
+class _ExpressionColumnLoader(_ColumnLoader):
     def __init__(self, parent, strategy_key):
         super().__init__(parent, strategy_key)
 
@@ -379,7 +379,7 @@ class ExpressionColumnLoader(ColumnLoader):
     deferred=True, instrument=True, raiseload=True
 )
 @properties.ColumnProperty.strategy_for(do_nothing=True)
-class DeferredColumnLoader(LoaderStrategy):
+class _DeferredColumnLoader(LoaderStrategy):
     """Provide loading behavior for a deferred :class:`.ColumnProperty`."""
 
     __slots__ = "columns", "group", "raiseload"
@@ -522,7 +522,7 @@ class DeferredColumnLoader(LoaderStrategy):
                 p.key
                 for p in localparent.iterate_properties
                 if isinstance(p, StrategizedProperty)
-                and isinstance(p.strategy, DeferredColumnLoader)
+                and isinstance(p.strategy, _DeferredColumnLoader)
                 and p.group == self.group
             ]
         else:
@@ -542,7 +542,7 @@ class DeferredColumnLoader(LoaderStrategy):
         if self.raiseload:
             self._invoke_raise_load(state, passive, "raise")
 
-        loading.load_scalar_attributes(
+        loading._load_scalar_attributes(
             state.mapper, state, set(group), PASSIVE_OFF
         )
 
@@ -554,7 +554,7 @@ class DeferredColumnLoader(LoaderStrategy):
         )
 
 
-class LoadDeferredColumns:
+class _LoadDeferredColumns:
     """serializable loader object used by DeferredColumnLoader"""
 
     def __init__(self, key: str, raiseload: bool = False):
@@ -578,7 +578,7 @@ class LoadDeferredColumns:
         return strategy._load_for_state(state, passive)
 
 
-class AbstractRelationshipLoader(LoaderStrategy):
+class _AbstractRelationshipLoader(LoaderStrategy):
     """LoaderStratgies which deal with related objects."""
 
     __slots__ = "mapper", "target", "uselist", "entity"
@@ -617,7 +617,7 @@ class AbstractRelationshipLoader(LoaderStrategy):
 
 @log.class_logger
 @relationships.RelationshipProperty.strategy_for(do_nothing=True)
-class DoNothingLoader(LoaderStrategy):
+class _DoNothingLoader(LoaderStrategy):
     """Relationship loader that makes no change to the object's state.
 
     Compared to NoLoader, this loader does not initialize the
@@ -630,7 +630,7 @@ class DoNothingLoader(LoaderStrategy):
 @log.class_logger
 @relationships.RelationshipProperty.strategy_for(lazy="noload")
 @relationships.RelationshipProperty.strategy_for(lazy=None)
-class NoLoader(AbstractRelationshipLoader):
+class _NoLoader(_AbstractRelationshipLoader):
     """Provide loading behavior for a :class:`.Relationship`
     with "lazy=None".
 
@@ -674,8 +674,8 @@ class NoLoader(AbstractRelationshipLoader):
 @relationships.RelationshipProperty.strategy_for(lazy="raise")
 @relationships.RelationshipProperty.strategy_for(lazy="raise_on_sql")
 @relationships.RelationshipProperty.strategy_for(lazy="baked_select")
-class LazyLoader(
-    AbstractRelationshipLoader, util.MemoizedSlots, log.Identified
+class _LazyLoader(
+    _AbstractRelationshipLoader, util.MemoizedSlots, log.Identified
 ):
     """Provide loading behavior for a :class:`.Relationship`
     with "lazy=True", that is loads when first accessed.
@@ -1022,7 +1022,7 @@ class LazyLoader(
             _raw_columns=[clauseelement],
             _propagate_attrs=clauseelement._propagate_attrs,
             _label_style=LABEL_STYLE_TABLENAME_PLUS_COL,
-            _compile_options=ORMCompileState.default_compile_options,
+            _compile_options=_ORMCompileState.default_compile_options,
         )
         load_options = QueryContext.default_load_options
 
@@ -1076,7 +1076,7 @@ class LazyLoader(
             if self._raise_on_sql and not passive & PassiveFlag.NO_RAISE:
                 self._invoke_raise_load(state, passive, "raise_on_sql")
 
-            return loading.load_on_pk_identity(
+            return loading._load_on_pk_identity(
                 session,
                 stmt,
                 primary_key_identity,
@@ -1094,7 +1094,7 @@ class LazyLoader(
                 if (
                     rev.direction is interfaces.MANYTOONE
                     and rev._use_get
-                    and not isinstance(rev.strategy, LazyLoader)
+                    and not isinstance(rev.strategy, _LazyLoader)
                 ):
                     strategy_options.Load._construct_for_existing_path(
                         compile_context.compile_options._current_path[
@@ -1202,7 +1202,7 @@ class LazyLoader(
                 InstanceState._instance_level_callable_processor
             )(
                 mapper.class_manager,
-                LoadLazyAttribute(
+                _LoadLazyAttribute(
                     key,
                     self,
                     loadopt,
@@ -1232,7 +1232,7 @@ class LazyLoader(
             populators["new"].append((self.key, reset_for_lazy_callable))
 
 
-class LoadLazyAttribute:
+class _LoadLazyAttribute:
     """semi-serializable loader object used by LazyLoader
 
     Historically, this object would be carried along with instances that
@@ -1284,7 +1284,7 @@ class LoadLazyAttribute:
         )
 
 
-class PostLoader(AbstractRelationshipLoader):
+class _PostLoader(_AbstractRelationshipLoader):
     """A relationship loader that emits a second SELECT statement."""
 
     __slots__ = ()
@@ -1332,7 +1332,7 @@ class PostLoader(AbstractRelationshipLoader):
                     }
                 )
 
-        if loading.PostLoad.path_exists(
+        if loading._PostLoad.path_exists(
             context, effective_path, self.parent_property
         ):
             return effective_path, False, execution_options, recursion_depth
@@ -1361,7 +1361,7 @@ class PostLoader(AbstractRelationshipLoader):
 
 
 @relationships.RelationshipProperty.strategy_for(lazy="immediate")
-class ImmediateLoader(PostLoader):
+class _ImmediateLoader(_PostLoader):
     __slots__ = ("join_depth",)
 
     def __init__(self, parent, strategy_key):
@@ -1403,7 +1403,7 @@ class ImmediateLoader(PostLoader):
         else:
             flags = attributes.PASSIVE_OFF | PassiveFlag.NO_RAISE
 
-        loading.PostLoad.callable_for_path(
+        loading._PostLoad.callable_for_path(
             context,
             effective_path,
             self.parent,
@@ -1463,7 +1463,7 @@ class ImmediateLoader(PostLoader):
 
 @log.class_logger
 @relationships.RelationshipProperty.strategy_for(lazy="subquery")
-class SubqueryLoader(PostLoader):
+class _SubqueryLoader(_PostLoader):
     __slots__ = ("join_depth",)
 
     def __init__(self, parent, strategy_key):
@@ -1871,12 +1871,12 @@ class SubqueryLoader(PostLoader):
         # compiled query but swapping the params, seems only marginally
         # less time spent but more complicated
         orig_query = context.query._execution_options.get(
-            ("orig_query", SubqueryLoader), context.query
+            ("orig_query", _SubqueryLoader), context.query
         )
 
         # make a new compile_state for the query that's probably cached, but
         # we're sort of undoing a bit of that caching :(
-        compile_state_cls = ORMCompileState._get_plugin_class_for_plugin(
+        compile_state_cls = _ORMCompileState._get_plugin_class_for_plugin(
             orig_query, "orm"
         )
 
@@ -1933,7 +1933,7 @@ class SubqueryLoader(PostLoader):
         q._execution_options = context.query._execution_options.merge_with(
             context.execution_options,
             {
-                ("orig_query", SubqueryLoader): orig_query,
+                ("orig_query", _SubqueryLoader): orig_query,
                 ("subquery_paths", None): (subq_path, rewritten_path),
             },
         )
@@ -2002,7 +2002,7 @@ class SubqueryLoader(PostLoader):
         if not run_loader:
             return
 
-        if not isinstance(context.compile_state, ORMSelectCompileState):
+        if not isinstance(context.compile_state, _ORMSelectCompileState):
             # issue 7505 - subqueryload() in 1.3 and previous would silently
             # degrade for from_statement() without warning. this behavior
             # is restored here
@@ -2116,7 +2116,7 @@ class SubqueryLoader(PostLoader):
 @log.class_logger
 @relationships.RelationshipProperty.strategy_for(lazy="joined")
 @relationships.RelationshipProperty.strategy_for(lazy=False)
-class JoinedLoader(AbstractRelationshipLoader):
+class _JoinedLoader(_AbstractRelationshipLoader):
     """Provide loading behavior for a :class:`.Relationship`
     using joined eager loading.
 
@@ -2941,7 +2941,7 @@ class JoinedLoader(AbstractRelationshipLoader):
 
 @log.class_logger
 @relationships.RelationshipProperty.strategy_for(lazy="selectin")
-class SelectInLoader(PostLoader, util.MemoizedSlots):
+class _SelectInLoader(_PostLoader, util.MemoizedSlots):
     __slots__ = (
         "join_depth",
         "omit_join",
@@ -3119,7 +3119,7 @@ class SelectInLoader(PostLoader, util.MemoizedSlots):
         else:
             effective_entity = self.entity
 
-        loading.PostLoad.callable_for_path(
+        loading._PostLoad.callable_for_path(
             context,
             selectin_path,
             self.parent,
@@ -3212,7 +3212,7 @@ class SelectInLoader(PostLoader, util.MemoizedSlots):
         q = Select._create_raw_select(
             _raw_columns=[bundle_sql, entity_sql],
             _label_style=LABEL_STYLE_TABLENAME_PLUS_COL,
-            _compile_options=ORMCompileState.default_compile_options,
+            _compile_options=_ORMCompileState.default_compile_options,
             _propagate_attrs={
                 "compile_state_plugin": "orm",
                 "plugin_subject": effective_entity,
@@ -3447,7 +3447,7 @@ class SelectInLoader(PostLoader, util.MemoizedSlots):
                     )
 
 
-def single_parent_validator(desc, prop):
+def _single_parent_validator(desc, prop):
     def _do_check(state, value, oldvalue, initiator):
         if value is not None and initiator.key == prop.key:
             hasparent = initiator.hasparent(attributes.instance_state(value))
index d62fba989043e326963c727bfaa309c1479d6e97..2ecbe246290f1d89536493ac85b1d387f4d1c2d8 100644 (file)
@@ -35,13 +35,13 @@ from ._typing import insp_is_mapper_property
 from .attributes import QueryableAttribute
 from .base import InspectionAttr
 from .interfaces import LoaderOption
+from .path_registry import _AbstractEntityRegistry
 from .path_registry import _DEFAULT_TOKEN
 from .path_registry import _StrPathToken
+from .path_registry import _TokenRegistry
 from .path_registry import _WILDCARD_TOKEN
-from .path_registry import AbstractEntityRegistry
 from .path_registry import path_is_property
 from .path_registry import PathRegistry
-from .path_registry import TokenRegistry
 from .util import _orm_full_deannotate
 from .util import AliasedInsp
 from .. import exc as sa_exc
@@ -66,7 +66,7 @@ if typing.TYPE_CHECKING:
     from ._typing import _EntityType
     from ._typing import _InternalEntityType
     from .context import _MapperEntity
-    from .context import ORMCompileState
+    from .context import _ORMCompileState
     from .context import QueryContext
     from .interfaces import _StrategyKey
     from .interfaces import MapperProperty
@@ -913,7 +913,7 @@ class _AbstractLoad(traversals.GenerativeOnTraversal, LoaderOption):
 
     def process_compile_state_replaced_entities(
         self,
-        compile_state: ORMCompileState,
+        compile_state: _ORMCompileState,
         mapper_entities: Sequence[_MapperEntity],
     ) -> None:
         if not compile_state.compile_options._enable_eagerloads:
@@ -928,7 +928,7 @@ class _AbstractLoad(traversals.GenerativeOnTraversal, LoaderOption):
             not bool(compile_state.current_path),
         )
 
-    def process_compile_state(self, compile_state: ORMCompileState) -> None:
+    def process_compile_state(self, compile_state: _ORMCompileState) -> None:
         if not compile_state.compile_options._enable_eagerloads:
             return
 
@@ -941,7 +941,7 @@ class _AbstractLoad(traversals.GenerativeOnTraversal, LoaderOption):
 
     def _process(
         self,
-        compile_state: ORMCompileState,
+        compile_state: _ORMCompileState,
         mapper_entities: Sequence[_MapperEntity],
         raiseerr: bool,
     ) -> None:
@@ -1042,7 +1042,7 @@ class Load(_AbstractLoad):
 
     @classmethod
     def _construct_for_existing_path(
-        cls, path: AbstractEntityRegistry
+        cls, path: _AbstractEntityRegistry
     ) -> Load:
         load = cls.__new__(cls)
         load.path = path
@@ -1139,7 +1139,7 @@ class Load(_AbstractLoad):
 
     def _process(
         self,
-        compile_state: ORMCompileState,
+        compile_state: _ORMCompileState,
         mapper_entities: Sequence[_MapperEntity],
         raiseerr: bool,
     ) -> None:
@@ -1428,7 +1428,7 @@ class _WildcardLoad(_AbstractLoad):
         if attr.endswith(_DEFAULT_TOKEN):
             attr = f"{attr.split(':')[0]}:{_WILDCARD_TOKEN}"
 
-        effective_path = cast(AbstractEntityRegistry, parent.path).token(attr)
+        effective_path = cast(_AbstractEntityRegistry, parent.path).token(attr)
 
         assert effective_path.is_token
 
@@ -2233,7 +2233,7 @@ class _TokenStrategyLoad(_LoadElement):
             ("loader", natural_path)
             for natural_path in (
                 cast(
-                    TokenRegistry, effective_path
+                    _TokenRegistry, effective_path
                 )._generate_natural_for_superclasses()
             )
         ]
index db09a3e90270bb506a582700377e1c0664b548bb..980c4793b908ba65b73b055e086806f72a0adc98 100644 (file)
@@ -19,7 +19,7 @@ from . import util as orm_util
 from .base import PassiveFlag
 
 
-def populate(
+def _populate(
     source,
     source_mapper,
     dest,
@@ -62,7 +62,7 @@ def populate(
             uowcommit.attributes[("pk_cascaded", dest, r)] = True
 
 
-def bulk_populate_inherit_keys(source_dict, source_mapper, synchronize_pairs):
+def _bulk_populate_inherit_keys(source_dict, source_mapper, synchronize_pairs):
     # a simplified version of populate() used by bulk insert mode
     for l, r in synchronize_pairs:
         try:
@@ -78,7 +78,7 @@ def bulk_populate_inherit_keys(source_dict, source_mapper, synchronize_pairs):
             _raise_col_to_prop(True, source_mapper, l, source_mapper, r, err)
 
 
-def clear(dest, dest_mapper, synchronize_pairs):
+def _clear(dest, dest_mapper, synchronize_pairs):
     for l, r in synchronize_pairs:
         if (
             r.primary_key
@@ -96,7 +96,7 @@ def clear(dest, dest_mapper, synchronize_pairs):
             _raise_col_to_prop(True, None, l, dest_mapper, r, err)
 
 
-def update(source, source_mapper, dest, old_prefix, synchronize_pairs):
+def _update(source, source_mapper, dest, old_prefix, synchronize_pairs):
     for l, r in synchronize_pairs:
         try:
             oldvalue = source_mapper._get_committed_attr_by_column(
@@ -111,7 +111,7 @@ def update(source, source_mapper, dest, old_prefix, synchronize_pairs):
         dest[old_prefix + r.key] = oldvalue
 
 
-def populate_dict(source, source_mapper, dict_, synchronize_pairs):
+def _populate_dict(source, source_mapper, dict_, synchronize_pairs):
     for l, r in synchronize_pairs:
         try:
             value = source_mapper._get_state_attr_by_column(
@@ -123,7 +123,7 @@ def populate_dict(source, source_mapper, dict_, synchronize_pairs):
         dict_[r.key] = value
 
 
-def source_modified(uowcommit, source, source_mapper, synchronize_pairs):
+def _source_modified(uowcommit, source, source_mapper, synchronize_pairs):
     """return true if the source object has changes from an old to a
     new value on the given synchronize pairs
 
index 7e2df2b0effd3847ed17d512bb5cb06f7d27af4b..34d53ccf84b0d15be758b68b896255c8ec2389ad 100644 (file)
@@ -32,7 +32,7 @@ from ..util import topological
 
 
 if TYPE_CHECKING:
-    from .dependency import DependencyProcessor
+    from .dependency import _DependencyProcessor
     from .interfaces import MapperProperty
     from .mapper import Mapper
     from .session import Session
@@ -40,7 +40,7 @@ if TYPE_CHECKING:
     from .state import InstanceState
 
 
-def track_cascade_events(descriptor, prop):
+def _track_cascade_events(descriptor, prop):
     """Establish event listeners on object attributes which handle
     cascade-on-set/append.
 
@@ -155,10 +155,12 @@ def track_cascade_events(descriptor, prop):
 
 
 class UOWTransaction:
+    """Manages the internal state of a unit of work flush operation."""
+
     session: Session
     transaction: SessionTransaction
     attributes: Dict[str, Any]
-    deps: util.defaultdict[Mapper[Any], Set[DependencyProcessor]]
+    deps: util.defaultdict[Mapper[Any], Set[_DependencyProcessor]]
     mappers: util.defaultdict[Mapper[Any], Set[InstanceState[Any]]]
 
     def __init__(self, session: Session):
@@ -301,7 +303,7 @@ class UOWTransaction:
     def register_preprocessor(self, processor, fromparent):
         key = (processor, fromparent)
         if key not in self.presort_actions:
-            self.presort_actions[key] = Preprocess(processor, fromparent)
+            self.presort_actions[key] = _Preprocess(processor, fromparent)
 
     def register_object(
         self,
@@ -344,8 +346,8 @@ class UOWTransaction:
         cols.update(post_update_cols)
 
     def _per_mapper_flush_actions(self, mapper):
-        saves = SaveUpdateAll(self, mapper.base_mapper)
-        deletes = DeleteAll(self, mapper.base_mapper)
+        saves = _SaveUpdateAll(self, mapper.base_mapper)
+        deletes = _DeleteAll(self, mapper.base_mapper)
         self.dependencies.add((saves, deletes))
 
         for dep in mapper._dependency_processors:
@@ -487,7 +489,7 @@ class UOWTransaction:
             self.session._register_persistent(other)
 
 
-class IterateMappersMixin:
+class _IterateMappersMixin:
     __slots__ = ()
 
     def _mappers(self, uow):
@@ -501,7 +503,7 @@ class IterateMappersMixin:
             return self.dependency_processor.mapper.self_and_descendants
 
 
-class Preprocess(IterateMappersMixin):
+class _Preprocess(_IterateMappersMixin):
     __slots__ = (
         "dependency_processor",
         "fromparent",
@@ -551,7 +553,7 @@ class Preprocess(IterateMappersMixin):
             return False
 
 
-class PostSortRec:
+class _PostSortRec:
     __slots__ = ("disabled",)
 
     def __new__(cls, uow, *args):
@@ -567,7 +569,7 @@ class PostSortRec:
         self.execute(uow)
 
 
-class ProcessAll(IterateMappersMixin, PostSortRec):
+class _ProcessAll(_IterateMappersMixin, _PostSortRec):
     __slots__ = "dependency_processor", "isdelete", "fromparent", "sort_key"
 
     def __init__(self, uow, dependency_processor, isdelete, fromparent):
@@ -612,7 +614,7 @@ class ProcessAll(IterateMappersMixin, PostSortRec):
                     yield state
 
 
-class PostUpdateAll(PostSortRec):
+class _PostUpdateAll(_PostSortRec):
     __slots__ = "mapper", "isdelete", "sort_key"
 
     def __init__(self, uow, mapper, isdelete):
@@ -626,10 +628,10 @@ class PostUpdateAll(PostSortRec):
         states, cols = uow.post_update_states[self.mapper]
         states = [s for s in states if uow.states[s][0] == self.isdelete]
 
-        persistence.post_update(self.mapper, states, uow, cols)
+        persistence._post_update(self.mapper, states, uow, cols)
 
 
-class SaveUpdateAll(PostSortRec):
+class _SaveUpdateAll(_PostSortRec):
     __slots__ = ("mapper", "sort_key")
 
     def __init__(self, uow, mapper):
@@ -639,7 +641,7 @@ class SaveUpdateAll(PostSortRec):
 
     @util.preload_module("sqlalchemy.orm.persistence")
     def execute(self, uow):
-        util.preloaded.orm_persistence.save_obj(
+        util.preloaded.orm_persistence._save_obj(
             self.mapper,
             uow.states_for_mapper_hierarchy(self.mapper, False, False),
             uow,
@@ -650,11 +652,11 @@ class SaveUpdateAll(PostSortRec):
             uow.states_for_mapper_hierarchy(self.mapper, False, False)
         )
         base_mapper = self.mapper.base_mapper
-        delete_all = DeleteAll(uow, base_mapper)
+        delete_all = _DeleteAll(uow, base_mapper)
         for state in states:
             # keep saves before deletes -
             # this ensures 'row switch' operations work
-            action = SaveUpdateState(uow, state)
+            action = _SaveUpdateState(uow, state)
             uow.dependencies.add((action, delete_all))
             yield action
 
@@ -666,7 +668,7 @@ class SaveUpdateAll(PostSortRec):
         return "%s(%s)" % (self.__class__.__name__, self.mapper)
 
 
-class DeleteAll(PostSortRec):
+class _DeleteAll(_PostSortRec):
     __slots__ = ("mapper", "sort_key")
 
     def __init__(self, uow, mapper):
@@ -676,7 +678,7 @@ class DeleteAll(PostSortRec):
 
     @util.preload_module("sqlalchemy.orm.persistence")
     def execute(self, uow):
-        util.preloaded.orm_persistence.delete_obj(
+        util.preloaded.orm_persistence._delete_obj(
             self.mapper,
             uow.states_for_mapper_hierarchy(self.mapper, True, False),
             uow,
@@ -687,11 +689,11 @@ class DeleteAll(PostSortRec):
             uow.states_for_mapper_hierarchy(self.mapper, True, False)
         )
         base_mapper = self.mapper.base_mapper
-        save_all = SaveUpdateAll(uow, base_mapper)
+        save_all = _SaveUpdateAll(uow, base_mapper)
         for state in states:
             # keep saves before deletes -
             # this ensures 'row switch' operations work
-            action = DeleteState(uow, state)
+            action = _DeleteState(uow, state)
             uow.dependencies.add((save_all, action))
             yield action
 
@@ -703,7 +705,7 @@ class DeleteAll(PostSortRec):
         return "%s(%s)" % (self.__class__.__name__, self.mapper)
 
 
-class ProcessState(PostSortRec):
+class _ProcessState(_PostSortRec):
     __slots__ = "dependency_processor", "isdelete", "state", "sort_key"
 
     def __init__(self, uow, dependency_processor, isdelete, state):
@@ -739,7 +741,7 @@ class ProcessState(PostSortRec):
         )
 
 
-class SaveUpdateState(PostSortRec):
+class _SaveUpdateState(_PostSortRec):
     __slots__ = "state", "mapper", "sort_key"
 
     def __init__(self, uow, state):
@@ -756,7 +758,7 @@ class SaveUpdateState(PostSortRec):
             r for r in recs if r.__class__ is cls_ and r.mapper is mapper
         ]
         recs.difference_update(our_recs)
-        persistence.save_obj(
+        persistence._save_obj(
             mapper, [self.state] + [r.state for r in our_recs], uow
         )
 
@@ -767,7 +769,7 @@ class SaveUpdateState(PostSortRec):
         )
 
 
-class DeleteState(PostSortRec):
+class _DeleteState(_PostSortRec):
     __slots__ = "state", "mapper", "sort_key"
 
     def __init__(self, uow, state):
@@ -785,7 +787,7 @@ class DeleteState(PostSortRec):
         ]
         recs.difference_update(our_recs)
         states = [self.state] + [r.state for r in our_recs]
-        persistence.delete_obj(
+        persistence._delete_obj(
             mapper, [s for s in states if uow.states[s][0]], uow
         )
 
index eb74514d47f9706265a8ca96ed2d5dc6afeaa19b..670f99f73d377495e58d58f2db077e6db628abcb 100644 (file)
@@ -104,9 +104,9 @@ if typing.TYPE_CHECKING:
     from ._typing import _InternalEntityType
     from ._typing import _ORMCOLEXPR
     from .context import _MapperEntity
-    from .context import ORMCompileState
+    from .context import _ORMCompileState
     from .mapper import Mapper
-    from .path_registry import AbstractEntityRegistry
+    from .path_registry import _AbstractEntityRegistry
     from .query import Query
     from .relationships import RelationshipProperty
     from ..engine import Row
@@ -1137,7 +1137,7 @@ class AliasedInsp(
         return self.mapper.class_
 
     @property
-    def _path_registry(self) -> AbstractEntityRegistry:
+    def _path_registry(self) -> _AbstractEntityRegistry:
         if self._use_mapper_path:
             return self.mapper._path_registry
         else:
@@ -1467,7 +1467,7 @@ class LoaderCriteriaOption(CriteriaOption):
                 else:
                     stack.extend(subclass.__subclasses__())
 
-    def _should_include(self, compile_state: ORMCompileState) -> bool:
+    def _should_include(self, compile_state: _ORMCompileState) -> bool:
         if (
             compile_state.select_statement._annotations.get(
                 "for_loader_criteria", None
@@ -1497,12 +1497,12 @@ class LoaderCriteriaOption(CriteriaOption):
 
     def process_compile_state_replaced_entities(
         self,
-        compile_state: ORMCompileState,
+        compile_state: _ORMCompileState,
         mapper_entities: Iterable[_MapperEntity],
     ) -> None:
         self.process_compile_state(compile_state)
 
-    def process_compile_state(self, compile_state: ORMCompileState) -> None:
+    def process_compile_state(self, compile_state: _ORMCompileState) -> None:
         """Apply a modification to a given :class:`.CompileState`."""
 
         # if options to limit the criteria to immediate query only,
index 6e5756d42da1ade0cfa36f00acd0cbcc0303e85c..7552dec332ed4de3f9ef5eea7227b5c2edcabf54 100644 (file)
@@ -84,7 +84,7 @@ class WriteOnlyHistory(Generic[_T]):
 
     def __init__(
         self,
-        attr: WriteOnlyAttributeImpl,
+        attr: _WriteOnlyAttributeImpl,
         state: InstanceState[_T],
         passive: PassiveFlag,
         apply_to: Optional[WriteOnlyHistory[_T]] = None,
@@ -147,8 +147,8 @@ class WriteOnlyHistory(Generic[_T]):
             self.deleted_items.add(value)
 
 
-class WriteOnlyAttributeImpl(
-    attributes.HasCollectionAdapter, attributes.AttributeImpl
+class _WriteOnlyAttributeImpl(
+    attributes._HasCollectionAdapter, attributes._AttributeImpl
 ):
     uses_objects: bool = True
     default_accepts_scalar_loader: bool = False
@@ -233,7 +233,7 @@ class WriteOnlyAttributeImpl(
         else:
             history = self._get_collection_history(state, passive)
             data = history.added_plus_unchanged
-        return DynamicCollectionAdapter(data)  # type: ignore[return-value]
+        return _DynamicCollectionAdapter(data)  # type: ignore[return-value]
 
     @util.memoized_property
     def _append_token(  # type:ignore[override]
@@ -442,8 +442,8 @@ class WriteOnlyAttributeImpl(
 
 @log.class_logger
 @relationships.RelationshipProperty.strategy_for(lazy="write_only")
-class WriteOnlyLoader(strategies.AbstractRelationshipLoader, log.Identified):
-    impl_class = WriteOnlyAttributeImpl
+class _WriteOnlyLoader(strategies._AbstractRelationshipLoader, log.Identified):
+    impl_class = _WriteOnlyAttributeImpl
 
     def init_class_attribute(self, mapper: Mapper[Any]) -> None:
         self.is_class_level = True
@@ -468,7 +468,7 @@ class WriteOnlyLoader(strategies.AbstractRelationshipLoader, log.Identified):
         )
 
 
-class DynamicCollectionAdapter:
+class _DynamicCollectionAdapter:
     """simplified CollectionAdapter for internal API consistency"""
 
     data: Collection[Any]
@@ -489,7 +489,7 @@ class DynamicCollectionAdapter:
         return True
 
 
-class AbstractCollectionWriter(Generic[_T]):
+class _AbstractCollectionWriter(Generic[_T]):
     """Virtual collection which includes append/remove methods that synchronize
     into the attribute event system.
 
@@ -501,7 +501,9 @@ class AbstractCollectionWriter(Generic[_T]):
     instance: _T
     _from_obj: Tuple[FromClause, ...]
 
-    def __init__(self, attr: WriteOnlyAttributeImpl, state: InstanceState[_T]):
+    def __init__(
+        self, attr: _WriteOnlyAttributeImpl, state: InstanceState[_T]
+    ):
         instance = state.obj()
         if TYPE_CHECKING:
             assert instance
@@ -552,7 +554,7 @@ class AbstractCollectionWriter(Generic[_T]):
         )
 
 
-class WriteOnlyCollection(AbstractCollectionWriter[_T]):
+class WriteOnlyCollection(_AbstractCollectionWriter[_T]):
     """Write-only collection which can synchronize changes into the
     attribute event system.
 
index 47756c94958e5edcd77c5419b71b4cd3bcd822b3..6452c7e344929845bd11ec5833443349d271a7f4 100644 (file)
@@ -30,7 +30,7 @@ def _register_attribute(class_, key, **kw):
     kw.setdefault("comparator", object())
     kw.setdefault("parententity", object())
 
-    attributes.register_attribute(class_, key, **kw)
+    attributes._register_attribute(class_, key, **kw)
 
 
 @decorator
index 0cf775e4d27aa9c00220aeb89dfc13e8b129a734..705bae88e51d2a981b18db50a824569a34be2a9d 100644 (file)
@@ -36,7 +36,7 @@ class ClsRegistryTest(fixtures.TestBase):
         base = registry()
         f1 = MockClass(base, "foo.bar.Foo")
         f2 = MockClass(base, "foo.bar.Foo")
-        clsregistry.add_class("Foo", f1, base._class_registry)
+        clsregistry._add_class("Foo", f1, base._class_registry)
         gc_collect()
 
         with expect_warnings(
@@ -44,7 +44,7 @@ class ClsRegistryTest(fixtures.TestBase):
             "same class name and module name as foo.bar.Foo, and "
             "will be replaced in the string-lookup table."
         ):
-            clsregistry.add_class(
+            clsregistry._add_class(
                 "Foo",
                 f2,
                 base._class_registry,
@@ -54,8 +54,8 @@ class ClsRegistryTest(fixtures.TestBase):
         base = registry()
         f1 = MockClass(base, "foo.bar.Foo")
         f2 = MockClass(base, "foo.alt.Foo")
-        clsregistry.add_class("Foo", f1, base._class_registry)
-        clsregistry.add_class("Foo", f2, base._class_registry)
+        clsregistry._add_class("Foo", f1, base._class_registry)
+        clsregistry._add_class("Foo", f2, base._class_registry)
         name_resolver, resolver = clsregistry._resolver(f1, MockProp())
 
         gc_collect()
@@ -71,9 +71,9 @@ class ClsRegistryTest(fixtures.TestBase):
         f1 = MockClass(base, "foo.bar.Foo")
         f2 = MockClass(base, "foo.alt.Foo")
         f3 = MockClass(base, "bat.alt.Hoho")
-        clsregistry.add_class("Foo", f1, base._class_registry)
-        clsregistry.add_class("Foo", f2, base._class_registry)
-        clsregistry.add_class("HoHo", f3, base._class_registry)
+        clsregistry._add_class("Foo", f1, base._class_registry)
+        clsregistry._add_class("Foo", f2, base._class_registry)
+        clsregistry._add_class("HoHo", f3, base._class_registry)
         name_resolver, resolver = clsregistry._resolver(f1, MockProp())
 
         gc_collect()
@@ -89,9 +89,9 @@ class ClsRegistryTest(fixtures.TestBase):
         f1 = MockClass(base, "foo.bar.Foo")
         f2 = MockClass(base, "foo.alt.Foo")
         f3 = MockClass(base, "bat.alt.Foo")
-        clsregistry.add_class("Foo", f1, base._class_registry)
-        clsregistry.add_class("Foo", f2, base._class_registry)
-        clsregistry.add_class("Foo", f3, base._class_registry)
+        clsregistry._add_class("Foo", f1, base._class_registry)
+        clsregistry._add_class("Foo", f2, base._class_registry)
+        clsregistry._add_class("Foo", f3, base._class_registry)
         name_resolver, resolver = clsregistry._resolver(f1, MockProp())
 
         gc_collect()
@@ -126,8 +126,8 @@ class ClsRegistryTest(fixtures.TestBase):
 
         f1 = MockClass(registry, "existent.Foo")
         f2 = MockClass(registry, "existent.existent.Foo")
-        clsregistry.add_class("Foo", f1, registry._class_registry)
-        clsregistry.add_class("Foo", f2, registry._class_registry)
+        clsregistry._add_class("Foo", f1, registry._class_registry)
+        clsregistry._add_class("Foo", f2, registry._class_registry)
 
         class MyClass(Base):
             __tablename__ = "my_table"
@@ -145,8 +145,8 @@ class ClsRegistryTest(fixtures.TestBase):
         base = registry()
         f1 = MockClass(base, "foo.bar.Foo")
         f2 = MockClass(base, "foo.alt.Foo")
-        clsregistry.add_class("Foo", f1, base._class_registry)
-        clsregistry.add_class("Foo", f2, base._class_registry)
+        clsregistry._add_class("Foo", f1, base._class_registry)
+        clsregistry._add_class("Foo", f2, base._class_registry)
         name_resolver, resolver = clsregistry._resolver(f1, MockProp())
 
         gc_collect()
@@ -170,8 +170,8 @@ class ClsRegistryTest(fixtures.TestBase):
         base = registry()
         f1 = MockClass(base, "foo.bar.Foo")
         f2 = MockClass(base, "foo.alt.Foo")
-        clsregistry.add_class("Foo", f1, base._class_registry)
-        clsregistry.add_class("Foo", f2, base._class_registry)
+        clsregistry._add_class("Foo", f1, base._class_registry)
+        clsregistry._add_class("Foo", f2, base._class_registry)
 
         gc_collect()
 
@@ -198,8 +198,8 @@ class ClsRegistryTest(fixtures.TestBase):
         base = registry()
         f1 = MockClass(base, "foo.bar.Foo")
         f2 = MockClass(base, "foo.alt.Foo")
-        clsregistry.add_class("Foo", f1, base._class_registry)
-        clsregistry.add_class("Foo", f2, base._class_registry)
+        clsregistry._add_class("Foo", f1, base._class_registry)
+        clsregistry._add_class("Foo", f2, base._class_registry)
 
         del f2
         gc_collect()
@@ -221,8 +221,8 @@ class ClsRegistryTest(fixtures.TestBase):
         for i in range(3):
             f1 = MockClass(base, "foo.bar.Foo")
             f2 = MockClass(base, "foo.alt.Foo")
-            clsregistry.add_class("Foo", f1, base._class_registry)
-            clsregistry.add_class("Foo", f2, base._class_registry)
+            clsregistry._add_class("Foo", f1, base._class_registry)
+            clsregistry._add_class("Foo", f2, base._class_registry)
 
             eq_(len(clsregistry._registries), 11)
 
@@ -238,8 +238,8 @@ class ClsRegistryTest(fixtures.TestBase):
         base = registry()
         f1 = MockClass(base, "foo.bar.Foo")
         f2 = MockClass(base, "foo.alt.Foo")
-        clsregistry.add_class("Foo", f1, base._class_registry)
-        clsregistry.add_class("Foo", f2, base._class_registry)
+        clsregistry._add_class("Foo", f1, base._class_registry)
+        clsregistry._add_class("Foo", f2, base._class_registry)
 
         dupe_reg = base._class_registry["Foo"]
         dupe_reg.contents = [lambda: None]
@@ -266,7 +266,7 @@ class ClsRegistryTest(fixtures.TestBase):
 
         base = registry()
         f1 = MockClass(base, "foo.bar.Foo")
-        clsregistry.add_class("Foo", f1, base._class_registry)
+        clsregistry._add_class("Foo", f1, base._class_registry)
         reg = base._class_registry["_sa_module_registry"]
 
         mod_entry = reg["foo"]["bar"]
@@ -291,7 +291,7 @@ class ClsRegistryTest(fixtures.TestBase):
     def test_module_reg_no_class(self):
         base = registry()
         f1 = MockClass(base, "foo.bar.Foo")
-        clsregistry.add_class("Foo", f1, base._class_registry)
+        clsregistry._add_class("Foo", f1, base._class_registry)
         reg = base._class_registry["_sa_module_registry"]
         mod_entry = reg["foo"]["bar"]  # noqa
         name_resolver, resolver = clsregistry._resolver(f1, MockProp())
@@ -314,11 +314,11 @@ class ClsRegistryTest(fixtures.TestBase):
     def test_module_reg_cleanout_two_sub(self):
         base = registry()
         f1 = MockClass(base, "foo.bar.Foo")
-        clsregistry.add_class("Foo", f1, base._class_registry)
+        clsregistry._add_class("Foo", f1, base._class_registry)
         reg = base._class_registry["_sa_module_registry"]
 
         f2 = MockClass(base, "foo.alt.Bar")
-        clsregistry.add_class("Bar", f2, base._class_registry)
+        clsregistry._add_class("Bar", f2, base._class_registry)
         assert reg["foo"]["bar"]
         del f1
         gc_collect()
@@ -332,7 +332,7 @@ class ClsRegistryTest(fixtures.TestBase):
     def test_module_reg_cleanout_sub_to_base(self):
         base = registry()
         f3 = MockClass(base, "bat.bar.Hoho")
-        clsregistry.add_class("Hoho", f3, base._class_registry)
+        clsregistry._add_class("Hoho", f3, base._class_registry)
         reg = base._class_registry["_sa_module_registry"]
 
         assert reg["bat"]["bar"]
@@ -343,7 +343,7 @@ class ClsRegistryTest(fixtures.TestBase):
     def test_module_reg_cleanout_cls_to_base(self):
         base = registry()
         f4 = MockClass(base, "single.Blat")
-        clsregistry.add_class("Blat", f4, base._class_registry)
+        clsregistry._add_class("Blat", f4, base._class_registry)
         reg = base._class_registry["_sa_module_registry"]
         assert reg["single"]
         del f4
index 579cd7a57a9f95b1827fe235adb372d29fbccb02..ca2e01242f63a8cf4eff8c356c988ea540926bcb 100644 (file)
@@ -75,12 +75,12 @@ from sqlalchemy.orm import remote
 from sqlalchemy.orm import Session
 from sqlalchemy.orm import undefer
 from sqlalchemy.orm import WriteOnlyMapped
-from sqlalchemy.orm.attributes import CollectionAttributeImpl
+from sqlalchemy.orm.attributes import _CollectionAttributeImpl
 from sqlalchemy.orm.collections import attribute_keyed_dict
 from sqlalchemy.orm.collections import KeyFuncDict
-from sqlalchemy.orm.dynamic import DynamicAttributeImpl
+from sqlalchemy.orm.dynamic import _DynamicAttributeImpl
 from sqlalchemy.orm.properties import MappedColumn
-from sqlalchemy.orm.writeonly import WriteOnlyAttributeImpl
+from sqlalchemy.orm.writeonly import _WriteOnlyAttributeImpl
 from sqlalchemy.schema import CreateTable
 from sqlalchemy.sql.base import _NoArg
 from sqlalchemy.sql.sqltypes import Enum
@@ -2595,10 +2595,10 @@ class RelationshipLHSTest(fixtures.TestBase, testing.AssertsCompiledSQL):
         Base.registry.dispose()
 
     @testing.combinations(
-        (Relationship, CollectionAttributeImpl),
-        (Mapped, CollectionAttributeImpl),
-        (WriteOnlyMapped, WriteOnlyAttributeImpl),
-        (DynamicMapped, DynamicAttributeImpl),
+        (Relationship, _CollectionAttributeImpl),
+        (Mapped, _CollectionAttributeImpl),
+        (WriteOnlyMapped, _WriteOnlyAttributeImpl),
+        (DynamicMapped, _DynamicAttributeImpl),
         argnames="mapped_cls,implcls",
     )
     def test_use_relationship(self, decl_base, mapped_cls, implcls):
index ba0c8c916036e68be14283015262c9e252d99063..6d487692644f338d5cd697344f653bbdf55b8137 100644 (file)
@@ -66,12 +66,12 @@ from sqlalchemy.orm import remote
 from sqlalchemy.orm import Session
 from sqlalchemy.orm import undefer
 from sqlalchemy.orm import WriteOnlyMapped
-from sqlalchemy.orm.attributes import CollectionAttributeImpl
+from sqlalchemy.orm.attributes import _CollectionAttributeImpl
 from sqlalchemy.orm.collections import attribute_keyed_dict
 from sqlalchemy.orm.collections import KeyFuncDict
-from sqlalchemy.orm.dynamic import DynamicAttributeImpl
+from sqlalchemy.orm.dynamic import _DynamicAttributeImpl
 from sqlalchemy.orm.properties import MappedColumn
-from sqlalchemy.orm.writeonly import WriteOnlyAttributeImpl
+from sqlalchemy.orm.writeonly import _WriteOnlyAttributeImpl
 from sqlalchemy.schema import CreateTable
 from sqlalchemy.sql.base import _NoArg
 from sqlalchemy.sql.sqltypes import Enum
@@ -2586,10 +2586,10 @@ class RelationshipLHSTest(fixtures.TestBase, testing.AssertsCompiledSQL):
         Base.registry.dispose()
 
     @testing.combinations(
-        (Relationship, CollectionAttributeImpl),
-        (Mapped, CollectionAttributeImpl),
-        (WriteOnlyMapped, WriteOnlyAttributeImpl),
-        (DynamicMapped, DynamicAttributeImpl),
+        (Relationship, _CollectionAttributeImpl),
+        (Mapped, _CollectionAttributeImpl),
+        (WriteOnlyMapped, _WriteOnlyAttributeImpl),
+        (DynamicMapped, _DynamicAttributeImpl),
         argnames="mapped_cls,implcls",
     )
     def test_use_relationship(self, decl_base, mapped_cls, implcls):
index 4b9d3b2e0255087f747c47dee7591e05660eaa5e..c70df6a583262c265a63d3fb7475937f9f54c1e1 100644 (file)
@@ -42,7 +42,7 @@ def _register_attribute(class_, key, **kw):
     kw.setdefault("comparator", object())
     kw.setdefault("parententity", object())
 
-    attributes.register_attribute(class_, key, **kw)
+    attributes._register_attribute(class_, key, **kw)
 
 
 class AttributeImplAPITest(fixtures.MappedTest):
@@ -866,7 +866,7 @@ class AttributesTest(fixtures.ORMTest):
         )
         assert attributes.manager_of_class(Foo).is_instrumented("collection")
         assert isinstance(Foo().collection, set)
-        attributes.unregister_attribute(Foo, "collection")
+        attributes._unregister_attribute(Foo, "collection")
         assert not attributes.manager_of_class(Foo).is_instrumented(
             "collection"
         )
@@ -902,7 +902,7 @@ class AttributesTest(fixtures.ORMTest):
             useobject=True,
         )
         assert isinstance(Foo().collection, MyDict)
-        attributes.unregister_attribute(Foo, "collection")
+        attributes._unregister_attribute(Foo, "collection")
 
         class MyColl:
             pass
index 317ebdc468d4225d7f29ecd6d1bffe1cdb11d78c..56e9422e430a627515cf07a43a92d4044937a84d 100644 (file)
@@ -465,19 +465,19 @@ class BindIntegrationTest(_fixtures.FixtureTest):
 
         with (
             mock.patch(
-                "sqlalchemy.orm.context.ORMCompileState."
+                "sqlalchemy.orm.context._ORMCompileState."
                 "orm_setup_cursor_result"
             ),
             mock.patch(
-                "sqlalchemy.orm.context.ORMCompileState.orm_execute_statement"
+                "sqlalchemy.orm.context._ORMCompileState.orm_execute_statement"
             ),
             mock.patch(
                 "sqlalchemy.orm.bulk_persistence."
-                "BulkORMInsert.orm_execute_statement"
+                "_BulkORMInsert.orm_execute_statement"
             ),
             mock.patch(
                 "sqlalchemy.orm.bulk_persistence."
-                "BulkUDCompileState.orm_setup_cursor_result"
+                "_BulkUDCompileState.orm_setup_cursor_result"
             ),
         ):
             sess.execute(statement)
index d07dadb239b03870c26b81e22478f61dd18d0c2d..90c12fc7727cb7a75bda220877e0f3008691dfd9 100644 (file)
@@ -44,7 +44,7 @@ def _register_attribute(class_, key, **kw):
     kw.setdefault("comparator", object())
     kw.setdefault("parententity", object())
 
-    return attributes.register_attribute(class_, key, **kw)
+    return attributes._register_attribute(class_, key, **kw)
 
 
 class Canary:
index 81d0d926f5c5466234126bff157e18e18a522544..b99bc643a1861d7a001f59570c22652c746036c8 100644 (file)
@@ -940,7 +940,7 @@ class InstrumentationTest(fixtures.ORMTest):
             pass
 
         instrumentation.register_class(Foo)
-        attributes.register_attribute(
+        attributes._register_attribute(
             Foo,
             "attr",
             parententity=object(),
index 22f61563318cfe3e865d95dd47b1cf0b62d4ecde..2b15c2443c222b337b5f770d943d06b0aef19e0c 100644 (file)
@@ -1585,7 +1585,7 @@ class ExpireTest(_fixtures.FixtureTest):
         u1 = sess.query(User).options(defer(User.name)).first()
         assert isinstance(
             attributes.instance_state(u1).callables["name"],
-            strategies.LoadDeferredColumns,
+            strategies._LoadDeferredColumns,
         )
 
         # expire the attr, it gets the InstanceState callable
@@ -1688,7 +1688,7 @@ class ExpireTest(_fixtures.FixtureTest):
         u1 = sess.query(User).options(lazyload(User.addresses)).first()
         assert isinstance(
             attributes.instance_state(u1).callables["addresses"],
-            strategies.LoadLazyAttribute,
+            strategies._LoadLazyAttribute,
         )
         # expire, it goes away from callables as of 1.4 and is considered
         # to be expired
@@ -1712,7 +1712,7 @@ class ExpireTest(_fixtures.FixtureTest):
         )
         assert isinstance(
             attributes.instance_state(u1).callables["addresses"],
-            strategies.LoadLazyAttribute,
+            strategies._LoadLazyAttribute,
         )
 
         # load the attr, goes away
index 51c86a5f1dad90dbc35e5603b5d147e46d46ffa0..9a1ff1ee442014303810bb37ba5a77dee0c2d4d1 100644 (file)
@@ -30,7 +30,7 @@ from sqlalchemy.orm import Mapped
 from sqlalchemy.orm import mapped_column
 from sqlalchemy.orm import relationship
 from sqlalchemy.orm import Session
-from sqlalchemy.orm.context import ORMSelectCompileState
+from sqlalchemy.orm.context import _ORMSelectCompileState
 from sqlalchemy.sql import column
 from sqlalchemy.sql import table
 from sqlalchemy.sql.selectable import LABEL_STYLE_TABLENAME_PLUS_COL
@@ -1893,7 +1893,7 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
                 .order_by(User.id)
             )
 
-        compile_state = ORMSelectCompileState.create_for_statement(stmt, None)
+        compile_state = _ORMSelectCompileState.create_for_statement(stmt, None)
         is_(compile_state._primary_entity, None)
 
     def test_column_queries_one(self):
index b4ce5b1f2e1f7372be863ef6eb7acfe684b44964..a6dc5428b2a3ae96235011401b3aa77fe922c7b6 100644 (file)
@@ -736,7 +736,7 @@ class MiscTest(fixtures.MappedTest):
             pass
 
         manager = instrumentation.register_class(A)
-        attributes.register_attribute(
+        attributes._register_attribute(
             A,
             "x",
             comparator=object(),
index 4b3bb99c5b1f1e27f9f14be6aabbd7484060be33..8bb8bb32c2ae02f96b22846444fed2a9b9d0d303 100644 (file)
@@ -654,11 +654,11 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
             pass
 
         from sqlalchemy.testing import mock
-        from sqlalchemy.orm.attributes import register_attribute_impl
+        from sqlalchemy.orm.attributes import _register_attribute_impl
 
         with mock.patch(
-            "sqlalchemy.orm.attributes.register_attribute_impl",
-            side_effect=register_attribute_impl,
+            "sqlalchemy.orm.attributes._register_attribute_impl",
+            side_effect=_register_attribute_impl,
         ) as some_mock:
             self.mapper(A, users, properties={"bs": relationship(B)})
             self.mapper(B, addresses)
index 83ffff3c91bdbec43660a1203b062614a7f3f72a..d25c3671d95c5f6259d15c718c6dc75b0025f687 100644 (file)
@@ -214,7 +214,7 @@ class _JoinFixtures:
             else:
                 return True
 
-        return relationships.JoinCondition(
+        return relationships._JoinCondition(
             self.three_tab_a,
             self.three_tab_b,
             self.three_tab_a,
@@ -230,7 +230,7 @@ class _JoinFixtures:
         )
 
     def _join_fixture_m2m(self, **kw):
-        return relationships.JoinCondition(
+        return relationships._JoinCondition(
             self.m2mleft,
             self.m2mright,
             self.m2mleft,
@@ -248,7 +248,7 @@ class _JoinFixtures:
         j1 = self._join_fixture_m2m()
         return (
             j1,
-            relationships.JoinCondition(
+            relationships._JoinCondition(
                 self.m2mright,
                 self.m2mleft,
                 self.m2mright,
@@ -261,7 +261,7 @@ class _JoinFixtures:
         )
 
     def _join_fixture_o2m(self, **kw):
-        return relationships.JoinCondition(
+        return relationships._JoinCondition(
             self.left,
             self.right,
             self.left,
@@ -271,7 +271,7 @@ class _JoinFixtures:
         )
 
     def _join_fixture_m2o(self, **kw):
-        return relationships.JoinCondition(
+        return relationships._JoinCondition(
             self.right,
             self.left,
             self.right,
@@ -281,7 +281,7 @@ class _JoinFixtures:
         )
 
     def _join_fixture_o2m_selfref(self, **kw):
-        return relationships.JoinCondition(
+        return relationships._JoinCondition(
             self.selfref,
             self.selfref,
             self.selfref,
@@ -291,7 +291,7 @@ class _JoinFixtures:
         )
 
     def _join_fixture_m2o_selfref(self, **kw):
-        return relationships.JoinCondition(
+        return relationships._JoinCondition(
             self.selfref,
             self.selfref,
             self.selfref,
@@ -302,7 +302,7 @@ class _JoinFixtures:
         )
 
     def _join_fixture_o2m_composite_selfref(self, **kw):
-        return relationships.JoinCondition(
+        return relationships._JoinCondition(
             self.composite_selfref,
             self.composite_selfref,
             self.composite_selfref,
@@ -312,7 +312,7 @@ class _JoinFixtures:
         )
 
     def _join_fixture_m2o_composite_selfref(self, **kw):
-        return relationships.JoinCondition(
+        return relationships._JoinCondition(
             self.composite_selfref,
             self.composite_selfref,
             self.composite_selfref,
@@ -326,7 +326,7 @@ class _JoinFixtures:
         )
 
     def _join_fixture_o2m_composite_selfref_func(self, **kw):
-        return relationships.JoinCondition(
+        return relationships._JoinCondition(
             self.composite_selfref,
             self.composite_selfref,
             self.composite_selfref,
@@ -342,7 +342,7 @@ class _JoinFixtures:
         )
 
     def _join_fixture_o2m_composite_selfref_func_remote_side(self, **kw):
-        return relationships.JoinCondition(
+        return relationships._JoinCondition(
             self.composite_selfref,
             self.composite_selfref,
             self.composite_selfref,
@@ -359,7 +359,7 @@ class _JoinFixtures:
         )
 
     def _join_fixture_o2m_composite_selfref_func_annotated(self, **kw):
-        return relationships.JoinCondition(
+        return relationships._JoinCondition(
             self.composite_selfref,
             self.composite_selfref,
             self.composite_selfref,
@@ -375,7 +375,7 @@ class _JoinFixtures:
         )
 
     def _join_fixture_compound_expression_1(self, **kw):
-        return relationships.JoinCondition(
+        return relationships._JoinCondition(
             self.left,
             self.right,
             self.left,
@@ -389,7 +389,7 @@ class _JoinFixtures:
         )
 
     def _join_fixture_compound_expression_2(self, **kw):
-        return relationships.JoinCondition(
+        return relationships._JoinCondition(
             self.left,
             self.right,
             self.left,
@@ -401,7 +401,7 @@ class _JoinFixtures:
         )
 
     def _join_fixture_compound_expression_1_non_annotated(self, **kw):
-        return relationships.JoinCondition(
+        return relationships._JoinCondition(
             self.left,
             self.right,
             self.left,
@@ -418,7 +418,7 @@ class _JoinFixtures:
         right = self.base_w_sub_rel.join(
             self.rel_sub, self.base_w_sub_rel.c.id == self.rel_sub.c.id
         )
-        return relationships.JoinCondition(
+        return relationships._JoinCondition(
             self.base_w_sub_rel,
             right,
             self.base_w_sub_rel,
@@ -432,7 +432,7 @@ class _JoinFixtures:
         left = self.base.join(
             self.sub_w_base_rel, self.base.c.id == self.sub_w_base_rel.c.id
         )
-        return relationships.JoinCondition(
+        return relationships._JoinCondition(
             left,
             self.base,
             self.sub_w_base_rel,
@@ -449,7 +449,7 @@ class _JoinFixtures:
         right = self.base.join(
             self.sub_w_base_rel, self.base.c.id == self.sub_w_base_rel.c.id
         )
-        return relationships.JoinCondition(
+        return relationships._JoinCondition(
             left,
             right,
             self.sub,
@@ -463,7 +463,7 @@ class _JoinFixtures:
         right = self.base.join(
             self.sub_w_sub_rel, self.base.c.id == self.sub_w_sub_rel.c.id
         )
-        return relationships.JoinCondition(
+        return relationships._JoinCondition(
             left,
             right,
             self.sub,
@@ -477,7 +477,7 @@ class _JoinFixtures:
         right = self.base.join(
             self.right_w_base_rel, self.base.c.id == self.right_w_base_rel.c.id
         )
-        return relationships.JoinCondition(
+        return relationships._JoinCondition(
             self.right_w_base_rel,
             right,
             self.right_w_base_rel,
@@ -490,7 +490,7 @@ class _JoinFixtures:
         right = self.base.join(
             self.right_w_base_rel, self.base.c.id == self.right_w_base_rel.c.id
         )
-        return relationships.JoinCondition(
+        return relationships._JoinCondition(
             self.right_w_base_rel,
             right,
             self.right_w_base_rel,
@@ -504,7 +504,7 @@ class _JoinFixtures:
         left = self.base.join(self.sub, self.base.c.id == self.sub.c.id)
 
         # see test_relationships->AmbiguousJoinInterpretedAsSelfRef
-        return relationships.JoinCondition(
+        return relationships._JoinCondition(
             left,
             self.sub,
             left,
@@ -513,7 +513,7 @@ class _JoinFixtures:
         )
 
     def _join_fixture_o2m_to_annotated_func(self, **kw):
-        return relationships.JoinCondition(
+        return relationships._JoinCondition(
             self.left,
             self.right,
             self.left,
@@ -524,7 +524,7 @@ class _JoinFixtures:
         )
 
     def _join_fixture_o2m_to_oldstyle_func(self, **kw):
-        return relationships.JoinCondition(
+        return relationships._JoinCondition(
             self.left,
             self.right,
             self.left,
@@ -536,7 +536,7 @@ class _JoinFixtures:
         )
 
     def _join_fixture_overlapping_composite_fks(self, **kw):
-        return relationships.JoinCondition(
+        return relationships._JoinCondition(
             self.composite_target,
             self.composite_multi_ref,
             self.composite_target,
@@ -550,7 +550,7 @@ class _JoinFixtures:
         )
 
     def _join_fixture_o2m_o_side_none(self, **kw):
-        return relationships.JoinCondition(
+        return relationships._JoinCondition(
             self.left,
             self.right,
             self.left,
@@ -563,7 +563,7 @@ class _JoinFixtures:
         )
 
     def _join_fixture_purely_single_o2m(self, **kw):
-        return relationships.JoinCondition(
+        return relationships._JoinCondition(
             self.purely_single_col,
             self.purely_single_col,
             self.purely_single_col,
@@ -576,7 +576,7 @@ class _JoinFixtures:
         )
 
     def _join_fixture_purely_single_m2o(self, **kw):
-        return relationships.JoinCondition(
+        return relationships._JoinCondition(
             self.purely_single_col,
             self.purely_single_col,
             self.purely_single_col,
@@ -592,7 +592,7 @@ class _JoinFixtures:
         def fn(a, b):
             return (a == b) | (b == a)
 
-        return relationships.JoinCondition(
+        return relationships._JoinCondition(
             self.selfref,
             self.selfref,
             self.selfref,
@@ -626,7 +626,7 @@ class _JoinFixtures:
         sub_w_sub_rel__flag = self.base.c.flag._annotate(
             {"parentmapper": prop.mapper}
         )
-        return relationships.JoinCondition(
+        return relationships._JoinCondition(
             local_selectable,
             remote_selectable,
             local_selectable,
@@ -1116,7 +1116,7 @@ class DetermineJoinTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL):
             "providing a list of those columns which "
             "should be counted as containing a foreign "
             "key reference to the parent table.",
-            relationships.JoinCondition,
+            relationships._JoinCondition,
             self.left,
             self.right_multi_fk,
             self.left,
@@ -1126,7 +1126,7 @@ class DetermineJoinTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL):
 
     def test_determine_join_no_fks_o2m(self):
         self._assert_raises_no_join(
-            relationships.JoinCondition,
+            relationships._JoinCondition,
             "Whatever.foo",
             None,
             self.left,
@@ -1138,7 +1138,7 @@ class DetermineJoinTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL):
 
     def test_determine_join_ambiguous_fks_m2m(self):
         self._assert_raises_ambig_join(
-            relationships.JoinCondition,
+            relationships._JoinCondition,
             "Whatever.foo",
             self.m2msecondary_ambig_fks,
             self.m2mleft,
@@ -1151,7 +1151,7 @@ class DetermineJoinTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL):
 
     def test_determine_join_no_fks_m2m(self):
         self._assert_raises_no_join(
-            relationships.JoinCondition,
+            relationships._JoinCondition,
             "Whatever.foo",
             self.m2msecondary_no_fks,
             self.m2mleft,
@@ -1163,7 +1163,7 @@ class DetermineJoinTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL):
         )
 
     def _join_fixture_fks_ambig_m2m(self):
-        return relationships.JoinCondition(
+        return relationships._JoinCondition(
             self.m2mleft,
             self.m2mright,
             self.m2mleft,
index d46362abdc891f3d14b0c7fc3deafcec8f01cfa0..c29da9f87c07f7ad995c32c6b23d1d0e77a2c9ea 100644 (file)
@@ -2386,7 +2386,7 @@ class ChunkingTest(fixtures.DeclarativeMappedTest):
 
         def go():
             with mock.patch(
-                "sqlalchemy.orm.strategies.SelectInLoader._chunksize", 47
+                "sqlalchemy.orm.strategies._SelectInLoader._chunksize", 47
             ):
                 q = session.query(A).options(selectinload(A.bs)).order_by(A.id)
 
@@ -2458,7 +2458,7 @@ class ChunkingTest(fixtures.DeclarativeMappedTest):
 
         def go():
             with mock.patch(
-                "sqlalchemy.orm.strategies.SelectInLoader._chunksize", 47
+                "sqlalchemy.orm.strategies._SelectInLoader._chunksize", 47
             ):
                 q = session.query(B).options(selectinload(B.a)).order_by(B.id)
 
index 6e9720774eb9c60e58cfa96824466468c0056db2..1495932744a5dc8d1b3eb6d80e2a46202b936a47 100644 (file)
@@ -2245,7 +2245,7 @@ class SessionInterface(fixtures.MappedTest):
             )
 
             with mock.patch(
-                "sqlalchemy.orm.session.loading.load_on_ident"
+                "sqlalchemy.orm.session.loading._load_on_ident"
             ) as load_on_ident:
                 s.refresh(m1, with_for_update={"read": True})
                 s.refresh(m1, with_for_update=True)
index 10d73cb8d6404a8644b57c3894dcf4dfa355c070..42efad952b65aa68d2b339536d894a4797f27862 100644 (file)
@@ -80,7 +80,7 @@ class SyncTest(
         pairs = [(a_mapper.c.id, b_mapper.c.id)]
         a1.obj().id = 7
         assert "id" not in b1.obj().__dict__
-        sync.populate(a1, a_mapper, b1, b_mapper, pairs, uowcommit, False)
+        sync._populate(a1, a_mapper, b1, b_mapper, pairs, uowcommit, False)
         eq_(b1.obj().id, 7)
         eq_(b1.obj().__dict__["id"], 7)
         assert ("pk_cascaded", b1, b_mapper.c.id) not in uowcommit.attributes
@@ -90,7 +90,7 @@ class SyncTest(
         pairs = [(a_mapper.c.id, b_mapper.c.id)]
         a1.obj().id = 7
         assert "id" not in b1.obj().__dict__
-        sync.populate(a1, a_mapper, b1, b_mapper, pairs, uowcommit, True)
+        sync._populate(a1, a_mapper, b1, b_mapper, pairs, uowcommit, True)
         eq_(b1.obj().id, 7)
         eq_(b1.obj().__dict__["id"], 7)
         eq_(uowcommit.attributes[("pk_cascaded", b1, b_mapper.c.id)], True)
@@ -102,7 +102,7 @@ class SyncTest(
             orm_exc.UnmappedColumnError,
             "Can't execute sync rule for source column 't2.id'; "
             r"mapper 'Mapper\[A\(t1\)\]' does not map this column.",
-            sync.populate,
+            sync._populate,
             a1,
             a_mapper,
             b1,
@@ -120,7 +120,7 @@ class SyncTest(
             r"Can't execute sync rule for destination "
             r"column 't1.id'; "
             r"mapper 'Mapper\[B\(t2\)\]' does not map this column.",
-            sync.populate,
+            sync._populate,
             a1,
             a_mapper,
             b1,
@@ -135,7 +135,7 @@ class SyncTest(
         pairs = [(a_mapper.c.id, b_mapper.c.t1id)]
         b1.obj().t1id = 8
         eq_(b1.obj().__dict__["t1id"], 8)
-        sync.clear(b1, b_mapper, pairs)
+        sync._clear(b1, b_mapper, pairs)
         eq_(b1.obj().__dict__["t1id"], None)
 
     def test_clear_pk(self):
@@ -147,7 +147,7 @@ class SyncTest(
             AssertionError,
             "Dependency rule on column 't1.id' tried to blank-out primary key "
             "column 't2.id' on instance '<B",
-            sync.clear,
+            sync._clear,
             b1,
             b_mapper,
             pairs,
@@ -161,7 +161,7 @@ class SyncTest(
             "Can't execute sync rule for destination "
             r"column 't1.foo'; mapper 'Mapper\[B\(t2\)\]' does not "
             "map this column.",
-            sync.clear,
+            sync._clear,
             b1,
             b_mapper,
             pairs,
@@ -174,7 +174,7 @@ class SyncTest(
         a1.obj().id = 12
         pairs = [(a_mapper.c.id, b_mapper.c.id)]
         dest = {}
-        sync.update(a1, a_mapper, dest, "old_", pairs)
+        sync._update(a1, a_mapper, dest, "old_", pairs)
         eq_(dest, {"id": 12, "old_id": 10})
 
     def test_update_unmapped(self):
@@ -185,7 +185,7 @@ class SyncTest(
             orm_exc.UnmappedColumnError,
             "Can't execute sync rule for source column 't2.id'; "
             r"mapper 'Mapper\[A\(t1\)\]' does not map this column.",
-            sync.update,
+            sync._update,
             a1,
             a_mapper,
             dest,
@@ -198,7 +198,7 @@ class SyncTest(
         a1.obj().id = 10
         pairs = [(a_mapper.c.id, b_mapper.c.id)]
         dest = {}
-        sync.populate_dict(a1, a_mapper, dest, pairs)
+        sync._populate_dict(a1, a_mapper, dest, pairs)
         eq_(dest, {"id": 10})
 
     def test_populate_dict_unmapped(self):
@@ -210,7 +210,7 @@ class SyncTest(
             orm_exc.UnmappedColumnError,
             "Can't execute sync rule for source column 't2.id'; "
             r"mapper 'Mapper\[A\(t1\)\]' does not map this column.",
-            sync.populate_dict,
+            sync._populate_dict,
             a1,
             a_mapper,
             dest,
@@ -221,11 +221,11 @@ class SyncTest(
         uowcommit, a1, b1, a_mapper, b_mapper = self._fixture()
         a1.obj().id = 10
         pairs = [(a_mapper.c.id, b_mapper.c.id)]
-        eq_(sync.source_modified(uowcommit, a1, a_mapper, pairs), False)
+        eq_(sync._source_modified(uowcommit, a1, a_mapper, pairs), False)
 
     def test_source_modified_no_pairs(self):
         uowcommit, a1, b1, a_mapper, b_mapper = self._fixture()
-        eq_(sync.source_modified(uowcommit, a1, a_mapper, []), False)
+        eq_(sync._source_modified(uowcommit, a1, a_mapper, []), False)
 
     def test_source_modified_modified(self):
         uowcommit, a1, b1, a_mapper, b_mapper = self._fixture()
@@ -233,7 +233,7 @@ class SyncTest(
         a1._commit_all(a1.dict)
         a1.obj().id = 12
         pairs = [(a_mapper.c.id, b_mapper.c.id)]
-        eq_(sync.source_modified(uowcommit, a1, a_mapper, pairs), True)
+        eq_(sync._source_modified(uowcommit, a1, a_mapper, pairs), True)
 
     def test_source_modified_composite(self):
         uowcommit, a1, b1, a_mapper, b_mapper = self._fixture()
@@ -244,7 +244,7 @@ class SyncTest(
             (a_mapper.c.id, b_mapper.c.id),
             (a_mapper.c.foo, b_mapper.c.id),
         ]
-        eq_(sync.source_modified(uowcommit, a1, a_mapper, pairs), True)
+        eq_(sync._source_modified(uowcommit, a1, a_mapper, pairs), True)
 
     def test_source_modified_composite_unmodified(self):
         uowcommit, a1, b1, a_mapper, b_mapper = self._fixture()
@@ -254,7 +254,7 @@ class SyncTest(
             (a_mapper.c.id, b_mapper.c.id),
             (a_mapper.c.foo, b_mapper.c.id),
         ]
-        eq_(sync.source_modified(uowcommit, a1, a_mapper, pairs), False)
+        eq_(sync._source_modified(uowcommit, a1, a_mapper, pairs), False)
 
     def test_source_modified_no_unmapped(self):
         uowcommit, a1, b1, a_mapper, b_mapper = self._fixture()
@@ -263,7 +263,7 @@ class SyncTest(
             orm_exc.UnmappedColumnError,
             "Can't execute sync rule for source column 't2.id'; "
             r"mapper 'Mapper\[A\(t1\)\]' does not map this column.",
-            sync.source_modified,
+            sync._source_modified,
             uowcommit,
             a1,
             a_mapper,