]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
Revert 903b18828461bb8cb8dca4acc56809b3df2b14d5
authorMike Bayer <mike_mp@zzzcomputing.com>
Tue, 26 May 2020 03:26:18 +0000 (23:26 -0400)
committerMike Bayer <mike_mp@zzzcomputing.com>
Tue, 26 May 2020 03:26:18 +0000 (23:26 -0400)
This was accidentally pushed just now.

Change-Id: I4da4151c4a81e5cf72146f8dcab3537301ccaae9

lib/sqlalchemy/ext/baked.py
lib/sqlalchemy/orm/loading.py
test/ext/test_baked.py

index fdcaec5595fdf07473e84fa0603a6839405e4212..112e245f78742ba930e2df17f61dc22bf29a04d2 100644 (file)
@@ -383,7 +383,7 @@ class Result(object):
         return str(self._as_query())
 
     def __iter__(self):
-        return self._iter().__iter__()
+        return iter(self._iter())
 
     def _iter(self):
         bq = self.bq
@@ -463,15 +463,16 @@ class Result(object):
         Equivalent to :meth:`_query.Query.first`.
 
         """
-
         bq = self.bq.with_criteria(lambda q: q.slice(0, 1))
-        return (
+        ret = list(
             bq.for_session(self.session)
             .params(self._params)
             ._using_post_criteria(self._post_criteria)
-            ._iter()
-            .first()
         )
+        if len(ret) > 0:
+            return ret[0]
+        else:
+            return None
 
     def one(self):
         """Return exactly one result or raise an exception.
@@ -479,7 +480,19 @@ class Result(object):
         Equivalent to :meth:`_query.Query.one`.
 
         """
-        return self._iter().one()
+        try:
+            ret = self.one_or_none()
+        except orm_exc.MultipleResultsFound as err:
+            util.raise_(
+                orm_exc.MultipleResultsFound(
+                    "Multiple rows were found for one()"
+                ),
+                replace_context=err,
+            )
+        else:
+            if ret is None:
+                raise orm_exc.NoResultFound("No row was found for one()")
+            return ret
 
     def one_or_none(self):
         """Return one or zero results, or raise an exception for multiple
@@ -490,7 +503,17 @@ class Result(object):
         .. versionadded:: 1.0.9
 
         """
-        return self._iter().one_or_none()
+        ret = list(self)
+
+        l = len(ret)
+        if l == 1:
+            return ret[0]
+        elif l == 0:
+            return None
+        else:
+            raise orm_exc.MultipleResultsFound(
+                "Multiple rows were found for one_or_none()"
+            )
 
     def all(self):
         """Return all rows.
index 102e74b99752e31d52c6cbf2696bee363f029cc3..616e757a39fe8329fed55c853966cb237f785256 100644 (file)
@@ -14,6 +14,8 @@ as well as some of the attribute loading strategies.
 """
 from __future__ import absolute_import
 
+import collections
+
 from . import attributes
 from . import exc as orm_exc
 from . import path_registry
@@ -586,86 +588,48 @@ def _instance_processor(
 
     identity_class = mapper._identity_class
 
-    compile_state = context.compile_state
-
-    populators = {}
+    populators = collections.defaultdict(list)
 
     props = mapper._prop_set
     if only_load_props is not None:
         props = props.intersection(mapper._props[k] for k in only_load_props)
 
-    getters = path.get(compile_state.attributes, "getters", None)
-    if getters is None:
-        # directives given to us by the ColumnLoader.setup_query()
-        # methods.   Turn these directives into getters against the
-        # actual result set.
-        quick_populators = path.get(
-            context.attributes, "memoized_setups", _none_set
-        )
-        cached_populators = {
-            "new": [],
-            "expire": [],
-            "quick": [],
-            "todo": [],
-            "delayed": [],
-            "existing": [],
-            "eager": [],
-        }
-
-        pk_cols = mapper.primary_key
+    quick_populators = path.get(
+        context.attributes, "memoized_setups", _none_set
+    )
 
-        if adapter:
-            pk_cols = [adapter.columns[c] for c in pk_cols]
-        getters = {
-            "populators": cached_populators,
-            "primary_key_getter": result._tuple_getter(pk_cols),
-        }
-
-        for prop in props:
-            if prop in quick_populators:
-                # this is an inlined path just for column-based attributes.
-                col = quick_populators[prop]
-                if col is _DEFER_FOR_STATE:
-                    cached_populators["new"].append(
-                        (prop.key, prop._deferred_column_loader)
-                    )
-                elif col is _SET_DEFERRED_EXPIRED:
-                    # note that in this path, we are no longer
-                    # searching in the result to see if the column might
-                    # be present in some unexpected way.
-                    cached_populators["expire"].append((prop.key, False))
-                elif col is _RAISE_FOR_STATE:
-                    cached_populators["new"].append(
-                        (prop.key, prop._raise_column_loader)
-                    )
-                else:
-                    getter = None
-                    if not getter:
-                        getter = result._getter(col, False)
-                    if getter:
-                        cached_populators["quick"].append((prop.key, getter))
-                    else:
-                        # fall back to the ColumnProperty itself, which
-                        # will iterate through all of its columns
-                        # to see if one fits
-                        prop.create_row_processor(
-                            context,
-                            path,
-                            mapper,
-                            result,
-                            adapter,
-                            cached_populators,
-                        )
+    for prop in props:
+        if prop in quick_populators:
+            # this is an inlined path just for column-based attributes.
+            col = quick_populators[prop]
+            if col is _DEFER_FOR_STATE:
+                populators["new"].append(
+                    (prop.key, prop._deferred_column_loader)
+                )
+            elif col is _SET_DEFERRED_EXPIRED:
+                # note that in this path, we are no longer
+                # searching in the result to see if the column might
+                # be present in some unexpected way.
+                populators["expire"].append((prop.key, False))
+            elif col is _RAISE_FOR_STATE:
+                populators["new"].append((prop.key, prop._raise_column_loader))
             else:
-                cached_populators["todo"].append(prop)
-        path.set(compile_state.attributes, "getters", getters)
-
-    cached_populators = getters["populators"]
-    populators = {k: list(v) for k, v in cached_populators.items()}
-    for prop in cached_populators["todo"]:
-        prop.create_row_processor(
-            context, path, mapper, result, adapter, populators
-        )
+                getter = None
+                if not getter:
+                    getter = result._getter(col, False)
+                if getter:
+                    populators["quick"].append((prop.key, getter))
+                else:
+                    # fall back to the ColumnProperty itself, which
+                    # will iterate through all of its columns
+                    # to see if one fits
+                    prop.create_row_processor(
+                        context, path, mapper, result, adapter, populators
+                    )
+        else:
+            prop.create_row_processor(
+                context, path, mapper, result, adapter, populators
+            )
 
     propagated_loader_options = context.propagated_loader_options
     load_path = (
@@ -743,7 +707,11 @@ def _instance_processor(
     else:
         refresh_identity_key = None
 
-        primary_key_getter = getters["primary_key_getter"]
+        pk_cols = mapper.primary_key
+
+        if adapter:
+            pk_cols = [adapter.columns[c] for c in pk_cols]
+        tuple_getter = result._tuple_getter(pk_cols)
 
     if mapper.allow_partial_pks:
         is_not_primary_key = _none_set.issuperset
@@ -764,11 +732,7 @@ def _instance_processor(
         else:
             # look at the row, see if that identity is in the
             # session, or we have to create a new one
-            identitykey = (
-                identity_class,
-                primary_key_getter(row),
-                identity_token,
-            )
+            identitykey = (identity_class, tuple_getter(row), identity_token)
 
             instance = session_identity_map.get(identitykey)
 
@@ -911,7 +875,7 @@ def _instance_processor(
         def ensure_no_pk(row):
             identitykey = (
                 identity_class,
-                primary_key_getter(row),
+                tuple_getter(row),
                 identity_token,
             )
             if not is_not_primary_key(identitykey[1]):
index 8b9fe8c116070bad35ac6aa6bae4a9008c63ba9e..ecb5e3919beec94500ec2508f0fa9f85864c4564 100644 (file)
@@ -180,7 +180,7 @@ class LikeQueryTest(BakedTest):
 
         assert_raises_message(
             orm_exc.MultipleResultsFound,
-            "Multiple rows were found when one or none was required",
+            "Multiple rows were found for one_or_none()",
             bq(Session()).one_or_none,
         )
 
@@ -192,7 +192,7 @@ class LikeQueryTest(BakedTest):
 
         assert_raises_message(
             orm_exc.NoResultFound,
-            "No row was found when one was required",
+            "No row was found for one()",
             bq(Session()).one,
         )
 
@@ -213,7 +213,7 @@ class LikeQueryTest(BakedTest):
 
         assert_raises_message(
             orm_exc.MultipleResultsFound,
-            "Multiple rows were found when exactly one was required",
+            "Multiple rows were found for one()",
             bq(Session()).one,
         )