]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
Result initial introduction
authorMike Bayer <mike_mp@zzzcomputing.com>
Tue, 4 Jun 2019 21:29:20 +0000 (17:29 -0400)
committerMike Bayer <mike_mp@zzzcomputing.com>
Fri, 21 Feb 2020 22:53:33 +0000 (17:53 -0500)
This builds on cc718cccc0bf8a01abdf4068c7ea4f3 which moved
RowProxy to Row, allowing Row to be more like a named tuple.

- KeyedTuple in ORM is replaced with Row

- ResultSetMetaData broken out into "simple" and "cursor" versions
  for ORM and Core, as well as LegacyCursor version.

- Row now has _mapping attribute that supplies full mapping behavior.
Row and SimpleRow both have named tuple behavior otherwise.
LegacyRow has some mapping features on the tuple which emit
deprecation warnings (e.g. keys(), values(), etc).   the biggest
change for mapping->tuple is the behavior of __contains__ which
moves from testing of "key in row" to "value in row".

- ResultProxy breaks into ResultProxy and FutureResult (interim),
the latter has the newer APIs.   Made available to dialects
using execution options.

- internal reflection methods and most tests move off of implicit
Row mapping behavior and move to row._mapping, result.mappings()
method using future result

- a new strategy system for cursor handling replaces the various
subclasses of RowProxy

- some execution context adjustments. We will leave EC in but
refined things like get_result_proxy() and out parameter handling.
Dialects for 1.4 will need to adjust from get_result_proxy()
to get_result_cursor_strategy(), if they are using this method

- out parameter handling now accommodated by get_out_parameter_values()
EC method.   Oracle changes for this.  external dialect for
DB2 for example will also need to adjust for this.

- deprecate case_insensitive flag for engine / result, this
feature is not used

mapping-methods on Row are deprecated, and replaced with
Row._mapping.<meth>, including:

   row.keys()  -> use row._mapping.keys()
   row.items()  -> use row._mapping.items()
   row.values() -> use row._mapping.values()
   key in row  -> use key in row._mapping
   int in row  -> use int < len(row)

Fixes: #4710
Fixes: #4878
Change-Id: Ieb9085e9bcff564359095b754da9ae0af55679f0

66 files changed:
doc/build/changelog/migration_14.rst
doc/build/changelog/migration_20.rst
doc/build/changelog/unreleased_14/4710.rst [new file with mode: 0644]
doc/build/changelog/unreleased_14/4710_row.rst [deleted file]
doc/build/changelog/unreleased_14/4878.rst [new file with mode: 0644]
doc/build/core/connections.rst
doc/build/core/future.rst [new file with mode: 0644]
doc/build/core/index.rst
doc/build/core/tutorial.rst
doc/build/index.rst
doc/build/orm/loading_columns.rst
doc/build/orm/query.rst
doc/build/orm/tutorial.rst
lib/sqlalchemy/cextension/resultproxy.c
lib/sqlalchemy/dialects/mssql/base.py
lib/sqlalchemy/dialects/mysql/mysqlconnector.py
lib/sqlalchemy/dialects/oracle/base.py
lib/sqlalchemy/dialects/oracle/cx_oracle.py
lib/sqlalchemy/dialects/postgresql/base.py
lib/sqlalchemy/dialects/postgresql/psycopg2.py
lib/sqlalchemy/engine/__init__.py
lib/sqlalchemy/engine/base.py
lib/sqlalchemy/engine/create.py
lib/sqlalchemy/engine/default.py
lib/sqlalchemy/engine/interfaces.py
lib/sqlalchemy/engine/result.py
lib/sqlalchemy/engine/row.py [new file with mode: 0644]
lib/sqlalchemy/ext/baked.py
lib/sqlalchemy/future/__init__.py
lib/sqlalchemy/future/result.py [new file with mode: 0644]
lib/sqlalchemy/orm/loading.py
lib/sqlalchemy/orm/mapper.py
lib/sqlalchemy/orm/persistence.py
lib/sqlalchemy/orm/query.py
lib/sqlalchemy/sql/compiler.py
lib/sqlalchemy/sql/selectable.py
lib/sqlalchemy/testing/profiling.py
lib/sqlalchemy/testing/suite/test_results.py
lib/sqlalchemy/testing/warnings.py
lib/sqlalchemy/util/__init__.py
lib/sqlalchemy/util/_collections.py
regen_callcounts.tox.ini
test/aaa_profiling/test_resultset.py
test/aaa_profiling/test_zoomark.py
test/base/test_utils.py
test/dialect/postgresql/test_types.py
test/dialect/test_sqlite.py
test/orm/test_assorted_eager.py
test/orm/test_deprecations.py
test/orm/test_loading.py
test/orm/test_pickled.py
test/orm/test_query.py
test/orm/test_unitofwork.py
test/profiles.txt
test/requirements.py
test/sql/test_compiler.py
test/sql/test_defaults.py
test/sql/test_deprecations.py
test/sql/test_functions.py
test/sql/test_insert_exec.py
test/sql/test_query.py
test/sql/test_resultset.py
test/sql/test_returning.py
test/sql/test_type_expressions.py
test/sql/test_types.py
test/sql/test_unicode.py

index c7819a5ae6c3bed634db7b36cd6243fbcc2520b1..35b2a7f91fde655ee2139265dcb76ef7590b7ff3 100644 (file)
@@ -320,6 +320,148 @@ details.
 
 :ticket:`4645`
 
+.. _change_4710_core:
+
+RowProxy is no longer a "proxy"; is now called Row and behaves like an enhanced named tuple
+-------------------------------------------------------------------------------------------
+
+The :class:`.RowProxy` class, which represents individual database result rows
+in a Core result set, is now called :class:`.Row` and is no longer a "proxy"
+object; what this means is that when the :class:`.Row` object is returned, the
+row is a simple tuple that contains the data in its final form, already having
+been processed by result-row handling functions associated with datatypes
+(examples include turning a date string from the database into a ``datetime``
+object, a JSON string into a Python ``json.loads()`` result, etc.).
+
+The immediate rationale for this is so that the row can act more like a Python
+named tuple, rather than a mapping, where the values in the tuple are the
+subject of the ``__contains__`` operator on the tuple, rather than the keys.
+With :class:`.Row` acting like a named tuple, it is then suitable for use as as
+replacement for the ORM's :class:`.KeyedTuple` object, leading to an eventual
+API where both the ORM and Core deliver result sets that  behave identically.
+Unification of major patterns within ORM and Core is a major goal of SQLAlchemy
+2.0, and release 1.4 aims to have most or all of the underlying architectural
+patterns in place in order to support this process.   The note in
+:ref:`change_4710_orm` describes the ORM's use of the :class:`.Row` class.
+
+For release 1.4, the :class:`.Row` class provides an additional subclass
+:class:`.LegacyRow`, which is used by Core and provides a backwards-compatible
+version of :class:`.RowProxy` while emitting deprecation warnings for those API
+features and behaviors that will be moved.  ORM :class:`.Query` now makes use
+of :class:`.Row` directly as a replacement for :class:`.KeyedTuple`.
+
+The :class:`.LegacyRow` class is a transitional class where the
+``__contains__`` method is still testing against the keys, not the values,
+while emitting a deprecation warning when the operation succeeds.
+Additionally, all the other mapping-like methods on the previous
+:class:`.RowProxy` are deprecated, including :meth:`.LegacyRow.keys`,
+:meth:`.LegacyRow.items`, etc.  For mapping-like behaviors from a :class:`.Row`
+object, including support for these methods as well as a key-oriented
+``__contains__`` operator, the API going forward will be to first access a
+special attribute :attr:`.Row._mapping`, which will then provide a complete
+mapping interface to the row, rather than a tuple interface.
+
+Rationale: To behave more like a named tuple rather than a mapping
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The difference between a named tuple and a mapping as far as boolean operators
+can be summarized.   Given a "named tuple" in pseudocode as::
+
+    row = (id: 5,  name: 'some name')
+
+The biggest cross-incompatible difference is the behavior of ``__contains__``::
+
+    "id" in row          # True for a mapping, False for a named tuple
+    "some name" in row   # False for a mapping, True for a named tuple
+
+In 1.4, when a :class:`.LegacyRow` is returned by a Core result set, the above
+``"id" in row`` comparison will continue to succeed, however a deprecation
+warning will be emitted.   To use the "in" operator as a mapping, use the
+:attr:`.Row._mapping` attribute::
+
+    "id" in row._mapping
+
+SQLAlchemy 2.0's result object will feature a ``.mappings()`` modifier so that
+these mappings can be received directly::
+
+    # using sqlalchemy.future package
+    for row in result.mappings():
+        row["id"]
+
+Proxying behavior goes away, was also unnecessary in modern usage
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The refactor of :class:`.Row` to behave like a tuple requires that all
+data values be fully available up front.  This is an internal behavior change
+from that of :class:`.RowProxy`, where result-row processing functions would
+be invoked at the point of accessing an element of the row, instead of
+when the row was first fetched.     This means for example when retrieving
+a datetime value from SQLite, the data for the row as present in the
+:class:`.RowProxy` object would previously have looked like::
+
+    row_proxy = (1, '2019-12-31 19:56:58.272106')
+
+and then upon access via ``__getitem__``, the ``datetime.strptime()`` function
+would be used on the fly to convert the above string date into a ``datetime``
+object.     With the new architecture, the ``datetime()`` object is present
+in the tuple when it is returned, the ``datetime.strptime()`` function
+having been called just once up front::
+
+    row = (1, datetime.datetime(2019, 12, 31, 19, 56, 58, 272106))
+
+The :class:`.RowProxy` and :class:`.Row` objects in SQLAlchemy are where the
+majority of SQLAlchemy's C extension code takes place.   This code has been
+highly refactored to provide the new behavior in an efficient manner, and
+overall performance has been improved as the design of :class:`.Row` is now
+considerably simpler.
+
+The rationale behind the previous  behavior assumed a usage model where a
+result row might have dozens or hundreds of columns present, where most of
+those columns would not be accessed, and for which a majority of those columns
+would require some result-value processing function.  By invoking the
+processing function only when needed, the goal was that lots of result
+processing functions would not be necessary, thus increasing performance.
+
+There are many reasons why the above assumptions do not hold:
+
+1. the vast majority of row-processing functions called were to unicode decode
+   a bytestring into a Python unicode string under Python 2.   This was right
+   as Python Unicode was beginning to see use and before Python 3 existed.
+   Once Python 3 was introduced, within a few years, all Python DBAPIs took
+   on the proper role of supporting the delivering of Python Unicode objects directly, under
+   both Python 2 and Python 3, as an option in the former case and as the only
+   way forward in the latter case.  Eventually, in most cases it became
+   the default for Python 2 as well.   SQLAlchemy's Python 2 support still
+   enables explicit string-to-unicode conversion for some DBAPIs such as
+   cx_Oracle, however it is now performed at the DBAPI level rather than
+   as a standard SQLAlchemy result row processing function.
+
+2. The above string conversion, when it is used, was made to be extremely
+   performant via the C extensions, so much so that even in 1.4, SQLAlchemy's
+   byte-to-unicode codec hook is plugged into cx_Oracle where it has been
+   observed to be more performant than cx_Oracle's own hook; this meant that
+   the overhead for converting all strings in a row was not as significant
+   as it originally was in any case.
+
+3. Row processing functions are not used in most other cases; the
+   exceptions are SQLite's datetime support, JSON support for some backends,
+   some numeric handlers such as string to ``Decimal``.   In the case of
+   ``Decimal``, Python 3 also standardized on the highly performant ``cdecimal``
+   implementation, which is not the case in Python 2 which continues to use
+   the much less performant pure Python version.
+
+4. Fetching full rows where only a few columns are needed is not common within
+   real-world use cases  In the early days of SQLAlchemy, database code from other
+   languages of the form "row = fetch('SELECT * FROM table')" was common;
+   using SQLAlchemy's expression language however, code observed in the wild
+   typically makes use of the specific columns needed.
+
+.. seealso::
+
+    :ref:`change_4710_orm`
+
+:ticket:`4710`
+
 New Features - ORM
 ==================
 
@@ -398,6 +540,58 @@ as was present previously.
 Behavioral Changes - ORM
 ========================
 
+.. _change_4710_orm:
+
+The "KeyedTuple" object returned by Query is replaced by Row
+-------------------------------------------------------------
+
+As discussed at :ref:`change_4710_core`, the Core :class:`.RowProxy` object
+is now replaced by a class called :class:`.Row`.    The base :class:`.Row`
+object now behaves more fully like a named tuple, and as such it is now
+used as the basis for tuple-like results returned by the :class:`.Query`
+object, rather than the previous "KeyedTuple" class.
+
+The rationale is so that by SQLAlchemy 2.0, both Core and ORM SELECT statements
+will return result rows using the same :class:`.Row` object which behaves  like
+a named tuple.  Dictionary-like functionality is available from :class:`.Row`
+via the :attr:`.Row._mapping` attribute.   In the interim, Core result sets
+will make use of a :class:`.Row` subclass :class:`.LegacyRow` which maintains
+the previous dict/tuple hybrid behavior for backwards compatibility while the
+:class:`.Row` class will be used directly for ORM tuple results returned
+by the :class:`.Query` object.
+
+Effort has been made to get most of the featureset of :class:`.Row` to be
+available within the ORM, meaning that access by string name as well
+as entity / column should work::
+
+    row = s.query(User, Address).join(User.addresses).first()
+
+    row._mapping[User]  # same as row[0]
+    row._mapping[Address]  # same as row[1]
+    row._mapping["User"]  # same as row[0]
+    row._mapping["Address"]  # same as row[1]
+
+    u1 = aliased(User)
+    row = s.query(u1).only_return_tuples(True).first()
+    row._mapping[u1]  # same as row[0]
+
+
+    row = (
+        s.query(User.id, Address.email_address)
+        .join(User.addresses)
+        .first()
+    )
+
+    row._mapping[User.id]  # same as row[0]
+    row._mapping["id"]  # same as row[0]
+    row._mapping[users.c.id]  # same as row[0]
+
+.. seealso::
+
+    :ref:`change_4710_core`
+
+:ticket:`4710`.
+
 .. _change_5074:
 
 Session does not immediately create a new SessionTransaction object
@@ -1079,61 +1273,6 @@ as::
 
 :ticket:`4753`
 
-.. _change_4710_row:
-
-The "RowProxy" is no longer a "proxy", now called ``Row``
----------------------------------------------------------
-
-Since the beginning of SQLAlchemy, the Core result objects exposed to the
-user are the :class:`.ResultProxy` and ``RowProxy`` objects.   The name
-"proxy" refers to the `GOF Proxy Pattern <https://en.wikipedia.org/wiki/Proxy_pattern>`_,
-emphasizing that these objects are presenting a facade around the DBAPI
-``cursor`` object and the tuple-like objects returned by methods such
-as ``cursor.fetchone()``; as methods on the result and row proxy objects
-are invoked, the underlying methods or data members of the ``cursor`` and
-the tuple-like objects returned are invoked.
-
-In particular, SQLAlchemy's row-processing functions would be invoked
-as a particular column in a row is accessed.  By row-processing functions,
-we refer to functions such as that of the :class:`.Unicode` datatype, which under
-Python 2 would often convert Python string objects to Python unicode
-objects, as well as numeric functions that produce ``Decimal`` objects,
-SQLite datetime functions that produce ``datetime`` objects from string
-representations, as well as any-number of user-defined functions which can
-be created using :class:`.TypeDecorator`.
-
-The rationale for this pattern was performance, where the anticipated use
-case of fetching a row from a legacy database that contained dozens of
-columns would not need to run, for example, a unicode converter on every
-element of each row, if only a few columns in the row were being fetched.
-SQLAlchemy eventually gained C extensions which allowed for additional
-performance gains within this process.
-
-As part of SQLAlchemy 1.4's goal of migrating towards SQLAlchemy 2.0's updated
-usage patterns, row objects will be made to behave more like tuples.  To
-suit this, the "proxy" behavior of :class:`.Row` has been removed and instead
-the row is populated with its final data values upon construction.  This
-in particular allows an operation such as ``obj in row`` to work as that
-of a tuple where it tests for containment of ``obj`` in the row itself,
-rather than considering it to be a key in a mapping as is the case now.
-For the moment, ``obj in row`` still does a key lookup,
-that is, detects if the row has a particular column name as ``obj``, however
-this behavior is deprecated and in 2.0 the :class:`.Row` will behave fully
-as a tuple-like object; lookup of keys will be via the ``._mapping``
-attribute.
-
-The result of removing the proxy behavior from rows is that the C code has been
-simplified and the performance of many operations is improved both with and
-without the C extensions in use.   Modern Python DBAPIs handle unicode
-conversion natively in most cases, and SQLAlchemy's unicode handlers are
-very fast in any case, so the expense of unicode conversion
-is a non-issue.
-
-This change by itself has no behavioral impact on the row, but is part of
-a larger series of changes in :ticket:`4710` which unifies the Core row/result
-facade with that of the ORM.
-
-:ticket:`4710`
 
 
 .. _change_4449:
index bdb35b86d465c2211a8fb6a2b66d1182fd573a1b..1949b2bf53c723cd011caf3d3dab4f850e5861c6 100644 (file)
@@ -545,6 +545,41 @@ equally::
         result[0].all()  # same as result.scalars().all()
         result[2:5].all()  # same as result.columns('c', 'd', 'e').all()
 
+Result rows unified between Core and ORM on named-tuple interface
+==================================================================
+
+Already part of 1.4, the previous ``KeyedTuple`` class that was used when
+selecting rows from the :class:`.Query` object has been replaced by the
+:class:`.Row` class, which is the base of the same :class:`.Row` that comes
+back with Core statement results (in 1.4 it is the :class:`.LegacyRow` class).
+
+This :class:`.Row` behaves like a named tuple, in that it acts as a sequence
+but also supports attribute name access, e.g. ``row.some_column``.  However,
+it also provides the previous "mapping" behavior via the special attribute
+``row._mapping``, which produces a Python mapping such that keyed access
+such as ``row["some_column"]`` can be used.
+
+In order to receive results as mappings up front, the ``mappings()`` modifier
+on the result can be used::
+
+    result = session.execute(stmt)
+    for row in result.mappings():
+        print("the user is: %s" % row["User"])
+
+The :class:`.Row` class as used by the ORM also supports access via entity
+or attribute::
+
+    stmt = select(User, Address).join(User.addresses)
+
+    for row in session.execute(stmt).mappings():
+        print("the user is: %s  the address is: %s" % (
+            row[User],
+            row[Address]
+        ))
+
+.. seealso::
+
+    :ref:`change_4710_core`
 
 Declarative becomes a first class API
 =====================================
diff --git a/doc/build/changelog/unreleased_14/4710.rst b/doc/build/changelog/unreleased_14/4710.rst
new file mode 100644 (file)
index 0000000..8ba8bb9
--- /dev/null
@@ -0,0 +1,33 @@
+.. change::
+    :tags: change, engine
+    :tickets: 4710
+
+    The ``RowProxy`` class is no longer a "proxy" object, and is instead
+    directly populated with the post-processed contents of the DBAPI row tuple
+    upon construction.   Now named :class:`.Row`, the mechanics of how the
+    Python-level value processors have been simplified, particularly as it impacts the
+    format of the C code, so that a DBAPI row is processed into a result tuple
+    up front.   The object returned by the :class:`.ResultProxy` is now the
+    :class:`.LegacyRow` subclass, which maintains mapping/tuple hybrid behavior,
+    however the base :class:`.Row` class now behaves more fully like a named
+    tuple.
+
+    .. seealso::
+
+        :ref:`change_4710_core`
+
+
+.. change::
+    :tags: change, orm
+    :tickets: 4710
+
+    The "KeyedTuple" class returned by :class:`.Query` is now replaced with the
+    Core :class:`.Row` class, which behaves in the same way as KeyedTuple.
+    In SQLAlchemy 2.0, both Core and ORM will return result rows using the same
+    :class:`.Row` object.   In the interim, Core uses a backwards-compatibility
+    class :class:`.LegacyRow` that maintains the former mapping/tuple hybrid
+    behavior used by "RowProxy".
+
+    .. seealso::
+
+        :ref:`change_4710_orm`
\ No newline at end of file
diff --git a/doc/build/changelog/unreleased_14/4710_row.rst b/doc/build/changelog/unreleased_14/4710_row.rst
deleted file mode 100644 (file)
index b9e417b..0000000
+++ /dev/null
@@ -1,13 +0,0 @@
-.. change::
-    :tags: feature, engine
-
-    The ``RowProxy`` class is no longer a "proxy" object, and is instead
-    directly populated with the post-processed contents of the DBAPI row tuple
-    upon construction.   Now named :class:`.Row`, the mechanics of how the
-    Python-level value processors have been simplified, particularly as it impacts the
-    format of the C code, so that a DBAPI row is processed into a result tuple
-    up front.   See the migration notes for further details.
-
-    .. seealso::
-
-        :ref:`change_4710_row`
diff --git a/doc/build/changelog/unreleased_14/4878.rst b/doc/build/changelog/unreleased_14/4878.rst
new file mode 100644 (file)
index 0000000..5795053
--- /dev/null
@@ -0,0 +1,11 @@
+.. change::
+    :tags: change, engine
+    :tickets: 4878
+
+    The :paramref:`.case_sensitive` flag on :func:`.create_engine` is
+    deprecated; this flag was part of the transition of the result row object
+    to allow case sensitive column matching as the default, while providing
+    backwards compatibility for the former matching method.   All string access
+    for a row should be assumed to be case sensitive just like any other Python
+    mapping.
+
index e205a37b5d81c20aee1046d38594fb010d3f587b..5619377de8c630e8fb29df85e5942d1574cfd33b 100644 (file)
@@ -635,6 +635,9 @@ The above will respond to ``create_engine("mysql+foodialect://")`` and load the
 Connection / Engine API
 =======================
 
+.. autoclass:: BaseResult
+    :members:
+
 .. autoclass:: Connection
    :members:
 
@@ -650,14 +653,22 @@ Connection / Engine API
 .. autoclass:: ExceptionContext
    :members:
 
+.. autoclass:: LegacyRow
+    :members:
+
 .. autoclass:: NestedTransaction
     :members:
 
 .. autoclass:: ResultProxy
     :members:
+    :inherited-members:
 
 .. autoclass:: Row
     :members:
+    :private-members: _fields, _mapping
+
+.. autoclass:: RowMapping
+    :members:
 
 .. autoclass:: Transaction
     :members:
diff --git a/doc/build/core/future.rst b/doc/build/core/future.rst
new file mode 100644 (file)
index 0000000..ffe8b67
--- /dev/null
@@ -0,0 +1,13 @@
+.. _core_future_toplevel:
+
+SQLAlchemy 2.0 Future (Core)
+============================
+
+.. module:: sqlalchemy.future
+
+
+.. autofunction:: sqlalchemy.future.select
+
+.. autoclass:: sqlalchemy.future.Result
+    :members:
+    :inherited-members:
index 26c26af07ae2a2e3f17d16da55c0e806ca5462a3..a3574341a4cef9f7515a7c8e5d87ed3ece31bc32 100644 (file)
@@ -17,3 +17,4 @@ Language provides a schema-centric usage paradigm.
     types
     engines_connections
     api_basics
+    future
\ No newline at end of file
index 9b58222f22497f9dd5fd3805e50a69db5f5a36d8..89316bcb9aeb6834f14c38e2751e1052e88ff240 100644 (file)
@@ -408,10 +408,40 @@ of :class:`.Row` objects:
     (2, u'wendy', u'Wendy Williams')
 
 Above, we see that printing each :class:`.Row` produces a simple
-tuple-like result.  The :class:`.Row` behaves like a hybrid between
-a Python mapping and tuple, with several methods of retrieving the data
-in each column.  One common way is
-as a Python mapping of strings, using the string names of columns:
+tuple-like result.  The most canonical way in Python to access the values
+of these tuples as rows are fetched is through tuple assignment:
+
+.. sourcecode:: pycon+sql
+
+    {sql}>>> result = conn.execute(s)
+    SELECT users.id, users.name, users.fullname
+    FROM users
+    ()
+
+    {stop}>>> for id, name, fullname in result:
+    ...     print("name:", name, "; fullname: ", fullname)
+    name: jack ; fullname:  Jack Jones
+    name: wendy ; fullname:  Wendy Williams
+
+The :class:`.Row` object actually behaves like a Python named tuple, so
+we may also access these attributes from the row itself using attribute
+access:
+
+.. sourcecode:: pycon+sql
+
+    {sql}>>> result = conn.execute(s)
+    SELECT users.id, users.name, users.fullname
+    FROM users
+    ()
+
+    {stop}>>> for row in result:
+    ...     print("name:", row.name, "; fullname: ", row.fullname)
+    name: jack ; fullname:  Jack Jones
+    name: wendy ; fullname:  Wendy Williams
+
+To access columns via name using strings, either when the column name is
+progammatically generated, or contains non-ascii characters, the
+:attr:`.Row._mapping` view may be used that provides dictionary-like access:
 
 .. sourcecode:: pycon+sql
 
@@ -421,10 +451,28 @@ as a Python mapping of strings, using the string names of columns:
     ()
 
     {stop}>>> row = result.fetchone()
-    >>> print("name:", row['name'], "; fullname:", row['fullname'])
+    >>> print("name:", row._mapping['name'], "; fullname:", row._mapping['fullname'])
     name: jack ; fullname: Jack Jones
 
-Another way is as a Python sequence, using integer indexes:
+.. deprecated:: 1.4
+
+    In versions of SQLAlchemy prior to 1.4, the above access using
+    :attr:`.Row._mapping` would proceed against the row object itself, that
+    is::
+
+        row = result.fetchone()
+        name, fullname = row["name"], row["fullname"]
+
+    This pattern is now deprecated and will be removed in SQLAlchemy 2.0, so
+    that the :class:`.Row` object may now behave fully like a Python named
+    tuple.
+
+.. versionchanged:: 1.4  Added :attr:`.Row._mapping` which provides for
+   dictionary-like access to a :class:`.Row`, superseding the use of string/
+   column keys against the :class:`.Row` object directly.
+
+As the :class:`.Row` is a tuple, sequence (i.e. integer or slice) access
+may be used as well:
 
 .. sourcecode:: pycon+sql
 
@@ -435,18 +483,27 @@ Another way is as a Python sequence, using integer indexes:
 A more specialized method of column access is to use the SQL construct that
 directly corresponds to a particular column as the mapping key; in this
 example, it means we would use the  :class:`.Column` objects selected in our
-SELECT directly as keys:
+SELECT directly as keys in conjunction with the :attr:`.Row._mapping`
+collection:
 
 .. sourcecode:: pycon+sql
 
     {sql}>>> for row in conn.execute(s):
-    ...     print("name:", row[users.c.name], "; fullname:", row[users.c.fullname])
+    ...     print("name:", row._mapping[users.c.name], "; fullname:", row._mapping[users.c.fullname])
     SELECT users.id, users.name, users.fullname
     FROM users
     ()
     {stop}name: jack ; fullname: Jack Jones
     name: wendy ; fullname: Wendy Williams
 
+.. sidebar:: Rows are changing
+
+    The :class:`.Row` class was known as :class:`.RowProxy` for all
+    SQLAlchemy versions through 1.3.  In 1.4, the objects returned by
+    :class:`.ResultProxy` are actually a subclass of :class:`.Row` known as
+    :class:`.LegacyRow`.   See :ref:`change_4710_core` for background on this
+    change.
+
 The :class:`.ResultProxy` object features "auto-close" behavior that closes the
 underlying DBAPI ``cursor`` object when all pending result rows have been
 fetched.   If a :class:`.ResultProxy` is to be discarded before such an
@@ -897,14 +954,14 @@ when the result-columns are fetched using the actual column object as a key.
 Fetching the ``email_address`` column would be::
 
     >>> row = result.fetchone()
-    >>> row[addresses.c.email_address]
+    >>> row._mapping[addresses.c.email_address]
     'jack@yahoo.com'
 
 If on the other hand we used a string column key, the usual rules of name-
 based matching still apply, and we'd get an ambiguous column error for
 the ``id`` value::
 
-    >>> row["id"]
+    >>> row._mapping["id"]
     Traceback (most recent call last):
     ...
     InvalidRequestError: Ambiguous column name 'id' in result set column descriptions
index 9513c89941e5ba6c2f0e30a577591ddcf81f7b83..cbed036dd53720862aad7b3e563996b2c6aaabc2 100644 (file)
@@ -91,6 +91,7 @@ are documented here.  In contrast to the ORM's domain-centric mode of usage, the
   :doc:`Core Event Interfaces <core/events>` |
   :doc:`Creating Custom SQL Constructs <core/compiler>` |
 
+* **SQLAlchemy 2.0 Compatibility:** :doc:`SQLAlchemy 2.0 Future (Core) <core/future>`
 
 Dialect Documentation
 ======================
index 25e64b0ab35e0c76f140d609740f8a4f621142be..f36f520b02b3ecb29025fc87fe66bcee84183edf 100644 (file)
@@ -295,7 +295,7 @@ are fetched.  The method :meth:`.Bundle.create_row_processor` is given
 the :class:`.Query` and a set of "row processor" functions at query execution
 time; these processor functions when given a result row will return the
 individual attribute value, which can then be adapted into any kind of
-return data structure.  Below illustrates replacing the usual :class:`.KeyedTuple`
+return data structure.  Below illustrates replacing the usual :class:`.Row`
 return structure with a straight Python dictionary::
 
     from sqlalchemy.orm import Bundle
index a192bad941436e5a7778ffa62083b03ec5830698..7d54de354d339db39c33644e5dbcb185c5e85bf3 100644 (file)
@@ -33,9 +33,6 @@ ORM-Specific Query Constructs
 .. autoclass:: sqlalchemy.orm.query.Bundle
     :members:
 
-.. autoclass:: sqlalchemy.util.KeyedTuple
-    :members: keys, _fields, _asdict
-
 .. autoclass:: sqlalchemy.orm.strategy_options.Load
     :members:
 
index 9842ece51f1bfaa37c65ec40be87cd893f5dfe54..d1b2a7f56df9e33b941a9cae98833a24a3309513 100644 (file)
@@ -617,7 +617,7 @@ is expressed as tuples:
     fred Fred Flintstone
 
 The tuples returned by :class:`~sqlalchemy.orm.query.Query` are *named*
-tuples, supplied by the :class:`.KeyedTuple` class, and can be treated much like an
+tuples, supplied by the :class:`.Row` class, and can be treated much like an
 ordinary Python object. The names are
 the same as the attribute's name for an attribute, and the class name for a
 class:
@@ -991,29 +991,21 @@ method:
 
 To use an entirely string-based statement, a :func:`.text` construct
 representing a complete statement can be passed to
-:meth:`~sqlalchemy.orm.query.Query.from_statement()`.  Without additional
-specifiers, the columns in the string SQL are matched to the model columns
-based on name, such as below where we use just an asterisk to represent
-loading all columns:
+:meth:`~sqlalchemy.orm.query.Query.from_statement()`.   Without further
+specification, the ORM will match columns in the ORM mapping to the result
+returned by the SQL statement based on column name::
 
 .. sourcecode:: python+sql
 
     {sql}>>> session.query(User).from_statement(
-    ...                     text("SELECT * FROM users where name=:name")).\
-    ...                     params(name='ed').all()
+    ...  text("SELECT * FROM users where name=:name")).params(name='ed').all()
     SELECT * FROM users where name=?
     ('ed',)
     {stop}[<User(name='ed', fullname='Ed Jones', nickname='eddie')>]
 
-Matching columns on name works for simple cases but can become unwieldy when
-dealing with complex statements that contain duplicate column names or when
-using anonymized ORM constructs that don't easily match to specific names.
-Additionally, there is typing behavior present in our mapped columns that
-we might find necessary when handling result rows.  For these cases,
-the :func:`~.expression.text` construct allows us to link its textual SQL
-to Core or ORM-mapped column expressions positionally; we can achieve this
-by passing column expressions as positional arguments to the
-:meth:`.TextClause.columns` method:
+For better targeting of mapped columns to a textual SELECT, as well as  to
+match on a specific subset of columns in arbitrary order, individual mapped
+columns are passed in the desired order to :meth:`.TextClause.columns`:
 
 .. sourcecode:: python+sql
 
@@ -1025,13 +1017,6 @@ by passing column expressions as positional arguments to the
     ('ed',)
     {stop}[<User(name='ed', fullname='Ed Jones', nickname='eddie')>]
 
-.. versionadded:: 1.1
-
-    The :meth:`.TextClause.columns` method now accepts column expressions
-    which will be matched positionally to a plain text SQL result set,
-    eliminating the need for column names to match or even be unique in the
-    SQL statement.
-
 When selecting from a :func:`~.expression.text` construct, the :class:`.Query`
 may still specify what columns and entities are to be returned; instead of
 ``query(User)`` we can also ask for the columns individually, as in
index f6523359dde6bb1add66a51de9252836c5e559e4..3c44010b8910f9960aa64af63a030e7203afc9f5 100644 (file)
@@ -277,14 +277,9 @@ BaseRow_getitem(BaseRow *self, Py_ssize_t i)
 }
 
 static PyObject *
-BaseRow_getitem_by_object(BaseRow *self, PyObject *key)
+BaseRow_getitem_by_object(BaseRow *self, PyObject *key, int asmapping)
 {
     PyObject *record, *indexobject;
-    PyObject *exc_module, *exception, *cstr_obj;
-#if PY_MAJOR_VERSION >= 3
-    PyObject *bytes;
-#endif
-    char *cstr_key;
     long index;
     int key_fallback = 0;
 
@@ -308,49 +303,14 @@ BaseRow_getitem_by_object(BaseRow *self, PyObject *key)
     }
 
     if (indexobject == Py_None) {
-        exc_module = PyImport_ImportModule("sqlalchemy.exc");
-        if (exc_module == NULL)
-            return NULL;
-
-        exception = PyObject_GetAttrString(exc_module,
-                                           "InvalidRequestError");
-        Py_DECREF(exc_module);
-        if (exception == NULL)
-            return NULL;
-
-        cstr_obj = PyTuple_GetItem(record, 2);
-        if (cstr_obj == NULL)
-            return NULL;
+        PyObject *tmp;
 
-        cstr_obj = PyObject_Str(cstr_obj);
-        if (cstr_obj == NULL)
-            return NULL;
-
-/*
-       FIXME: raise encoding error exception (in both versions below)
-       if the key contains non-ascii chars, instead of an
-       InvalidRequestError without any message like in the
-       python version.
-*/
-
-
-#if PY_MAJOR_VERSION >= 3
-        bytes = PyUnicode_AsASCIIString(cstr_obj);
-        if (bytes == NULL)
-            return NULL;
-        cstr_key = PyBytes_AS_STRING(bytes);
-#else
-        cstr_key = PyString_AsString(cstr_obj);
-#endif
-        if (cstr_key == NULL) {
-            Py_DECREF(cstr_obj);
+        tmp = PyObject_CallMethod(self->parent, "_raise_for_ambiguous_column_name", "(O)", record);
+        if (tmp == NULL) {
             return NULL;
         }
-        Py_DECREF(cstr_obj);
+        Py_DECREF(tmp);
 
-        PyErr_Format(exception,
-                "Ambiguous column name '%.200s' in "
-                "result set column descriptions", cstr_key);
         return NULL;
     }
 
@@ -363,6 +323,16 @@ BaseRow_getitem_by_object(BaseRow *self, PyObject *key)
         /* -1 can be either the actual value, or an error flag. */
         return NULL;
 
+    if (!asmapping) {
+        PyObject *tmp;
+
+        tmp = PyObject_CallMethod(self->parent, "_warn_for_nonint", "O", key);
+        if (tmp == NULL) {
+            return NULL;
+        }
+        Py_DECREF(tmp);
+    }
+
     return BaseRow_getitem(self, index);
 
 }
@@ -400,17 +370,7 @@ BaseRow_subscript_impl(BaseRow *self, PyObject *key, int asmapping)
         Py_DECREF(values);
         return result;
     } else {
-        /*
-         // if we want to warn for non-integer access by getitem,
-         // that would happen here.
-         if (!asmapping) {
-            tmp = PyObject_CallMethod(self->parent, "_warn_for_nonint", "");
-            if (tmp == NULL) {
-                return NULL;
-            }
-            Py_DECREF(tmp);
-        }*/
-        return BaseRow_getitem_by_object(self, key);
+        return BaseRow_getitem_by_object(self, key, asmapping);
     }
 }
 
index 4339551a32f3adfd9e7b3b76b70b254eaf6395f0..a3855cc2c0eb86a10d8320ac6c13ee17bcd4c43f 100644 (file)
@@ -685,7 +685,6 @@ import operator
 import re
 
 from . import information_schema as ischema
-from ... import engine
 from ... import exc
 from ... import schema as sa_schema
 from ... import sql
@@ -693,6 +692,7 @@ from ... import types as sqltypes
 from ... import util
 from ...engine import default
 from ...engine import reflection
+from ...engine import result as _result
 from ...sql import compiler
 from ...sql import elements
 from ...sql import expression
@@ -1431,8 +1431,9 @@ class MSTypeCompiler(compiler.GenericTypeCompiler):
 class MSExecutionContext(default.DefaultExecutionContext):
     _enable_identity_insert = False
     _select_lastrowid = False
-    _result_proxy = None
     _lastrowid = None
+    _rowcount = None
+    _result_strategy = None
 
     def _opt_encode(self, statement):
         if not self.dialect.supports_unicode_statements:
@@ -1500,6 +1501,10 @@ class MSExecutionContext(default.DefaultExecutionContext):
         """Disable IDENTITY_INSERT if enabled."""
 
         conn = self.root_connection
+
+        if self.isinsert or self.isupdate or self.isdelete:
+            self._rowcount = self.cursor.rowcount
+
         if self._select_lastrowid:
             if self.dialect.use_scope_identity:
                 conn._cursor_execute(
@@ -1516,10 +1521,13 @@ class MSExecutionContext(default.DefaultExecutionContext):
             row = self.cursor.fetchall()[0]
             self._lastrowid = int(row[0])
 
-        if (
+        elif (
             self.isinsert or self.isupdate or self.isdelete
         ) and self.compiled.returning:
-            self._result_proxy = engine.FullyBufferedResultProxy(self)
+            fbcr = _result.FullyBufferedCursorFetchStrategy
+            self._result_strategy = fbcr.create_from_buffer(
+                self.cursor, self.cursor.description, self.cursor.fetchall()
+            )
 
         if self._enable_identity_insert:
             conn._cursor_execute(
@@ -1537,6 +1545,13 @@ class MSExecutionContext(default.DefaultExecutionContext):
     def get_lastrowid(self):
         return self._lastrowid
 
+    @property
+    def rowcount(self):
+        if self._rowcount is not None:
+            return self._rowcount
+        else:
+            return self.cursor.rowcount
+
     def handle_dbapi_exception(self, e):
         if self._enable_identity_insert:
             try:
@@ -1551,11 +1566,13 @@ class MSExecutionContext(default.DefaultExecutionContext):
             except Exception:
                 pass
 
-    def get_result_proxy(self):
-        if self._result_proxy:
-            return self._result_proxy
+    def get_result_cursor_strategy(self, result):
+        if self._result_strategy:
+            return self._result_strategy
         else:
-            return engine.ResultProxy(self)
+            return super(MSExecutionContext, self).get_result_cursor_strategy(
+                result
+            )
 
 
 class MSSQLCompiler(compiler.SQLCompiler):
@@ -2570,7 +2587,7 @@ class MSDialect(default.DefaultDialect):
         if self.server_version_info < MS_2005_VERSION:
             return []
 
-        rp = connection.execute(
+        rp = connection.execution_options(future_result=True).execute(
             sql.text(
                 "select ind.index_id, ind.is_unique, ind.name "
                 "from sys.indexes as ind join sys.tables as tab on "
@@ -2587,13 +2604,13 @@ class MSDialect(default.DefaultDialect):
             .columns(name=sqltypes.Unicode())
         )
         indexes = {}
-        for row in rp:
+        for row in rp.mappings():
             indexes[row["index_id"]] = {
                 "name": row["name"],
                 "unique": row["is_unique"] == 1,
                 "column_names": [],
             }
-        rp = connection.execute(
+        rp = connection.execution_options(future_result=True).execute(
             sql.text(
                 "select ind_col.index_id, ind_col.object_id, col.name "
                 "from sys.columns as col "
@@ -2611,7 +2628,7 @@ class MSDialect(default.DefaultDialect):
             )
             .columns(name=sqltypes.Unicode())
         )
-        for row in rp:
+        for row in rp.mappings():
             if row["index_id"] in indexes:
                 indexes[row["index_id"]]["column_names"].append(row["name"])
 
@@ -2657,12 +2674,10 @@ class MSDialect(default.DefaultDialect):
             [columns], whereclause, order_by=[columns.c.ordinal_position]
         )
 
-        c = connection.execute(s)
+        c = connection.execution_options(future_result=True).execute(s)
         cols = []
-        while True:
-            row = c.fetchone()
-            if row is None:
-                break
+
+        for row in c.mappings():
             (
                 name,
                 type_,
@@ -2785,9 +2800,9 @@ class MSDialect(default.DefaultDialect):
                 C.c.table_schema == owner,
             ),
         )
-        c = connection.execute(s)
+        c = connection.execution_options(future_result=True).execute(s)
         constraint_name = None
-        for row in c:
+        for row in c.mappings():
             if "PRIMARY" in row[TC.c.constraint_type.name]:
                 pkeys.append(row[0])
                 if constraint_name is None:
index 962db750c127c640c5c190d4a695fe9d06581d90..66a429d35ac9983867af369c48296400f78bc1f1 100644 (file)
@@ -26,17 +26,11 @@ import re
 from .base import BIT
 from .base import MySQLCompiler
 from .base import MySQLDialect
-from .base import MySQLExecutionContext
 from .base import MySQLIdentifierPreparer
 from ... import processors
 from ... import util
 
 
-class MySQLExecutionContext_mysqlconnector(MySQLExecutionContext):
-    def get_lastrowid(self):
-        return self.cursor.lastrowid
-
-
 class MySQLCompiler_mysqlconnector(MySQLCompiler):
     def visit_mod_binary(self, binary, operator, **kw):
         if self.dialect._mysqlconnector_double_percents:
@@ -100,7 +94,6 @@ class MySQLDialect_mysqlconnector(MySQLDialect):
     supports_native_decimal = True
 
     default_paramstyle = "format"
-    execution_ctx_cls = MySQLExecutionContext_mysqlconnector
     statement_compiler = MySQLCompiler_mysqlconnector
 
     preparer = MySQLIdentifierPreparer_mysqlconnector
index 8c69bf09765db84ff8130195a013435b5ada5d44..f6de4de68f4571fdb7d61b8587f16d83c808257f 100644 (file)
@@ -931,6 +931,12 @@ class OracleCompiler(compiler.SQLCompiler):
             binds.append(
                 self.bindparam_string(self._truncate_bindparam(outparam))
             )
+
+            # ensure the ExecutionContext.get_out_parameters() method is
+            # *not* called; the cx_Oracle dialect wants to handle these
+            # parameters separately
+            self.has_out_parameters = False
+
             columns.append(self.process(col_expr, within_columns_clause=False))
 
             self._add_to_result_map(
@@ -1454,9 +1460,11 @@ class OracleDialect(default.DefaultDialect):
 
         q += " AND ".join(clauses)
 
-        result = connection.execute(sql.text(q), **params)
+        result = connection.execution_options(future_result=True).execute(
+            sql.text(q), **params
+        )
         if desired_owner:
-            row = result.first()
+            row = result.mappings().first()
             if row:
                 return (
                     row["table_name"],
@@ -1467,7 +1475,7 @@ class OracleDialect(default.DefaultDialect):
             else:
                 return None, None, None, None
         else:
-            rows = result.fetchall()
+            rows = result.mappings().all()
             if len(rows) > 1:
                 raise AssertionError(
                     "There are multiple tables visible to the schema, you "
@@ -1621,8 +1629,10 @@ class OracleDialect(default.DefaultDialect):
 
         row = result.first()
         if row:
-            if "compression" in row and enabled.get(row.compression, False):
-                if "compress_for" in row:
+            if "compression" in row._fields and enabled.get(
+                row.compression, False
+            ):
+                if "compress_for" in row._fields:
                     options["oracle_compress"] = row.compress_for
                 else:
                     options["oracle_compress"] = True
index b000a46155a29e1c7d8ccc75abdb6d5926ca44d4..69423992f78f18b4cc0b1fc6bc4f49c4fcbc4a19 100644 (file)
@@ -569,9 +569,10 @@ class OracleExecutionContext_cx_oracle(OracleExecutionContext):
                     param[toname] = param[fromname]
                     del param[fromname]
 
-    def _handle_out_parameters(self):
-        # if a single execute, check for outparams
-        if len(self.compiled_parameters) == 1:
+    def _generate_out_parameter_vars(self):
+        # check for has_out_parameters or RETURNING, create cx_Oracle.var
+        # objects if so
+        if self.compiled.returning or self.compiled.has_out_parameters:
             quoted_bind_names = self.compiled._quoted_bind_names
             for bindparam in self.compiled.binds.values():
                 if bindparam.isoutparam:
@@ -645,7 +646,7 @@ class OracleExecutionContext_cx_oracle(OracleExecutionContext):
             include_types=self.dialect._include_setinputsizes,
         )
 
-        self._handle_out_parameters()
+        self._generate_out_parameter_vars()
 
         self._generate_cursor_outputtype_handler()
 
@@ -656,66 +657,51 @@ class OracleExecutionContext_cx_oracle(OracleExecutionContext):
 
         return c
 
-    def get_result_proxy(self):
-        if self.out_parameters and self.compiled.returning:
+    def get_out_parameter_values(self, out_param_names):
+        # this method should not be called when the compiler has
+        # RETURNING as we've turned the has_out_parameters flag set to
+        # False.
+        assert not self.compiled.returning
+
+        return [
+            self.dialect._paramval(self.out_parameters[name])
+            for name in out_param_names
+        ]
+
+    def get_result_cursor_strategy(self, result):
+        if self.compiled and self.out_parameters and self.compiled.returning:
+            # create a fake cursor result from the out parameters. unlike
+            # get_out_parameter_values(), the result-row handlers here will be
+            # applied at the Result level
             returning_params = [
                 self.dialect._returningval(self.out_parameters["ret_%d" % i])
                 for i in range(len(self.out_parameters))
             ]
-            return ReturningResultProxy(self, returning_params)
-
-        result = _result.ResultProxy(self)
-
-        if self.out_parameters:
-            if (
-                self.compiled_parameters is not None
-                and len(self.compiled_parameters) == 1
-            ):
-                result.out_parameters = out_parameters = {}
-
-                for bind, name in self.compiled.bind_names.items():
-                    if name in self.out_parameters:
-                        type_ = bind.type
-                        impl_type = type_.dialect_impl(self.dialect)
-                        dbapi_type = impl_type.get_dbapi_type(
-                            self.dialect.dbapi
-                        )
-                        result_processor = impl_type.result_processor(
-                            self.dialect, dbapi_type
-                        )
-                        if result_processor is not None:
-                            out_parameters[name] = result_processor(
-                                self.dialect._paramval(
-                                    self.out_parameters[name]
-                                )
-                            )
-                        else:
-                            out_parameters[name] = self.dialect._paramval(
-                                self.out_parameters[name]
-                            )
-            else:
-                result.out_parameters = dict(
-                    (k, self._dialect._paramval(v))
-                    for k, v in self.out_parameters.items()
-                )
 
-        return result
+            return ReturningResultStrategy(
+                result, result.cursor, returning_params
+            )
+        else:
+            return super(
+                OracleExecutionContext_cx_oracle, self
+            ).get_result_cursor_strategy(result)
 
 
-class ReturningResultProxy(_result.FullyBufferedResultProxy):
-    """Result proxy which stuffs the _returning clause + outparams
-    into the fetch."""
+class ReturningResultStrategy(_result.FullyBufferedCursorFetchStrategy):
+    __slots__ = ("_returning_params",)
 
-    def __init__(self, context, returning_params):
+    def __init__(self, result, dbapi_cursor, returning_params):
         self._returning_params = returning_params
-        super(ReturningResultProxy, self).__init__(context)
 
-    def _cursor_description(self):
-        returning = self.context.compiled.returning
-        return [
+        returning = result.context.compiled.returning
+        cursor_description = [
             (getattr(col, "name", col.anon_label), None) for col in returning
         ]
 
+        super(ReturningResultStrategy, self).__init__(
+            dbapi_cursor, cursor_description
+        )
+
     def _buffer_rows(self):
         return collections.deque([tuple(self._returning_params)])
 
index ee81fc020acf8752a5dde3c10919a9657e576203..b30e7770498bf39e63f6a5afeedd478bd7f4cae6 100644 (file)
@@ -3538,18 +3538,18 @@ class PGDialect(default.DefaultDialect):
         enums = []
         enum_by_name = {}
         for enum in c.fetchall():
-            key = (enum["schema"], enum["name"])
+            key = (enum.schema, enum.name)
             if key in enum_by_name:
-                enum_by_name[key]["labels"].append(enum["label"])
+                enum_by_name[key]["labels"].append(enum.label)
             else:
                 enum_by_name[key] = enum_rec = {
-                    "name": enum["name"],
-                    "schema": enum["schema"],
-                    "visible": enum["visible"],
+                    "name": enum.name,
+                    "schema": enum.schema,
+                    "visible": enum.visible,
                     "labels": [],
                 }
-                if enum["label"] is not None:
-                    enum_rec["labels"].append(enum["label"])
+                if enum.label is not None:
+                    enum_rec["labels"].append(enum.label)
                 enums.append(enum_rec)
         return enums
 
@@ -3568,10 +3568,11 @@ class PGDialect(default.DefaultDialect):
         """
 
         s = sql.text(SQL_DOMAINS).columns(attname=sqltypes.Unicode)
-        c = connection.execute(s)
+        c = connection.execution_options(future_result=True).execute(s)
 
         domains = {}
-        for domain in c.fetchall():
+        for domain in c.mappings():
+            domain = domain
             # strip (30) from character varying(30)
             attype = re.search(r"([^\(]+)", domain["attype"]).group(1)
             # 'visible' just means whether or not the domain is in a
index e4867fa0bd877f8b73f3609b28ddc1109af32c8f..0b6afc337de93b52dcca681e43084c93cb1f754a 100644 (file)
@@ -458,7 +458,6 @@ from ... import exc
 from ... import processors
 from ... import types as sqltypes
 from ... import util
-from ...engine import result as _result
 from ...util import collections_abc
 
 try:
@@ -577,13 +576,12 @@ class PGExecutionContext_psycopg2(PGExecutionContext):
         ident = "c_%s_%s" % (hex(id(self))[2:], hex(_server_side_id())[2:])
         return self._dbapi_connection.cursor(ident)
 
-    def get_result_proxy(self):
+    def get_result_cursor_strategy(self, result):
         self._log_notices(self.cursor)
 
-        if self._is_server_side:
-            return _result.BufferedRowResultProxy(self)
-        else:
-            return _result.ResultProxy(self)
+        return super(PGExecutionContext, self).get_result_cursor_strategy(
+            result
+        )
 
     def _log_notices(self, cursor):
         # check also that notices is an iterable, after it's already
index a129069a402e5aca2636943b6e97156c6a87cf82..7f0270b425217eef530e94d6f7224a137cfba9d8 100644 (file)
@@ -38,10 +38,23 @@ from .result import BufferedColumnResultProxy  # noqa
 from .result import BufferedColumnRow  # noqa
 from .result import BufferedRowResultProxy  # noqa
 from .result import FullyBufferedResultProxy  # noqa
+from .result import LegacyRow  # noqa
+from .result import result_tuple  # noqa
 from .result import ResultProxy  # noqa
 from .result import Row  # noqa
+from .result import RowMapping  # noqa
 from .util import connection_memoize  # noqa
 from ..sql import ddl  # noqa
 
 
 __all__ = ("create_engine", "engine_from_config", "create_mock_engine")
+
+
+def __go(lcls):
+    from .. import future
+    from . import result
+
+    result._future_Result = future.Result
+
+
+__go(locals())
index 29df67dcb47465ad2f9f6bcc94a270508b490c43..ce6c2e9c67bffa18da6eb06991ed288342a8dea0 100644 (file)
@@ -1297,12 +1297,7 @@ class Connection(Connectable):
         if context.compiled:
             context.post_exec()
 
-        if context.is_crud or context.is_text:
-            result = context._setup_crud_result_proxy()
-        else:
-            result = context.get_result_proxy()
-            if result._metadata is None:
-                result._soft_close()
+        result = context._setup_result_proxy()
 
         if context.should_autocommit and self._root.__transaction is None:
             self._root._commit_impl(autocommit=True)
@@ -1310,6 +1305,8 @@ class Connection(Connectable):
         # for "connectionless" execution, we have to close this
         # Connection after the statement is complete.
         if self.should_close_with_result:
+            assert not context._is_future_result
+
             # ResultProxy already exhausted rows / has no rows.
             # close us now
             if result._soft_closed:
index 5198c8cd600c057b6b4b5f19dca76bb317669183..3d50b0828c37348885ec0024ed61f799438d4ea9 100644 (file)
@@ -34,6 +34,13 @@ from ..sql import compiler
         'expressions, or an "empty set" SELECT, at statement execution'
         "time.",
     ),
+    case_sensitive=(
+        "1.4",
+        "The :paramref:`.create_engine.case_sensitive` parameter "
+        "is deprecated and will be removed in a future release. "
+        "Applications should work with result column names in a case "
+        "sensitive fashion.",
+    ),
 )
 def create_engine(url, **kwargs):
     """Create a new :class:`.Engine` instance.
index 7d36345fda7dfbf0b62fcfc8472e6cfbdcf6e0a9..7efc4bda2fb65586f45f800d3b8eaf7217921d98 100644 (file)
@@ -19,7 +19,7 @@ import re
 import weakref
 
 from . import interfaces
-from . import result
+from . import result as _result
 from .. import event
 from .. import exc
 from .. import pool
@@ -201,6 +201,13 @@ class DefaultDialect(interfaces.Dialect):
             'expressions, or an "empty set" SELECT, at statement execution'
             "time.",
         ),
+        case_sensitive=(
+            "1.4",
+            "The :paramref:`.create_engine.case_sensitive` parameter "
+            "is deprecated and will be removed in a future release. "
+            "Applications should work with result column names in a case "
+            "sensitive fashion.",
+        ),
     )
     def __init__(
         self,
@@ -667,6 +674,8 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
     returned_defaults = None
     _is_implicit_returning = False
     _is_explicit_returning = False
+    _is_future_result = False
+    _is_server_side = False
 
     # a hook for SQLite's translation of
     # result column names
@@ -725,6 +734,9 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
         # we get here
         assert compiled.can_execute
 
+        self._is_future_result = connection._execution_options.get(
+            "future_result", False
+        )
         self.execution_options = compiled.execution_options.union(
             connection._execution_options
         )
@@ -860,6 +872,10 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
         self.dialect = connection.dialect
         self.is_text = True
 
+        self._is_future_result = connection._execution_options.get(
+            "future_result", False
+        )
+
         # plain text statement
         self.execution_options = connection._execution_options
 
@@ -1035,6 +1051,11 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
     def pre_exec(self):
         pass
 
+    def get_out_parameter_values(self, names):
+        raise NotImplementedError(
+            "This dialect does not support OUT parameters"
+        )
+
     def post_exec(self):
         pass
 
@@ -1051,27 +1072,18 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
     def get_lastrowid(self):
         """return self.cursor.lastrowid, or equivalent, after an INSERT.
 
-        This may involve calling special cursor functions,
-        issuing a new SELECT on the cursor (or a new one),
-        or returning a stored value that was
+        This may involve calling special cursor functions, issuing a new SELECT
+        on the cursor (or a new one), or returning a stored value that was
         calculated within post_exec().
 
-        This function will only be called for dialects
-        which support "implicit" primary key generation,
-        keep preexecute_autoincrement_sequences set to False,
-        and when no explicit id value was bound to the
-        statement.
+        This function will only be called for dialects which support "implicit"
+        primary key generation, keep preexecute_autoincrement_sequences set to
+        False, and when no explicit id value was bound to the statement.
 
-        The function is called once, directly after
-        post_exec() and before the transaction is committed
-        or ResultProxy is generated.   If the post_exec()
-        method assigns a value to `self._lastrowid`, the
-        value is used in place of calling get_lastrowid().
-
-        Note that this method is *not* equivalent to the
-        ``lastrowid`` method on ``ResultProxy``, which is a
-        direct proxy to the DBAPI ``lastrowid`` accessor
-        in all cases.
+        The function is called once for an INSERT statement that would need to
+        return the last inserted primary key for those dialects that make use
+        of the lastrowid concept.  In these cases, it is called directly after
+        :meth:`.ExecutionContext.post_exec`.
 
         """
         return self.cursor.lastrowid
@@ -1079,11 +1091,13 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
     def handle_dbapi_exception(self, e):
         pass
 
-    def get_result_proxy(self):
+    def get_result_cursor_strategy(self, result):
         if self._is_server_side:
-            return result.BufferedRowResultProxy(self)
+            strat_cls = _result.BufferedRowCursorFetchStrategy
         else:
-            return result.ResultProxy(self)
+            strat_cls = _result.DefaultCursorFetchStrategy
+
+        return strat_cls.create(result)
 
     @property
     def rowcount(self):
@@ -1095,6 +1109,49 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
     def supports_sane_multi_rowcount(self):
         return self.dialect.supports_sane_multi_rowcount
 
+    def _setup_result_proxy(self):
+        if self.is_crud or self.is_text:
+            result = self._setup_crud_result_proxy()
+        else:
+            result = _result.ResultProxy._create_for_context(self)
+
+        if (
+            self.compiled
+            and not self.isddl
+            and self.compiled.has_out_parameters
+        ):
+            self._setup_out_parameters(result)
+
+        return result
+
+    def _setup_out_parameters(self, result):
+
+        out_bindparams = [
+            (param, name)
+            for param, name in self.compiled.bind_names.items()
+            if param.isoutparam
+        ]
+        out_parameters = {}
+
+        for bindparam, raw_value in zip(
+            [param for param, name in out_bindparams],
+            self.get_out_parameter_values(
+                [name for param, name in out_bindparams]
+            ),
+        ):
+
+            type_ = bindparam.type
+            impl_type = type_.dialect_impl(self.dialect)
+            dbapi_type = impl_type.get_dbapi_type(self.dialect.dbapi)
+            result_processor = impl_type.result_processor(
+                self.dialect, dbapi_type
+            )
+            if result_processor is not None:
+                raw_value = result_processor(raw_value)
+            out_parameters[bindparam.key] = raw_value
+
+        result.out_parameters = out_parameters
+
     def _setup_crud_result_proxy(self):
         if self.isinsert and not self.executemany:
             if (
@@ -1108,11 +1165,11 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
             elif not self._is_implicit_returning:
                 self._setup_ins_pk_from_empty()
 
-        result = self.get_result_proxy()
+        result = _result.ResultProxy._create_for_context(self)
 
         if self.isinsert:
             if self._is_implicit_returning:
-                row = result.fetchone()
+                row = result._onerow()
                 self.returned_defaults = row
                 self._setup_ins_pk_from_implicit_returning(row)
                 result._soft_close()
@@ -1121,7 +1178,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
                 result._soft_close()
                 result._metadata = None
         elif self.isupdate and self._is_implicit_returning:
-            row = result.fetchone()
+            row = result._onerow()
             self.returned_defaults = row
             result._soft_close()
             result._metadata = None
@@ -1179,8 +1236,13 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
         key_getter = self.compiled._key_getters_for_crud_column[2]
         table = self.compiled.statement.table
         compiled_params = self.compiled_parameters[0]
+
+        # TODO: why are we using keyed index here?  can't we get the ints?
+        # can compiler build up the structure here as far as what was
+        # explicit and what comes back in returning?
+        row_mapping = row._mapping
         self.inserted_primary_key = [
-            row[col] if value is None else value
+            row_mapping[col] if value is None else value
             for col, value in [
                 (col, compiled_params.get(key_getter(col), None))
                 for col in table.primary_key
index 237eb0f2f0765797987e210c10ff055a5326cd2e..3d4308df29f0915bdf19a7870ef07d86a0dd1aee 100644 (file)
@@ -1044,6 +1044,44 @@ class ExecutionContext(object):
 
         raise NotImplementedError()
 
+    def get_out_parameter_values(self, out_param_names):
+        """Return a sequence of OUT parameter values from a cursor.
+
+        For dialects that support OUT parameters, this method will be called
+        when there is a :class:`.SQLCompiler` object which has the
+        :attr:`.SQLCompiler.has_out_parameters` flag set.  This flag in turn
+        will be set to True if the statement itself has :class:`.BindParameter`
+        objects that have the ``.isoutparam`` flag set which are consumed by
+        the :meth:`.SQLCompiler.visit_bindparam` method.  If the dialect
+        compiler produces :class:`.BindParameter` objects with ``.isoutparam``
+        set which are not handled by :meth:`.SQLCompiler.visit_bindparam`, it
+        should set this flag explicitly.
+
+        The list of names that were rendered for each bound parameter
+        is passed to the method.  The method should then return a sequence of
+        values corresponding to the list of parameter objects. Unlike in
+        previous SQLAlchemy versions, the values can be the **raw values** from
+        the DBAPI; the execution context will apply the appropriate type
+        handler based on what's present in self.compiled.binds and update the
+        values.  The processed dictionary will then be made available via the
+        ``.out_parameters`` collection on the result object.  Note that
+        SQLAlchemy 1.4 has multiple kinds of result object as part of the 2.0
+        transition.
+
+        .. versionadded:: 1.4 - added
+           :meth:`.ExecutionContext.get_out_parameter_values`, which is invoked
+           automatically by the :class:`.DefaultExecutionContext` when there
+           are :class:`.BindParameter` objects with the ``.isoutparam`` flag
+           set.  This replaces the practice of setting out parameters within
+           the now-removed ``get_result_proxy()`` method.
+
+        .. seealso::
+
+            :meth:`.ExecutionContext.get_result_cursor_strategy`
+
+        """
+        raise NotImplementedError()
+
     def post_exec(self):
         """Called after the execution of a compiled statement.
 
@@ -1054,12 +1092,67 @@ class ExecutionContext(object):
 
         raise NotImplementedError()
 
-    def result(self):
-        """Return a result object corresponding to this ExecutionContext.
+    def get_result_cursor_strategy(self, result):
+        """Return a result cursor strategy for a given result object.
 
-        Returns a ResultProxy.
-        """
+        This method is implemented by the :class:`.DefaultDialect` and is
+        only needed by implementing dialects in the case where some special
+        steps regarding the cursor must be taken, such as manufacturing
+        fake results from some other element of the cursor, or pre-buffering
+        the cursor's results.
+
+        A simplified version of the default implementation is::
+
+            from sqlalchemy.engine.result import DefaultCursorFetchStrategy
+
+            class MyExecutionContext(DefaultExecutionContext):
+                def get_result_cursor_strategy(self, result):
+                    return DefaultCursorFetchStrategy.create(result)
+
+        Above, the :class:`.DefaultCursorFetchStrategy` will be applied
+        to the result object.   For results that are pre-buffered from a
+        cursor that might be closed, an implementation might be::
 
+
+            from sqlalchemy.engine.result import (
+                FullyBufferedCursorFetchStrategy
+            )
+
+            class MyExecutionContext(DefaultExecutionContext):
+                _pre_buffered_result = None
+
+                def pre_exec(self):
+                    if self.special_condition_prebuffer_cursor():
+                        self._pre_buffered_result = (
+                            self.cursor.description,
+                            self.cursor.fetchall()
+                        )
+
+                def get_result_cursor_strategy(self, result):
+                    if self._pre_buffered_result:
+                        description, cursor_buffer = self._pre_buffered_result
+                        return (
+                            FullyBufferedCursorFetchStrategy.
+                                create_from_buffer(
+                                    result, description, cursor_buffer
+                            )
+                        )
+                    else:
+                        return DefaultCursorFetchStrategy.create(result)
+
+        This method replaces the previous not-quite-documented
+        ``get_result_proxy()`` method.
+
+        .. versionadded:: 1.4  - result objects now interpret cursor results
+           based on a pluggable "strategy" object, which is delivered
+           by the :class:`.ExecutionContext` via the
+           :meth:`.ExecutionContext.get_result_cursor_strategy` method.
+
+        .. seealso::
+
+            :meth:`.ExecutionContext.get_out_parameter_values`
+
+        """
         raise NotImplementedError()
 
     def handle_dbapi_exception(self, e):
index 13738cb469ac0937e843e7eae55adcb66367dd82..1a63c307bce6752a83e2b7d1218aae23d12fcecd 100644 (file)
@@ -5,13 +5,20 @@
 # This module is part of SQLAlchemy and is released under
 # the MIT License: http://www.opensource.org/licenses/mit-license.php
 
-"""Define result set constructs including :class:`.ResultProxy`
-and :class:`.Row`."""
+"""Define result set constructs including :class:`.Result`"""
 
 
 import collections
+import functools
 import operator
 
+from .row import _baserow_usecext
+from .row import BaseRow  # noqa
+from .row import LegacyRow  # noqa
+from .row import Row  # noqa
+from .row import RowMapping  # noqa
+from .row import RowProxy  # noqa
+from .row import rowproxy_reconstructor  # noqa
 from .. import exc
 from .. import util
 from ..sql import expression
@@ -21,264 +28,81 @@ from ..sql.compiler import RM_NAME
 from ..sql.compiler import RM_OBJECTS
 from ..sql.compiler import RM_RENDERED_NAME
 from ..sql.compiler import RM_TYPE
-from ..util.compat import collections_abc
 
+if _baserow_usecext:
+    from sqlalchemy.cresultproxy import tuplegetter as _tuplegetter
 
 _UNPICKLED = util.symbol("unpickled")
 
-# This reconstructor is necessary so that pickles with the C extension or
-# without use the same Binary format.
-try:
-    # We need a different reconstructor on the C extension so that we can
-    # add extra checks that fields have correctly been initialized by
-    # __setstate__.
-    from sqlalchemy.cresultproxy import safe_rowproxy_reconstructor
-
-    # The extra function embedding is needed so that the
-    # reconstructor function has the same signature whether or not
-    # the extension is present.
-    def rowproxy_reconstructor(cls, state):
-        return safe_rowproxy_reconstructor(cls, state)
+# cyclical import for sqlalchemy.future
+_future_Result = None
 
+# metadata entry tuple indexes.
+# using raw tuple is faster than namedtuple.
+MD_INDEX = 0  # integer index in cursor.description
+MD_OBJECTS = 1  # other string keys and ColumnElement obj that can match
+MD_LOOKUP_KEY = 2  # string key we usually expect for key-based lookup
+MD_RENDERED_NAME = 3  # name that is usually in cursor.description
+MD_PROCESSOR = 4  # callable to process a result value into a row
+MD_UNTRANSLATED = 5  # raw name from cursor.description
 
-except ImportError:
-
-    def rowproxy_reconstructor(cls, state):
-        obj = cls.__new__(cls)
-        obj.__setstate__(state)
-        return obj
 
+class ResultMetaData(object):
+    __slots__ = ()
 
-try:
-    from sqlalchemy.cresultproxy import BaseRow
-    from sqlalchemy.cresultproxy import tuplegetter as _tuplegetter
+    def _has_key(self, key):
+        return key in self._keymap
 
-    _baserow_usecext = True
-except ImportError:
-    _baserow_usecext = False
+    def _key_fallback(self, key):
+        if isinstance(key, int):
+            raise IndexError(key)
+        else:
+            raise KeyError(key)
 
-    class BaseRow(object):
-        __slots__ = ("_parent", "_data", "_keymap")
 
-        def __init__(self, parent, processors, keymap, data):
-            """Row objects are constructed by ResultProxy objects."""
+class SimpleResultMetaData(ResultMetaData):
+    __slots__ = "keys", "_keymap", "_processors"
 
-            self._parent = parent
+    def __init__(self, keys, extra=None):
+        self.keys = list(keys)
 
-            self._data = tuple(
-                [
-                    proc(value) if proc else value
-                    for proc, value in zip(processors, data)
-                ]
-            )
-            self._keymap = keymap
+        len_keys = len(keys)
 
-        def __reduce__(self):
-            return (
-                rowproxy_reconstructor,
-                (self.__class__, self.__getstate__()),
+        self._keymap = {
+            name: (index, name) for index, name in enumerate(self.keys)
+        }
+        if not _baserow_usecext:
+            self._keymap.update(
+                {
+                    index: (index, None, self.keys[index])
+                    for index in range(len_keys)
+                }
             )
-
-        def _values_impl(self):
-            return list(self)
-
-        def __iter__(self):
-            return iter(self._data)
-
-        def __len__(self):
-            return len(self._data)
-
-        def __hash__(self):
-            return hash(self._data)
-
-        def _get_by_key_impl(self, key):
-            try:
+        if extra:
+            for key, ex in zip(keys, extra):
                 rec = self._keymap[key]
-            except KeyError:
-                rec = self._parent._key_fallback(key)
-            except TypeError:
-                # the non-C version detects a slice using TypeError.
-                # this is pretty inefficient for the slice use case
-                # but is more efficient for the integer use case since we
-                # don't have to check it up front.
-                if isinstance(key, slice):
-                    return tuple(self._data[key])
-                else:
-                    raise
-            if rec[MD_INDEX] is None:
-                raise exc.InvalidRequestError(
-                    "Ambiguous column name '%s' in "
-                    "result set column descriptions" % rec[MD_LOOKUP_KEY]
-                )
-
-            return self._data[rec[MD_INDEX]]
-
-        def _get_by_key_impl_mapping(self, key):
-            # the C code has two different methods so that we can distinguish
-            # between tuple-like keys (integers, slices) and mapping-like keys
-            # (strings, objects)
-            return self._get_by_key_impl(key)
-
-        def __getattr__(self, name):
-            try:
-                return self._get_by_key_impl_mapping(name)
-            except KeyError as e:
-                raise AttributeError(e.args[0])
-
-
-class Row(BaseRow, collections_abc.Sequence):
-    """Represent a single result row.
-
-    The :class:`.Row` object is retrieved from a database result, from the
-    :class:`.ResultProxy` object using methods like
-    :meth:`.ResultProxy.fetchall`.
-
-    The :class:`.Row` object seeks to act mostly like a Python named
-    tuple, but also provides some Python dictionary behaviors at the same time.
-
-    .. seealso::
-
-        :ref:`coretutorial_selecting` - includes examples of selecting
-        rows from SELECT statements.
-
-    .. versionchanged 1.4::
-
-        Renamed ``RowProxy`` to :class:`.Row`.  :class:`.Row` is no longer a
-        "proxy" object in that it contains the final form of data within it.
-
-    """
-
-    __slots__ = ()
-
-    def __contains__(self, key):
-        return self._parent._has_key(key)
+                self._keymap.update({e: rec for e in ex})
+        self._processors = [None] * len(keys)
 
     def __getstate__(self):
-        return {"_parent": self._parent, "_data": self._data}
+        return {"keys": self.keys}
 
     def __setstate__(self, state):
-        self._parent = parent = state["_parent"]
-        self._data = state["_data"]
-        self._keymap = parent._keymap
-
-    def _op(self, other, op):
-        return (
-            op(tuple(self), tuple(other))
-            if isinstance(other, Row)
-            else op(tuple(self), other)
-        )
-
-    __hash__ = BaseRow.__hash__
-
-    def __lt__(self, other):
-        return self._op(other, operator.lt)
-
-    def __le__(self, other):
-        return self._op(other, operator.le)
-
-    def __ge__(self, other):
-        return self._op(other, operator.ge)
-
-    def __gt__(self, other):
-        return self._op(other, operator.gt)
-
-    def __eq__(self, other):
-        return self._op(other, operator.eq)
-
-    def __ne__(self, other):
-        return self._op(other, operator.ne)
-
-    def __repr__(self):
-        return repr(sql_util._repr_row(self))
-
-    def has_key(self, key):
-        """Return True if this :class:`.Row` contains the given key.
-
-        Through the SQLAlchemy 1.x series, the ``__contains__()`` method
-        of :class:`.Row` also links to :meth:`.Row.has_key`, in that
-        an expression such as ::
-
-            "some_col" in row
-
-        Will return True if the row contains a column named ``"some_col"``,
-        in the way that a Python mapping works.
-
-        However, it is planned that the 2.0 series of SQLAlchemy will reverse
-        this behavior so that ``__contains__()`` will refer to a value being
-        present in the row, in the way that a Python tuple works.
-
-        """
-
-        return self._parent._has_key(key)
-
-    def __getitem__(self, key):
-        return self._get_by_key_impl(key)
-
-    def items(self):
-        """Return a list of tuples, each tuple containing a key/value pair.
-
-        This method is analogous to the Python dictionary ``.items()`` method,
-        except that it returns a list, not an iterator.
-
-        """
-
-        return [(key, self[key]) for key in self.keys()]
-
-    def keys(self):
-        """Return the list of keys as strings represented by this
-        :class:`.Row`.
-
-        This method is analogous to the Python dictionary ``.keys()`` method,
-        except that it returns a list, not an iterator.
-
-        """
-
-        return [k for k in self._parent.keys if k is not None]
-
-    def iterkeys(self):
-        """Return a an iterator against the :meth:`.Row.keys` method.
-
-        This method is analogous to the Python-2-only dictionary
-        ``.iterkeys()`` method.
-
-        """
-        return iter(self._parent.keys)
-
-    def itervalues(self):
-        """Return a an iterator against the :meth:`.Row.values` method.
-
-        This method is analogous to the Python-2-only dictionary
-        ``.itervalues()`` method.
-
-        """
-        return iter(self)
-
-    def values(self):
-        """Return the values represented by this :class:`.Row` as a list.
-
-        This method is analogous to the Python dictionary ``.values()`` method,
-        except that it returns a list, not an iterator.
-
-        """
-
-        return self._values_impl()
+        self.__init__(state["keys"])
 
+    def _has_key(self, key):
+        return key in self._keymap
 
-BaseRowProxy = BaseRow
-RowProxy = Row
+    def _contains(self, value, row):
+        return value in row._data
 
 
-# metadata entry tuple indexes.
-# using raw tuple is faster than namedtuple.
-MD_INDEX = 0  # integer index in cursor.description
-MD_OBJECTS = 1  # other string keys and ColumnElement obj that can match
-MD_LOOKUP_KEY = 2  # string key we usually expect for key-based lookup
-MD_RENDERED_NAME = 3  # name that is usually in cursor.description
-MD_PROCESSOR = 4  # callable to process a result value into a row
-MD_UNTRANSLATED = 5  # raw name from cursor.description
+def result_tuple(fields, extra=None):
+    parent = SimpleResultMetaData(fields, extra)
+    return functools.partial(Row, parent, parent._processors, parent._keymap)
 
 
-class ResultMetaData(object):
+class CursorResultMetaData(ResultMetaData):
     """Handle cursor.description, applying additional info from an execution
     context."""
 
@@ -654,6 +478,7 @@ class ResultMetaData(object):
         match_map = self._create_description_match_map(
             result_columns, case_sensitive, loose_column_name_matching
         )
+
         self.matched_on_name = True
         for (
             idx,
@@ -671,15 +496,6 @@ class ResultMetaData(object):
                 mapped_type = ctx_rec[2]
             yield idx, colname, mapped_type, coltype, obj, untranslated
 
-    def _merge_cols_by_none(self, context, cursor_description):
-        for (
-            idx,
-            colname,
-            untranslated,
-            coltype,
-        ) in self._colnames_from_description(context, cursor_description):
-            yield idx, colname, sqltypes.NULLTYPE, coltype, None, untranslated
-
     @classmethod
     def _create_description_match_map(
         cls,
@@ -695,6 +511,7 @@ class ResultMetaData(object):
         d = {}
         for elem in result_columns:
             key = elem[RM_RENDERED_NAME]
+
             if not case_sensitive:
                 key = key.lower()
             if key in d:
@@ -717,13 +534,134 @@ class ResultMetaData(object):
                     d.setdefault(
                         r_key, (elem[RM_NAME], elem[RM_OBJECTS], elem[RM_TYPE])
                     )
+
         return d
 
+    def _merge_cols_by_none(self, context, cursor_description):
+        for (
+            idx,
+            colname,
+            untranslated,
+            coltype,
+        ) in self._colnames_from_description(context, cursor_description):
+            yield idx, colname, sqltypes.NULLTYPE, coltype, None, untranslated
+
+    def _key_fallback(self, key, raiseerr=True):
+        if raiseerr:
+            raise exc.NoSuchColumnError(
+                "Could not locate column in row for column '%s'"
+                % util.string_or_unprintable(key)
+            )
+        else:
+            return None
+
+    def _raise_for_ambiguous_column_name(self, rec):
+        raise exc.InvalidRequestError(
+            "Ambiguous column name '%s' in "
+            "result set column descriptions" % rec[MD_LOOKUP_KEY]
+        )
+
+    def _warn_for_nonint(self, key):
+        raise TypeError(
+            "TypeError: tuple indices must be integers or slices, not %s"
+            % type(key).__name__
+        )
+
+    def _getter(self, key, raiseerr=True):
+        try:
+            rec = self._keymap[key]
+        except KeyError:
+            rec = self._key_fallback(key, raiseerr)
+            if rec is None:
+                return None
+
+        index, obj = rec[0:2]
+
+        if index is None:
+            self._raise_for_ambiguous_column_name(rec)
+
+        return operator.methodcaller("_get_by_key_impl_mapping", index)
+
+    def _tuple_getter(self, keys, raiseerr=True):
+        """Given a list of keys, return a callable that will deliver a tuple.
+
+        This is strictly used by the ORM and the keys are Column objects.
+        However, this might be some nice-ish feature if we could find a very
+        clean way of presenting it.
+
+        note that in the new world of "row._mapping", this is a mapping-getter.
+        maybe the name should indicate that somehow.
+
+
+        """
+        indexes = []
+        for key in keys:
+            try:
+                rec = self._keymap[key]
+            except KeyError:
+                rec = self._key_fallback(key, raiseerr)
+                if rec is None:
+                    return None
+
+            index, obj = rec[0:2]
+
+            if index is None:
+                self._raise_for_ambiguous_column_name(obj)
+            indexes.append(index)
+
+        if _baserow_usecext:
+            return _tuplegetter(*indexes)
+        else:
+            return self._pure_py_tuplegetter(*indexes)
+
+    def _pure_py_tuplegetter(self, *indexes):
+        getters = [
+            operator.methodcaller("_get_by_key_impl_mapping", index)
+            for index in indexes
+        ]
+        return lambda rec: tuple(getter(rec) for getter in getters)
+
+    def __getstate__(self):
+        return {
+            "_keymap": {
+                key: (rec[MD_INDEX], _UNPICKLED, key)
+                for key, rec in self._keymap.items()
+                if isinstance(key, util.string_types + util.int_types)
+            },
+            "keys": self.keys,
+            "case_sensitive": self.case_sensitive,
+            "matched_on_name": self.matched_on_name,
+        }
+
+    def __setstate__(self, state):
+        self._processors = [None for _ in range(len(state["keys"]))]
+        self._keymap = state["_keymap"]
+
+        self.keys = state["keys"]
+        self.case_sensitive = state["case_sensitive"]
+        self.matched_on_name = state["matched_on_name"]
+
+
+class LegacyCursorResultMetaData(CursorResultMetaData):
+    def _contains(self, value, row):
+        key = value
+        if key in self._keymap:
+            util.warn_deprecated(
+                "Using the 'in' operator to test for string or column "
+                "keys, or integer indexes, in a :class:`.Row` object is "
+                "deprecated and will "
+                "be removed in a future release. "
+                "Use the `Row._fields` or `Row._mapping` attribute, i.e. "
+                "'key in row._fields'"
+            )
+            return True
+        else:
+            return self._key_fallback(key, False) is not None
+
     def _key_fallback(self, key, raiseerr=True):
         map_ = self._keymap
         result = None
-        # lowercase col support will be deprecated, at the
-        # create_engine() / dialect level
+
         if isinstance(key, util.string_types):
             result = map_.get(key if self.case_sensitive else key.lower())
         elif isinstance(key, expression.ColumnElement):
@@ -786,379 +724,464 @@ class ResultMetaData(object):
             map_[key] = result
         return result
 
+    def _warn_for_nonint(self, key):
+        util.warn_deprecated_20(
+            "Using non-integer/slice indices on Row is deprecated and will "
+            "be removed in version 2.0; please use row._mapping[<key>], or "
+            "the mappings() accessor on the sqlalchemy.future result object.",
+            stacklevel=4,
+        )
+
     def _has_key(self, key):
         if key in self._keymap:
             return True
         else:
             return self._key_fallback(key, False) is not None
 
-    def _getter(self, key, raiseerr=True):
-        try:
-            rec = self._keymap[key]
-        except KeyError:
-            rec = self._key_fallback(key, raiseerr)
-            if rec is None:
-                return None
 
-        index, obj = rec[0:2]
+class CursorFetchStrategy(object):
+    """Define a cursor strategy for a result object.
 
-        if index is None:
-            raise exc.InvalidRequestError(
-                "Ambiguous column name '%s' in "
-                "result set column descriptions" % rec[MD_LOOKUP_KEY]
-            )
+    Subclasses define different ways of fetching rows, typically but
+    not necessarily using a DBAPI cursor object.
 
-        return operator.methodcaller("_get_by_key_impl", index)
+    .. versionadded:: 1.4
 
-    def _tuple_getter(self, keys, raiseerr=True):
-        """Given a list of keys, return a callable that will deliver a tuple.
+    """
 
-        This is strictly used by the ORM and the keys are Column objects.
-        However, this might be some nice-ish feature if we could find a very
-        clean way of presenting it.
+    __slots__ = ("dbapi_cursor", "cursor_description")
 
-        note that in the new world of "row._mapping", this is a mapping-getter.
-        maybe the name should indicate that somehow.
+    def __init__(self, dbapi_cursor, cursor_description):
+        self.dbapi_cursor = dbapi_cursor
+        self.cursor_description = cursor_description
 
+    @classmethod
+    def create(cls, result):
+        raise NotImplementedError()
 
-        """
-        indexes = []
-        for key in keys:
-            try:
-                rec = self._keymap[key]
-            except KeyError:
-                rec = self._key_fallback(key, raiseerr)
-                if rec is None:
-                    return None
+    def soft_close(self, result):
+        raise NotImplementedError()
 
-            index, obj = rec[0:2]
+    def hard_close(self, result):
+        raise NotImplementedError()
 
-            if index is None:
-                raise exc.InvalidRequestError(
-                    "Ambiguous column name '%s' in "
-                    "result set column descriptions" % obj
-                )
-            indexes.append(index)
+    def fetchone(self):
+        raise NotImplementedError()
 
-        if _baserow_usecext:
-            return _tuplegetter(*indexes)
-        else:
-            return self._pure_py_tuplegetter(*indexes)
+    def fetchmany(self, size=None):
+        raise NotImplementedError()
 
-    def _pure_py_tuplegetter(self, *indexes):
-        getters = [
-            operator.methodcaller("_get_by_key_impl", index)
-            for index in indexes
-        ]
-        return lambda rec: tuple(getter(rec) for getter in getters)
+    def fetchall(self):
+        raise NotImplementedError()
 
-    def __getstate__(self):
-        return {
-            "_keymap": {
-                key: (rec[MD_INDEX], _UNPICKLED, key)
-                for key, rec in self._keymap.items()
-                if isinstance(key, util.string_types + util.int_types)
-            },
-            "keys": self.keys,
-            "case_sensitive": self.case_sensitive,
-            "matched_on_name": self.matched_on_name,
-        }
 
-    def __setstate__(self, state):
-        self._processors = [None for _ in range(len(state["keys"]))]
-        self._keymap = state["_keymap"]
+class NoCursorDQLFetchStrategy(CursorFetchStrategy):
+    """Cursor strategy for a DQL result that has no open cursor.
 
-        self.keys = state["keys"]
-        self.case_sensitive = state["case_sensitive"]
-        self.matched_on_name = state["matched_on_name"]
+    This is a result set that can return rows, i.e. for a SELECT, or for an
+    INSERT, UPDATE, DELETE that includes RETURNING. However it is in the state
+    where the cursor is closed and no rows remain available.  The owning result
+    object may or may not be "hard closed", which determines if the fetch
+    methods send empty results or raise for closed result.
 
+    """
 
-class ResultProxy(object):
-    """A facade around a DBAPI cursor object.
+    __slots__ = ("closed",)
 
-    Returns database rows via the :class:`.Row` class, which provides
-    additional API features and behaviors on top of the raw data returned
-    by the DBAPI.
+    def __init__(self, closed):
+        self.closed = closed
+        self.cursor_description = None
 
-    .. seealso::
+    def soft_close(self, result):
+        pass
 
-        :ref:`coretutorial_selecting` - introductory material for accessing
-        :class:`.ResultProxy` and :class:`.Row` objects.
+    def hard_close(self, result):
+        self.closed = True
+
+    def fetchone(self):
+        return self._non_result(None)
+
+    def fetchmany(self, size=None):
+        return self._non_result([])
+
+    def fetchall(self):
+        return self._non_result([])
+
+    def _non_result(self, default):
+        if self.closed:
+            raise exc.ResourceClosedError("This result object is closed.")
+        else:
+            return default
+
+
+class NoCursorDMLFetchStrategy(CursorFetchStrategy):
+    """Cursor strategy for a DML result that has no open cursor.
+
+    This is a result set that does not return rows, i.e. for an INSERT,
+    UPDATE, DELETE that does not include RETURNING.
 
     """
 
-    _process_row = Row
-    out_parameters = None
-    _autoclose_connection = False
-    _metadata = None
-    _soft_closed = False
-    closed = False
+    __slots__ = ("closed",)
 
-    def __init__(self, context):
-        self.context = context
-        self.dialect = context.dialect
-        self.cursor = self._saved_cursor = context.cursor
-        self.connection = context.root_connection
-        self._echo = (
-            self.connection._echo and context.engine._should_log_debug()
+    def __init__(self, closed):
+        self.closed = closed
+        self.cursor_description = None
+
+    def soft_close(self, result):
+        pass
+
+    def hard_close(self, result):
+        self.closed = True
+
+    def fetchone(self):
+        return self._non_result(None)
+
+    def fetchmany(self, size=None):
+        return self._non_result([])
+
+    def fetchall(self):
+        return self._non_result([])
+
+    def _non_result(self, default):
+        raise exc.ResourceClosedError(
+            "This result object does not return rows. "
+            "It has been closed automatically."
         )
-        self._init_metadata()
 
-    def _getter(self, key, raiseerr=True):
-        try:
-            getter = self._metadata._getter
-        except AttributeError:
-            return self._non_result(None)
-        else:
-            return getter(key, raiseerr)
 
-    def _tuple_getter(self, key, raiseerr=True):
-        try:
-            getter = self._metadata._tuple_getter
-        except AttributeError:
-            return self._non_result(None)
-        else:
-            return getter(key, raiseerr)
+class DefaultCursorFetchStrategy(CursorFetchStrategy):
+    """Call fetch methods from a DBAPI cursor.
 
-    def _has_key(self, key):
-        try:
-            has_key = self._metadata._has_key
-        except AttributeError:
-            return self._non_result(None)
+    Alternate versions of this class may instead buffer the rows from
+    cursors or not use cursors at all.
+
+    """
+
+    @classmethod
+    def create(cls, result):
+        dbapi_cursor = result.cursor
+        description = dbapi_cursor.description
+
+        if description is None:
+            return NoCursorDMLFetchStrategy(False)
         else:
-            return has_key(key)
+            return cls(dbapi_cursor, description)
 
-    def _init_metadata(self):
-        cursor_description = self._cursor_description()
-        if cursor_description is not None:
-            if (
-                self.context.compiled
-                and "compiled_cache" in self.context.execution_options
-            ):
-                if self.context.compiled._cached_metadata:
-                    self._metadata = self.context.compiled._cached_metadata
-                else:
-                    # TODO: what we hope to do here is have "Legacy" be
-                    # the default in 1.4 but a flag (somewhere?) will have it
-                    # use non-legacy. ORM should be able to use non-legacy
-                    self._metadata = (
-                        self.context.compiled._cached_metadata
-                    ) = ResultMetaData(self, cursor_description)
-            else:
-                self._metadata = ResultMetaData(self, cursor_description)
-            if self._echo:
-                self.context.engine.logger.debug(
-                    "Col %r", tuple(x[0] for x in cursor_description)
-                )
+    def soft_close(self, result):
+        result.cursor_strategy = NoCursorDQLFetchStrategy(False)
 
-    def keys(self):
-        """Return the list of string keys that would represented by each
-        :class:`.Row`."""
+    def hard_close(self, result):
+        result.cursor_strategy = NoCursorDQLFetchStrategy(True)
 
-        if self._metadata:
-            return self._metadata.keys
+    def fetchone(self):
+        return self.dbapi_cursor.fetchone()
+
+    def fetchmany(self, size=None):
+        if size is None:
+            return self.dbapi_cursor.fetchmany()
         else:
-            return []
+            return self.dbapi_cursor.fetchmany(size)
 
-    @util.memoized_property
-    def rowcount(self):
-        """Return the 'rowcount' for this result.
+    def fetchall(self):
+        return self.dbapi_cursor.fetchall()
 
-        The 'rowcount' reports the number of rows *matched*
-        by the WHERE criterion of an UPDATE or DELETE statement.
 
-        .. note::
+class BufferedRowCursorFetchStrategy(DefaultCursorFetchStrategy):
+    """A cursor fetch strategy with row buffering behavior.
 
-           Notes regarding :attr:`.ResultProxy.rowcount`:
+    This strategy buffers the contents of a selection of rows
+    before ``fetchone()`` is called.  This is to allow the results of
+    ``cursor.description`` to be available immediately, when
+    interfacing with a DB-API that requires rows to be consumed before
+    this information is available (currently psycopg2, when used with
+    server-side cursors).
 
+    The pre-fetching behavior fetches only one row initially, and then
+    grows its buffer size by a fixed amount with each successive need
+    for additional rows up the ``max_row_buffer`` size, which defaults
+    to 1000::
 
-           * This attribute returns the number of rows *matched*,
-             which is not necessarily the same as the number of rows
-             that were actually *modified* - an UPDATE statement, for example,
-             may have no net change on a given row if the SET values
-             given are the same as those present in the row already.
-             Such a row would be matched but not modified.
-             On backends that feature both styles, such as MySQL,
-             rowcount is configured by default to return the match
-             count in all cases.
+        with psycopg2_engine.connect() as conn:
 
-           * :attr:`.ResultProxy.rowcount` is *only* useful in conjunction
-             with an UPDATE or DELETE statement.  Contrary to what the Python
-             DBAPI says, it does *not* return the
-             number of rows available from the results of a SELECT statement
-             as DBAPIs cannot support this functionality when rows are
-             unbuffered.
+            result = conn.execution_options(
+                stream_results=True, max_row_buffer=50
+                ).execute("select * from table")
 
-           * :attr:`.ResultProxy.rowcount` may not be fully implemented by
-             all dialects.  In particular, most DBAPIs do not support an
-             aggregate rowcount result from an executemany call.
-             The :meth:`.ResultProxy.supports_sane_rowcount` and
-             :meth:`.ResultProxy.supports_sane_multi_rowcount` methods
-             will report from the dialect if each usage is known to be
-             supported.
+    .. versionadded:: 1.4 ``max_row_buffer`` may now exceed 1000 rows.
 
-           * Statements that use RETURNING may not return a correct
-             rowcount.
+    .. seealso::
 
-        """
-        try:
-            return self.context.rowcount
-        except BaseException as e:
-            self.connection._handle_dbapi_exception(
-                e, None, None, self.cursor, self.context
-            )
+        :ref:`psycopg2_execution_options`
+    """
 
-    @property
-    def lastrowid(self):
-        """return the 'lastrowid' accessor on the DBAPI cursor.
+    __slots__ = ("_max_row_buffer", "_rowbuffer", "_bufsize")
 
-        This is a DBAPI specific method and is only functional
-        for those backends which support it, for statements
-        where it is appropriate.  It's behavior is not
-        consistent across backends.
+    def __init__(
+        self, max_row_buffer, dbapi_cursor, description, initial_buffer
+    ):
+        super(BufferedRowCursorFetchStrategy, self).__init__(
+            dbapi_cursor, description
+        )
 
-        Usage of this method is normally unnecessary when
-        using insert() expression constructs; the
-        :attr:`~ResultProxy.inserted_primary_key` attribute provides a
-        tuple of primary key values for a newly inserted row,
-        regardless of database backend.
+        self._max_row_buffer = max_row_buffer
+        self._growth_factor = 5
+        self._rowbuffer = initial_buffer
+
+        self._bufsize = min(self._max_row_buffer, self._growth_factor)
+
+    @classmethod
+    def create(cls, result):
+        """Buffered row strategy has to buffer the first rows *before*
+        cursor.description is fetched so that it works with named cursors
+        correctly
 
         """
-        try:
-            return self._saved_cursor.lastrowid
-        except BaseException as e:
-            self.connection._handle_dbapi_exception(
-                e, None, None, self._saved_cursor, self.context
+
+        dbapi_cursor = result.cursor
+
+        initial_buffer = collections.deque(dbapi_cursor.fetchmany(1))
+
+        description = dbapi_cursor.description
+
+        if description is None:
+            return NoCursorDMLFetchStrategy(False)
+        else:
+            max_row_buffer = result.context.execution_options.get(
+                "max_row_buffer", 1000
+            )
+            return cls(
+                max_row_buffer, dbapi_cursor, description, initial_buffer
             )
 
-    @property
-    def returns_rows(self):
-        """True if this :class:`.ResultProxy` returns rows.
+    def __buffer_rows(self):
+        size = self._bufsize
+        self._rowbuffer = collections.deque(self.dbapi_cursor.fetchmany(size))
+        if size < self._max_row_buffer:
+            self._bufsize = min(
+                self._max_row_buffer, size * self._growth_factor
+            )
 
-        I.e. if it is legal to call the methods
-        :meth:`~.ResultProxy.fetchone`,
-        :meth:`~.ResultProxy.fetchmany`
-        :meth:`~.ResultProxy.fetchall`.
+    def soft_close(self, result):
+        self._rowbuffer.clear()
+        super(BufferedRowCursorFetchStrategy, self).soft_close(result)
 
-        """
-        return self._metadata is not None
+    def hard_close(self, result):
+        self._rowbuffer.clear()
+        super(BufferedRowCursorFetchStrategy, self).hard_close(result)
 
-    @property
-    def is_insert(self):
-        """True if this :class:`.ResultProxy` is the result
-        of a executing an expression language compiled
-        :func:`.expression.insert` construct.
+    def fetchone(self):
+        if not self._rowbuffer:
+            self.__buffer_rows()
+            if not self._rowbuffer:
+                return None
+        return self._rowbuffer.popleft()
 
-        When True, this implies that the
-        :attr:`inserted_primary_key` attribute is accessible,
-        assuming the statement did not include
-        a user defined "returning" construct.
+    def fetchmany(self, size=None):
+        if size is None:
+            return self.fetchall()
+        result = []
+        for x in range(0, size):
+            row = self.fetchone()
+            if row is None:
+                break
+            result.append(row)
+        return result
 
-        """
-        return self.context.isinsert
+    def fetchall(self):
+        self._rowbuffer.extend(self.dbapi_cursor.fetchall())
+        ret = self._rowbuffer
+        self._rowbuffer = collections.deque()
+        return ret
 
-    def _cursor_description(self):
-        """May be overridden by subclasses."""
 
-        return self._saved_cursor.description
+class FullyBufferedCursorFetchStrategy(DefaultCursorFetchStrategy):
+    """A cursor strategy that buffers rows fully upon creation.
 
-    def _soft_close(self):
-        """Soft close this :class:`.ResultProxy`.
+    Used for operations where a result is to be delivered
+    after the database conversation can not be continued,
+    such as MSSQL INSERT...OUTPUT after an autocommit.
 
-        This releases all DBAPI cursor resources, but leaves the
-        ResultProxy "open" from a semantic perspective, meaning the
-        fetchXXX() methods will continue to return empty results.
+    """
 
-        This method is called automatically when:
+    __slots__ = ("_rowbuffer",)
 
-        * all result rows are exhausted using the fetchXXX() methods.
-        * cursor.description is None.
+    def __init__(self, dbapi_cursor, description, initial_buffer=None):
+        super(FullyBufferedCursorFetchStrategy, self).__init__(
+            dbapi_cursor, description
+        )
+        if initial_buffer is not None:
+            self._rowbuffer = collections.deque(initial_buffer)
+        else:
+            self._rowbuffer = self._buffer_rows()
 
-        This method is **not public**, but is documented in order to clarify
-        the "autoclose" process used.
+    @classmethod
+    def create_from_buffer(cls, dbapi_cursor, description, buffer):
+        return cls(dbapi_cursor, description, buffer)
 
-        .. versionadded:: 1.0.0
+    def _buffer_rows(self):
+        return collections.deque(self.dbapi_cursor.fetchall())
 
-        .. seealso::
+    def soft_close(self, result):
+        self._rowbuffer.clear()
+        super(FullyBufferedCursorFetchStrategy, self).soft_close(result)
 
-            :meth:`.ResultProxy.close`
+    def hard_close(self, result):
+        self._rowbuffer.clear()
+        super(FullyBufferedCursorFetchStrategy, self).hard_close(result)
 
+    def fetchone(self):
+        if self._rowbuffer:
+            return self._rowbuffer.popleft()
+        else:
+            return None
 
-        """
-        if self._soft_closed:
-            return
-        self._soft_closed = True
-        cursor = self.cursor
-        self.connection._safe_close_cursor(cursor)
-        if self._autoclose_connection:
-            self.connection.close()
-        self.cursor = None
+    def fetchmany(self, size=None):
+        if size is None:
+            return self.fetchall()
+        result = []
+        for x in range(0, size):
+            row = self.fetchone()
+            if row is None:
+                break
+            result.append(row)
+        return result
 
-    def close(self):
-        """Close this ResultProxy.
+    def fetchall(self):
+        ret = self._rowbuffer
+        self._rowbuffer = collections.deque()
+        return ret
 
-        This closes out the underlying DBAPI cursor corresponding
-        to the statement execution, if one is still present.  Note that the
-        DBAPI cursor is automatically released when the :class:`.ResultProxy`
-        exhausts all available rows.  :meth:`.ResultProxy.close` is generally
-        an optional method except in the case when discarding a
-        :class:`.ResultProxy` that still has additional rows pending for fetch.
 
-        In the case of a result that is the product of
-        :ref:`connectionless execution <dbengine_implicit>`,
-        the underlying :class:`.Connection` object is also closed, which
-        :term:`releases` DBAPI connection resources.
+class BaseResult(object):
+    """Base class for database result objects.
 
-        After this method is called, it is no longer valid to call upon
-        the fetch methods, which will raise a :class:`.ResourceClosedError`
-        on subsequent use.
 
-        .. versionchanged:: 1.0.0 - the :meth:`.ResultProxy.close` method
-           has been separated out from the process that releases the underlying
-           DBAPI cursor resource.   The "auto close" feature of the
-           :class:`.Connection` now performs a so-called "soft close", which
-           releases the underlying DBAPI cursor, but allows the
-           :class:`.ResultProxy` to still behave as an open-but-exhausted
-           result set; the actual :meth:`.ResultProxy.close` method is never
-           called.    It is still safe to discard a :class:`.ResultProxy`
-           that has been fully exhausted without calling this method.
+    :class:`.BaseResult` is the base class for the 1.x style
+    :class:`.ResultProxy` class as well as the 2.x style
+    :class:`.future.Result` class.
 
-        .. seealso::
+    """
 
-            :ref:`connections_toplevel`
+    out_parameters = None
+    _metadata = None
+    _soft_closed = False
+    closed = False
 
-        """
+    @classmethod
+    def _create_for_context(cls, context):
+        if context._is_future_result:
+            obj = object.__new__(_future_Result)
+        else:
+            obj = object.__new__(ResultProxy)
+        obj.__init__(context)
+        return obj
 
-        if not self.closed:
-            self._soft_close()
-            self.closed = True
+    def __init__(self, context):
+        self.context = context
+        self.dialect = context.dialect
+        self.cursor = context.cursor
+        self.connection = context.root_connection
+        self._echo = (
+            self.connection._echo and context.engine._should_log_debug()
+        )
+        self._init_metadata()
 
-    def __iter__(self):
-        """Implement iteration protocol."""
+    def _init_metadata(self):
+        self.cursor_strategy = strat = self.context.get_result_cursor_strategy(
+            self
+        )
 
-        while True:
-            row = self.fetchone()
-            if row is None:
-                return
+        if strat.cursor_description is not None:
+            if self.context.compiled:
+                if self.context.compiled._cached_metadata:
+                    self._metadata = self.context.compiled._cached_metadata
+                else:
+                    self._metadata = (
+                        self.context.compiled._cached_metadata
+                    ) = self._cursor_metadata(self, strat.cursor_description)
             else:
-                yield row
+                self._metadata = self._cursor_metadata(
+                    self, strat.cursor_description
+                )
+            if self._echo:
+                self.context.engine.logger.debug(
+                    "Col %r", tuple(x[0] for x in strat.cursor_description)
+                )
+        # leave cursor open so that execution context can continue
+        # setting up things like rowcount
+
+    def keys(self):
+        """Return the list of string keys that would represented by each
+        :class:`.Row`."""
+
+        if self._metadata:
+            return self._metadata.keys
+        else:
+            return []
+
+    def _getter(self, key, raiseerr=True):
+        try:
+            getter = self._metadata._getter
+        except AttributeError:
+            return self.cursor_strategy._non_result(None)
+        else:
+            return getter(key, raiseerr)
+
+    def _tuple_getter(self, key, raiseerr=True):
+        try:
+            getter = self._metadata._tuple_getter
+        except AttributeError:
+            return self.cursor_strategy._non_result(None)
+        else:
+            return getter(key, raiseerr)
+
+    def _has_key(self, key):
+        try:
+            has_key = self._metadata._has_key
+        except AttributeError:
+            return self.cursor_strategy._non_result(None)
+        else:
+            return has_key(key)
+
+    def _soft_close(self, hard=False):
+        """Soft close this :class:`.ResultProxy`.
+
+        This releases all DBAPI cursor resources, but leaves the
+        ResultProxy "open" from a semantic perspective, meaning the
+        fetchXXX() methods will continue to return empty results.
+
+        This method is called automatically when:
+
+        * all result rows are exhausted using the fetchXXX() methods.
+        * cursor.description is None.
+
+        This method is **not public**, but is documented in order to clarify
+        the "autoclose" process used.
 
-    def __next__(self):
-        """Implement the Python next() protocol.
+        .. versionadded:: 1.0.0
 
-        This method, mirrored as both ``.next()`` and  ``.__next__()``, is part
-        of Python's API for producing iterator-like behavior.
+        .. seealso::
+
+            :meth:`.ResultProxy.close`
 
-        .. versionadded:: 1.2
 
         """
-        row = self.fetchone()
-        if row is None:
-            raise StopIteration()
+
+        if (not hard and self._soft_closed) or (hard and self.closed):
+            return
+
+        if hard:
+            self.closed = True
+            self.cursor_strategy.hard_close(self)
         else:
-            return row
+            self.cursor_strategy.soft_close(self)
 
-    next = __next__
+        if not self._soft_closed:
+            cursor = self.cursor
+            self.cursor = None
+            self.connection._safe_close_cursor(cursor)
+            self._soft_closed = True
 
     @util.memoized_property
     def inserted_primary_key(self):
@@ -1340,37 +1363,196 @@ class ResultProxy(object):
 
         return self.dialect.supports_sane_multi_rowcount
 
-    def _fetchone_impl(self):
+    @util.memoized_property
+    def rowcount(self):
+        """Return the 'rowcount' for this result.
+
+        The 'rowcount' reports the number of rows *matched*
+        by the WHERE criterion of an UPDATE or DELETE statement.
+
+        .. note::
+
+           Notes regarding :attr:`.ResultProxy.rowcount`:
+
+
+           * This attribute returns the number of rows *matched*,
+             which is not necessarily the same as the number of rows
+             that were actually *modified* - an UPDATE statement, for example,
+             may have no net change on a given row if the SET values
+             given are the same as those present in the row already.
+             Such a row would be matched but not modified.
+             On backends that feature both styles, such as MySQL,
+             rowcount is configured by default to return the match
+             count in all cases.
+
+           * :attr:`.ResultProxy.rowcount` is *only* useful in conjunction
+             with an UPDATE or DELETE statement.  Contrary to what the Python
+             DBAPI says, it does *not* return the
+             number of rows available from the results of a SELECT statement
+             as DBAPIs cannot support this functionality when rows are
+             unbuffered.
+
+           * :attr:`.ResultProxy.rowcount` may not be fully implemented by
+             all dialects.  In particular, most DBAPIs do not support an
+             aggregate rowcount result from an executemany call.
+             The :meth:`.ResultProxy.supports_sane_rowcount` and
+             :meth:`.ResultProxy.supports_sane_multi_rowcount` methods
+             will report from the dialect if each usage is known to be
+             supported.
+
+           * Statements that use RETURNING may not return a correct
+             rowcount.
+
+        """
         try:
-            return self.cursor.fetchone()
-        except AttributeError:
-            return self._non_result(None)
+            return self.context.rowcount
+        except BaseException as e:
+            self.connection._handle_dbapi_exception(
+                e, None, None, self.cursor, self.context
+            )
+
+    @property
+    def lastrowid(self):
+        """return the 'lastrowid' accessor on the DBAPI cursor.
+
+        This is a DBAPI specific method and is only functional
+        for those backends which support it, for statements
+        where it is appropriate.  It's behavior is not
+        consistent across backends.
+
+        Usage of this method is normally unnecessary when
+        using insert() expression constructs; the
+        :attr:`~ResultProxy.inserted_primary_key` attribute provides a
+        tuple of primary key values for a newly inserted row,
+        regardless of database backend.
 
-    def _fetchmany_impl(self, size=None):
+        """
         try:
-            if size is None:
-                return self.cursor.fetchmany()
+            return self.context.get_lastrowid()
+        except BaseException as e:
+            self.connection._handle_dbapi_exception(
+                e, None, None, self.cursor, self.context
+            )
+
+    @property
+    def returns_rows(self):
+        """True if this :class:`.ResultProxy` returns rows.
+
+        I.e. if it is legal to call the methods
+        :meth:`~.ResultProxy.fetchone`,
+        :meth:`~.ResultProxy.fetchmany`
+        :meth:`~.ResultProxy.fetchall`.
+
+        """
+        return self._metadata is not None
+
+    @property
+    def is_insert(self):
+        """True if this :class:`.ResultProxy` is the result
+        of a executing an expression language compiled
+        :func:`.expression.insert` construct.
+
+        When True, this implies that the
+        :attr:`inserted_primary_key` attribute is accessible,
+        assuming the statement did not include
+        a user defined "returning" construct.
+
+        """
+        return self.context.isinsert
+
+
+class ResultProxy(BaseResult):
+    """A facade around a DBAPI cursor object.
+
+    Returns database rows via the :class:`.Row` class, which provides
+    additional API features and behaviors on top of the raw data returned
+    by the DBAPI.
+
+    Within the scope of the 1.x series of SQLAlchemy, the :class:`.ResultProxy`
+    will in fact return instances of the :class:`.LegacyRow` class, which
+    maintains Python mapping (i.e. dictionary) like behaviors upon the object
+    itself.  Going forward, the :attr:`.Row._mapping` attribute should be used
+    for dictionary behaviors.
+
+    .. seealso::
+
+        :ref:`coretutorial_selecting` - introductory material for accessing
+        :class:`.ResultProxy` and :class:`.Row` objects.
+
+    """
+
+    _autoclose_connection = False
+    _process_row = LegacyRow
+    _cursor_metadata = LegacyCursorResultMetaData
+    _cursor_strategy_cls = DefaultCursorFetchStrategy
+
+    def __iter__(self):
+        """Implement iteration protocol."""
+
+        while True:
+            row = self.fetchone()
+            if row is None:
+                return
             else:
-                return self.cursor.fetchmany(size)
-        except AttributeError:
-            return self._non_result([])
+                yield row
 
-    def _fetchall_impl(self):
-        try:
-            return self.cursor.fetchall()
-        except AttributeError:
-            return self._non_result([])
+    def close(self):
+        """Close this ResultProxy.
 
-    def _non_result(self, default):
-        if self._metadata is None:
-            raise exc.ResourceClosedError(
-                "This result object does not return rows. "
-                "It has been closed automatically."
-            )
-        elif self.closed:
-            raise exc.ResourceClosedError("This result object is closed.")
+        This closes out the underlying DBAPI cursor corresponding
+        to the statement execution, if one is still present.  Note that the
+        DBAPI cursor is automatically released when the :class:`.ResultProxy`
+        exhausts all available rows.  :meth:`.ResultProxy.close` is generally
+        an optional method except in the case when discarding a
+        :class:`.ResultProxy` that still has additional rows pending for fetch.
+
+        In the case of a result that is the product of
+        :ref:`connectionless execution <dbengine_implicit>`,
+        the underlying :class:`.Connection` object is also closed, which
+        :term:`releases` DBAPI connection resources.
+
+        .. deprecated:: 2.0 "connectionless" execution is deprecated and will
+           be removed in version 2.0.   Version 2.0 will feature the
+           :class:`.Result` object that will no longer affect the status
+           of the originating connection in any case.
+
+        After this method is called, it is no longer valid to call upon
+        the fetch methods, which will raise a :class:`.ResourceClosedError`
+        on subsequent use.
+
+        .. seealso::
+
+            :ref:`connections_toplevel`
+
+        """
+        self._soft_close(hard=True)
+
+    def _soft_close(self, hard=False):
+        soft_closed = self._soft_closed
+        super(ResultProxy, self)._soft_close(hard=hard)
+        if (
+            not soft_closed
+            and self._soft_closed
+            and self._autoclose_connection
+        ):
+            self.connection.close()
+
+    def __next__(self):
+        """Implement the Python next() protocol.
+
+        This method, mirrored as both ``.next()`` and  ``.__next__()``, is part
+        of Python's API for producing iterator-like behavior.
+
+        .. versionadded:: 1.2
+
+        """
+        row = self.fetchone()
+        if row is None:
+            raise StopIteration()
         else:
-            return default
+            return row
+
+    next = __next__
 
     def process_rows(self, rows):
         process_row = self._process_row
@@ -1406,7 +1588,7 @@ class ResultProxy(object):
         """
 
         try:
-            l = self.process_rows(self._fetchall_impl())
+            l = self.process_rows(self.cursor_strategy.fetchall())
             self._soft_close()
             return l
         except BaseException as e:
@@ -1432,7 +1614,7 @@ class ResultProxy(object):
         """
 
         try:
-            l = self.process_rows(self._fetchmany_impl(size))
+            l = self.process_rows(self.cursor_strategy.fetchmany(size))
             if len(l) == 0:
                 self._soft_close()
             return l
@@ -1441,6 +1623,9 @@ class ResultProxy(object):
                 e, None, None, self.cursor, self.context
             )
 
+    def _onerow(self):
+        return self.fetchone()
+
     def fetchone(self):
         """Fetch one row, just like DB-API ``cursor.fetchone()``.
 
@@ -1457,7 +1642,7 @@ class ResultProxy(object):
 
         """
         try:
-            row = self._fetchone_impl()
+            row = self.cursor_strategy.fetchone()
             if row is not None:
                 return self.process_rows([row])[0]
             else:
@@ -1477,11 +1662,8 @@ class ResultProxy(object):
         :return: a :class:`.Row` object, or None if no rows remain
 
         """
-        if self._metadata is None:
-            return self._non_result(None)
-
         try:
-            row = self._fetchone_impl()
+            row = self.cursor_strategy.fetchone()
         except BaseException as e:
             self.connection._handle_dbapi_exception(
                 e, None, None, self.cursor, self.context
@@ -1514,128 +1696,26 @@ class ResultProxy(object):
 class BufferedRowResultProxy(ResultProxy):
     """A ResultProxy with row buffering behavior.
 
-    ``ResultProxy`` that buffers the contents of a selection of rows
-    before ``fetchone()`` is called.  This is to allow the results of
-    ``cursor.description`` to be available immediately, when
-    interfacing with a DB-API that requires rows to be consumed before
-    this information is available (currently psycopg2, when used with
-    server-side cursors).
-
-    The pre-fetching behavior fetches only one row initially, and then
-    grows its buffer size by a fixed amount with each successive need
-    for additional rows up the ``max_row_buffer`` size, which defaults
-    to 1000::
-
-        with psycopg2_engine.connect() as conn:
-
-            result = conn.execution_options(
-                stream_results=True, max_row_buffer=50
-                ).execute("select * from table")
+    .. deprecated::  1.4 this class is now supplied using a strategy object.
+       See :class:`.BufferedRowCursorFetchStrategy`.
 
-    .. versionadded:: 1.4 ``max_row_buffer`` may now exceed 1000 rows.
-
-    .. seealso::
-
-        :ref:`psycopg2_execution_options`
     """
 
-    def _init_metadata(self):
-        self._max_row_buffer = self.context.execution_options.get(
-            "max_row_buffer", 1000
-        )
-        self._growth_factor = 5
-        self.__buffer_rows()
-        super(BufferedRowResultProxy, self)._init_metadata()
-
-    def __buffer_rows(self):
-        if self.cursor is None:
-            return
-        size = getattr(self, "_bufsize", 1)
-        self.__rowbuffer = collections.deque(self.cursor.fetchmany(size))
-        if size < self._max_row_buffer:
-            self._bufsize = min(
-                self._max_row_buffer, size * self._growth_factor
-            )
-
-    def _soft_close(self, **kw):
-        self.__rowbuffer.clear()
-        super(BufferedRowResultProxy, self)._soft_close(**kw)
-
-    def _fetchone_impl(self):
-        if self.cursor is None:
-            return self._non_result(None)
-        if not self.__rowbuffer:
-            self.__buffer_rows()
-            if not self.__rowbuffer:
-                return None
-        return self.__rowbuffer.popleft()
-
-    def _fetchmany_impl(self, size=None):
-        if size is None:
-            return self._fetchall_impl()
-        result = []
-        for x in range(0, size):
-            row = self._fetchone_impl()
-            if row is None:
-                break
-            result.append(row)
-        return result
-
-    def _fetchall_impl(self):
-        if self.cursor is None:
-            return self._non_result([])
-        self.__rowbuffer.extend(self.cursor.fetchall())
-        ret = self.__rowbuffer
-        self.__rowbuffer = collections.deque()
-        return ret
+    _cursor_strategy_cls = BufferedRowCursorFetchStrategy
 
 
 class FullyBufferedResultProxy(ResultProxy):
     """A result proxy that buffers rows fully upon creation.
 
-    Used for operations where a result is to be delivered
-    after the database conversation can not be continued,
-    such as MSSQL INSERT...OUTPUT after an autocommit.
+    .. deprecated::  1.4 this class is now supplied using a strategy object.
+       See :class:`.FullyBufferedCursorFetchStrategy`.
 
     """
 
-    def _init_metadata(self):
-        super(FullyBufferedResultProxy, self)._init_metadata()
-        self.__rowbuffer = self._buffer_rows()
-
-    def _buffer_rows(self):
-        return collections.deque(self.cursor.fetchall())
-
-    def _soft_close(self, **kw):
-        self.__rowbuffer.clear()
-        super(FullyBufferedResultProxy, self)._soft_close(**kw)
-
-    def _fetchone_impl(self):
-        if self.__rowbuffer:
-            return self.__rowbuffer.popleft()
-        else:
-            return self._non_result(None)
-
-    def _fetchmany_impl(self, size=None):
-        if size is None:
-            return self._fetchall_impl()
-        result = []
-        for x in range(0, size):
-            row = self._fetchone_impl()
-            if row is None:
-                break
-            result.append(row)
-        return result
-
-    def _fetchall_impl(self):
-        if not self.cursor:
-            return self._non_result([])
-        ret = self.__rowbuffer
-        self.__rowbuffer = collections.deque()
-        return ret
+    _cursor_strategy_cls = FullyBufferedCursorFetchStrategy
 
 
-class BufferedColumnRow(Row):
+class BufferedColumnRow(LegacyRow):
     """Row is now BufferedColumn in all cases"""
 
 
diff --git a/lib/sqlalchemy/engine/row.py b/lib/sqlalchemy/engine/row.py
new file mode 100644 (file)
index 0000000..b4347a5
--- /dev/null
@@ -0,0 +1,501 @@
+# engine/row.py
+# Copyright (C) 2005-2020 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""Define row constructs including :class:`.Row`."""
+
+
+import operator
+
+from .. import util
+from ..sql import util as sql_util
+from ..util.compat import collections_abc
+
+
+MD_INDEX = 0  # integer index in cursor.description
+
+# This reconstructor is necessary so that pickles with the C extension or
+# without use the same Binary format.
+try:
+    # We need a different reconstructor on the C extension so that we can
+    # add extra checks that fields have correctly been initialized by
+    # __setstate__.
+    from sqlalchemy.cresultproxy import safe_rowproxy_reconstructor
+
+    # The extra function embedding is needed so that the
+    # reconstructor function has the same signature whether or not
+    # the extension is present.
+    def rowproxy_reconstructor(cls, state):
+        return safe_rowproxy_reconstructor(cls, state)
+
+
+except ImportError:
+
+    def rowproxy_reconstructor(cls, state):
+        obj = cls.__new__(cls)
+        obj.__setstate__(state)
+        return obj
+
+
+try:
+    from sqlalchemy.cresultproxy import BaseRow
+
+    _baserow_usecext = True
+except ImportError:
+    _baserow_usecext = False
+
+    class BaseRow(object):
+        __slots__ = ("_parent", "_data", "_keymap")
+
+        def __init__(self, parent, processors, keymap, data):
+            """Row objects are constructed by ResultProxy objects."""
+
+            self._parent = parent
+
+            self._data = tuple(
+                [
+                    proc(value) if proc else value
+                    for proc, value in zip(processors, data)
+                ]
+            )
+            self._keymap = keymap
+
+        def __reduce__(self):
+            return (
+                rowproxy_reconstructor,
+                (self.__class__, self.__getstate__()),
+            )
+
+        def _values_impl(self):
+            return list(self)
+
+        def __iter__(self):
+            return iter(self._data)
+
+        def __len__(self):
+            return len(self._data)
+
+        def __hash__(self):
+            return hash(self._data)
+
+        def _subscript_impl(self, key, ismapping):
+            try:
+                rec = self._keymap[key]
+            except KeyError:
+                rec = self._parent._key_fallback(key)
+            except TypeError:
+                # the non-C version detects a slice using TypeError.
+                # this is pretty inefficient for the slice use case
+                # but is more efficient for the integer use case since we
+                # don't have to check it up front.
+                if isinstance(key, slice):
+                    return tuple(self._data[key])
+                else:
+                    raise
+
+            mdindex = rec[MD_INDEX]
+            if mdindex is None:
+                self._parent._raise_for_ambiguous_column_name(rec)
+            elif not ismapping and mdindex != key and not isinstance(key, int):
+                self._parent._warn_for_nonint(key)
+
+            # TODO: warn for non-int here, RemovedIn20Warning when available
+
+            return self._data[mdindex]
+
+        def _get_by_key_impl(self, key):
+            return self._subscript_impl(key, False)
+
+        def _get_by_key_impl_mapping(self, key):
+            # the C code has two different methods so that we can distinguish
+            # between tuple-like keys (integers, slices) and mapping-like keys
+            # (strings, objects)
+            return self._subscript_impl(key, True)
+
+        def __getattr__(self, name):
+            try:
+                return self._get_by_key_impl_mapping(name)
+            except KeyError as e:
+                raise AttributeError(e.args[0])
+
+
+class Row(BaseRow, collections_abc.Sequence):
+    """Represent a single result row.
+
+    The :class:`.Row` object represents a row of a database result.  It is
+    typically associated in the 1.x series of SQLAlchemy with the
+    :class:`.ResultProxy` object, however is also used by the ORM for
+    tuple-like results as of SQLAlchemy 1.4.
+
+    The :class:`.Row` object seeks to act as much like a Python named
+    tuple as possible.   For mapping (i.e. dictionary) behavior on a row,
+    such as testing for containment of keys, refer to the :attr:`.Row._mapping`
+    attribute.
+
+    .. seealso::
+
+        :ref:`coretutorial_selecting` - includes examples of selecting
+        rows from SELECT statements.
+
+        :class:`.LegacyRow` - Compatibility interface introduced in SQLAlchemy
+        1.4.
+
+    .. versionchanged:: 1.4
+
+        Renamed ``RowProxy`` to :class:`.Row`.  :class:`.Row` is no longer a
+        "proxy" object in that it contains the final form of data within it,
+        and now acts mostly like a named tuple.  Mapping-like functionality is
+        moved to the :attr:`.Row._mapping` attribute, but will remain available
+        in SQLAlchemy 1.x series via the :class:`.LegacyRow` class that is used
+        by :class:`.ResultProxy`.   See :ref:`change_4710_core` for background
+        on this change.
+
+    """
+
+    __slots__ = ()
+
+    @property
+    def _mapping(self):
+        """Return a :class:`.RowMapping` for this :class:`.Row`.
+
+        This object provides a consistent Python mapping (i.e. dictionary)
+        interface for the data contained within the row.   The :class:`.Row`
+        by itself behaves like a named tuple, however in the 1.4 series of
+        SQLAlchemy, the :class:`.LegacyRow` class is still used by Core which
+        continues to have mapping-like behaviors against the row object
+        itself.
+
+        .. seealso::
+
+            :attr:`.Row._fields`
+
+        .. versionadded:: 1.4
+
+        """
+
+        return RowMapping(self)
+
+    def __contains__(self, key):
+        return key in self._data
+
+    def __getitem__(self, key):
+        return self._data[key]
+
+    def __getstate__(self):
+        return {"_parent": self._parent, "_data": self._data}
+
+    def __setstate__(self, state):
+        self._parent = parent = state["_parent"]
+        self._data = state["_data"]
+        self._keymap = parent._keymap
+
+    def _op(self, other, op):
+        return (
+            op(tuple(self), tuple(other))
+            if isinstance(other, Row)
+            else op(tuple(self), other)
+        )
+
+    __hash__ = BaseRow.__hash__
+
+    def __lt__(self, other):
+        return self._op(other, operator.lt)
+
+    def __le__(self, other):
+        return self._op(other, operator.le)
+
+    def __ge__(self, other):
+        return self._op(other, operator.ge)
+
+    def __gt__(self, other):
+        return self._op(other, operator.gt)
+
+    def __eq__(self, other):
+        return self._op(other, operator.eq)
+
+    def __ne__(self, other):
+        return self._op(other, operator.ne)
+
+    def __repr__(self):
+        return repr(sql_util._repr_row(self))
+
+    @util.deprecated(
+        "1.4",
+        "The :meth:`.Row.keys` method is deprecated and will be removed in a "
+        "future release.  Use the namedtuple standard accessor "
+        ":attr:`.Row._fields`, or for full mapping behavior use  "
+        "row._mapping.keys() ",
+    )
+    def keys(self):
+        """Return the list of keys as strings represented by this
+        :class:`.Row`.
+
+        This method is analogous to the Python dictionary ``.keys()`` method,
+        except that it returns a list, not an iterator.
+
+        .. seealso::
+
+            :attr:`.Row._fields`
+
+            :attr:`.Row._mapping`
+
+        """
+        return [k for k in self._parent.keys if k is not None]
+
+    @property
+    def _fields(self):
+        """Return a tuple of string keys as represented by this
+        :class:`.Row`.
+
+        This attribute is analogous to the Python named tuple ``._fields``
+        attribute.
+
+        .. versionadded:: 1.4
+
+        .. seealso::
+
+            :attr:`.Row._mapping`
+
+        """
+        return tuple([k for k in self._parent.keys if k is not None])
+
+    def _asdict(self):
+        """Return a new dict which maps field names to their corresponding
+        values.
+
+        This method is analogous to the Python named tuple ``._asdict()``
+        method, and works by applying the ``dict()`` constructor to the
+        :attr:`.Row._mapping` attribute.
+
+        .. versionadded:: 1.4
+
+        .. seealso::
+
+            :attr:`.Row._mapping`
+
+        """
+        return dict(self._mapping)
+
+    def _replace(self):
+        raise NotImplementedError()
+
+    @property
+    def _field_defaults(self):
+        raise NotImplementedError()
+
+
+class LegacyRow(Row):
+    """A subclass of :class:`.Row` that delivers 1.x SQLAlchemy behaviors
+    for Core.
+
+    The :class:`.LegacyRow` class is where most of the Python mapping
+    (i.e. dictionary-like)
+    behaviors are implemented for the row object.  The mapping behavior
+    of :class:`.Row` going forward is accessible via the :class:`.Row._mapping`
+    attribute.
+
+    .. versionadded:: 1.4 - added :class:`.LegacyRow` which encapsulates most
+       of the deprecated behaviors of :class:`.Row`.
+
+    """
+
+    def __contains__(self, key):
+        return self._parent._contains(key, self)
+
+    def __getitem__(self, key):
+        return self._get_by_key_impl(key)
+
+    @util.deprecated(
+        "1.4",
+        "The :meth:`.LegacyRow.has_key` method is deprecated and will be "
+        "removed in a future release.  To test for key membership, use "
+        "the :attr:`Row._mapping` attribute, i.e. 'key in row._mapping`.",
+    )
+    def has_key(self, key):
+        """Return True if this :class:`.LegacyRow` contains the given key.
+
+        Through the SQLAlchemy 1.x series, the ``__contains__()`` method of
+        :class:`.Row` (or :class:`.LegacyRow` as of SQLAlchemy 1.4)  also links
+        to :meth:`.Row.has_key`, in that an expression such as ::
+
+            "some_col" in row
+
+        Will return True if the row contains a column named ``"some_col"``,
+        in the way that a Python mapping works.
+
+        However, it is planned that the 2.0 series of SQLAlchemy will reverse
+        this behavior so that ``__contains__()`` will refer to a value being
+        present in the row, in the way that a Python tuple works.
+
+        .. seealso::
+
+            :ref:`change_4710_core`
+
+        """
+
+        return self._parent._has_key(key)
+
+    @util.deprecated(
+        "1.4",
+        "The :meth:`.LegacyRow.items` method is deprecated and will be "
+        "removed in a future release.  Use the :attr:`Row._mapping` "
+        "attribute, i.e., 'row._mapping.items()'.",
+    )
+    def items(self):
+        """Return a list of tuples, each tuple containing a key/value pair.
+
+        This method is analogous to the Python dictionary ``.items()`` method,
+        except that it returns a list, not an iterator.
+
+        """
+
+        return [(key, self[key]) for key in self.keys()]
+
+    @util.deprecated(
+        "1.4",
+        "The :meth:`.LegacyRow.iterkeys` method is deprecated and will be "
+        "removed in a future release.  Use the :attr:`Row._mapping` "
+        "attribute, i.e., 'row._mapping.keys()'.",
+    )
+    def iterkeys(self):
+        """Return a an iterator against the :meth:`.Row.keys` method.
+
+        This method is analogous to the Python-2-only dictionary
+        ``.iterkeys()`` method.
+
+        """
+        return iter(self._parent.keys)
+
+    @util.deprecated(
+        "1.4",
+        "The :meth:`.LegacyRow.itervalues` method is deprecated and will be "
+        "removed in a future release.  Use the :attr:`Row._mapping` "
+        "attribute, i.e., 'row._mapping.values()'.",
+    )
+    def itervalues(self):
+        """Return a an iterator against the :meth:`.Row.values` method.
+
+        This method is analogous to the Python-2-only dictionary
+        ``.itervalues()`` method.
+
+        """
+        return iter(self)
+
+    @util.deprecated(
+        "1.4",
+        "The :meth:`.LegacyRow.values` method is deprecated and will be "
+        "removed in a future release.  Use the :attr:`Row._mapping` "
+        "attribute, i.e., 'row._mapping.values()'.",
+    )
+    def values(self):
+        """Return the values represented by this :class:`.Row` as a list.
+
+        This method is analogous to the Python dictionary ``.values()`` method,
+        except that it returns a list, not an iterator.
+
+        """
+
+        return self._values_impl()
+
+
+BaseRowProxy = BaseRow
+RowProxy = Row
+
+
+class ROMappingView(
+    collections_abc.KeysView,
+    collections_abc.ValuesView,
+    collections_abc.ItemsView,
+):
+    __slots__ = (
+        "_mapping",
+        "_items",
+    )
+
+    def __init__(self, mapping, items):
+        self._mapping = mapping
+        self._items = items
+
+    def __len__(self):
+        return len(self._items)
+
+    def __repr__(self):
+        return "{0.__class__.__name__}({0._mapping!r})".format(self)
+
+    def __iter__(self):
+        return iter(self._items)
+
+    def __contains__(self, item):
+        return item in self._items
+
+    def __eq__(self, other):
+        return list(other) == list(self)
+
+    def __ne__(self, other):
+        return list(other) != list(self)
+
+
+class RowMapping(collections_abc.Mapping):
+    """A ``Mapping`` that maps column names and objects to :class:`.Row` values.
+
+    The :class:`.RowMapping` is available from a :class:`.Row` via the
+    :attr:`.Row._mapping` attribute and supplies Python mapping (i.e.
+    dictionary) access to the  contents of the row.   This includes support
+    for testing of containment of specific keys (string column names or
+    objects), as well as iteration of keys, values, and items::
+
+        for row in result:
+            if 'a' in row._mapping:
+                print("Column 'a': %s" % row._mapping['a'])
+
+            print("Column b: %s" % row._mapping[table.c.b])
+
+
+    .. versionadded:: 1.4 The :class:`.RowMapping` object replaces the
+       mapping-like access previously provided by a database result row,
+       which now seeks to behave mostly like a named tuple.
+
+    """
+
+    __slots__ = ("row",)
+
+    def __init__(self, row):
+        self.row = row
+
+    def __getitem__(self, key):
+        return self.row._get_by_key_impl_mapping(key)
+
+    def __iter__(self):
+        return (k for k in self.row._parent.keys if k is not None)
+
+    def __len__(self):
+        return len(self.row)
+
+    def __contains__(self, key):
+        return self.row._parent._has_key(key)
+
+    def items(self):
+        """Return a view of key/value tuples for the elements in the
+        underlying :class:`.Row`.
+
+        """
+        return ROMappingView(self, [(key, self[key]) for key in self.keys()])
+
+    def keys(self):
+        """Return a view of 'keys' for string column names represented
+        by the underlying :class:`.Row`.
+
+        """
+        return ROMappingView(
+            self, [k for k in self.row._parent.keys if k is not None]
+        )
+
+    def values(self):
+        """Return a view of values for the values represented in the
+        underlying :class:`.Row`.
+
+        """
+        return ROMappingView(self, self.row._values_impl())
index 7d9b964ac0e5413ebebba304e97f8dae1ceb34cd..cafe69093a47dd419a4d7187f1f352c408ee3b88 100644 (file)
@@ -25,6 +25,7 @@ from ..orm.session import Session
 from ..sql import func
 from ..sql import literal_column
 from ..sql import util as sql_util
+from ..util import collections_abc
 
 
 log = logging.getLogger(__name__)
@@ -472,7 +473,7 @@ class Result(object):
         """
         try:
             ret = self.one()
-            if not isinstance(ret, tuple):
+            if not isinstance(ret, collections_abc.Sequence):
                 return ret
             return ret[0]
         except orm_exc.NoResultFound:
index 808ef076a1c9dc25edbba8e686a374c1f626f782..f8836112ec61f89af14a6949ad0887776d75db49 100644 (file)
@@ -9,7 +9,8 @@
 
 """
 
+from .result import Result  # noqa
 from ..sql.selectable import Select
 from ..util.langhelpers import public_factory
 
-select = public_factory(Select._create_select, ".expression.select")
+select = public_factory(Select._create_select, ".future.select")
diff --git a/lib/sqlalchemy/future/result.py b/lib/sqlalchemy/future/result.py
new file mode 100644 (file)
index 0000000..583ff95
--- /dev/null
@@ -0,0 +1,171 @@
+import operator
+
+from .. import util
+from ..engine.result import _baserow_usecext
+from ..engine.result import BaseResult
+from ..engine.result import CursorResultMetaData
+from ..engine.result import DefaultCursorFetchStrategy
+from ..engine.result import Row
+from ..sql import util as sql_util
+from ..sql.base import _generative
+from ..sql.base import Generative
+
+
+class Result(Generative, BaseResult):
+    """Interim "future" result proxy so that dialects can build on
+    upcoming 2.0 patterns.
+
+
+    """
+
+    _process_row = Row
+    _cursor_metadata = CursorResultMetaData
+    _cursor_strategy_cls = DefaultCursorFetchStrategy
+
+    _column_slice_filter = None
+    _post_creational_filter = None
+
+    def close(self):
+        """Close this :class:`.Result`.
+
+        This closes out the underlying DBAPI cursor corresponding
+        to the statement execution, if one is still present.  Note that the
+        DBAPI cursor is automatically released when the :class:`.Result`
+        exhausts all available rows.  :meth:`.Result.close` is generally
+        an optional method except in the case when discarding a
+        :class:`.Result` that still has additional rows pending for fetch.
+
+        After this method is called, it is no longer valid to call upon
+        the fetch methods, which will raise a :class:`.ResourceClosedError`
+        on subsequent use.
+
+        .. seealso::
+
+            :ref:`connections_toplevel`
+
+        """
+        self._soft_close(hard=True)
+
+    def columns(self, *col_expressions):
+        indexes = []
+        for key in col_expressions:
+            try:
+                rec = self._keymap[key]
+            except KeyError:
+                rec = self._key_fallback(key, True)
+                if rec is None:
+                    return None
+
+            index, obj = rec[0:2]
+
+            if index is None:
+                self._metadata._raise_for_ambiguous_column_name(obj)
+            indexes.append(index)
+        return self._column_slices(indexes)
+
+    def scalars(self):
+        result = self._column_slices(0)
+        result._post_creational_filter = operator.itemgetter(0)
+        return result
+
+    @_generative
+    def _column_slices(self, indexes):
+        if _baserow_usecext:
+            self._column_slice_filter = self._metadata._tuplegetter(*indexes)
+        else:
+            self._column_slice_filter = self._metadata._pure_py_tuplegetter(
+                *indexes
+            )
+
+    @_generative
+    def mappings(self):
+        self._post_creational_filter = operator.attrgetter("_mapping")
+
+    def _row_getter(self):
+        process_row = self._process_row
+        metadata = self._metadata
+        keymap = metadata._keymap
+        processors = metadata._processors
+
+        fns = ()
+
+        if self._echo:
+            log = self.context.engine.logger.debug
+
+            def log_row(row):
+                log("Row %r", sql_util._repr_row(row))
+                return row
+
+            fns += (log_row,)
+
+        if self._column_slice_filter:
+            fns += (self._column_slice_filter,)
+
+        if self._post_creational_filter:
+            fns += (self._post_creational_filter,)
+
+        def make_row(row):
+            row = process_row(metadata, processors, keymap, row)
+            for fn in fns:
+                row = fn(row)
+            return row
+
+        return make_row
+
+    def _safe_fetchone_impl(self):
+        try:
+            return self.cursor_strategy.fetchone()
+        except BaseException as e:
+            self.connection._handle_dbapi_exception(
+                e, None, None, self.cursor, self.context
+            )
+
+    def _safe_fetchall_impl(self):
+        try:
+            result = self.cursor_strategy.fetchall()
+            self._soft_close()
+            return result
+        except BaseException as e:
+            self.connection._handle_dbapi_exception(
+                e, None, None, self.cursor, self.context
+            )
+
+    def _safe_fetchmany_impl(self, size=None):
+        try:
+            l = self.process_rows(self.cursor_strategy.fetchmany(size))
+            if len(l) == 0:
+                self._soft_close()
+            return l
+        except BaseException as e:
+            self.connection._handle_dbapi_exception(
+                e, None, None, self.cursor, self.context
+            )
+
+    def __iter__(self):
+        getter = self._row_getter()
+        return (getter(r) for r in self._safe_fetchall_impl())
+
+    def _onerow(self):
+        getter = self._row_getter()
+        row = self._safe_fetchone_impl()
+        if row is None:
+            return None
+        else:
+            return getter(row)
+
+    def all(self):
+        getter = self._row_getter()
+        return [getter(r) for r in self._safe_fetchall_impl()]
+
+    def first(self):
+        getter = self._row_getter()
+        row = self._safe_fetchone_impl()
+        if row is None:
+            return None
+        else:
+            row = getter(row)
+            second_row = self._safe_fetchone_impl()
+            if second_row is not None:
+                self._soft_close()
+                util.warn("Additional rows remain")
+            return row
index 617f027d976c098fddafcf0258e09f925b50b77f..193980e6c32a5a19f581b3b433a58a5eb52c5d15 100644 (file)
@@ -28,6 +28,7 @@ from .util import aliased
 from .util import state_str
 from .. import exc as sa_exc
 from .. import util
+from ..engine import result_tuple
 from ..sql import util as sql_util
 
 
@@ -56,7 +57,7 @@ def instances(query, cursor, context):
                 )
 
     try:
-        (process, labels) = list(
+        (process, labels, extra) = list(
             zip(
                 *[
                     query_entity.row_processor(query, context, cursor)
@@ -66,7 +67,7 @@ def instances(query, cursor, context):
         )
 
         if not single_entity:
-            keyed_tuple = util.lightweight_named_tuple("result", labels)
+            keyed_tuple = result_tuple(labels, extra)
 
         while True:
             context.partials = {}
@@ -138,7 +139,9 @@ def merge_result(querylib, query, iterator, load=True):
             ]
             result = []
             keys = [ent._label_name for ent in query._entities]
-            keyed_tuple = util.lightweight_named_tuple("result", keys)
+            keyed_tuple = result_tuple(
+                keys, [ent.entities for ent in query._entities]
+            )
             for row in iterator:
                 newrow = list(row)
                 for i in mapped_entities:
@@ -190,7 +193,6 @@ def load_on_ident(
     query, key, refresh_state=None, with_for_update=None, only_load_props=None
 ):
     """Load the given identity key from the database."""
-
     if key is not None:
         ident = key[1]
         identity_token = key[2]
@@ -452,10 +454,19 @@ def _instance_processor(
     instance_state = attributes.instance_state
     instance_dict = attributes.instance_dict
     session_id = context.session.hash_key
-    version_check = context.version_check
     runid = context.runid
     identity_token = context.identity_token
 
+    version_check = context.version_check
+    if version_check:
+        version_id_col = mapper.version_id_col
+        if version_id_col is not None:
+            if adapter:
+                version_id_col = adapter.columns[version_id_col]
+            version_id_getter = result._getter(version_id_col)
+        else:
+            version_id_getter = None
+
     if not refresh_state and _polymorphic_from is not None:
         key = ("loader", path.path)
         if key in context.attributes and context.attributes[key].strategy == (
@@ -539,8 +550,10 @@ def _instance_processor(
                 currentload = not isnew
                 loaded_instance = False
 
-                if version_check and not currentload:
-                    _validate_version_id(mapper, state, dict_, row, adapter)
+                if version_check and version_id_getter and not currentload:
+                    _validate_version_id(
+                        mapper, state, dict_, row, version_id_getter
+                    )
 
             else:
                 # create a new instance
@@ -667,7 +680,7 @@ def _instance_processor(
         def ensure_no_pk(row):
             identitykey = (
                 identity_class,
-                tuple([row[column] for column in pk_cols]),
+                tuple_getter(row),
                 identity_token,
             )
             if not is_not_primary_key(identitykey[1]):
@@ -812,20 +825,11 @@ def _populate_partial(
     return to_load
 
 
-def _validate_version_id(mapper, state, dict_, row, adapter):
+def _validate_version_id(mapper, state, dict_, row, getter):
 
-    version_id_col = mapper.version_id_col
-
-    if version_id_col is None:
-        return
-
-    if adapter:
-        version_id_col = adapter.columns[version_id_col]
-
-    if (
-        mapper._get_state_attr_by_column(state, dict_, mapper.version_id_col)
-        != row[version_id_col]
-    ):
+    if mapper._get_state_attr_by_column(
+        state, dict_, mapper.version_id_col
+    ) != getter(row):
         raise orm_exc.StaleDataError(
             "Instance '%s' has version id '%s' which "
             "does not match database-loaded version id '%s'."
@@ -834,7 +838,7 @@ def _validate_version_id(mapper, state, dict_, row, adapter):
                 mapper._get_state_attr_by_column(
                     state, dict_, mapper.version_id_col
                 ),
-                row[version_id_col],
+                getter(row),
             )
         )
 
index 82e68fd07f70dbf3b0f7ee3416f730a5d1ed8c19..b84d41260f1277b6ff3ff32e54e58bf557ccaf1e 100644 (file)
@@ -2631,7 +2631,7 @@ class Mapper(sql_base.HasCacheKey, InspectionAttr):
         """Return an identity-map key for use in storing/retrieving an
         item from the identity map.
 
-        :param row: A :class:`.RowProxy` instance.  The columns which are
+        :param row: A :class:`.Row` instance.  The columns which are
          mapped by this :class:`.Mapper` should be locatable in the row,
          preferably via the :class:`.Column` object directly (as is the case
          when a :func:`.select` construct is executed), or via string names of
index 31b8b0a2080c0be30bfeab4411ec9e1919313241..95c5f8fa2c78f822fbaf82f57324321f88f035d2 100644 (file)
@@ -1522,7 +1522,7 @@ def _postfetch(
     if returning_cols:
         row = result.context.returned_defaults
         if row is not None:
-            for col in returning_cols:
+            for row_value, col in zip(row, returning_cols):
                 # pk cols returned from insert are handled
                 # distinctly, don't step on the values here
                 if col.primary_key and result.context.isinsert:
@@ -1534,7 +1534,7 @@ def _postfetch(
                 # when using declarative w/ single table inheritance
                 prop = mapper._columntoproperty.get(col)
                 if prop:
-                    dict_[prop.key] = row[col]
+                    dict_[prop.key] = row_value
                     if refresh_flush:
                         load_evt_attrs.append(prop.key)
 
index f19ec5673b7b330fc35ce96e79c8e12625bc0418..d237aa3bf244a3caa63841e0aff6d8938bf4e4ba 100644 (file)
@@ -47,6 +47,7 @@ from .. import inspection
 from .. import log
 from .. import sql
 from .. import util
+from ..engine import result_tuple
 from ..sql import coercions
 from ..sql import expression
 from ..sql import roles
@@ -56,6 +57,7 @@ from ..sql.base import _generative
 from ..sql.base import ColumnCollection
 from ..sql.base import Generative
 from ..sql.selectable import ForUpdateArg
+from ..util import collections_abc
 
 
 __all__ = ["Query", "QueryContext", "aliased"]
@@ -3320,7 +3322,7 @@ class Query(Generative):
         """
         try:
             ret = self.one()
-            if not isinstance(ret, tuple):
+            if not isinstance(ret, collections_abc.Sequence):
                 return ret
             return ret[0]
         except orm_exc.NoResultFound:
@@ -4259,7 +4261,7 @@ class _MapperEntity(_QueryEntity):
             polymorphic_discriminator=self._polymorphic_discriminator,
         )
 
-        return _instance, self._label_name
+        return _instance, self._label_name, self.entities
 
     def setup_context(self, query, context):
         adapter = self._get_entity_clauses(query, context)
@@ -4414,7 +4416,7 @@ class Bundle(InspectionAttr):
             :ref:`bundles` - includes an example of subclassing.
 
         """
-        keyed_tuple = util.lightweight_named_tuple("result", labels)
+        keyed_tuple = result_tuple(labels, [() for l in labels])
 
         def proc(row):
             return keyed_tuple([proc(row) for proc in procs])
@@ -4517,7 +4519,7 @@ class _BundleEntity(_QueryEntity):
             ent.setup_context(query, context)
 
     def row_processor(self, query, context, result):
-        procs, labels = zip(
+        procs, labels, extra = zip(
             *[
                 ent.row_processor(query, context, result)
                 for ent in self._entities
@@ -4526,7 +4528,7 @@ class _BundleEntity(_QueryEntity):
 
         proc = self.bundle.create_row_processor(query, procs, labels)
 
-        return proc, self._label_name
+        return proc, self._label_name, ()
 
 
 class _ColumnEntity(_QueryEntity):
@@ -4675,7 +4677,8 @@ class _ColumnEntity(_QueryEntity):
             column = context.adapter.columns[column]
 
         getter = result._getter(column)
-        return getter, self._label_name
+
+        return getter, self._label_name, (self.expr, self.column)
 
     def setup_context(self, query, context):
         column = query._adapt_clause(self.column, False, True)
index ed463ebe37a1fa41936d56513ab346b64b3e4a20..2e1809b0719255bee0c6af1b7e151a196a0d1824 100644 (file)
@@ -647,6 +647,10 @@ class SQLCompiler(Compiled):
 
     """
 
+    has_out_parameters = False
+    """if True, there are bindparam() objects that have the isoutparam
+    flag set."""
+
     insert_prefetch = update_prefetch = ()
 
     def __init__(
@@ -1006,7 +1010,7 @@ class SQLCompiler(Compiled):
     @util.dependencies("sqlalchemy.engine.result")
     def _create_result_map(self, result):
         """utility method used for unit tests only."""
-        return result.ResultMetaData._create_description_match_map(
+        return result.CursorResultMetaData._create_description_match_map(
             self._result_columns
         )
 
@@ -1901,6 +1905,8 @@ class SQLCompiler(Compiled):
                     )
 
         self.binds[bindparam.key] = self.binds[name] = bindparam
+        if bindparam.isoutparam:
+            self.has_out_parameters = True
 
         if post_compile:
             if render_postcompile:
index b2ec32c131dc19261443cc1fabd5ef3f247b6b7a..2cb5f8390c42d920ea60230d9de389d5943b35b6 100644 (file)
@@ -3180,6 +3180,36 @@ class Select(
 
     @classmethod
     def _create_select(cls, *entities):
+        r"""Construct a new :class:`.Select` using the 2.x style API.
+
+        .. versionadded:: 2.0 - the :func:`.future.select` construct is
+           the same construct as the one returned by
+           :func:`.sql.expression.select`, except that the function only
+           accepts the "columns clause" entities up front; the rest of the
+           state of the SELECT should be built up using generative methods.
+
+        Similar functionality is also available via the
+        :meth:`.FromClause.select` method on any :class:`.FromClause`.
+
+        .. seealso::
+
+            :ref:`coretutorial_selecting` - Core Tutorial description of
+            :func:`.select`.
+
+        :param \*entities:
+          Entities to SELECT from.  For Core usage, this is typically a series
+          of :class:`.ColumnElement` and / or :class:`.FromClause`
+          objects which will form the columns clause of the resulting
+          statement.   For those objects that are instances of
+          :class:`.FromClause` (typically :class:`.Table` or :class:`.Alias`
+          objects), the :attr:`.FromClause.c` collection is extracted
+          to form a collection of :class:`.ColumnElement` objects.
+
+          This parameter will also accept :class:`.Text` constructs as
+          given, as well as ORM-mapped classes.
+
+        """
+
         self = cls.__new__(cls)
         self._raw_columns = [
             coercions.expect(roles.ColumnsClauseRole, ent)
@@ -3430,7 +3460,8 @@ class Select(
             "The select() function in SQLAlchemy 2.0 will accept a "
             "series of columns / tables and other entities only, "
             "passed positionally. For forwards compatibility, use the "
-            "sqlalchemy.future.select() construct."
+            "sqlalchemy.future.select() construct.",
+            stacklevel=4,
         )
 
         self._auto_correlate = correlate
index 0026b5f8c529cfcd930a0fbc664bd58d46a12dde..05a0fde49edc927522ff791c1667cae405ae905d 100644 (file)
@@ -289,7 +289,6 @@ def count_functions(variance=0.05):
     print(("Pstats calls: %d Expected %s" % (callcount, expected_count)))
     stats.sort_stats(_profile_stats.sort)
     stats.print_stats()
-
     if _profile_stats.force_write:
         _profile_stats.replace(callcount)
     elif expected_count:
index d77d13efac6bc02a7ca3b381ab277e0854ac8dcd..5186e189ca6ac52c82ebaa0922319839ae1cfe14 100644 (file)
@@ -51,13 +51,21 @@ class RowFetchTest(fixtures.TablesTest):
             [{"id": 1, "today": datetime.datetime(2006, 5, 12, 12, 0, 0)}],
         )
 
+    def test_via_attr(self):
+        row = config.db.execute(
+            self.tables.plain_pk.select().order_by(self.tables.plain_pk.c.id)
+        ).first()
+
+        eq_(row.id, 1)
+        eq_(row.data, "d1")
+
     def test_via_string(self):
         row = config.db.execute(
             self.tables.plain_pk.select().order_by(self.tables.plain_pk.c.id)
         ).first()
 
-        eq_(row["id"], 1)
-        eq_(row["data"], "d1")
+        eq_(row._mapping["id"], 1)
+        eq_(row._mapping["data"], "d1")
 
     def test_via_int(self):
         row = config.db.execute(
@@ -72,8 +80,8 @@ class RowFetchTest(fixtures.TablesTest):
             self.tables.plain_pk.select().order_by(self.tables.plain_pk.c.id)
         ).first()
 
-        eq_(row[self.tables.plain_pk.c.id], 1)
-        eq_(row[self.tables.plain_pk.c.data], "d1")
+        eq_(row._mapping[self.tables.plain_pk.c.id], 1)
+        eq_(row._mapping[self.tables.plain_pk.c.data], "d1")
 
     @requirements.duplicate_names_in_cursor_description
     def test_row_with_dupe_names(self):
@@ -102,7 +110,7 @@ class RowFetchTest(fixtures.TablesTest):
         s2 = select([datetable.c.id, s.label("somelabel")])
         row = config.db.execute(s2).first()
 
-        eq_(row["somelabel"], datetime.datetime(2006, 5, 12, 12, 0, 0))
+        eq_(row.somelabel, datetime.datetime(2006, 5, 12, 12, 0, 0))
 
 
 class PercentSchemaNamesTest(fixtures.TablesTest):
@@ -191,11 +199,11 @@ class PercentSchemaNamesTest(fixtures.TablesTest):
             row = config.db.execute(
                 table.select().order_by(table.c["percent%"])
             ).first()
-            eq_(row["percent%"], 5)
-            eq_(row["spaces % more spaces"], 12)
+            eq_(row._mapping["percent%"], 5)
+            eq_(row._mapping["spaces % more spaces"], 12)
 
-            eq_(row[table.c["percent%"]], 5)
-            eq_(row[table.c["spaces % more spaces"]], 12)
+            eq_(row._mapping[table.c["percent%"]], 5)
+            eq_(row._mapping[table.c["spaces % more spaces"]], 12)
 
         config.db.execute(
             percent_table.update().values(
index 08f543b4737f766d94087519cc1b93837dc59648..cc11e556c2e5660a0567a55e7d1f455f4f26d9a4 100644 (file)
@@ -31,6 +31,7 @@ def setup_filters():
         "ignore", category=DeprecationWarning, message=".*inspect.get.*argspec"
     )
 
+    # ignore 2.0 warnings unless we are explicitly testing for them
     warnings.filterwarnings("ignore", category=sa_exc.RemovedIn20Warning)
 
 
index 434c5cb79c8b30fbd9ba52194a2cc9384469b7a0..b0ceb802a453dfd8957ff07a38cd8efdf761f8fe 100644 (file)
@@ -23,8 +23,6 @@ from ._collections import IdentitySet  # noqa
 from ._collections import ImmutableContainer  # noqa
 from ._collections import immutabledict  # noqa
 from ._collections import ImmutableProperties  # noqa
-from ._collections import KeyedTuple  # noqa
-from ._collections import lightweight_named_tuple  # noqa
 from ._collections import LRUCache  # noqa
 from ._collections import ordered_column_set  # noqa
 from ._collections import OrderedDict  # noqa
index ac8d0aa822469b6e3d88d47d7857a4124c0c2004..2770cc2397d9cdf5340a3622fe25e3ddb8befd44 100644 (file)
@@ -24,108 +24,6 @@ from .compat import threading
 EMPTY_SET = frozenset()
 
 
-class AbstractKeyedTuple(tuple):
-    __slots__ = ()
-
-    def keys(self):
-        """Return a list of string key names for this :class:`.KeyedTuple`.
-
-        .. seealso::
-
-            :attr:`.KeyedTuple._fields`
-
-        """
-
-        return list(self._fields)
-
-
-class KeyedTuple(AbstractKeyedTuple):
-    """``tuple`` subclass that adds labeled names.
-
-    E.g.::
-
-        >>> k = KeyedTuple([1, 2, 3], labels=["one", "two", "three"])
-        >>> k.one
-        1
-        >>> k.two
-        2
-
-    Result rows returned by :class:`.Query` that contain multiple
-    ORM entities and/or column expressions make use of this
-    class to return rows.
-
-    The :class:`.KeyedTuple` exhibits similar behavior to the
-    ``collections.namedtuple()`` construct provided in the Python
-    standard library, however is architected very differently.
-    Unlike ``collections.namedtuple()``, :class:`.KeyedTuple` is
-    does not rely on creation of custom subtypes in order to represent
-    a new series of keys, instead each :class:`.KeyedTuple` instance
-    receives its list of keys in place.   The subtype approach
-    of ``collections.namedtuple()`` introduces significant complexity
-    and performance overhead, which is not necessary for the
-    :class:`.Query` object's use case.
-
-    .. seealso::
-
-        :ref:`ormtutorial_querying`
-
-    """
-
-    def __new__(cls, vals, labels=None):
-        t = tuple.__new__(cls, vals)
-        if labels:
-            t.__dict__.update(zip(labels, vals))
-        else:
-            labels = []
-        t.__dict__["_labels"] = labels
-        return t
-
-    @property
-    def _fields(self):
-        """Return a tuple of string key names for this :class:`.KeyedTuple`.
-
-        This method provides compatibility with ``collections.namedtuple()``.
-
-        .. seealso::
-
-            :meth:`.KeyedTuple.keys`
-
-        """
-        return tuple([l for l in self._labels if l is not None])
-
-    def __setattr__(self, key, value):
-        raise AttributeError("Can't set attribute: %s" % key)
-
-    def _asdict(self):
-        """Return the contents of this :class:`.KeyedTuple` as a dictionary.
-
-        This method provides compatibility with ``collections.namedtuple()``,
-        with the exception that the dictionary returned is **not** ordered.
-
-        """
-        return {key: self.__dict__[key] for key in self.keys()}
-
-
-class _LW(AbstractKeyedTuple):
-    __slots__ = ()
-
-    def __new__(cls, vals):
-        return tuple.__new__(cls, vals)
-
-    def __reduce__(self):
-        # for pickling, degrade down to the regular
-        # KeyedTuple, thus avoiding anonymous class pickling
-        # difficulties
-        return KeyedTuple, (list(self), self._real_fields)
-
-    def _asdict(self):
-        """Return the contents of this :class:`.KeyedTuple` as a dictionary."""
-
-        d = dict(zip(self._real_fields, self))
-        d.pop(None, None)
-        return d
-
-
 class ImmutableContainer(object):
     def _immutable(self, *arg, **kw):
         raise TypeError("%s object is immutable" % self.__class__.__name__)
@@ -965,35 +863,6 @@ class LRUCache(dict):
             self._mutex.release()
 
 
-_lw_tuples = LRUCache(100)
-
-
-def lightweight_named_tuple(name, fields):
-    hash_ = (name,) + tuple(fields)
-    tp_cls = _lw_tuples.get(hash_)
-    if tp_cls:
-        return tp_cls
-
-    tp_cls = type(
-        name,
-        (_LW,),
-        dict(
-            [
-                (field, _property_getters[idx])
-                for idx, field in enumerate(fields)
-                if field is not None
-            ]
-            + [("__slots__", ())]
-        ),
-    )
-
-    tp_cls._real_fields = fields
-    tp_cls._fields = tuple([f for f in fields if f is not None])
-
-    _lw_tuples[hash_] = tp_cls
-    return tp_cls
-
-
 class ScopedRegistry(object):
     """A Registry that can store one or multiple instances of a single
     class on the basis of a "scope" function.
index 87153511ff975a437a62019a9c2b0228e75c8032..8f21dae3df0297059c15b706a8e97a3ddf496879 100644 (file)
@@ -30,7 +30,7 @@ setenv=
     PYTHONPATH=
     PYTHONNOUSERSITE=1
     sqla_nocext: DISABLE_SQLALCHEMY_CEXT=1
-    cext: REQUIRE_SQLALCHEMY_CEXT=1
+    sqla_cext: REQUIRE_SQLALCHEMY_CEXT=1
     db_sqlite: SQLITE={env:TOX_SQLITE:--db sqlite}
     db_postgresql: POSTGRESQL={env:TOX_POSTGRESQL:--db postgresql}
     db_mysql: MYSQL={env:TOX_MYSQL:--db mysql --db pymysql}
index 51fcbde651240925fb2ebe24ea7856bc3baad3ee..87908f016a5c74e811cd05154e6435cde25fd275 100644 (file)
@@ -8,6 +8,7 @@ from sqlalchemy import String
 from sqlalchemy import Table
 from sqlalchemy import testing
 from sqlalchemy import Unicode
+from sqlalchemy.engine.result import LegacyRow
 from sqlalchemy.engine.result import Row
 from sqlalchemy.testing import AssertsExecutionResults
 from sqlalchemy.testing import eq_
@@ -158,6 +159,9 @@ class RowTest(fixtures.TestBase):
             def __init__(self):
                 pass
 
+            def _warn_for_nonint(self, arg):
+                pass
+
         metadata = MockMeta()
 
         keymap = {}
@@ -167,7 +171,7 @@ class RowTest(fixtures.TestBase):
             keymap[index] = (index, key)
         return row_cls(metadata, processors, keymap, row)
 
-    def _test_getitem_value_refcounts(self, seq_factory):
+    def _test_getitem_value_refcounts_legacy(self, seq_factory):
         col1, col2 = object(), object()
 
         def proc1(value):
@@ -178,7 +182,7 @@ class RowTest(fixtures.TestBase):
             [(col1, "a"), (col2, "b")],
             [proc1, None],
             seq_factory([value1, value2]),
-            Row,
+            LegacyRow,
         )
 
         v1_refcount = sys.getrefcount(value1)
@@ -194,8 +198,36 @@ class RowTest(fixtures.TestBase):
         eq_(sys.getrefcount(value1), v1_refcount)
         eq_(sys.getrefcount(value2), v2_refcount)
 
+    def _test_getitem_value_refcounts_new(self, seq_factory):
+        col1, col2 = object(), object()
+
+        def proc1(value):
+            return value
+
+        value1, value2 = "x", "y"
+        row = self._rowproxy_fixture(
+            [(col1, "a"), (col2, "b")],
+            [proc1, None],
+            seq_factory([value1, value2]),
+            Row,
+        )
+
+        v1_refcount = sys.getrefcount(value1)
+        v2_refcount = sys.getrefcount(value2)
+        for i in range(10):
+            row._mapping[col1]
+            row._mapping["a"]
+            row._mapping[col2]
+            row._mapping["b"]
+            row[0]
+            row[1]
+            row[0:2]
+        eq_(sys.getrefcount(value1), v1_refcount)
+        eq_(sys.getrefcount(value2), v2_refcount)
+
     def test_value_refcounts_pure_tuple(self):
-        self._test_getitem_value_refcounts(tuple)
+        self._test_getitem_value_refcounts_legacy(tuple)
+        self._test_getitem_value_refcounts_new(tuple)
 
     def test_value_refcounts_custom_seq(self):
         class CustomSeq(object):
@@ -208,4 +240,5 @@ class RowTest(fixtures.TestBase):
             def __iter__(self):
                 return iter(self.data)
 
-        self._test_getitem_value_refcounts(CustomSeq)
+        self._test_getitem_value_refcounts_legacy(CustomSeq)
+        self._test_getitem_value_refcounts_new(CustomSeq)
index abe9943927506fe5faf2c83a153a3722a3b07413..9f9cf7a94009d61e52cb5c263bad46f7f71d7d87 100644 (file)
@@ -440,9 +440,12 @@ class ZooMarkTest(replay_fixture.ReplayFixtureTest):
 
             # Edit
 
-            SDZ = engine.execute(
-                Zoo.select(Zoo.c.Name == "San Diego Zoo")
-            ).first()
+            SDZ = (
+                engine.execute(Zoo.select(Zoo.c.Name == "San Diego Zoo"))
+                .first()
+                ._mapping
+            )
+
             engine.execute(
                 Zoo.update(Zoo.c.ID == SDZ["ID"]),
                 Name="The San Diego Zoo",
@@ -453,9 +456,12 @@ class ZooMarkTest(replay_fixture.ReplayFixtureTest):
 
             # Test edits
 
-            SDZ = engine.execute(
-                Zoo.select(Zoo.c.Name == "The San Diego Zoo")
-            ).first()
+            SDZ = (
+                engine.execute(Zoo.select(Zoo.c.Name == "The San Diego Zoo"))
+                .first()
+                ._mapping
+            )
+
             assert SDZ["Founded"] == datetime.date(1900, 1, 1), SDZ["Founded"]
 
             # Change it back
@@ -470,9 +476,12 @@ class ZooMarkTest(replay_fixture.ReplayFixtureTest):
 
             # Test re-edits
 
-            SDZ = engine.execute(
-                Zoo.select(Zoo.c.Name == "San Diego Zoo")
-            ).first()
+            SDZ = (
+                engine.execute(Zoo.select(Zoo.c.Name == "San Diego Zoo"))
+                .first()
+                ._mapping
+            )
+
             assert SDZ["Founded"] == datetime.date(1935, 9, 13)
 
     def _baseline_7_multiview(self):
index 1f7c0cf623a3144c0dba49bc5fce51226b3dced8..48e464a01e5f4c93870df04e1eba5e16443e85ce 100644 (file)
@@ -9,6 +9,7 @@ from sqlalchemy import exc
 from sqlalchemy import sql
 from sqlalchemy import testing
 from sqlalchemy import util
+from sqlalchemy.engine import result
 from sqlalchemy.sql import column
 from sqlalchemy.sql.base import DedupeColumnCollection
 from sqlalchemy.testing import assert_raises
@@ -41,30 +42,30 @@ class _KeyedTupleTest(object):
         eq_(str(keyed_tuple), "()")
         eq_(len(keyed_tuple), 0)
 
-        eq_(list(keyed_tuple.keys()), [])
+        eq_(list(keyed_tuple._mapping.keys()), [])
         eq_(keyed_tuple._fields, ())
         eq_(keyed_tuple._asdict(), {})
 
-    def test_values_but_no_labels(self):
-        keyed_tuple = self._fixture([1, 2], [])
+    def test_values_none_labels(self):
+        keyed_tuple = self._fixture([1, 2], [None, None])
         eq_(str(keyed_tuple), "(1, 2)")
         eq_(len(keyed_tuple), 2)
 
-        eq_(list(keyed_tuple.keys()), [])
+        eq_(list(keyed_tuple._mapping.keys()), [])
         eq_(keyed_tuple._fields, ())
         eq_(keyed_tuple._asdict(), {})
 
         eq_(keyed_tuple[0], 1)
         eq_(keyed_tuple[1], 2)
 
-    def test_basic_creation(self):
+    def test_creation(self):
         keyed_tuple = self._fixture([1, 2], ["a", "b"])
         eq_(str(keyed_tuple), "(1, 2)")
-        eq_(list(keyed_tuple.keys()), ["a", "b"])
+        eq_(list(keyed_tuple._mapping.keys()), ["a", "b"])
         eq_(keyed_tuple._fields, ("a", "b"))
         eq_(keyed_tuple._asdict(), {"a": 1, "b": 2})
 
-    def test_basic_index_access(self):
+    def test_index_access(self):
         keyed_tuple = self._fixture([1, 2], ["a", "b"])
         eq_(keyed_tuple[0], 1)
         eq_(keyed_tuple[1], 2)
@@ -74,7 +75,11 @@ class _KeyedTupleTest(object):
 
         assert_raises(IndexError, should_raise)
 
-    def test_basic_attribute_access(self):
+    def test_slice_access(self):
+        keyed_tuple = self._fixture([1, 2], ["a", "b"])
+        eq_(keyed_tuple[0:2], (1, 2))
+
+    def test_attribute_access(self):
         keyed_tuple = self._fixture([1, 2], ["a", "b"])
         eq_(keyed_tuple.a, 1)
         eq_(keyed_tuple.b, 2)
@@ -84,11 +89,26 @@ class _KeyedTupleTest(object):
 
         assert_raises(AttributeError, should_raise)
 
+    def test_contains(self):
+        keyed_tuple = self._fixture(["x", "y"], ["a", "b"])
+
+        is_true("x" in keyed_tuple)
+        is_false("z" in keyed_tuple)
+
+        is_true("z" not in keyed_tuple)
+        is_false("x" not in keyed_tuple)
+
+        # we don't do keys
+        is_false("a" in keyed_tuple)
+        is_false("z" in keyed_tuple)
+        is_true("a" not in keyed_tuple)
+        is_true("z" not in keyed_tuple)
+
     def test_none_label(self):
         keyed_tuple = self._fixture([1, 2, 3], ["a", None, "b"])
         eq_(str(keyed_tuple), "(1, 2, 3)")
 
-        eq_(list(keyed_tuple.keys()), ["a", "b"])
+        eq_(list(keyed_tuple._mapping.keys()), ["a", "b"])
         eq_(keyed_tuple._fields, ("a", "b"))
         eq_(keyed_tuple._asdict(), {"a": 1, "b": 3})
 
@@ -105,7 +125,7 @@ class _KeyedTupleTest(object):
         keyed_tuple = self._fixture([1, 2, 3], ["a", "b", "b"])
         eq_(str(keyed_tuple), "(1, 2, 3)")
 
-        eq_(list(keyed_tuple.keys()), ["a", "b", "b"])
+        eq_(list(keyed_tuple._mapping.keys()), ["a", "b", "b"])
         eq_(keyed_tuple._fields, ("a", "b", "b"))
         eq_(keyed_tuple._asdict(), {"a": 1, "b": 3})
 
@@ -124,7 +144,8 @@ class _KeyedTupleTest(object):
 
         eq_(keyed_tuple.a, 1)
 
-        assert_raises(AttributeError, setattr, keyed_tuple, "a", 5)
+        # eh
+        # assert_raises(AttributeError, setattr, keyed_tuple, "a", 5)
 
         def should_raise():
             keyed_tuple[0] = 100
@@ -140,19 +161,14 @@ class _KeyedTupleTest(object):
 
             eq_(str(kt), "(1, 2, 3)")
 
-            eq_(list(kt.keys()), ["a", "b"])
+            eq_(list(kt._mapping.keys()), ["a", "b"])
             eq_(kt._fields, ("a", "b"))
             eq_(kt._asdict(), {"a": 1, "b": 3})
 
 
-class KeyedTupleTest(_KeyedTupleTest, fixtures.TestBase):
-    def _fixture(self, values, labels):
-        return util.KeyedTuple(values, labels)
-
-
 class LWKeyedTupleTest(_KeyedTupleTest, fixtures.TestBase):
     def _fixture(self, values, labels):
-        return util.lightweight_named_tuple("n", labels)(values)
+        return result.result_tuple(labels)(values)
 
 
 class WeakSequenceTest(fixtures.TestBase):
index be05dec7bcee5178f6da451922c195e716611ce7..26e7bb8d69aa7698d41113cd6f4b4db43cb4af51 100644 (file)
@@ -1357,8 +1357,8 @@ class ArrayRoundTripTest(object):
         )
         results = arrtable.select().execute().fetchall()
         eq_(len(results), 1)
-        eq_(results[0]["intarr"], [1, 2, 3])
-        eq_(results[0]["strarr"], [util.u("abc"), util.u("def")])
+        eq_(results[0].intarr, [1, 2, 3])
+        eq_(results[0].strarr, [util.u("abc"), util.u("def")])
 
     def test_insert_array_w_null(self):
         arrtable = self.tables.arrtable
@@ -1367,8 +1367,8 @@ class ArrayRoundTripTest(object):
         )
         results = arrtable.select().execute().fetchall()
         eq_(len(results), 1)
-        eq_(results[0]["intarr"], [1, None, 3])
-        eq_(results[0]["strarr"], [util.u("abc"), None])
+        eq_(results[0].intarr, [1, None, 3])
+        eq_(results[0].strarr, [util.u("abc"), None])
 
     def test_array_where(self):
         arrtable = self.tables.arrtable
@@ -1383,7 +1383,7 @@ class ArrayRoundTripTest(object):
             .fetchall()
         )
         eq_(len(results), 1)
-        eq_(results[0]["intarr"], [1, 2, 3])
+        eq_(results[0].intarr, [1, 2, 3])
 
     def test_array_concat(self):
         arrtable = self.tables.arrtable
@@ -1422,9 +1422,9 @@ class ArrayRoundTripTest(object):
             arrtable.select(order_by=[arrtable.c.intarr]).execute().fetchall()
         )
         eq_(len(results), 2)
-        eq_(results[0]["strarr"], [util.ue("m\xe4\xe4"), util.ue("m\xf6\xf6")])
+        eq_(results[0].strarr, [util.ue("m\xe4\xe4"), util.ue("m\xf6\xf6")])
         eq_(
-            results[1]["strarr"],
+            results[1].strarr,
             [[util.ue("m\xe4\xe4")], [util.ue("m\xf6\xf6")]],
         )
 
index 7675b8aa65b78f7785e3d8d08ca316e689edb848..01ef5f084cade3a2bdea9437468996ac78fc6489 100644 (file)
@@ -924,16 +924,16 @@ class AttachedDBTest(fixtures.TestBase):
 
         self.conn.execute(ct.insert(), {"id": 1, "name": "foo"})
         row = self.conn.execute(ct.select()).first()
-        eq_(row["id"], 1)
-        eq_(row["name"], "foo")
+        eq_(row._mapping["id"], 1)
+        eq_(row._mapping["name"], "foo")
 
     def test_col_targeting_union(self):
         ct = self._fixture()
 
         self.conn.execute(ct.insert(), {"id": 1, "name": "foo"})
         row = self.conn.execute(ct.select().union(ct.select())).first()
-        eq_(row["id"], 1)
-        eq_(row["name"], "foo")
+        eq_(row._mapping["id"], 1)
+        eq_(row._mapping["name"], "foo")
 
 
 class SQLTest(fixtures.TestBase, AssertsCompiledSQL):
index 21534d6bc9907d804c0c7d845a651bd32ffbeb1a..5c62165f9aa343e2ffa28b912f818a93e15dfd73 100644 (file)
@@ -539,10 +539,10 @@ class EagerTest3(fixtures.MappedTest):
         arb_result = arb_data.execute().fetchall()
 
         # order the result list descending based on 'max'
-        arb_result.sort(key=lambda a: a["max"], reverse=True)
+        arb_result.sort(key=lambda a: a._mapping["max"], reverse=True)
 
         # extract just the "data_id" from it
-        arb_result = [row["data_id"] for row in arb_result]
+        arb_result = [row._mapping["data_id"] for row in arb_result]
 
         arb_data = arb_data.alias("arb")
 
index cb41b384c06980f00f1b36b4b068360d54ddcb68..bf045223494431817a91edaa40b8b8c871ca2f02 100644 (file)
@@ -445,6 +445,30 @@ class DeprecatedQueryTest(_fixtures.FixtureTest, AssertsCompiledSQL):
                 [User(id=7), User(id=8), User(id=9), User(id=10)],
             )
 
+    def test_text_as_column(self):
+        User = self.classes.User
+
+        s = create_session()
+
+        # TODO: this works as of "use rowproxy for ORM keyed tuple"
+        # Ieb9085e9bcff564359095b754da9ae0af55679f0
+        # but im not sure how this relates to things here
+        q = s.query(User.id, text("users.name"))
+        self.assert_compile(
+            q, "SELECT users.id AS users_id, users.name FROM users"
+        )
+        eq_(q.all(), [(7, "jack"), (8, "ed"), (9, "fred"), (10, "chuck")])
+
+        # same here, this was "passing string names to Query.columns"
+        # deprecation message, that's gone here?
+        assert_raises_message(
+            exc.ArgumentError,
+            "Textual column expression 'name' should be explicitly",
+            s.query,
+            User.id,
+            "name",
+        )
+
     def test_query_as_scalar(self):
         User = self.classes.User
 
index 5e993a27f9062f2b384b7389667f0708a53d7c6b..1c1a2c4df35be1211a654bff82b9ed3d32be179e 100644 (file)
@@ -1,6 +1,7 @@
 from sqlalchemy import exc
 from sqlalchemy import select
 from sqlalchemy import testing
+from sqlalchemy.engine import result_tuple
 from sqlalchemy.orm import aliased
 from sqlalchemy.orm import loading
 from sqlalchemy.orm import mapper
@@ -10,7 +11,6 @@ from sqlalchemy.testing import mock
 from sqlalchemy.testing.assertions import assert_raises
 from sqlalchemy.testing.assertions import assert_raises_message
 from sqlalchemy.testing.assertions import eq_
-from sqlalchemy.util import KeyedTuple
 from . import _fixtures
 
 # class GetFromIdentityTest(_fixtures.FixtureTest):
@@ -146,7 +146,7 @@ class MergeResultTest(_fixtures.FixtureTest):
         it = loading.merge_result(q, collection)
         it = list(it)
         eq_([(x.id, y) for x, y in it], [(1, 1), (2, 2), (7, 7), (8, 8)])
-        eq_(list(it[0].keys()), ["User", "id"])
+        eq_(list(it[0]._mapping.keys()), ["User", "id"])
 
     def test_entity_col_mix_keyed_tuple(self):
         s, (u1, u2, u3, u4) = self._fixture()
@@ -154,14 +154,16 @@ class MergeResultTest(_fixtures.FixtureTest):
 
         q = s.query(User, User.id)
 
+        row = result_tuple(["User", "id"])
+
         def kt(*x):
-            return KeyedTuple(x, ["User", "id"])
+            return row(x)
 
         collection = [kt(u1, 1), kt(u2, 2), kt(u3, 7), kt(u4, 8)]
         it = loading.merge_result(q, collection)
         it = list(it)
         eq_([(x.id, y) for x, y in it], [(1, 1), (2, 2), (7, 7), (8, 8)])
-        eq_(list(it[0].keys()), ["User", "id"])
+        eq_(list(it[0]._mapping.keys()), ["User", "id"])
 
     def test_none_entity(self):
         s, (u1, u2, u3, u4) = self._fixture()
@@ -170,8 +172,10 @@ class MergeResultTest(_fixtures.FixtureTest):
         ua = aliased(User)
         q = s.query(User, ua)
 
+        row = result_tuple(["User", "useralias"])
+
         def kt(*x):
-            return KeyedTuple(x, ["User", "useralias"])
+            return row(x)
 
         collection = [kt(u1, u2), kt(u1, None), kt(u2, u3)]
         it = loading.merge_result(q, collection)
index 4da583a0c18bc4ac3800db1e1a9fcddbc78a03c0..5c4d1e22d166b97efad6f6f0928767c3487347a9 100644 (file)
@@ -756,14 +756,14 @@ class TupleLabelTest(_fixtures.FixtureTest):
                 if pickled is not False:
                     row = pickle.loads(pickle.dumps(row, pickled))
 
-                eq_(list(row.keys()), ["User", "Address"])
+                eq_(list(row._fields), ["User", "Address"])
                 eq_(row.User, row[0])
                 eq_(row.Address, row[1])
 
             for row in sess.query(User.name, User.id.label("foobar")):
                 if pickled is not False:
                     row = pickle.loads(pickle.dumps(row, pickled))
-                eq_(list(row.keys()), ["name", "foobar"])
+                eq_(list(row._fields), ["name", "foobar"])
                 eq_(row.name, row[0])
                 eq_(row.foobar, row[1])
 
@@ -772,7 +772,7 @@ class TupleLabelTest(_fixtures.FixtureTest):
             ):
                 if pickled is not False:
                     row = pickle.loads(pickle.dumps(row, pickled))
-                eq_(list(row.keys()), ["name", "foobar"])
+                eq_(list(row._fields), ["name", "foobar"])
                 eq_(row.name, row[0])
                 eq_(row.foobar, row[1])
 
@@ -784,7 +784,7 @@ class TupleLabelTest(_fixtures.FixtureTest):
             ):
                 if pickled is not False:
                     row = pickle.loads(pickle.dumps(row, pickled))
-                eq_(list(row.keys()), ["User"])
+                eq_(list(row._fields), ["User"])
                 eq_(row.User, row[0])
 
             oalias = aliased(Order, name="orders")
@@ -793,12 +793,12 @@ class TupleLabelTest(_fixtures.FixtureTest):
             ):
                 if pickled is not False:
                     row = pickle.loads(pickle.dumps(row, pickled))
-                eq_(list(row.keys()), ["User", "orders"])
+                eq_(list(row._fields), ["User", "orders"])
                 eq_(row.User, row[0])
                 eq_(row.orders, row[1])
 
             for row in sess.query(User.name + "hoho", User.name):
-                eq_(list(row.keys()), ["name"])
+                eq_(list(row._fields), ["name"])
                 eq_(row[0], row.name + "hoho")
 
             if pickled is not False:
index 882255cc862c958e29751b453ee4d4c0e2372907..aabee82ad58747f6ecf8ee897bcd73437a0e7f7e 100644 (file)
@@ -111,6 +111,7 @@ class OnlyReturnTuplesTest(QueryTest):
         is_false(query.is_single_entity)
         row = query.first()
         assert isinstance(row, collections_abc.Sequence)
+        assert isinstance(row._mapping, collections_abc.Mapping)
 
     def test_multiple_entity_false(self):
         User = self.classes.User
@@ -118,6 +119,7 @@ class OnlyReturnTuplesTest(QueryTest):
         is_false(query.is_single_entity)
         row = query.first()
         assert isinstance(row, collections_abc.Sequence)
+        assert isinstance(row._mapping, collections_abc.Mapping)
 
     def test_multiple_entity_true(self):
         User = self.classes.User
@@ -125,6 +127,7 @@ class OnlyReturnTuplesTest(QueryTest):
         is_false(query.is_single_entity)
         row = query.first()
         assert isinstance(row, collections_abc.Sequence)
+        assert isinstance(row._mapping, collections_abc.Mapping)
 
 
 class RowTupleTest(QueryTest):
@@ -141,8 +144,48 @@ class RowTupleTest(QueryTest):
             .filter(User.id == 7)
             .first()
         )
-        assert row.id == 7
-        assert row.uname == "jack"
+
+        eq_(row.id, 7)
+        eq_(row.uname, "jack")
+
+    def test_entity_mapping_access(self):
+        User, users = self.classes.User, self.tables.users
+        Address, addresses = self.classes.Address, self.tables.addresses
+
+        mapper(User, users, properties={"addresses": relationship(Address)})
+        mapper(Address, addresses)
+
+        s = Session()
+
+        row = s.query(User).only_return_tuples(True).first()
+        eq_(row._mapping[User], row[0])
+
+        row = s.query(User, Address).join(User.addresses).first()
+        eq_(row._mapping[User], row[0])
+        eq_(row._mapping[Address], row[1])
+        eq_(row._mapping["User"], row[0])
+        eq_(row._mapping["Address"], row[1])
+
+        u1 = aliased(User)
+        row = s.query(u1).only_return_tuples(True).first()
+        eq_(row._mapping[u1], row[0])
+        assert_raises(KeyError, lambda: row._mapping[User])
+
+        row = (
+            s.query(User.id, Address.email_address)
+            .join(User.addresses)
+            .first()
+        )
+
+        eq_(row._mapping[User.id], row[0])
+        eq_(row._mapping[User.id], row[0])
+        eq_(row._mapping["id"], row[0])
+        eq_(row._mapping[Address.email_address], row[1])
+        eq_(row._mapping["email_address"], row[1])
+        eq_(row._mapping[users.c.id], row[0])
+        eq_(row._mapping[addresses.c.email_address], row[1])
+        assert_raises(KeyError, lambda: row._mapping[User.name])
+        assert_raises(KeyError, lambda: row._mapping[users.c.name])
 
     def test_deep_entity(self):
         users, User = (self.tables.users, self.classes.User)
@@ -3908,7 +3951,7 @@ class DistinctTest(QueryTest, AssertsCompiledSQL):
             ],
         )
         for row in q:
-            eq_(row.keys(), ["id", "foo", "id"])
+            eq_(row._mapping.keys(), ["id", "foo", "id"])
 
     def test_columns_augmented_sql_one(self):
         User, Address = self.classes.User, self.classes.Address
index f5cc4836f2af5f07876de026db0eba45dbab442d..58eb6233925894d8a0f141b01a54076b77f390f5 100644 (file)
@@ -1504,7 +1504,7 @@ class OneToManyTest(_fixtures.FixtureTest):
         session.flush()
 
         user_rows = users.select(users.c.id.in_([u.id])).execute().fetchall()
-        eq_(list(user_rows[0].values()), [u.id, "one2manytester"])
+        eq_(list(user_rows[0]), [u.id, "one2manytester"])
 
         address_rows = (
             addresses.select(
@@ -1514,8 +1514,8 @@ class OneToManyTest(_fixtures.FixtureTest):
             .execute()
             .fetchall()
         )
-        eq_(list(address_rows[0].values()), [a2.id, u.id, "lala@test.org"])
-        eq_(list(address_rows[1].values()), [a.id, u.id, "one2many@test.org"])
+        eq_(list(address_rows[0]), [a2.id, u.id, "lala@test.org"])
+        eq_(list(address_rows[1]), [a.id, u.id, "one2many@test.org"])
 
         userid = u.id
         addressid = a2.id
@@ -1527,10 +1527,7 @@ class OneToManyTest(_fixtures.FixtureTest):
         address_rows = (
             addresses.select(addresses.c.id == addressid).execute().fetchall()
         )
-        eq_(
-            list(address_rows[0].values()),
-            [addressid, userid, "somethingnew@foo.com"],
-        )
+        eq_(list(address_rows[0]), [addressid, userid, "somethingnew@foo.com"])
         self.assert_(u.id == userid and a2.id == addressid)
 
     def test_one_to_many_2(self):
@@ -2065,11 +2062,11 @@ class SaveTest(_fixtures.FixtureTest):
         user_rows = (
             users.select(users.c.id.in_([u.foo_id])).execute().fetchall()
         )
-        eq_(list(user_rows[0].values()), [u.foo_id, "multitester"])
+        eq_(list(user_rows[0]), [u.foo_id, "multitester"])
         address_rows = (
             addresses.select(addresses.c.id.in_([u.id])).execute().fetchall()
         )
-        eq_(list(address_rows[0].values()), [u.id, u.foo_id, "multi@test.org"])
+        eq_(list(address_rows[0]), [u.id, u.foo_id, "multi@test.org"])
 
         u.email = "lala@hey.com"
         u.name = "imnew"
@@ -2078,11 +2075,11 @@ class SaveTest(_fixtures.FixtureTest):
         user_rows = (
             users.select(users.c.id.in_([u.foo_id])).execute().fetchall()
         )
-        eq_(list(user_rows[0].values()), [u.foo_id, "imnew"])
+        eq_(list(user_rows[0]), [u.foo_id, "imnew"])
         address_rows = (
             addresses.select(addresses.c.id.in_([u.id])).execute().fetchall()
         )
-        eq_(list(address_rows[0].values()), [u.id, u.foo_id, "lala@hey.com"])
+        eq_(list(address_rows[0]), [u.id, u.foo_id, "lala@hey.com"])
 
         session.expunge_all()
         u = session.query(User).get(id_)
@@ -2252,7 +2249,7 @@ class ManyToOneTest(_fixtures.FixtureTest):
             sa.and_(users.c.id == addresses.c.user_id, addresses.c.id == a.id),
         ).execute()
         eq_(
-            list(result.first().values()),
+            list(result.first()),
             [a.user.id, "asdf8d", a.id, a.user_id, "theater@foo.com"],
         )
 
index 501ea699e5781f309bb1d600eb7a261933e0e422..4244e126636a1b1cb2080dc0e21061e284dd6793 100644 (file)
 
 # TEST: test.aaa_profiling.test_compiler.CompileTest.test_insert
 
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mssql_pyodbc_dbapiunicode_cextensions 66
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mssql_pyodbc_dbapiunicode_nocextensions 66
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_pymysql_dbapiunicode_cextensions 66
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_pymysql_dbapiunicode_nocextensions 66
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_oracle_cx_oracle_dbapiunicode_cextensions 66
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_oracle_cx_oracle_dbapiunicode_nocextensions 66
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_postgresql_psycopg2_dbapiunicode_cextensions 65
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 66
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_sqlite_pysqlite_dbapiunicode_cextensions 65
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 65
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_mssql_pyodbc_dbapiunicode_cextensions 71
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_mssql_pyodbc_dbapiunicode_nocextensions 71
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_mysql_mysqldb_dbapiunicode_cextensions 71
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_mysql_mysqldb_dbapiunicode_nocextensions 71
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_mysql_pymysql_dbapiunicode_cextensions 71
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_mysql_pymysql_dbapiunicode_nocextensions 71
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_oracle_cx_oracle_dbapiunicode_cextensions 71
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_oracle_cx_oracle_dbapiunicode_nocextensions 71
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_postgresql_psycopg2_dbapiunicode_cextensions 71
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_postgresql_psycopg2_dbapiunicode_nocextensions 71
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_sqlite_pysqlite_dbapiunicode_cextensions 70
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 70
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mssql_pyodbc_dbapiunicode_cextensions 67
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mssql_pyodbc_dbapiunicode_nocextensions 67
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqldb_dbapiunicode_cextensions 67
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqldb_dbapiunicode_nocextensions 67
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_pymysql_dbapiunicode_cextensions 67
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_pymysql_dbapiunicode_nocextensions 67
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_oracle_cx_oracle_dbapiunicode_cextensions 67
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_oracle_cx_oracle_dbapiunicode_nocextensions 67
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_postgresql_psycopg2_dbapiunicode_cextensions 67
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 67
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_sqlite_pysqlite_dbapiunicode_cextensions 67
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 67
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_mssql_pyodbc_dbapiunicode_cextensions 72
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_mssql_pyodbc_dbapiunicode_nocextensions 72
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_mysql_mysqldb_dbapiunicode_cextensions 72
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_mysql_mysqldb_dbapiunicode_nocextensions 72
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_mysql_pymysql_dbapiunicode_cextensions 72
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_mysql_pymysql_dbapiunicode_nocextensions 72
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_oracle_cx_oracle_dbapiunicode_cextensions 72
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_oracle_cx_oracle_dbapiunicode_nocextensions 72
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_postgresql_psycopg2_dbapiunicode_cextensions 72
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_postgresql_psycopg2_dbapiunicode_nocextensions 72
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_sqlite_pysqlite_dbapiunicode_cextensions 72
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 72
 
 # TEST: test.aaa_profiling.test_compiler.CompileTest.test_select
 
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mssql_pyodbc_dbapiunicode_cextensions 161,161
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mssql_pyodbc_dbapiunicode_nocextensions 161
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_pymysql_dbapiunicode_cextensions 163,163
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_pymysql_dbapiunicode_nocextensions 161
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_oracle_cx_oracle_dbapiunicode_cextensions 163,161
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_oracle_cx_oracle_dbapiunicode_nocextensions 161
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_dbapiunicode_cextensions 163
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 161
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_dbapiunicode_cextensions 163
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 163
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_mssql_pyodbc_dbapiunicode_cextensions 174
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_mssql_pyodbc_dbapiunicode_nocextensions 174
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_mysql_mysqldb_dbapiunicode_cextensions 174
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_mysql_mysqldb_dbapiunicode_nocextensions 174
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_mysql_pymysql_dbapiunicode_cextensions 174
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_mysql_pymysql_dbapiunicode_nocextensions 174
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_oracle_cx_oracle_dbapiunicode_cextensions 174
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_oracle_cx_oracle_dbapiunicode_nocextensions 174
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_postgresql_psycopg2_dbapiunicode_cextensions 174
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_postgresql_psycopg2_dbapiunicode_nocextensions 174
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_sqlite_pysqlite_dbapiunicode_cextensions 176
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 176
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mssql_pyodbc_dbapiunicode_cextensions 164
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mssql_pyodbc_dbapiunicode_nocextensions 164
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqldb_dbapiunicode_cextensions 164
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqldb_dbapiunicode_nocextensions 164
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_pymysql_dbapiunicode_cextensions 164
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_pymysql_dbapiunicode_nocextensions 164
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_oracle_cx_oracle_dbapiunicode_cextensions 164
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_oracle_cx_oracle_dbapiunicode_nocextensions 164
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_dbapiunicode_cextensions 164
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 164
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_dbapiunicode_cextensions 164
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 164
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_mssql_pyodbc_dbapiunicode_cextensions 177
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_mssql_pyodbc_dbapiunicode_nocextensions 177
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_mysql_mysqldb_dbapiunicode_cextensions 177
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_mysql_mysqldb_dbapiunicode_nocextensions 177
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_mysql_pymysql_dbapiunicode_cextensions 177
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_mysql_pymysql_dbapiunicode_nocextensions 177
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_oracle_cx_oracle_dbapiunicode_cextensions 177
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_oracle_cx_oracle_dbapiunicode_nocextensions 177
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_postgresql_psycopg2_dbapiunicode_cextensions 177
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_postgresql_psycopg2_dbapiunicode_nocextensions 177
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_sqlite_pysqlite_dbapiunicode_cextensions 177
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 177
 
 # TEST: test.aaa_profiling.test_compiler.CompileTest.test_select_labels
 
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mssql_pyodbc_dbapiunicode_cextensions 194,194
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mssql_pyodbc_dbapiunicode_nocextensions 194
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_pymysql_dbapiunicode_cextensions 196,196
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_pymysql_dbapiunicode_nocextensions 194
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_oracle_cx_oracle_dbapiunicode_cextensions 196,194
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_oracle_cx_oracle_dbapiunicode_nocextensions 194
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_dbapiunicode_cextensions 194
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 194
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_dbapiunicode_cextensions 194
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 194
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_mssql_pyodbc_dbapiunicode_cextensions 207
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_mssql_pyodbc_dbapiunicode_nocextensions 207
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_mysql_mysqldb_dbapiunicode_cextensions 207
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_mysql_mysqldb_dbapiunicode_nocextensions 207
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_mysql_pymysql_dbapiunicode_cextensions 207
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_mysql_pymysql_dbapiunicode_nocextensions 207
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_oracle_cx_oracle_dbapiunicode_cextensions 207
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_oracle_cx_oracle_dbapiunicode_nocextensions 207
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_postgresql_psycopg2_dbapiunicode_cextensions 207
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_postgresql_psycopg2_dbapiunicode_nocextensions 207
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_sqlite_pysqlite_dbapiunicode_cextensions 207
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 207
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mssql_pyodbc_dbapiunicode_cextensions 195
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mssql_pyodbc_dbapiunicode_nocextensions 195
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqldb_dbapiunicode_cextensions 195
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqldb_dbapiunicode_nocextensions 195
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_pymysql_dbapiunicode_cextensions 195
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_pymysql_dbapiunicode_nocextensions 195
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_oracle_cx_oracle_dbapiunicode_cextensions 195
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_oracle_cx_oracle_dbapiunicode_nocextensions 195
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_dbapiunicode_cextensions 195
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 195
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_dbapiunicode_cextensions 195
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 195
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_mssql_pyodbc_dbapiunicode_cextensions 208
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_mssql_pyodbc_dbapiunicode_nocextensions 208
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_mysql_mysqldb_dbapiunicode_cextensions 208
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_mysql_mysqldb_dbapiunicode_nocextensions 208
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_mysql_pymysql_dbapiunicode_cextensions 208
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_mysql_pymysql_dbapiunicode_nocextensions 208
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_oracle_cx_oracle_dbapiunicode_cextensions 208
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_oracle_cx_oracle_dbapiunicode_nocextensions 208
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_postgresql_psycopg2_dbapiunicode_cextensions 208
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_postgresql_psycopg2_dbapiunicode_nocextensions 208
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_sqlite_pysqlite_dbapiunicode_cextensions 208
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 208
 
 # TEST: test.aaa_profiling.test_compiler.CompileTest.test_update
 
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mssql_pyodbc_dbapiunicode_cextensions 82,80
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mssql_pyodbc_dbapiunicode_nocextensions 78
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_pymysql_dbapiunicode_cextensions 80,80
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_pymysql_dbapiunicode_nocextensions 76
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_oracle_cx_oracle_dbapiunicode_cextensions 82,80
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_oracle_cx_oracle_dbapiunicode_nocextensions 78
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_postgresql_psycopg2_dbapiunicode_cextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 78
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_sqlite_pysqlite_dbapiunicode_cextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_mssql_pyodbc_dbapiunicode_cextensions 81
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_mssql_pyodbc_dbapiunicode_nocextensions 81
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_mysql_mysqldb_dbapiunicode_cextensions 81
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_mysql_mysqldb_dbapiunicode_nocextensions 81
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_mysql_pymysql_dbapiunicode_cextensions 79
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_mysql_pymysql_dbapiunicode_nocextensions 79
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_oracle_cx_oracle_dbapiunicode_cextensions 81
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_oracle_cx_oracle_dbapiunicode_nocextensions 81
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_postgresql_psycopg2_dbapiunicode_cextensions 81
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_postgresql_psycopg2_dbapiunicode_nocextensions 81
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_sqlite_pysqlite_dbapiunicode_cextensions 80
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 80
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mssql_pyodbc_dbapiunicode_cextensions 79
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mssql_pyodbc_dbapiunicode_nocextensions 79
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqldb_dbapiunicode_cextensions 79
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqldb_dbapiunicode_nocextensions 79
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_pymysql_dbapiunicode_cextensions 77
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_pymysql_dbapiunicode_nocextensions 77
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_oracle_cx_oracle_dbapiunicode_cextensions 79
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_oracle_cx_oracle_dbapiunicode_nocextensions 79
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_postgresql_psycopg2_dbapiunicode_cextensions 79
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 79
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_sqlite_pysqlite_dbapiunicode_cextensions 79
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 79
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_mssql_pyodbc_dbapiunicode_cextensions 82
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_mssql_pyodbc_dbapiunicode_nocextensions 82
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_mysql_mysqldb_dbapiunicode_cextensions 82
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_mysql_mysqldb_dbapiunicode_nocextensions 82
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_mysql_pymysql_dbapiunicode_cextensions 80
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_mysql_pymysql_dbapiunicode_nocextensions 80
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_oracle_cx_oracle_dbapiunicode_cextensions 82
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_oracle_cx_oracle_dbapiunicode_nocextensions 82
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_postgresql_psycopg2_dbapiunicode_cextensions 82
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_postgresql_psycopg2_dbapiunicode_nocextensions 82
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_sqlite_pysqlite_dbapiunicode_cextensions 82
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 82
 
 # TEST: test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause
 
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mssql_pyodbc_dbapiunicode_cextensions 156,156
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mssql_pyodbc_dbapiunicode_nocextensions 157
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_pymysql_dbapiunicode_cextensions 158,158
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_pymysql_dbapiunicode_nocextensions 157
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_oracle_cx_oracle_dbapiunicode_cextensions 158,156
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_oracle_cx_oracle_dbapiunicode_nocextensions 157
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_dbapiunicode_cextensions 157
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 157
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_dbapiunicode_cextensions 157
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 157
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_mssql_pyodbc_dbapiunicode_cextensions 162
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_mssql_pyodbc_dbapiunicode_nocextensions 162
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_mysql_mysqldb_dbapiunicode_cextensions 162
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_mysql_mysqldb_dbapiunicode_nocextensions 162
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_mysql_pymysql_dbapiunicode_cextensions 162
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_mysql_pymysql_dbapiunicode_nocextensions 162
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_oracle_cx_oracle_dbapiunicode_cextensions 162
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_oracle_cx_oracle_dbapiunicode_nocextensions 162
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_postgresql_psycopg2_dbapiunicode_cextensions 162
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_postgresql_psycopg2_dbapiunicode_nocextensions 162
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_sqlite_pysqlite_dbapiunicode_cextensions 162
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 162
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mssql_pyodbc_dbapiunicode_cextensions 159
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mssql_pyodbc_dbapiunicode_nocextensions 159
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_dbapiunicode_cextensions 159
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_dbapiunicode_nocextensions 159
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_pymysql_dbapiunicode_cextensions 159
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_pymysql_dbapiunicode_nocextensions 159
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_oracle_cx_oracle_dbapiunicode_cextensions 159
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_oracle_cx_oracle_dbapiunicode_nocextensions 159
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_dbapiunicode_cextensions 159
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 159
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_dbapiunicode_cextensions 159
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 159
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_mssql_pyodbc_dbapiunicode_cextensions 164
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_mssql_pyodbc_dbapiunicode_nocextensions 164
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_mysql_mysqldb_dbapiunicode_cextensions 164
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_mysql_mysqldb_dbapiunicode_nocextensions 164
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_mysql_pymysql_dbapiunicode_cextensions 164
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_mysql_pymysql_dbapiunicode_nocextensions 164
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_oracle_cx_oracle_dbapiunicode_cextensions 164
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_oracle_cx_oracle_dbapiunicode_nocextensions 164
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_postgresql_psycopg2_dbapiunicode_cextensions 164
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_postgresql_psycopg2_dbapiunicode_nocextensions 164
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_sqlite_pysqlite_dbapiunicode_cextensions 164
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 164
 
 # TEST: test.aaa_profiling.test_misc.CacheKeyTest.test_statement_one
 
@@ -152,73 +162,73 @@ test.aaa_profiling.test_misc.EnumTest.test_create_enum_from_pep_435_w_expensive_
 
 # TEST: test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_w_annotation
 
-test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_w_annotation 2.7_sqlite_pysqlite_dbapiunicode_cextensions 49105
-test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_w_annotation 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 54205
-test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_w_annotation 3.7_sqlite_pysqlite_dbapiunicode_cextensions 51005
-test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_w_annotation 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 56405
+test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_w_annotation 2.7_sqlite_pysqlite_dbapiunicode_cextensions 49005
+test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_w_annotation 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 64705
+test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_w_annotation 3.7_sqlite_pysqlite_dbapiunicode_cextensions 50605
+test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_w_annotation 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 64405
 
 # TEST: test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_wo_annotation
 
-test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_wo_annotation 2.7_sqlite_pysqlite_dbapiunicode_cextensions 48605
-test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_wo_annotation 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 53705
-test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_wo_annotation 3.7_sqlite_pysqlite_dbapiunicode_cextensions 50505
-test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_wo_annotation 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 55905
+test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_wo_annotation 2.7_sqlite_pysqlite_dbapiunicode_cextensions 48505
+test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_wo_annotation 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 64205
+test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_wo_annotation 3.7_sqlite_pysqlite_dbapiunicode_cextensions 50105
+test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_bundle_wo_annotation 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 63905
 
 # TEST: test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_w_annotations
 
-test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_w_annotations 2.7_sqlite_pysqlite_dbapiunicode_cextensions 46905
-test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_w_annotations 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 52005
-test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_w_annotations 3.7_sqlite_pysqlite_dbapiunicode_cextensions 48205
-test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_w_annotations 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 53605
+test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_w_annotations 2.7_sqlite_pysqlite_dbapiunicode_cextensions 47405
+test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_w_annotations 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 59805
+test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_w_annotations 3.7_sqlite_pysqlite_dbapiunicode_cextensions 48505
+test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_w_annotations 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 59105
 
 # TEST: test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_wo_annotations
 
-test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_wo_annotations 2.7_sqlite_pysqlite_dbapiunicode_cextensions 46405
-test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_wo_annotations 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 51505
-test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_wo_annotations 3.7_sqlite_pysqlite_dbapiunicode_cextensions 47705
-test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_wo_annotations 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 53105
+test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_wo_annotations 2.7_sqlite_pysqlite_dbapiunicode_cextensions 46805
+test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_wo_annotations 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 59205
+test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_wo_annotations 3.7_sqlite_pysqlite_dbapiunicode_cextensions 47905
+test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_entity_wo_annotations 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 58505
 
 # TEST: test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle
 
-test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle 2.7_sqlite_pysqlite_dbapiunicode_cextensions 39505
-test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 44105
-test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle 3.7_sqlite_pysqlite_dbapiunicode_cextensions 41705
-test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 46605
+test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle 2.7_sqlite_pysqlite_dbapiunicode_cextensions 40205
+test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 46305
+test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle 3.7_sqlite_pysqlite_dbapiunicode_cextensions 42405
+test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 48805
 
 # TEST: test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_w_annotations
 
-test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_w_annotations 2.7_sqlite_pysqlite_dbapiunicode_cextensions 46905
-test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_w_annotations 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 52005
-test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_w_annotations 3.7_sqlite_pysqlite_dbapiunicode_cextensions 48205
-test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_w_annotations 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 53605
+test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_w_annotations 2.7_sqlite_pysqlite_dbapiunicode_cextensions 47405
+test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_w_annotations 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 59805
+test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_w_annotations 3.7_sqlite_pysqlite_dbapiunicode_cextensions 48505
+test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_w_annotations 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 59105
 
 # TEST: test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_wo_annotations
 
-test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_wo_annotations 2.7_sqlite_pysqlite_dbapiunicode_cextensions 46405
-test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_wo_annotations 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 51505
-test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_wo_annotations 3.7_sqlite_pysqlite_dbapiunicode_cextensions 47705
-test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_wo_annotations 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 53105
+test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_wo_annotations 2.7_sqlite_pysqlite_dbapiunicode_cextensions 46805
+test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_wo_annotations 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 59205
+test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_wo_annotations 3.7_sqlite_pysqlite_dbapiunicode_cextensions 47905
+test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_bundle_wo_annotations 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 58505
 
 # TEST: test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_entity_w_annotations
 
-test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_entity_w_annotations 2.7_sqlite_pysqlite_dbapiunicode_cextensions 26505
-test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_entity_w_annotations 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 28505
-test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_entity_w_annotations 3.7_sqlite_pysqlite_dbapiunicode_cextensions 28605
-test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_entity_w_annotations 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 30805
+test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_entity_w_annotations 2.7_sqlite_pysqlite_dbapiunicode_cextensions 26905
+test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_entity_w_annotations 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 30705
+test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_entity_w_annotations 3.7_sqlite_pysqlite_dbapiunicode_cextensions 28805
+test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_entity_w_annotations 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 32705
 
 # TEST: test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_entity_wo_annotations
 
-test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_entity_wo_annotations 2.7_sqlite_pysqlite_dbapiunicode_cextensions 26005
-test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_entity_wo_annotations 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 28005
-test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_entity_wo_annotations 3.7_sqlite_pysqlite_dbapiunicode_cextensions 28105
-test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_entity_wo_annotations 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 30305
+test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_entity_wo_annotations 2.7_sqlite_pysqlite_dbapiunicode_cextensions 26305
+test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_entity_wo_annotations 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 30105
+test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_entity_wo_annotations 3.7_sqlite_pysqlite_dbapiunicode_cextensions 28205
+test.aaa_profiling.test_orm.AnnotatedOverheadTest.test_no_entity_wo_annotations 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 32105
 
 # TEST: test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set
 
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_sqlite_pysqlite_dbapiunicode_cextensions 3807
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 3807
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.7_sqlite_pysqlite_dbapiunicode_cextensions 3928
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 3928
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_sqlite_pysqlite_dbapiunicode_cextensions 3812
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 3812
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.7_sqlite_pysqlite_dbapiunicode_cextensions 3933
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 3933
 
 # TEST: test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove
 
@@ -257,17 +267,17 @@ test.aaa_profiling.test_orm.BranchedOptionTest.test_query_opts_unbound_branching
 
 # TEST: test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline
 
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_sqlite_pysqlite_dbapiunicode_cextensions 17102
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 30108
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.7_sqlite_pysqlite_dbapiunicode_cextensions 17129
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 30138
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_sqlite_pysqlite_dbapiunicode_cextensions 17182
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 38188
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.7_sqlite_pysqlite_dbapiunicode_cextensions 17209
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 38218
 
 # TEST: test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols
 
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_sqlite_pysqlite_dbapiunicode_cextensions 23249
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 30255
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.7_sqlite_pysqlite_dbapiunicode_cextensions 23289
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 30298
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_sqlite_pysqlite_dbapiunicode_cextensions 23250
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 32256
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.7_sqlite_pysqlite_dbapiunicode_cextensions 23290
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 32299
 
 # TEST: test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_b_aliased
 
@@ -299,17 +309,17 @@ test.aaa_profiling.test_orm.JoinConditionTest.test_a_to_d_aliased 3.7_sqlite_pys
 
 # TEST: test.aaa_profiling.test_orm.JoinedEagerLoadTest.test_build_query
 
-test.aaa_profiling.test_orm.JoinedEagerLoadTest.test_build_query 2.7_sqlite_pysqlite_dbapiunicode_cextensions 466578
-test.aaa_profiling.test_orm.JoinedEagerLoadTest.test_build_query 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 466573
-test.aaa_profiling.test_orm.JoinedEagerLoadTest.test_build_query 3.7_sqlite_pysqlite_dbapiunicode_cextensions 497244
-test.aaa_profiling.test_orm.JoinedEagerLoadTest.test_build_query 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 497244
+test.aaa_profiling.test_orm.JoinedEagerLoadTest.test_build_query 2.7_sqlite_pysqlite_dbapiunicode_cextensions 458478
+test.aaa_profiling.test_orm.JoinedEagerLoadTest.test_build_query 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 458483
+test.aaa_profiling.test_orm.JoinedEagerLoadTest.test_build_query 3.7_sqlite_pysqlite_dbapiunicode_cextensions 489144
+test.aaa_profiling.test_orm.JoinedEagerLoadTest.test_build_query 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 489144
 
 # TEST: test.aaa_profiling.test_orm.JoinedEagerLoadTest.test_fetch_results
 
-test.aaa_profiling.test_orm.JoinedEagerLoadTest.test_fetch_results 2.7_sqlite_pysqlite_dbapiunicode_cextensions 457181
-test.aaa_profiling.test_orm.JoinedEagerLoadTest.test_fetch_results 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 475681
-test.aaa_profiling.test_orm.JoinedEagerLoadTest.test_fetch_results 3.7_sqlite_pysqlite_dbapiunicode_cextensions 461991
-test.aaa_profiling.test_orm.JoinedEagerLoadTest.test_fetch_results 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 480791
+test.aaa_profiling.test_orm.JoinedEagerLoadTest.test_fetch_results 2.7_sqlite_pysqlite_dbapiunicode_cextensions 458245
+test.aaa_profiling.test_orm.JoinedEagerLoadTest.test_fetch_results 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 488845
+test.aaa_profiling.test_orm.JoinedEagerLoadTest.test_fetch_results 3.7_sqlite_pysqlite_dbapiunicode_cextensions 463655
+test.aaa_profiling.test_orm.JoinedEagerLoadTest.test_fetch_results 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 493955
 
 # TEST: test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity
 
@@ -320,24 +330,24 @@ test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_
 
 # TEST: test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity
 
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_sqlite_pysqlite_dbapiunicode_cextensions 88227
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 91279
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.7_sqlite_pysqlite_dbapiunicode_cextensions 90052
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 93306
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_sqlite_pysqlite_dbapiunicode_cextensions 90023
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 93725
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.7_sqlite_pysqlite_dbapiunicode_cextensions 91798
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 95802
 
 # TEST: test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks
 
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_sqlite_pysqlite_dbapiunicode_cextensions 18356
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 18706
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.7_sqlite_pysqlite_dbapiunicode_cextensions 18981
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 19417
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_sqlite_pysqlite_dbapiunicode_cextensions 18540
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 19006
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.7_sqlite_pysqlite_dbapiunicode_cextensions 19167
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 19719
 
 # TEST: test.aaa_profiling.test_orm.MergeTest.test_merge_load
 
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_dbapiunicode_cextensions 1009
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 1036
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.7_sqlite_pysqlite_dbapiunicode_cextensions 1044
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 1075
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_dbapiunicode_cextensions 1027
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 1061
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.7_sqlite_pysqlite_dbapiunicode_cextensions 1062
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 1100
 
 # TEST: test.aaa_profiling.test_orm.MergeTest.test_merge_no_load
 
@@ -348,24 +358,24 @@ test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.7_sqlite_pysqlite_dba
 
 # TEST: test.aaa_profiling.test_orm.QueryTest.test_query_cols
 
-test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_sqlite_pysqlite_dbapiunicode_cextensions 5764
-test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 6414
-test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.7_sqlite_pysqlite_dbapiunicode_cextensions 5962
-test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 6632
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_sqlite_pysqlite_dbapiunicode_cextensions 5816
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 7096
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.7_sqlite_pysqlite_dbapiunicode_cextensions 5994
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 7284
 
 # TEST: test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results
 
-test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results 2.7_sqlite_pysqlite_dbapiunicode_cextensions 166153
-test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 172857
-test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results 3.7_sqlite_pysqlite_dbapiunicode_cextensions 171502
-test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 178710
+test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results 2.7_sqlite_pysqlite_dbapiunicode_cextensions 173671
+test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 195075
+test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results 3.7_sqlite_pysqlite_dbapiunicode_cextensions 177890
+test.aaa_profiling.test_orm.SelectInEagerLoadTest.test_round_trip_results 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 197698
 
 # TEST: test.aaa_profiling.test_orm.SessionTest.test_expire_lots
 
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_sqlite_pysqlite_dbapiunicode_cextensions 1153
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 1130
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.7_sqlite_pysqlite_dbapiunicode_cextensions 1271
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 1264
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_sqlite_pysqlite_dbapiunicode_cextensions 1158
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 1124
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.7_sqlite_pysqlite_dbapiunicode_cextensions 1265
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 1243
 
 # TEST: test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect
 
@@ -383,58 +393,64 @@ test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.7_sqlite_pysqli
 
 # TEST: test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute
 
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mssql_pyodbc_dbapiunicode_cextensions 47
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mssql_pyodbc_dbapiunicode_nocextensions 52
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_pymysql_dbapiunicode_cextensions 47
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_pymysql_dbapiunicode_nocextensions 52
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_oracle_cx_oracle_dbapiunicode_cextensions 47
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_oracle_cx_oracle_dbapiunicode_nocextensions 52
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_postgresql_psycopg2_dbapiunicode_cextensions 48
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 52
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_sqlite_pysqlite_dbapiunicode_cextensions 48
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 52
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.7_mssql_pyodbc_dbapiunicode_cextensions 52
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.7_mssql_pyodbc_dbapiunicode_nocextensions 56
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.7_mysql_mysqldb_dbapiunicode_cextensions 52
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.7_mysql_mysqldb_dbapiunicode_nocextensions 56
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.7_mysql_pymysql_dbapiunicode_cextensions 52
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.7_mysql_pymysql_dbapiunicode_nocextensions 56
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.7_oracle_cx_oracle_dbapiunicode_cextensions 52
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.7_oracle_cx_oracle_dbapiunicode_nocextensions 56
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.7_postgresql_psycopg2_dbapiunicode_cextensions 52
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.7_postgresql_psycopg2_dbapiunicode_nocextensions 56
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.7_sqlite_pysqlite_dbapiunicode_cextensions 52
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 56
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mssql_pyodbc_dbapiunicode_cextensions 53
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mssql_pyodbc_dbapiunicode_nocextensions 57
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqldb_dbapiunicode_cextensions 53
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqldb_dbapiunicode_nocextensions 57
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_pymysql_dbapiunicode_cextensions 53
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_pymysql_dbapiunicode_nocextensions 57
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_oracle_cx_oracle_dbapiunicode_cextensions 53
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_oracle_cx_oracle_dbapiunicode_nocextensions 57
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_postgresql_psycopg2_dbapiunicode_cextensions 53
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 57
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_sqlite_pysqlite_dbapiunicode_cextensions 53
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 57
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.7_mssql_pyodbc_dbapiunicode_cextensions 57
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.7_mssql_pyodbc_dbapiunicode_nocextensions 61
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.7_mysql_mysqldb_dbapiunicode_cextensions 57
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.7_mysql_mysqldb_dbapiunicode_nocextensions 61
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.7_mysql_pymysql_dbapiunicode_cextensions 57
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.7_mysql_pymysql_dbapiunicode_nocextensions 61
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.7_oracle_cx_oracle_dbapiunicode_cextensions 57
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.7_oracle_cx_oracle_dbapiunicode_nocextensions 61
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.7_postgresql_psycopg2_dbapiunicode_cextensions 57
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.7_postgresql_psycopg2_dbapiunicode_nocextensions 61
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.7_sqlite_pysqlite_dbapiunicode_cextensions 57
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 61
 
 # TEST: test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute
 
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mssql_pyodbc_dbapiunicode_cextensions 86
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mssql_pyodbc_dbapiunicode_nocextensions 92
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_pymysql_dbapiunicode_cextensions 86
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_pymysql_dbapiunicode_nocextensions 92
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_oracle_cx_oracle_dbapiunicode_cextensions 86
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_oracle_cx_oracle_dbapiunicode_nocextensions 92
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_postgresql_psycopg2_dbapiunicode_cextensions 86
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 92
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_sqlite_pysqlite_dbapiunicode_cextensions 86
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 90
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.7_mssql_pyodbc_dbapiunicode_cextensions 90
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.7_mssql_pyodbc_dbapiunicode_nocextensions 94
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.7_mysql_mysqldb_dbapiunicode_cextensions 90
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.7_mysql_mysqldb_dbapiunicode_nocextensions 94
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.7_mysql_pymysql_dbapiunicode_cextensions 90
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.7_mysql_pymysql_dbapiunicode_nocextensions 94
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.7_oracle_cx_oracle_dbapiunicode_cextensions 90
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.7_oracle_cx_oracle_dbapiunicode_nocextensions 94
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.7_postgresql_psycopg2_dbapiunicode_cextensions 90
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.7_postgresql_psycopg2_dbapiunicode_nocextensions 94
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.7_sqlite_pysqlite_dbapiunicode_cextensions 88
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 92
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mssql_pyodbc_dbapiunicode_cextensions 91
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mssql_pyodbc_dbapiunicode_nocextensions 95
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqldb_dbapiunicode_cextensions 91
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqldb_dbapiunicode_nocextensions 95
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_pymysql_dbapiunicode_cextensions 91
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_pymysql_dbapiunicode_nocextensions 95
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_oracle_cx_oracle_dbapiunicode_cextensions 91
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_oracle_cx_oracle_dbapiunicode_nocextensions 95
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_postgresql_psycopg2_dbapiunicode_cextensions 91
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 95
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_sqlite_pysqlite_dbapiunicode_cextensions 91
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 95
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.7_mssql_pyodbc_dbapiunicode_cextensions 93
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.7_mssql_pyodbc_dbapiunicode_nocextensions 97
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.7_mysql_mysqldb_dbapiunicode_cextensions 93
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.7_mysql_mysqldb_dbapiunicode_nocextensions 97
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.7_mysql_pymysql_dbapiunicode_cextensions 93
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.7_mysql_pymysql_dbapiunicode_nocextensions 97
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.7_oracle_cx_oracle_dbapiunicode_cextensions 93
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.7_oracle_cx_oracle_dbapiunicode_nocextensions 97
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.7_postgresql_psycopg2_dbapiunicode_cextensions 93
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.7_postgresql_psycopg2_dbapiunicode_nocextensions 97
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.7_sqlite_pysqlite_dbapiunicode_cextensions 93
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 97
 
 # TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile
 
 test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mssql_pyodbc_dbapiunicode_cextensions 15
 test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mssql_pyodbc_dbapiunicode_nocextensions 15
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_mysqldb_dbapiunicode_cextensions 15
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_mysqldb_dbapiunicode_nocextensions 15
 test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_pymysql_dbapiunicode_cextensions 15
 test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_pymysql_dbapiunicode_nocextensions 15
 test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_oracle_cx_oracle_dbapiunicode_cextensions 15
@@ -458,112 +474,122 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.7
 
 # TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string
 
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 2.7_mssql_pyodbc_dbapiunicode_cextensions 256
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 2.7_mssql_pyodbc_dbapiunicode_nocextensions 6260
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 2.7_mysql_pymysql_dbapiunicode_cextensions 122257
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 2.7_mysql_pymysql_dbapiunicode_nocextensions 128259
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 2.7_oracle_cx_oracle_dbapiunicode_cextensions 375
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 2.7_oracle_cx_oracle_dbapiunicode_nocextensions 36419
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 2.7_postgresql_psycopg2_dbapiunicode_cextensions 277
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 6293
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 2.7_sqlite_pysqlite_dbapiunicode_cextensions 246
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 6268
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 3.7_mssql_pyodbc_dbapiunicode_cextensions 245
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 3.7_mssql_pyodbc_dbapiunicode_nocextensions 6249
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 3.7_mysql_mysqldb_dbapiunicode_cextensions 281
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 3.7_mysql_mysqldb_dbapiunicode_nocextensions 6285
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 3.7_mysql_pymysql_dbapiunicode_cextensions 88034
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 3.7_mysql_pymysql_dbapiunicode_nocextensions 94038
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 3.7_oracle_cx_oracle_dbapiunicode_cextensions 334
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 3.7_oracle_cx_oracle_dbapiunicode_nocextensions 6338
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 3.7_postgresql_psycopg2_dbapiunicode_cextensions 269
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 3.7_postgresql_psycopg2_dbapiunicode_nocextensions 6273
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 3.7_sqlite_pysqlite_dbapiunicode_cextensions 240
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 6244
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 2.7_mssql_pyodbc_dbapiunicode_cextensions 272
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 2.7_mssql_pyodbc_dbapiunicode_nocextensions 6274
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 2.7_mysql_mysqldb_dbapiunicode_cextensions 314
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 2.7_mysql_mysqldb_dbapiunicode_nocextensions 6336
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 2.7_mysql_pymysql_dbapiunicode_cextensions 122270
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 2.7_mysql_pymysql_dbapiunicode_nocextensions 128272
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 2.7_oracle_cx_oracle_dbapiunicode_cextensions 381
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 2.7_oracle_cx_oracle_dbapiunicode_nocextensions 36423
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 2.7_postgresql_psycopg2_dbapiunicode_cextensions 285
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 6307
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 2.7_sqlite_pysqlite_dbapiunicode_cextensions 253
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 6275
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 3.7_mssql_pyodbc_dbapiunicode_cextensions 257
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 3.7_mssql_pyodbc_dbapiunicode_nocextensions 6261
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 3.7_mysql_mysqldb_dbapiunicode_cextensions 290
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 3.7_mysql_mysqldb_dbapiunicode_nocextensions 6294
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 3.7_mysql_pymysql_dbapiunicode_cextensions 88045
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 3.7_mysql_pymysql_dbapiunicode_nocextensions 94049
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 3.7_oracle_cx_oracle_dbapiunicode_cextensions 346
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 3.7_oracle_cx_oracle_dbapiunicode_nocextensions 6350
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 3.7_postgresql_psycopg2_dbapiunicode_cextensions 281
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 3.7_postgresql_psycopg2_dbapiunicode_nocextensions 6285
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 3.7_sqlite_pysqlite_dbapiunicode_cextensions 247
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 6251
 
 # TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode
 
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 2.7_mssql_pyodbc_dbapiunicode_cextensions 256
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 2.7_mssql_pyodbc_dbapiunicode_nocextensions 6260
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 2.7_mysql_pymysql_dbapiunicode_cextensions 122257
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 2.7_mysql_pymysql_dbapiunicode_nocextensions 128259
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 2.7_oracle_cx_oracle_dbapiunicode_cextensions 375
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 2.7_oracle_cx_oracle_dbapiunicode_nocextensions 36419
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 2.7_postgresql_psycopg2_dbapiunicode_cextensions 277
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 6293
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 2.7_sqlite_pysqlite_dbapiunicode_cextensions 246
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 6268
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 3.7_mssql_pyodbc_dbapiunicode_cextensions 245
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 3.7_mssql_pyodbc_dbapiunicode_nocextensions 6249
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 3.7_mysql_mysqldb_dbapiunicode_cextensions 281
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 3.7_mysql_mysqldb_dbapiunicode_nocextensions 6285
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 3.7_mysql_pymysql_dbapiunicode_cextensions 88034
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 3.7_mysql_pymysql_dbapiunicode_nocextensions 94038
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 3.7_oracle_cx_oracle_dbapiunicode_cextensions 334
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 3.7_oracle_cx_oracle_dbapiunicode_nocextensions 6338
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 3.7_postgresql_psycopg2_dbapiunicode_cextensions 269
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 3.7_postgresql_psycopg2_dbapiunicode_nocextensions 6273
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 3.7_sqlite_pysqlite_dbapiunicode_cextensions 240
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 6244
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 2.7_mssql_pyodbc_dbapiunicode_cextensions 272
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 2.7_mssql_pyodbc_dbapiunicode_nocextensions 6274
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 2.7_mysql_mysqldb_dbapiunicode_cextensions 314
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 2.7_mysql_mysqldb_dbapiunicode_nocextensions 6336
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 2.7_mysql_pymysql_dbapiunicode_cextensions 122270
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 2.7_mysql_pymysql_dbapiunicode_nocextensions 128272
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 2.7_oracle_cx_oracle_dbapiunicode_cextensions 381
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 2.7_oracle_cx_oracle_dbapiunicode_nocextensions 36423
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 2.7_postgresql_psycopg2_dbapiunicode_cextensions 285
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 6307
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 2.7_sqlite_pysqlite_dbapiunicode_cextensions 253
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 6275
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 3.7_mssql_pyodbc_dbapiunicode_cextensions 257
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 3.7_mssql_pyodbc_dbapiunicode_nocextensions 6261
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 3.7_mysql_mysqldb_dbapiunicode_cextensions 290
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 3.7_mysql_mysqldb_dbapiunicode_nocextensions 6294
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 3.7_mysql_pymysql_dbapiunicode_cextensions 88045
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 3.7_mysql_pymysql_dbapiunicode_nocextensions 94049
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 3.7_oracle_cx_oracle_dbapiunicode_cextensions 346
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 3.7_oracle_cx_oracle_dbapiunicode_nocextensions 6350
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 3.7_postgresql_psycopg2_dbapiunicode_cextensions 281
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 3.7_postgresql_psycopg2_dbapiunicode_nocextensions 6285
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 3.7_sqlite_pysqlite_dbapiunicode_cextensions 247
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 6251
 
 # TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_string
 
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mssql_pyodbc_dbapiunicode_cextensions 526
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mssql_pyodbc_dbapiunicode_nocextensions 6520
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_pymysql_dbapiunicode_cextensions 122496
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_pymysql_dbapiunicode_nocextensions 128488
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_oracle_cx_oracle_dbapiunicode_cextensions 555
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_oracle_cx_oracle_dbapiunicode_nocextensions 36569
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_dbapiunicode_cextensions 500
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 6491
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_dbapiunicode_cextensions 455
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 6457
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.7_mssql_pyodbc_dbapiunicode_cextensions 519
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.7_mssql_pyodbc_dbapiunicode_nocextensions 6523
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.7_mysql_mysqldb_dbapiunicode_cextensions 523
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.7_mysql_mysqldb_dbapiunicode_nocextensions 6527
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.7_mysql_pymysql_dbapiunicode_cextensions 88277
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.7_mysql_pymysql_dbapiunicode_nocextensions 94281
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.7_oracle_cx_oracle_dbapiunicode_cextensions 538
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.7_oracle_cx_oracle_dbapiunicode_nocextensions 6542
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.7_postgresql_psycopg2_dbapiunicode_cextensions 511
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.7_postgresql_psycopg2_dbapiunicode_nocextensions 6515
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.7_sqlite_pysqlite_dbapiunicode_cextensions 472
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 6476
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mssql_pyodbc_dbapiunicode_cextensions 534
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mssql_pyodbc_dbapiunicode_nocextensions 6536
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqldb_dbapiunicode_cextensions 539
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqldb_dbapiunicode_nocextensions 6541
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_pymysql_dbapiunicode_cextensions 122505
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_pymysql_dbapiunicode_nocextensions 128507
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_oracle_cx_oracle_dbapiunicode_cextensions 565
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_oracle_cx_oracle_dbapiunicode_nocextensions 36587
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_dbapiunicode_cextensions 509
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 6511
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_dbapiunicode_cextensions 463
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 6465
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.7_mssql_pyodbc_dbapiunicode_cextensions 533
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.7_mssql_pyodbc_dbapiunicode_nocextensions 6537
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.7_mysql_mysqldb_dbapiunicode_cextensions 538
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.7_mysql_mysqldb_dbapiunicode_nocextensions 6542
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.7_mysql_pymysql_dbapiunicode_cextensions 88294
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.7_mysql_pymysql_dbapiunicode_nocextensions 94298
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.7_oracle_cx_oracle_dbapiunicode_cextensions 564
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.7_oracle_cx_oracle_dbapiunicode_nocextensions 6568
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.7_postgresql_psycopg2_dbapiunicode_cextensions 529
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.7_postgresql_psycopg2_dbapiunicode_nocextensions 6533
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.7_sqlite_pysqlite_dbapiunicode_cextensions 480
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 6484
 
 # TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_unicode
 
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mssql_pyodbc_dbapiunicode_cextensions 526
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mssql_pyodbc_dbapiunicode_nocextensions 6520
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_pymysql_dbapiunicode_cextensions 122496
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_pymysql_dbapiunicode_nocextensions 128488
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_oracle_cx_oracle_dbapiunicode_cextensions 555
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_oracle_cx_oracle_dbapiunicode_nocextensions 36569
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_dbapiunicode_cextensions 500
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 6491
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_dbapiunicode_cextensions 455
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 6457
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.7_mssql_pyodbc_dbapiunicode_cextensions 519
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.7_mssql_pyodbc_dbapiunicode_nocextensions 6523
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.7_mysql_mysqldb_dbapiunicode_cextensions 523
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.7_mysql_mysqldb_dbapiunicode_nocextensions 6527
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.7_mysql_pymysql_dbapiunicode_cextensions 88277
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.7_mysql_pymysql_dbapiunicode_nocextensions 94281
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.7_oracle_cx_oracle_dbapiunicode_cextensions 538
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.7_oracle_cx_oracle_dbapiunicode_nocextensions 6542
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.7_postgresql_psycopg2_dbapiunicode_cextensions 511
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.7_postgresql_psycopg2_dbapiunicode_nocextensions 6515
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.7_sqlite_pysqlite_dbapiunicode_cextensions 472
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 6476
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mssql_pyodbc_dbapiunicode_cextensions 534
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mssql_pyodbc_dbapiunicode_nocextensions 6536
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqldb_dbapiunicode_cextensions 539
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqldb_dbapiunicode_nocextensions 6541
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_pymysql_dbapiunicode_cextensions 122505
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_pymysql_dbapiunicode_nocextensions 128507
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_oracle_cx_oracle_dbapiunicode_cextensions 565
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_oracle_cx_oracle_dbapiunicode_nocextensions 36587
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_dbapiunicode_cextensions 509
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 6511
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_dbapiunicode_cextensions 463
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 6465
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.7_mssql_pyodbc_dbapiunicode_cextensions 533
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.7_mssql_pyodbc_dbapiunicode_nocextensions 6537
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.7_mysql_mysqldb_dbapiunicode_cextensions 538
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.7_mysql_mysqldb_dbapiunicode_nocextensions 6542
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.7_mysql_pymysql_dbapiunicode_cextensions 88294
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.7_mysql_pymysql_dbapiunicode_nocextensions 94298
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.7_oracle_cx_oracle_dbapiunicode_cextensions 564
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.7_oracle_cx_oracle_dbapiunicode_nocextensions 6568
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.7_postgresql_psycopg2_dbapiunicode_cextensions 529
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.7_postgresql_psycopg2_dbapiunicode_nocextensions 6533
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.7_sqlite_pysqlite_dbapiunicode_cextensions 480
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.7_sqlite_pysqlite_dbapiunicode_nocextensions 6484
 
 # TEST: test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation
 
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_dbapiunicode_cextensions 6465,325,4295,12719,1286,2222,2793
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.7_postgresql_psycopg2_dbapiunicode_cextensions 6177,306,4162,12597,1233,2133,2852
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.7_postgresql_psycopg2_dbapiunicode_nocextensions 6260,306,4242,13203,1344,2151,3046
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_dbapiunicode_cextensions 6685,336,4367,12924,1322,2274,2821
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 6740,336,4431,13486,1445,2290,3005
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.7_postgresql_psycopg2_dbapiunicode_cextensions 6474,318,4335,12868,1309,2223,2872
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.7_postgresql_psycopg2_dbapiunicode_nocextensions 6551,318,4415,13476,1440,2245,3066
 
 # TEST: test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation
 
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_dbapiunicode_cextensions 6959,432,7231,18822,1299,2861
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.7_postgresql_psycopg2_dbapiunicode_cextensions 7090,411,7281,19190,1247,2897
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.7_postgresql_psycopg2_dbapiunicode_nocextensions 7186,416,7465,20675,1350,2957
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_dbapiunicode_cextensions 7162,444,7247,18830,1335,2890
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 7338,454,7479,21133,1448,2949
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.7_postgresql_psycopg2_dbapiunicode_cextensions 7199,435,7459,19425,1323,2974
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.7_postgresql_psycopg2_dbapiunicode_nocextensions 7347,443,7715,21791,1444,3042
index 53c1f72e468c44ed0cb4b8b966c3ca4be145afe9..2b61bc6a1bfd818de65426fe589618da25917750 100644 (file)
@@ -792,7 +792,7 @@ class DefaultRequirements(SuiteRequirements):
         return skip_if(
             "mssql+pymssql", "crashes on pymssql"
         ) + fails_on_everything_except(
-            "mysql", "sqlite+pysqlite", "sqlite+pysqlcipher"
+            "mysql", "sqlite+pysqlite", "sqlite+pysqlcipher", "mssql"
         )
 
     @property
index a7e9916cdc1314fda5282c2b351d0bf48b36f76b..6f16f0514959ebf9e88c8f275eb87599b056a5ee 100644 (file)
@@ -4841,7 +4841,7 @@ class ResultMapTest(fixtures.TestBase):
         )
 
     def test_nested_api(self):
-        from sqlalchemy.engine.result import ResultMetaData
+        from sqlalchemy.engine.result import CursorResultMetaData
 
         stmt2 = select([table2]).subquery()
 
@@ -4870,7 +4870,7 @@ class ResultMapTest(fixtures.TestBase):
 
         comp = MyCompiler(default.DefaultDialect(), stmt1)
         eq_(
-            ResultMetaData._create_description_match_map(
+            CursorResultMetaData._create_description_match_map(
                 contexts[stmt2.element][0]
             ),
             {
index 2fded335bd42313880bae67604845fd044f52435..ad27828a4daec90a55db168628fec442dee95987 100644 (file)
@@ -684,7 +684,7 @@ class DefaultTest(fixtures.TestBase):
     def test_insert_values(self):
         t.insert(values={"col3": 50}).execute()
         result = t.select().execute()
-        eq_(50, result.first()["col3"])
+        eq_(50, result.first()._mapping["col3"])
 
     @testing.fails_on("firebird", "Data type unknown")
     def test_updatemany(self):
@@ -793,7 +793,7 @@ class DefaultTest(fixtures.TestBase):
         t.update(t.c.col1 == pk, values={"col3": 55}).execute()
         result = t.select(t.c.col1 == pk).execute()
         result = result.first()
-        eq_(55, result["col3"])
+        eq_(55, result._mapping["col3"])
 
 
 class CTEDefaultTest(fixtures.TablesTest):
index b058cbe1b5930692bebd78aadb1b06a6f906dc3f..ba6048f071de0907837bdcfdbb89455077a4284f 100644 (file)
@@ -34,6 +34,7 @@ from sqlalchemy.sql.selectable import SelectStatementGrouping
 from sqlalchemy.testing import assert_raises
 from sqlalchemy.testing import assert_raises_message
 from sqlalchemy.testing import AssertsCompiledSQL
+from sqlalchemy.testing import engines
 from sqlalchemy.testing import eq_
 from sqlalchemy.testing import fixtures
 from sqlalchemy.testing import in_
@@ -1282,15 +1283,24 @@ class ResultProxyTest(fixtures.TablesTest):
 
         with testing.expect_deprecated(
             "Retreiving row values using Column objects "
-            "with only matching names"
+            "with only matching names",
+            "Using non-integer/slice indices on Row is "
+            "deprecated and will be removed in version 2.0",
         ):
             eq_(r[users.c.user_id], 2)
 
+        r._keymap.pop(users.c.user_id)  # reset lookup
         with testing.expect_deprecated(
             "Retreiving row values using Column objects "
             "with only matching names"
         ):
-            eq_(r[users.c.user_name], "jack")
+            eq_(r._mapping[users.c.user_id], 2)
+
+        with testing.expect_deprecated(
+            "Retreiving row values using Column objects "
+            "with only matching names"
+        ):
+            eq_(r._mapping[users.c.user_name], "jack")
 
     def test_column_accessor_basic_text(self):
         users = self.tables.users
@@ -1300,17 +1310,35 @@ class ResultProxyTest(fixtures.TablesTest):
         ).first()
 
         with testing.expect_deprecated(
+            "Using non-integer/slice indices on Row is deprecated "
+            "and will be removed in version 2.0",
             "Retreiving row values using Column objects "
-            "with only matching names"
+            "with only matching names",
         ):
             eq_(r[users.c.user_id], 2)
 
+        r._keymap.pop(users.c.user_id)
         with testing.expect_deprecated(
             "Retreiving row values using Column objects "
             "with only matching names"
+        ):
+            eq_(r._mapping[users.c.user_id], 2)
+
+        with testing.expect_deprecated(
+            "Using non-integer/slice indices on Row is deprecated "
+            "and will be removed in version 2.0",
+            "Retreiving row values using Column objects "
+            "with only matching names",
         ):
             eq_(r[users.c.user_name], "jack")
 
+        r._keymap.pop(users.c.user_name)
+        with testing.expect_deprecated(
+            "Retreiving row values using Column objects "
+            "with only matching names"
+        ):
+            eq_(r._mapping[users.c.user_name], "jack")
+
     @testing.provide_metadata
     def test_column_label_overlap_fallback(self):
         content = Table("content", self.metadata, Column("type", String(30)))
@@ -1319,7 +1347,7 @@ class ResultProxyTest(fixtures.TablesTest):
         testing.db.execute(content.insert().values(type="t1"))
 
         row = testing.db.execute(content.select(use_labels=True)).first()
-        in_(content.c.type, row)
+        in_(content.c.type, row._mapping)
         not_in_(bar.c.content_type, row)
         with testing.expect_deprecated(
             "Retreiving row values using Column objects "
@@ -1387,18 +1415,38 @@ class ResultProxyTest(fixtures.TablesTest):
                         "Retreiving row values using Column objects "
                         "from a row that was unpickled"
                     ):
-                        eq_(result[0][users.c.user_id], 7)
+                        eq_(result[0]._mapping[users.c.user_id], 7)
+
+                    result[0]._keymap.pop(users.c.user_id)
+                    with testing.expect_deprecated(
+                        "Retreiving row values using Column objects "
+                        "from a row that was unpickled"
+                    ):
+                        eq_(result[0]._mapping[users.c.user_id], 7)
+
                     with testing.expect_deprecated(
                         "Retreiving row values using Column objects "
                         "from a row that was unpickled"
                     ):
-                        eq_(result[0][users.c.user_name], "jack")
+                        eq_(result[0]._mapping[users.c.user_name], "jack")
+
+                    result[0]._keymap.pop(users.c.user_name)
+                    with testing.expect_deprecated(
+                        "Retreiving row values using Column objects "
+                        "from a row that was unpickled"
+                    ):
+                        eq_(result[0]._mapping[users.c.user_name], "jack")
 
                 if not pickle or use_labels:
                     assert_raises(
                         exc.NoSuchColumnError,
                         lambda: result[0][addresses.c.user_id],
                     )
+
+                    assert_raises(
+                        exc.NoSuchColumnError,
+                        lambda: result[0]._mapping[addresses.c.user_id],
+                    )
                 else:
                     # test with a different table.  name resolution is
                     # causing 'user_id' to match when use_labels wasn't used.
@@ -1406,13 +1454,161 @@ class ResultProxyTest(fixtures.TablesTest):
                         "Retreiving row values using Column objects "
                         "from a row that was unpickled"
                     ):
-                        eq_(result[0][addresses.c.user_id], 7)
+                        eq_(result[0]._mapping[addresses.c.user_id], 7)
+
+                    result[0]._keymap.pop(addresses.c.user_id)
+                    with testing.expect_deprecated(
+                        "Retreiving row values using Column objects "
+                        "from a row that was unpickled"
+                    ):
+                        eq_(result[0]._mapping[addresses.c.user_id], 7)
 
                 assert_raises(
                     exc.NoSuchColumnError,
                     lambda: result[0][addresses.c.address_id],
                 )
 
+                assert_raises(
+                    exc.NoSuchColumnError,
+                    lambda: result[0]._mapping[addresses.c.address_id],
+                )
+
+    @testing.requires.duplicate_names_in_cursor_description
+    def test_ambiguous_column_case_sensitive(self):
+        with testing.expect_deprecated(
+            "The create_engine.case_sensitive parameter is deprecated"
+        ):
+            eng = engines.testing_engine(options=dict(case_sensitive=False))
+
+        row = eng.execute(
+            select(
+                [
+                    literal_column("1").label("SOMECOL"),
+                    literal_column("1").label("SOMECOL"),
+                ]
+            )
+        ).first()
+
+        assert_raises_message(
+            exc.InvalidRequestError,
+            "Ambiguous column name",
+            lambda: row._mapping["somecol"],
+        )
+
+    def test_row_getitem_string(self):
+        with testing.db.connect() as conn:
+            col = literal_column("1").label("foo")
+            row = conn.execute(select([col])).first()
+
+            with testing.expect_deprecated(
+                "Using non-integer/slice indices on Row is deprecated "
+                "and will be removed in version 2.0;"
+            ):
+                eq_(row["foo"], 1)
+
+            eq_(row._mapping["foo"], 1)
+
+    def test_row_getitem_column(self):
+        with testing.db.connect() as conn:
+            col = literal_column("1").label("foo")
+            row = conn.execute(select([col])).first()
+
+            with testing.expect_deprecated(
+                "Using non-integer/slice indices on Row is deprecated "
+                "and will be removed in version 2.0;"
+            ):
+                eq_(row[col], 1)
+
+            eq_(row._mapping[col], 1)
+
+    def test_row_case_insensitive(self):
+        with testing.expect_deprecated(
+            "The create_engine.case_sensitive parameter is deprecated"
+        ):
+            ins_db = engines.testing_engine(options={"case_sensitive": False})
+        row = ins_db.execute(
+            select(
+                [
+                    literal_column("1").label("case_insensitive"),
+                    literal_column("2").label("CaseSensitive"),
+                ]
+            )
+        ).first()
+
+        eq_(list(row._mapping.keys()), ["case_insensitive", "CaseSensitive"])
+
+        in_("case_insensitive", row._keymap)
+        in_("CaseSensitive", row._keymap)
+        in_("casesensitive", row._keymap)
+
+        eq_(row._mapping["case_insensitive"], 1)
+        eq_(row._mapping["CaseSensitive"], 2)
+        eq_(row._mapping["Case_insensitive"], 1)
+        eq_(row._mapping["casesensitive"], 2)
+
+    def test_row_case_insensitive_unoptimized(self):
+        with testing.expect_deprecated(
+            "The create_engine.case_sensitive parameter is deprecated"
+        ):
+            ins_db = engines.testing_engine(options={"case_sensitive": False})
+        row = ins_db.execute(
+            select(
+                [
+                    literal_column("1").label("case_insensitive"),
+                    literal_column("2").label("CaseSensitive"),
+                    text("3 AS screw_up_the_cols"),
+                ]
+            )
+        ).first()
+
+        eq_(
+            list(row._mapping.keys()),
+            ["case_insensitive", "CaseSensitive", "screw_up_the_cols"],
+        )
+
+        in_("case_insensitive", row._keymap)
+        in_("CaseSensitive", row._keymap)
+        in_("casesensitive", row._keymap)
+
+        eq_(row._mapping["case_insensitive"], 1)
+        eq_(row._mapping["CaseSensitive"], 2)
+        eq_(row._mapping["screw_up_the_cols"], 3)
+        eq_(row._mapping["Case_insensitive"], 1)
+        eq_(row._mapping["casesensitive"], 2)
+        eq_(row._mapping["screw_UP_the_cols"], 3)
+
+    def test_row_keys_deprecated(self):
+        r = testing.db.execute(
+            text("select * from users where user_id=2")
+        ).first()
+
+        with testing.expect_deprecated(
+            r"The Row.keys\(\) method is deprecated and will be "
+            "removed in a future release."
+        ):
+            eq_(r.keys(), ["user_id", "user_name"])
+
+    def test_row_contains_key_deprecated(self):
+        r = testing.db.execute(
+            text("select * from users where user_id=2")
+        ).first()
+
+        with testing.expect_deprecated(
+            "Using the 'in' operator to test for string or column keys, or "
+            "integer indexes, .* is deprecated"
+        ):
+            in_("user_name", r)
+
+        # no warning if the key is not there
+        not_in_("foobar", r)
+
+        # this seems to happen only with Python BaseRow
+        # with testing.expect_deprecated(
+        #    "Using the 'in' operator to test for string or column keys, or "
+        #   "integer indexes, .* is deprecated"
+        # ):
+        #    in_(1, r)
+
 
 class PositionalTextTest(fixtures.TablesTest):
     run_inserts = "once"
@@ -1452,7 +1648,7 @@ class PositionalTextTest(fixtures.TablesTest):
             "Retreiving row values using Column objects "
             "with only matching names"
         ):
-            eq_(row[text1.c.a], "a1")
+            eq_(row._mapping[text1.c.a], "a1")
 
     def test_anon_aliased_unique(self):
         text1 = self.tables.text1
@@ -1466,10 +1662,10 @@ class PositionalTextTest(fixtures.TablesTest):
         result = testing.db.execute(stmt)
         row = result.first()
 
-        eq_(row[c1], "a1")
-        eq_(row[c2], "b1")
-        eq_(row[c3], "c1")
-        eq_(row[c4], "d1")
+        eq_(row._mapping[c1], "a1")
+        eq_(row._mapping[c2], "b1")
+        eq_(row._mapping[c3], "c1")
+        eq_(row._mapping[c4], "d1")
 
         # key fallback rules still match this to a column
         # unambiguously based on its name
@@ -1477,7 +1673,7 @@ class PositionalTextTest(fixtures.TablesTest):
             "Retreiving row values using Column objects "
             "with only matching names"
         ):
-            eq_(row[text1.c.a], "a1")
+            eq_(row._mapping[text1.c.a], "a1")
 
         # key fallback rules still match this to a column
         # unambiguously based on its name
@@ -1485,7 +1681,7 @@ class PositionalTextTest(fixtures.TablesTest):
             "Retreiving row values using Column objects "
             "with only matching names"
         ):
-            eq_(row[text1.c.d], "d1")
+            eq_(row._mapping[text1.c.d], "d1")
 
         # text1.c.b goes nowhere....because we hit key fallback
         # but the text1.c.b doesn't derive from text1.c.c
@@ -1495,6 +1691,12 @@ class PositionalTextTest(fixtures.TablesTest):
             lambda: row[text1.c.b],
         )
 
+        assert_raises_message(
+            exc.NoSuchColumnError,
+            "Could not locate column in row for column 'text1.b'",
+            lambda: row._mapping[text1.c.b],
+        )
+
 
 class DefaultTest(fixtures.TestBase):
     __backend__ = True
index 6ee8a67b79733704787f0f5faffdaeb462de5003..fd3f557803d39091c789acceeb708d039f4a7968 100644 (file)
@@ -1015,15 +1015,15 @@ class ExecuteTest(fixtures.TestBase):
         )
         meta.create_all()
         t.insert(values=dict(value=func.length("one"))).execute()
-        assert t.select().execute().first()["value"] == 3
+        assert t.select().execute().first().value == 3
         t.update(values=dict(value=func.length("asfda"))).execute()
-        assert t.select().execute().first()["value"] == 5
+        assert t.select().execute().first().value == 5
 
         r = t.insert(values=dict(value=func.length("sfsaafsda"))).execute()
         id_ = r.inserted_primary_key[0]
-        assert t.select(t.c.id == id_).execute().first()["value"] == 9
+        assert t.select(t.c.id == id_).execute().first().value == 9
         t.update(values={t.c.value: func.length("asdf")}).execute()
-        assert t.select().execute().first()["value"] == 4
+        assert t.select().execute().first().value == 4
         t2.insert().execute()
         t2.insert(values=dict(value=func.length("one"))).execute()
         t2.insert(values=dict(value=func.length("asfda") + -19)).execute(
@@ -1045,7 +1045,7 @@ class ExecuteTest(fixtures.TestBase):
         t2.delete().execute()
 
         t2.insert(values=dict(value=func.length("one") + 8)).execute()
-        assert t2.select().execute().first()["value"] == 11
+        assert t2.select().execute().first().value == 11
 
         t2.update(values=dict(value=func.length("asfda"))).execute()
         eq_(
index 85cdeca4b2f1a66e715e08694a9605b33b8f9964..2021c030c4e268e321eee2eb8a026bf231c92460 100644 (file)
@@ -112,7 +112,7 @@ class InsertExecTest(fixtures.TablesTest):
                 )
                 row = engine.execute(table_.select(criterion)).first()
                 for c in table_.c:
-                    ret[c.key] = row[c]
+                    ret[c.key] = row._mapping[c]
             return ret
 
         if testing.against("firebird", "postgresql", "oracle", "mssql"):
index aa5e913d54ff5a9f76fb5c2b953abe36b7749e12..83c5da3427ac6b94661f5486aaeb2292b195ac23 100644 (file)
@@ -265,13 +265,13 @@ class QueryTest(fixtures.TestBase):
         users.insert().execute(user_id=7, user_name="jack")
         s = select([users], users.c.user_id == bindparam("id")).compile()
         c = testing.db.connect()
-        assert c.execute(s, id=7).fetchall()[0]["user_id"] == 7
+        eq_(c.execute(s, id=7).first()._mapping["user_id"], 7)
 
     def test_compiled_insert_execute(self):
         users.insert().compile().execute(user_id=7, user_name="jack")
         s = select([users], users.c.user_id == bindparam("id")).compile()
         c = testing.db.connect()
-        assert c.execute(s, id=7).fetchall()[0]["user_id"] == 7
+        eq_(c.execute(s, id=7).first()._mapping["user_id"], 7)
 
     def test_repeated_bindparams(self):
         """Tests that a BindParam can be used more than once.
index 8aa524d78289cb723fd47795c642a1829c8bd93d..2a6851a99fbfa676aad6a3e6daa1d62b1fe6e553 100644 (file)
@@ -1,3 +1,4 @@
+import collections
 from contextlib import contextmanager
 import csv
 import operator
@@ -134,8 +135,8 @@ class ResultProxyTest(fixtures.TablesTest):
             .scalar_subquery()
         )
         for row in select([sel + 1, sel + 3], bind=users.bind).execute():
-            eq_(row["anon_1"], 8)
-            eq_(row["anon_2"], 10)
+            eq_(row._mapping["anon_1"], 8)
+            eq_(row._mapping["anon_2"], 10)
 
     def test_row_comparison(self):
         users = self.tables.users
@@ -196,15 +197,18 @@ class ResultProxyTest(fixtures.TablesTest):
         testing.db.execute(content.insert().values(type="t1"))
 
         row = testing.db.execute(content.select(use_labels=True)).first()
-        in_(content.c.type, row)
-        not_in_(bar.c.content_type, row)
+        in_(content.c.type, row._mapping)
+        not_in_(bar.c.content_type, row._mapping)
+
+        not_in_(bar.c.content_type, row._mapping)
 
         row = testing.db.execute(
             select([func.now().label("content_type")])
         ).first()
 
-        not_in_(content.c.type, row)
-        not_in_(bar.c.content_type, row)
+        not_in_(content.c.type, row._mapping)
+
+        not_in_(bar.c.content_type, row._mapping)
 
     def test_pickled_rows(self):
         users = self.tables.users
@@ -229,14 +233,14 @@ class ResultProxyTest(fixtures.TablesTest):
 
                 eq_(result, [(7, "jack"), (8, "ed"), (9, "fred")])
                 if use_labels:
-                    eq_(result[0]["users_user_id"], 7)
+                    eq_(result[0]._mapping["users_user_id"], 7)
                     eq_(
-                        list(result[0].keys()),
+                        list(result[0]._fields),
                         ["users_user_id", "users_user_name"],
                     )
                 else:
-                    eq_(result[0]["user_id"], 7)
-                    eq_(list(result[0].keys()), ["user_id", "user_name"])
+                    eq_(result[0]._mapping["user_id"], 7)
+                    eq_(list(result[0]._fields), ["user_id", "user_name"])
 
                 eq_(result[0][0], 7)
 
@@ -244,6 +248,11 @@ class ResultProxyTest(fixtures.TablesTest):
                     exc.NoSuchColumnError, lambda: result[0]["fake key"]
                 )
 
+                assert_raises(
+                    exc.NoSuchColumnError,
+                    lambda: result[0]._mapping["fake key"],
+                )
+
     def test_column_error_printing(self):
         result = testing.db.execute(select([1]))
         row = result.first()
@@ -267,7 +276,9 @@ class ResultProxyTest(fixtures.TablesTest):
             is_(result._getter(accessor, False), None)
 
             assert_raises_message(
-                exc.NoSuchColumnError, msg % repl, lambda: row[accessor]
+                exc.NoSuchColumnError,
+                msg % repl,
+                lambda: row._mapping[accessor],
             )
 
     def test_fetchmany(self):
@@ -297,7 +308,24 @@ class ResultProxyTest(fixtures.TablesTest):
         eq_(r[1:], (2, "foo@bar.com"))
         eq_(r[:-1], (1, 2))
 
-    def test_column_accessor_basic_compiled(self):
+    def test_column_accessor_basic_compiled_mapping(self):
+        users = self.tables.users
+
+        users.insert().execute(
+            dict(user_id=1, user_name="john"),
+            dict(user_id=2, user_name="jack"),
+        )
+
+        r = users.select(users.c.user_id == 2).execute().first()
+        eq_(r.user_id, 2)
+        eq_(r._mapping["user_id"], 2)
+        eq_(r._mapping[users.c.user_id], 2)
+
+        eq_(r.user_name, "jack")
+        eq_(r._mapping["user_name"], "jack")
+        eq_(r._mapping[users.c.user_name], "jack")
+
+    def test_column_accessor_basic_compiled_traditional(self):
         users = self.tables.users
 
         users.insert().execute(
@@ -306,13 +334,28 @@ class ResultProxyTest(fixtures.TablesTest):
         )
 
         r = users.select(users.c.user_id == 2).execute().first()
+
         eq_(r.user_id, 2)
-        eq_(r["user_id"], 2)
-        eq_(r[users.c.user_id], 2)
+        eq_(r._mapping["user_id"], 2)
+        eq_(r._mapping[users.c.user_id], 2)
 
         eq_(r.user_name, "jack")
-        eq_(r["user_name"], "jack")
-        eq_(r[users.c.user_name], "jack")
+        eq_(r._mapping["user_name"], "jack")
+        eq_(r._mapping[users.c.user_name], "jack")
+
+    def test_row_getitem_string(self):
+        users = self.tables.users
+
+        users.insert().execute(
+            dict(user_id=1, user_name="john"),
+            dict(user_id=2, user_name="jack"),
+        )
+
+        r = testing.db.execute(
+            text("select * from users where user_id=2")
+        ).first()
+
+        eq_(r._mapping["user_name"], "jack")
 
     def test_column_accessor_basic_text(self):
         users = self.tables.users
@@ -326,10 +369,34 @@ class ResultProxyTest(fixtures.TablesTest):
         ).first()
 
         eq_(r.user_id, 2)
-        eq_(r["user_id"], 2)
 
         eq_(r.user_name, "jack")
-        eq_(r["user_name"], "jack")
+
+        eq_(r._mapping["user_id"], 2)
+
+        eq_(r.user_name, "jack")
+        eq_(r._mapping["user_name"], "jack")
+
+    def test_column_accessor_text_colexplicit(self):
+        users = self.tables.users
+
+        users.insert().execute(
+            dict(user_id=1, user_name="john"),
+            dict(user_id=2, user_name="jack"),
+        )
+        r = testing.db.execute(
+            text("select * from users where user_id=2").columns(
+                users.c.user_id, users.c.user_name
+            )
+        ).first()
+
+        eq_(r.user_id, 2)
+        eq_(r._mapping["user_id"], 2)
+        eq_(r._mapping[users.c.user_id], 2)
+
+        eq_(r.user_name, "jack")
+        eq_(r._mapping["user_name"], "jack")
+        eq_(r._mapping[users.c.user_name], "jack")
 
     def test_column_accessor_textual_select(self):
         users = self.tables.users
@@ -346,11 +413,12 @@ class ResultProxyTest(fixtures.TablesTest):
             .where(text("user_id=2"))
         ).first()
 
+        # keyed access works in many ways
         eq_(r.user_id, 2)
-        eq_(r["user_id"], 2)
-
         eq_(r.user_name, "jack")
-        eq_(r["user_name"], "jack")
+        eq_(r._mapping["user_id"], 2)
+        eq_(r.user_name, "jack")
+        eq_(r._mapping["user_name"], "jack")
 
     def test_column_accessor_dotted_union(self):
         users = self.tables.users
@@ -367,9 +435,9 @@ class ResultProxyTest(fixtures.TablesTest):
                 "users.user_name from users"
             )
         ).first()
-        eq_(r["user_id"], 1)
-        eq_(r["user_name"], "john")
-        eq_(list(r.keys()), ["user_id", "user_name"])
+        eq_(r._mapping["user_id"], 1)
+        eq_(r._mapping["user_name"], "john")
+        eq_(list(r._fields), ["user_id", "user_name"])
 
     def test_column_accessor_sqlite_raw(self):
         users = self.tables.users
@@ -395,14 +463,14 @@ class ResultProxyTest(fixtures.TablesTest):
             eq_(r["users.user_id"], 1)
             eq_(r["users.user_name"], "john")
 
-            eq_(list(r.keys()), ["users.user_id", "users.user_name"])
+            eq_(list(r._fields), ["users.user_id", "users.user_name"])
         else:
-            not_in_("users.user_id", r)
-            not_in_("users.user_name", r)
-            eq_(r["user_id"], 1)
-            eq_(r["user_name"], "john")
+            not_in_("users.user_id", r._mapping)
+            not_in_("users.user_name", r._mapping)
+            eq_(r._mapping["user_id"], 1)
+            eq_(r._mapping["user_name"], "john")
 
-            eq_(list(r.keys()), ["user_id", "user_name"])
+            eq_(list(r._fields), ["user_id", "user_name"])
 
     def test_column_accessor_sqlite_translated(self):
         users = self.tables.users
@@ -420,17 +488,17 @@ class ResultProxyTest(fixtures.TablesTest):
             .execute()
             .first()
         )
-        eq_(r["user_id"], 1)
-        eq_(r["user_name"], "john")
+        eq_(r._mapping["user_id"], 1)
+        eq_(r._mapping["user_name"], "john")
 
         if testing.against("sqlite < 3.10.0"):
-            eq_(r["users.user_id"], 1)
-            eq_(r["users.user_name"], "john")
+            eq_(r._mapping["users.user_id"], 1)
+            eq_(r._mapping["users.user_name"], "john")
         else:
-            not_in_("users.user_id", r)
-            not_in_("users.user_name", r)
+            not_in_("users.user_id", r._mapping)
+            not_in_("users.user_name", r._mapping)
 
-        eq_(list(r.keys()), ["user_id", "user_name"])
+        eq_(list(r._fields), ["user_id", "user_name"])
 
     def test_column_accessor_labels_w_dots(self):
         users = self.tables.users
@@ -448,10 +516,10 @@ class ResultProxyTest(fixtures.TablesTest):
             .execute()
             .first()
         )
-        eq_(r["users.user_id"], 1)
-        eq_(r["users.user_name"], "john")
-        not_in_("user_name", r)
-        eq_(list(r.keys()), ["users.user_id", "users.user_name"])
+        eq_(r._mapping["users.user_id"], 1)
+        eq_(r._mapping["users.user_name"], "john")
+        not_in_("user_name", r._mapping)
+        eq_(list(r._fields), ["users.user_id", "users.user_name"])
 
     def test_column_accessor_unary(self):
         users = self.tables.users
@@ -465,7 +533,7 @@ class ResultProxyTest(fixtures.TablesTest):
             .execute()
             .first()
         )
-        eq_(r[users.c.user_name], "john")
+        eq_(r._mapping[users.c.user_name], "john")
         eq_(r.user_name, "john")
 
     def test_column_accessor_err(self):
@@ -480,7 +548,7 @@ class ResultProxyTest(fixtures.TablesTest):
         assert_raises_message(
             KeyError,
             "Could not locate column in row for column 'foo'",
-            lambda: r["foo"],
+            lambda: r._mapping["foo"],
         )
 
     def test_graceful_fetch_on_non_rows(self):
@@ -586,20 +654,20 @@ class ResultProxyTest(fixtures.TablesTest):
             )
         ).first()
 
-        eq_(list(row.keys()), ["case_insensitive", "CaseSensitive"])
+        eq_(list(row._fields), ["case_insensitive", "CaseSensitive"])
 
         in_("case_insensitive", row._keymap)
         in_("CaseSensitive", row._keymap)
         not_in_("casesensitive", row._keymap)
 
-        eq_(row["case_insensitive"], 1)
-        eq_(row["CaseSensitive"], 2)
+        eq_(row._mapping["case_insensitive"], 1)
+        eq_(row._mapping["CaseSensitive"], 2)
 
-        assert_raises(KeyError, lambda: row["Case_insensitive"])
-        assert_raises(KeyError, lambda: row["casesensitive"])
+        assert_raises(KeyError, lambda: row._mapping["Case_insensitive"])
+        assert_raises(KeyError, lambda: row._mapping["casesensitive"])
 
     def test_row_case_sensitive_unoptimized(self):
-        ins_db = engines.testing_engine(options={"case_sensitive": True})
+        ins_db = engines.testing_engine()
         row = ins_db.execute(
             select(
                 [
@@ -611,7 +679,7 @@ class ResultProxyTest(fixtures.TablesTest):
         ).first()
 
         eq_(
-            list(row.keys()),
+            list(row._fields),
             ["case_insensitive", "CaseSensitive", "screw_up_the_cols"],
         )
 
@@ -619,63 +687,13 @@ class ResultProxyTest(fixtures.TablesTest):
         in_("CaseSensitive", row._keymap)
         not_in_("casesensitive", row._keymap)
 
-        eq_(row["case_insensitive"], 1)
-        eq_(row["CaseSensitive"], 2)
-        eq_(row["screw_up_the_cols"], 3)
-
-        assert_raises(KeyError, lambda: row["Case_insensitive"])
-        assert_raises(KeyError, lambda: row["casesensitive"])
-        assert_raises(KeyError, lambda: row["screw_UP_the_cols"])
-
-    def test_row_case_insensitive(self):
-        ins_db = engines.testing_engine(options={"case_sensitive": False})
-        row = ins_db.execute(
-            select(
-                [
-                    literal_column("1").label("case_insensitive"),
-                    literal_column("2").label("CaseSensitive"),
-                ]
-            )
-        ).first()
-
-        eq_(list(row.keys()), ["case_insensitive", "CaseSensitive"])
-
-        in_("case_insensitive", row._keymap)
-        in_("CaseSensitive", row._keymap)
-        in_("casesensitive", row._keymap)
-
-        eq_(row["case_insensitive"], 1)
-        eq_(row["CaseSensitive"], 2)
-        eq_(row["Case_insensitive"], 1)
-        eq_(row["casesensitive"], 2)
-
-    def test_row_case_insensitive_unoptimized(self):
-        ins_db = engines.testing_engine(options={"case_sensitive": False})
-        row = ins_db.execute(
-            select(
-                [
-                    literal_column("1").label("case_insensitive"),
-                    literal_column("2").label("CaseSensitive"),
-                    text("3 AS screw_up_the_cols"),
-                ]
-            )
-        ).first()
-
-        eq_(
-            list(row.keys()),
-            ["case_insensitive", "CaseSensitive", "screw_up_the_cols"],
-        )
-
-        in_("case_insensitive", row._keymap)
-        in_("CaseSensitive", row._keymap)
-        in_("casesensitive", row._keymap)
+        eq_(row._mapping["case_insensitive"], 1)
+        eq_(row._mapping["CaseSensitive"], 2)
+        eq_(row._mapping["screw_up_the_cols"], 3)
 
-        eq_(row["case_insensitive"], 1)
-        eq_(row["CaseSensitive"], 2)
-        eq_(row["screw_up_the_cols"], 3)
-        eq_(row["Case_insensitive"], 1)
-        eq_(row["casesensitive"], 2)
-        eq_(row["screw_UP_the_cols"], 3)
+        assert_raises(KeyError, lambda: row._mapping["Case_insensitive"])
+        assert_raises(KeyError, lambda: row._mapping["casesensitive"])
+        assert_raises(KeyError, lambda: row._mapping["screw_UP_the_cols"])
 
     def test_row_as_args(self):
         users = self.tables.users
@@ -683,7 +701,7 @@ class ResultProxyTest(fixtures.TablesTest):
         users.insert().execute(user_id=1, user_name="john")
         r = users.select(users.c.user_id == 1).execute().first()
         users.delete().execute()
-        users.insert().execute(r)
+        users.insert().execute(r._mapping)
         eq_(users.select().execute().fetchall(), [(1, "john")])
 
     def test_result_as_args(self):
@@ -697,7 +715,7 @@ class ResultProxyTest(fixtures.TablesTest):
             ]
         )
         r = users.select().execute()
-        users2.insert().execute(list(r))
+        users2.insert().execute([row._mapping for row in r])
         eq_(
             users2.select().order_by(users2.c.user_id).execute().fetchall(),
             [(1, "john"), (2, "ed")],
@@ -705,7 +723,7 @@ class ResultProxyTest(fixtures.TablesTest):
 
         users2.delete().execute()
         r = users.select().execute()
-        users2.insert().execute(*list(r))
+        users2.insert().execute(*[row._mapping for row in r])
         eq_(
             users2.select().order_by(users2.c.user_id).execute().fetchall(),
             [(1, "john"), (2, "ed")],
@@ -723,7 +741,7 @@ class ResultProxyTest(fixtures.TablesTest):
         assert_raises_message(
             exc.InvalidRequestError,
             "Ambiguous column name",
-            lambda: r["user_id"],
+            lambda: r._mapping["user_id"],
         )
 
         assert_raises_message(
@@ -736,8 +754,8 @@ class ResultProxyTest(fixtures.TablesTest):
         # pure positional targeting; users.c.user_id
         # and addresses.c.user_id are known!
         # works as of 1.1 issue #3501
-        eq_(r[users.c.user_id], 1)
-        eq_(r[addresses.c.user_id], None)
+        eq_(r._mapping[users.c.user_id], 1)
+        eq_(r._mapping[addresses.c.user_id], None)
 
         # try to trick it - fake_table isn't in the result!
         # we get the correct error
@@ -745,14 +763,14 @@ class ResultProxyTest(fixtures.TablesTest):
         assert_raises_message(
             exc.InvalidRequestError,
             "Could not locate column in row for column 'fake.user_id'",
-            lambda: r[fake_table.c.user_id],
+            lambda: r._mapping[fake_table.c.user_id],
         )
 
         r = util.pickle.loads(util.pickle.dumps(r))
         assert_raises_message(
             exc.InvalidRequestError,
             "Ambiguous column name",
-            lambda: r["user_id"],
+            lambda: r._mapping["user_id"],
         )
 
         result = users.outerjoin(addresses).select().execute()
@@ -762,7 +780,7 @@ class ResultProxyTest(fixtures.TablesTest):
         assert_raises_message(
             exc.InvalidRequestError,
             "Ambiguous column name",
-            lambda: r["user_id"],
+            lambda: r._mapping["user_id"],
         )
 
     @testing.requires.duplicate_names_in_cursor_description
@@ -781,35 +799,16 @@ class ResultProxyTest(fixtures.TablesTest):
 
         # as of 1.1 issue #3501, we use pure positional
         # targeting for the column objects here
-        eq_(row[users.c.user_id], 1)
+        eq_(row._mapping[users.c.user_id], 1)
 
-        eq_(row[ua.c.user_id], 1)
+        eq_(row._mapping[ua.c.user_id], 1)
 
         # this now works as of 1.1 issue #3501;
         # previously this was stuck on "ambiguous column name"
         assert_raises_message(
             exc.InvalidRequestError,
             "Could not locate column in row",
-            lambda: row[u2.c.user_id],
-        )
-
-    @testing.requires.duplicate_names_in_cursor_description
-    def test_ambiguous_column_case_sensitive(self):
-        eng = engines.testing_engine(options=dict(case_sensitive=False))
-
-        row = eng.execute(
-            select(
-                [
-                    literal_column("1").label("SOMECOL"),
-                    literal_column("1").label("SOMECOL"),
-                ]
-            )
-        ).first()
-
-        assert_raises_message(
-            exc.InvalidRequestError,
-            "Ambiguous column name",
-            lambda: row["somecol"],
+            lambda: row._mapping[u2.c.user_id],
         )
 
     @testing.requires.duplicate_names_in_cursor_description
@@ -829,7 +828,12 @@ class ResultProxyTest(fixtures.TablesTest):
         row = result.first()
 
         eq_(
-            set([users.c.user_id in row, addresses.c.user_id in row]),
+            set(
+                [
+                    users.c.user_id in row._mapping,
+                    addresses.c.user_id in row._mapping,
+                ]
+            ),
             set([True]),
         )
 
@@ -864,8 +868,8 @@ class ResultProxyTest(fixtures.TablesTest):
                 )
             )
             row = result.first()
-            eq_(row[users.c.user_id], 1)
-            eq_(row[users.c.user_name], "john")
+            eq_(row._mapping[users.c.user_id], 1)
+            eq_(row._mapping[users.c.user_name], "john")
 
     def test_loose_matching_two(self):
         users = self.tables.users
@@ -898,14 +902,14 @@ class ResultProxyTest(fixtures.TablesTest):
             assert_raises_message(
                 exc.InvalidRequestError,
                 "Ambiguous column name",
-                lambda: row[users.c.user_id],
+                lambda: row._mapping[users.c.user_id],
             )
             assert_raises_message(
                 exc.InvalidRequestError,
                 "Ambiguous column name",
-                lambda: row[addresses.c.user_id],
+                lambda: row._mapping[addresses.c.user_id],
             )
-            eq_(row[users.c.user_name], "john")
+            eq_(row._mapping[users.c.user_name], "john")
 
     def test_ambiguous_column_by_col_plus_label(self):
         users = self.tables.users
@@ -918,7 +922,7 @@ class ResultProxyTest(fixtures.TablesTest):
             ]
         ).execute()
         row = result.first()
-        eq_(row[users.c.user_id], 1)
+        eq_(row._mapping[users.c.user_id], 1)
         eq_(row[1], 1)
 
     def test_fetch_partial_result_map(self):
@@ -969,8 +973,52 @@ class ResultProxyTest(fixtures.TablesTest):
             users.select().alias(users.name),
         ):
             row = s.select(use_labels=True).execute().first()
-            eq_(row[s.c.user_id], 7)
-            eq_(row[s.c.user_name], "ed")
+            eq_(row._mapping[s.c.user_id], 7)
+            eq_(row._mapping[s.c.user_name], "ed")
+
+    @testing.requires.python3
+    def test_ro_mapping_py3k(self):
+        users = self.tables.users
+
+        users.insert().execute(user_id=1, user_name="foo")
+        result = users.select().execute()
+
+        row = result.first()
+        dict_row = row._asdict()
+
+        # dictionaries aren't ordered in Python 3 until 3.7
+        odict_row = collections.OrderedDict(
+            [("user_id", 1), ("user_name", "foo")]
+        )
+        eq_(dict_row, odict_row)
+
+        mapping_row = row._mapping
+
+        eq_(list(mapping_row), list(mapping_row.keys()))
+        eq_(odict_row.keys(), mapping_row.keys())
+        eq_(odict_row.values(), mapping_row.values())
+        eq_(odict_row.items(), mapping_row.items())
+
+    @testing.requires.python2
+    def test_ro_mapping_py2k(self):
+        users = self.tables.users
+
+        users.insert().execute(user_id=1, user_name="foo")
+        result = users.select().execute()
+
+        row = result.first()
+        dict_row = row._asdict()
+
+        odict_row = collections.OrderedDict(
+            [("user_id", 1), ("user_name", "foo")]
+        )
+        eq_(dict_row, odict_row)
+        mapping_row = row._mapping
+
+        eq_(list(mapping_row), list(mapping_row.keys()))
+        eq_(odict_row.keys(), list(mapping_row.keys()))
+        eq_(odict_row.values(), list(mapping_row.values()))
+        eq_(odict_row.items(), list(mapping_row.items()))
 
     def test_keys(self):
         users = self.tables.users
@@ -979,7 +1027,8 @@ class ResultProxyTest(fixtures.TablesTest):
         result = users.select().execute()
         eq_(result.keys(), ["user_id", "user_name"])
         row = result.first()
-        eq_(row.keys(), ["user_id", "user_name"])
+        eq_(list(row._mapping.keys()), ["user_id", "user_name"])
+        eq_(row._fields, ("user_id", "user_name"))
 
     def test_keys_anon_labels(self):
         """test [ticket:3483]"""
@@ -999,7 +1048,8 @@ class ResultProxyTest(fixtures.TablesTest):
 
         eq_(result.keys(), ["user_id", "user_name_1", "count_1"])
         row = result.first()
-        eq_(row.keys(), ["user_id", "user_name_1", "count_1"])
+        eq_(row._fields, ("user_id", "user_name_1", "count_1"))
+        eq_(list(row._mapping.keys()), ["user_id", "user_name_1", "count_1"])
 
     def test_items(self):
         users = self.tables.users
@@ -1007,7 +1057,7 @@ class ResultProxyTest(fixtures.TablesTest):
         users.insert().execute(user_id=1, user_name="foo")
         r = users.select().execute().first()
         eq_(
-            [(x[0].lower(), x[1]) for x in list(r.items())],
+            [(x[0].lower(), x[1]) for x in list(r._mapping.items())],
             [("user_id", 1), ("user_name", "foo")],
         )
 
@@ -1046,8 +1096,8 @@ class ResultProxyTest(fixtures.TablesTest):
         r = users.select(users.c.user_id == 1).execute().first()
         eq_(r[0], 1)
         eq_(r[1], "foo")
-        eq_([x.lower() for x in list(r.keys())], ["user_id", "user_name"])
-        eq_(list(r.values()), [1, "foo"])
+        eq_([x.lower() for x in r._fields], ["user_id", "user_name"])
+        eq_(list(r._mapping.values()), [1, "foo"])
 
     def test_column_order_with_text_query(self):
         # should return values in query order
@@ -1057,8 +1107,8 @@ class ResultProxyTest(fixtures.TablesTest):
         r = testing.db.execute("select user_name, user_id from users").first()
         eq_(r[0], "foo")
         eq_(r[1], 1)
-        eq_([x.lower() for x in list(r.keys())], ["user_name", "user_id"])
-        eq_(list(r.values()), ["foo", 1])
+        eq_([x.lower() for x in r._fields], ["user_name", "user_id"])
+        eq_(list(r._mapping.values()), ["foo", 1])
 
     @testing.crashes("oracle", "FIXME: unknown, varify not fails_on()")
     @testing.crashes("firebird", "An identifier must begin with a letter")
@@ -1086,23 +1136,23 @@ class ResultProxyTest(fixtures.TablesTest):
         r = shadowed.select(shadowed.c.shadow_id == 1).execute().first()
 
         eq_(r.shadow_id, 1)
-        eq_(r["shadow_id"], 1)
-        eq_(r[shadowed.c.shadow_id], 1)
+        eq_(r._mapping["shadow_id"], 1)
+        eq_(r._mapping[shadowed.c.shadow_id], 1)
 
         eq_(r.shadow_name, "The Shadow")
-        eq_(r["shadow_name"], "The Shadow")
-        eq_(r[shadowed.c.shadow_name], "The Shadow")
+        eq_(r._mapping["shadow_name"], "The Shadow")
+        eq_(r._mapping[shadowed.c.shadow_name], "The Shadow")
 
         eq_(r.parent, "The Light")
-        eq_(r["parent"], "The Light")
-        eq_(r[shadowed.c.parent], "The Light")
+        eq_(r._mapping["parent"], "The Light")
+        eq_(r._mapping[shadowed.c.parent], "The Light")
 
         eq_(r.row, "Without light there is no shadow")
-        eq_(r["row"], "Without light there is no shadow")
-        eq_(r[shadowed.c.row], "Without light there is no shadow")
+        eq_(r._mapping["row"], "Without light there is no shadow")
+        eq_(r._mapping[shadowed.c.row], "Without light there is no shadow")
 
-        eq_(r["_parent"], "Hidden parent")
-        eq_(r["_row"], "Hidden row")
+        eq_(r._mapping["_parent"], "Hidden parent")
+        eq_(r._mapping["_row"], "Hidden row")
 
     def test_nontuple_row(self):
         """ensure the C version of BaseRow handles
@@ -1131,7 +1181,7 @@ class ResultProxyTest(fixtures.TablesTest):
         )
         eq_(list(proxy), ["value"])
         eq_(proxy[0], "value")
-        eq_(proxy["key"], "value")
+        eq_(proxy._mapping["key"], "value")
 
     @testing.provide_metadata
     def test_no_rowcount_on_selects_inserts(self):
@@ -1202,8 +1252,8 @@ class ResultProxyTest(fixtures.TablesTest):
 
         testing.db.execute(values.insert(), dict(key="One", value="Uno"))
         row = testing.db.execute(values.select()).first()
-        eq_(row["key"], "One")
-        eq_(row["value"], "Uno")
+        eq_(row._mapping["key"], "One")
+        eq_(row._mapping["value"], "Uno")
         eq_(row[0], "One")
         eq_(row[1], "Uno")
         eq_(row[-2], "One")
@@ -1213,8 +1263,8 @@ class ResultProxyTest(fixtures.TablesTest):
     @testing.only_on("sqlite")
     def test_row_getitem_indexes_raw(self):
         row = testing.db.execute("select 'One' as key, 'Uno' as value").first()
-        eq_(row["key"], "One")
-        eq_(row["value"], "Uno")
+        eq_(row._mapping["key"], "One")
+        eq_(row._mapping["value"], "Uno")
         eq_(row[0], "One")
         eq_(row[1], "Uno")
         eq_(row[-2], "One")
@@ -1366,12 +1416,12 @@ class KeyTargetingTest(fixtures.TablesTest):
         )
         row = testing.db.execute(stmt).first()
 
-        eq_(row[expression], "a1")
-        eq_(row[lt], 2)
+        eq_(row._mapping[expression], "a1")
+        eq_(row._mapping[lt], 2)
 
         # Postgresql for example has the key as "?column?", which dupes
         # easily.  we get around that because we know that "2" is unique
-        eq_(row["2"], 2)
+        eq_(row._mapping["2"], 2)
 
     def test_keyed_targeting_no_label_at_all_one(self):
         class not_named_max(expression.ColumnElement):
@@ -1411,8 +1461,8 @@ class KeyTargetingTest(fixtures.TablesTest):
         stmt = select([t1, t2]).select_from(self.tables.keyed1)
         row = testing.db.execute(stmt).first()
 
-        eq_(row[t1], "a1")
-        eq_(row[t2], "a1")
+        eq_(row._mapping[t1], "a1")
+        eq_(row._mapping[t2], "a1")
 
     @testing.requires.duplicate_names_in_cursor_description
     def test_keyed_accessor_composite_conflict_2(self):
@@ -1424,7 +1474,7 @@ class KeyTargetingTest(fixtures.TablesTest):
         ).first()
 
         # column access is unambiguous
-        eq_(row[self.tables.keyed2.c.b], "b2")
+        eq_(row._mapping[self.tables.keyed2.c.b], "b2")
 
         # row.a is ambiguous
         assert_raises_message(
@@ -1459,10 +1509,10 @@ class KeyTargetingTest(fixtures.TablesTest):
         ).first()
 
         # column access is unambiguous
-        eq_(row[self.tables.keyed2.c.b], "b2")
+        eq_(row._mapping[self.tables.keyed2.c.b], "b2")
 
-        eq_(row["keyed2_b"], "b2")
-        eq_(row["keyed1_a"], "a1")
+        eq_(row._mapping["keyed2_b"], "b2")
+        eq_(row._mapping["keyed1_a"], "a1")
 
     def test_keyed_accessor_composite_names_precedent(self):
         keyed1 = self.tables.keyed1
@@ -1516,8 +1566,11 @@ class KeyTargetingTest(fixtures.TablesTest):
         eq_(row.keyed1_c, "c1")
         eq_(row.keyed2_a, "a2")
         eq_(row.keyed2_b, "b2")
+
         assert_raises(KeyError, lambda: row["keyed2_c"])
         assert_raises(KeyError, lambda: row["keyed2_q"])
+        assert_raises(KeyError, lambda: row._mapping["keyed2_c"])
+        assert_raises(KeyError, lambda: row._mapping["keyed2_q"])
 
     def test_keyed_accessor_column_is_repeated_multiple_times(self):
         # test new logic added as a result of the combination of #4892 and
@@ -1568,10 +1621,10 @@ class KeyTargetingTest(fixtures.TablesTest):
         row = result.first()
 
         # keyed access will ignore the dupe cols
-        eq_(row[keyed2.c.a], "a2")
-        eq_(row[keyed3.c.a], "a3")
+        eq_(row._mapping[keyed2.c.a], "a2")
+        eq_(row._mapping[keyed3.c.a], "a3")
         eq_(result._getter(keyed3.c.a)(row), "a3")
-        eq_(row[keyed3.c.d], "d3")
+        eq_(row._mapping[keyed3.c.d], "d3")
 
         # however we can get everything positionally
         eq_(row, ("a2", "a3", "a2", "a2", "a3", "a3", "d3", "d3"))
@@ -1591,8 +1644,8 @@ class KeyTargetingTest(fixtures.TablesTest):
         stmt = select([a, b]).select_from(table("keyed2"))
         row = testing.db.execute(stmt).first()
 
-        in_(a, row)
-        in_(b, row)
+        in_(a, row._mapping)
+        in_(b, row._mapping)
 
     def test_columnclause_schema_column_two(self):
         keyed2 = self.tables.keyed2
@@ -1600,17 +1653,16 @@ class KeyTargetingTest(fixtures.TablesTest):
         stmt = select([keyed2.c.a, keyed2.c.b])
         row = testing.db.execute(stmt).first()
 
-        in_(keyed2.c.a, row)
-        in_(keyed2.c.b, row)
+        in_(keyed2.c.a, row._mapping)
+        in_(keyed2.c.b, row._mapping)
 
     def test_columnclause_schema_column_three(self):
         # this is also addressed by [ticket:2932]
-
         stmt = text("select a, b from keyed2").columns(a=CHAR, b=CHAR)
         row = testing.db.execute(stmt).first()
 
-        in_(stmt.selected_columns.a, row)
-        in_(stmt.selected_columns.b, row)
+        in_(stmt.selected_columns.a, row._mapping)
+        in_(stmt.selected_columns.b, row._mapping)
 
     def test_columnclause_schema_column_four(self):
         # originally addressed by [ticket:2932], however liberalized
@@ -1622,10 +1674,11 @@ class KeyTargetingTest(fixtures.TablesTest):
         )
         row = testing.db.execute(stmt).first()
 
-        in_(a, row)
-        in_(b, row)
-        in_(stmt.selected_columns.keyed2_a, row)
-        in_(stmt.selected_columns.keyed2_b, row)
+        in_(a, row._mapping)
+        in_(b, row._mapping)
+
+        in_(stmt.selected_columns.keyed2_a, row._mapping)
+        in_(stmt.selected_columns.keyed2_b, row._mapping)
 
     def test_columnclause_schema_column_five(self):
         # this is also addressed by [ticket:2932]
@@ -1635,8 +1688,8 @@ class KeyTargetingTest(fixtures.TablesTest):
         )
         row = testing.db.execute(stmt).first()
 
-        in_(stmt.selected_columns.keyed2_a, row)
-        in_(stmt.selected_columns.keyed2_b, row)
+        in_(stmt.selected_columns.keyed2_a, row._mapping)
+        in_(stmt.selected_columns.keyed2_b, row._mapping)
 
 
 class PositionalTextTest(fixtures.TablesTest):
@@ -1668,13 +1721,14 @@ class PositionalTextTest(fixtures.TablesTest):
         result = testing.db.execute(stmt)
         row = result.first()
 
-        eq_(row[c2], "b1")
-        eq_(row[c4], "d1")
+        eq_(row._mapping[c2], "b1")
+        eq_(row._mapping[c4], "d1")
         eq_(row[1], "b1")
-        eq_(row["b"], "b1")
-        eq_(row.keys(), ["a", "b", "c", "d"])
-        eq_(row["r"], "c1")
-        eq_(row["d"], "d1")
+        eq_(row._mapping["b"], "b1")
+        eq_(list(row._mapping.keys()), ["a", "b", "c", "d"])
+        eq_(row._fields, ("a", "b", "c", "d"))
+        eq_(row._mapping["r"], "c1")
+        eq_(row._mapping["d"], "d1")
 
     def test_fewer_cols_than_sql_positional(self):
         c1, c2 = column("q"), column("p")
@@ -1684,8 +1738,8 @@ class PositionalTextTest(fixtures.TablesTest):
         result = testing.db.execute(stmt)
         row = result.first()
 
-        eq_(row[c1], "a1")
-        eq_(row["c"], "c1")
+        eq_(row._mapping[c1], "a1")
+        eq_(row._mapping["c"], "c1")
 
     def test_fewer_cols_than_sql_non_positional(self):
         c1, c2 = column("a"), column("p")
@@ -1696,15 +1750,17 @@ class PositionalTextTest(fixtures.TablesTest):
         row = result.first()
 
         # c1 name matches, locates
-        eq_(row[c1], "a1")
-        eq_(row["c"], "c1")
+        eq_(row._mapping[c1], "a1")
+        eq_(row._mapping["c"], "c1")
 
         # c2 name does not match, doesn't locate
         assert_raises_message(
-            exc.NoSuchColumnError, "in row for column 'p'", lambda: row[c2]
+            exc.NoSuchColumnError,
+            "in row for column 'p'",
+            lambda: row._mapping[c2],
         )
 
-    def test_more_cols_than_sql(self):
+    def test_more_cols_than_sql_positional(self):
         c1, c2, c3, c4 = column("q"), column("p"), column("r"), column("d")
         stmt = text("select a, b from text1").columns(c1, c2, c3, c4)
 
@@ -1715,10 +1771,56 @@ class PositionalTextTest(fixtures.TablesTest):
             result = testing.db.execute(stmt)
 
         row = result.first()
-        eq_(row[c2], "b1")
+        eq_(row._mapping[c2], "b1")
+
+        assert_raises_message(
+            exc.NoSuchColumnError,
+            "in row for column 'r'",
+            lambda: row._mapping[c3],
+        )
+
+    def test_more_cols_than_sql_nonpositional(self):
+        c1, c2, c3, c4 = column("b"), column("a"), column("r"), column("d")
+        stmt = TextualSelect(
+            text("select a, b from text1"), [c1, c2, c3, c4], positional=False
+        )
+
+        # no warning for non-positional
+        result = testing.db.execute(stmt)
+
+        row = result.first()
+        eq_(row._mapping[c1], "b1")
+        eq_(row._mapping[c2], "a1")
+
+        assert_raises_message(
+            exc.NoSuchColumnError,
+            "in row for column 'r'",
+            lambda: row._mapping[c3],
+        )
+
+    def test_more_cols_than_sql_nonpositional_labeled_cols(self):
+        text1 = self.tables.text1
+        c1, c2, c3, c4 = text1.c.b, text1.c.a, column("r"), column("d")
+
+        # the compiler will enable loose matching for this statement
+        # so that column._label is taken into account
+        stmt = TextualSelect(
+            text("select a, b AS text1_b from text1"),
+            [c1, c2, c3, c4],
+            positional=False,
+        )
+
+        # no warning for non-positional
+        result = testing.db.execute(stmt)
+
+        row = result.first()
+        eq_(row._mapping[c1], "b1")
+        eq_(row._mapping[c2], "a1")
 
         assert_raises_message(
-            exc.NoSuchColumnError, "in row for column 'r'", lambda: row[c3]
+            exc.NoSuchColumnError,
+            "in row for column 'r'",
+            lambda: row._mapping[c3],
         )
 
     def test_dupe_col_obj(self):
@@ -1745,17 +1847,17 @@ class PositionalTextTest(fixtures.TablesTest):
         result = testing.db.execute(stmt)
         row = result.first()
 
-        eq_(row[c1], "a1")
-        eq_(row[c2], "b1")
-        eq_(row[c3], "c1")
-        eq_(row[c4], "d1")
+        eq_(row._mapping[c1], "a1")
+        eq_(row._mapping[c2], "b1")
+        eq_(row._mapping[c3], "c1")
+        eq_(row._mapping[c4], "d1")
 
         # text1.c.b goes nowhere....because we hit key fallback
         # but the text1.c.b doesn't derive from text1.c.c
         assert_raises_message(
             exc.NoSuchColumnError,
             "Could not locate column in row for column 'text1.b'",
-            lambda: row[text1.c.b],
+            lambda: row._mapping[text1.c.b],
         )
 
     def test_anon_aliased_overlapping(self):
@@ -1770,10 +1872,10 @@ class PositionalTextTest(fixtures.TablesTest):
         result = testing.db.execute(stmt)
         row = result.first()
 
-        eq_(row[c1], "a1")
-        eq_(row[c2], "b1")
-        eq_(row[c3], "c1")
-        eq_(row[c4], "d1")
+        eq_(row._mapping[c1], "a1")
+        eq_(row._mapping[c2], "b1")
+        eq_(row._mapping[c3], "c1")
+        eq_(row._mapping[c4], "d1")
 
     def test_anon_aliased_name_conflict(self):
         text1 = self.tables.text1
@@ -1791,17 +1893,17 @@ class PositionalTextTest(fixtures.TablesTest):
         result = testing.db.execute(stmt)
         row = result.first()
 
-        eq_(row[c1], "a1")
-        eq_(row[c2], "b1")
-        eq_(row[c3], "c1")
-        eq_(row[c4], "d1")
+        eq_(row._mapping[c1], "a1")
+        eq_(row._mapping[c2], "b1")
+        eq_(row._mapping[c3], "c1")
+        eq_(row._mapping[c4], "d1")
 
         # fails, because we hit key fallback and find conflicts
         # in columns that are presnet
         assert_raises_message(
             exc.NoSuchColumnError,
             "Could not locate column in row for column 'text1.a'",
-            lambda: row[text1.c.a],
+            lambda: row._mapping[text1.c.a],
         )
 
 
@@ -1834,8 +1936,11 @@ class AlternateResultProxyTest(fixtures.TablesTest):
         self.table = self.tables.test
 
         class ExcCtx(default.DefaultExecutionContext):
+            def get_result_cursor_strategy(self, result):
+                return cls.create(result)
+
             def get_result_proxy(self):
-                return cls(self)
+                raise NotImplementedError()
 
         self.patcher = patch.object(
             self.engine.dialect, "execution_ctx_cls", ExcCtx
@@ -1847,7 +1952,7 @@ class AlternateResultProxyTest(fixtures.TablesTest):
         with self._proxy_fixture(cls):
             rows = []
             r = self.engine.execute(select([self.table]))
-            assert isinstance(r, cls)
+            assert isinstance(r.cursor_strategy, cls)
             for i in range(5):
                 rows.append(r.fetchone())
             eq_(rows, [(i, "t_%d" % i) for i in range(1, 6)])
@@ -1908,40 +2013,42 @@ class AlternateResultProxyTest(fixtures.TablesTest):
         )
 
     def test_basic_plain(self):
-        self._test_proxy(_result.ResultProxy)
+        self._test_proxy(_result.DefaultCursorFetchStrategy)
 
     def test_basic_buffered_row_result_proxy(self):
-        self._test_proxy(_result.BufferedRowResultProxy)
+        self._test_proxy(_result.BufferedRowCursorFetchStrategy)
 
     def test_basic_fully_buffered_result_proxy(self):
-        self._test_proxy(_result.FullyBufferedResultProxy)
+        self._test_proxy(_result.FullyBufferedCursorFetchStrategy)
 
     def test_basic_buffered_column_result_proxy(self):
-        self._test_proxy(_result.BufferedColumnResultProxy)
+        self._test_proxy(_result.DefaultCursorFetchStrategy)
 
     def test_resultprocessor_plain(self):
-        self._test_result_processor(_result.ResultProxy, False)
+        self._test_result_processor(_result.DefaultCursorFetchStrategy, False)
 
     def test_resultprocessor_plain_cached(self):
-        self._test_result_processor(_result.ResultProxy, True)
-
-    def test_resultprocessor_buffered_column(self):
-        self._test_result_processor(_result.BufferedColumnResultProxy, False)
-
-    def test_resultprocessor_buffered_column_cached(self):
-        self._test_result_processor(_result.BufferedColumnResultProxy, True)
+        self._test_result_processor(_result.DefaultCursorFetchStrategy, True)
 
     def test_resultprocessor_buffered_row(self):
-        self._test_result_processor(_result.BufferedRowResultProxy, False)
+        self._test_result_processor(
+            _result.BufferedRowCursorFetchStrategy, False
+        )
 
     def test_resultprocessor_buffered_row_cached(self):
-        self._test_result_processor(_result.BufferedRowResultProxy, True)
+        self._test_result_processor(
+            _result.BufferedRowCursorFetchStrategy, True
+        )
 
     def test_resultprocessor_fully_buffered(self):
-        self._test_result_processor(_result.FullyBufferedResultProxy, False)
+        self._test_result_processor(
+            _result.FullyBufferedCursorFetchStrategy, False
+        )
 
     def test_resultprocessor_fully_buffered_cached(self):
-        self._test_result_processor(_result.FullyBufferedResultProxy, True)
+        self._test_result_processor(
+            _result.FullyBufferedCursorFetchStrategy, True
+        )
 
     def _test_result_processor(self, cls, use_cache):
         class MyType(TypeDecorator):
@@ -1963,7 +2070,7 @@ class AlternateResultProxyTest(fixtures.TablesTest):
 
     @testing.fixture
     def row_growth_fixture(self):
-        with self._proxy_fixture(_result.BufferedRowResultProxy):
+        with self._proxy_fixture(_result.BufferedRowCursorFetchStrategy):
             with self.engine.connect() as conn:
                 conn.execute(
                     self.table.insert(),
@@ -2001,7 +2108,7 @@ class AlternateResultProxyTest(fixtures.TablesTest):
         max_size = max(checks.values())
         for idx, row in enumerate(result, 0):
             if idx in checks:
-                assertion[idx] = result._bufsize
-            le_(len(result._BufferedRowResultProxy__rowbuffer), max_size)
+                assertion[idx] = result.cursor_strategy._bufsize
+            le_(len(result.cursor_strategy._rowbuffer), max_size)
 
         eq_(checks, assertion)
index b8a3cce7cbf0355ce0e44d8c55910fde21f1372b..4cfb3f0d66de7b361e757a6de9e109cf95122ad4 100644 (file)
@@ -65,7 +65,7 @@ class ReturningTest(fixtures.TestBase, AssertsExecutionResults):
             .execute({"persons": 1, "full": False})
         )
 
-        row = result.first()
+        row = result.first()._mapping
         assert row[table.c.id] == row["id"] == 1
         assert row[table.c.full] == row["full"]
         assert row["full"] is False
@@ -76,7 +76,7 @@ class ReturningTest(fixtures.TestBase, AssertsExecutionResults):
             .returning(table.c.persons, table.c.full, table.c.goofy)
             .execute()
         )
-        row = result.first()
+        row = result.first()._mapping
         assert row[table.c.persons] == row["persons"] == 5
         assert row[table.c.full] == row["full"]
 
@@ -91,7 +91,7 @@ class ReturningTest(fixtures.TestBase, AssertsExecutionResults):
             .returning(table.c.persons.label("lala"))
             .execute()
         )
-        row = result.first()
+        row = result.first()._mapping
         assert row["lala"] == 6
 
     @testing.fails_on(
@@ -181,7 +181,7 @@ class ReturningTest(fixtures.TestBase, AssertsExecutionResults):
             'insert into tables (id, persons, "full") '
             "values (5, 10, %s) returning persons" % literal_true
         )
-        eq_([dict(row) for row in result4], [{"persons": 10}])
+        eq_([dict(row._mapping) for row in result4], [{"persons": 10}])
 
     def test_delete_returning(self):
         table.insert().execute(
@@ -288,10 +288,10 @@ class KeyReturningTest(fixtures.TestBase, AssertsExecutionResults):
         result = (
             table.insert().returning(table.c.foo_id).execute(data="somedata")
         )
-        row = result.first()
+        row = result.first()._mapping
         assert row[table.c.foo_id] == row["id"] == 1
 
-        result = table.select().execute().first()
+        result = table.select().execute().first()._mapping
         assert row[table.c.foo_id] == row["id"] == 1
 
 
@@ -331,7 +331,10 @@ class ReturnDefaultsTest(fixtures.TablesTest):
             t1.insert().values(upddef=1).return_defaults(t1.c.insdef)
         )
         eq_(
-            [result.returned_defaults[k] for k in (t1.c.id, t1.c.insdef)],
+            [
+                result.returned_defaults._mapping[k]
+                for k in (t1.c.id, t1.c.insdef)
+            ],
             [1, 0],
         )
 
@@ -341,7 +344,10 @@ class ReturnDefaultsTest(fixtures.TablesTest):
             t1.insert(return_defaults=[t1.c.insdef]).values(upddef=1)
         )
         eq_(
-            [result.returned_defaults[k] for k in (t1.c.id, t1.c.insdef)],
+            [
+                result.returned_defaults._mapping[k]
+                for k in (t1.c.id, t1.c.insdef)
+            ],
             [1, 0],
         )
 
@@ -351,7 +357,9 @@ class ReturnDefaultsTest(fixtures.TablesTest):
         result = testing.db.execute(
             t1.update().values(data="d1").return_defaults(t1.c.upddef)
         )
-        eq_([result.returned_defaults[k] for k in (t1.c.upddef,)], [1])
+        eq_(
+            [result.returned_defaults._mapping[k] for k in (t1.c.upddef,)], [1]
+        )
 
     def test_arg_update_pk(self):
         t1 = self.tables.t1
@@ -359,7 +367,9 @@ class ReturnDefaultsTest(fixtures.TablesTest):
         result = testing.db.execute(
             t1.update(return_defaults=[t1.c.upddef]).values(data="d1")
         )
-        eq_([result.returned_defaults[k] for k in (t1.c.upddef,)], [1])
+        eq_(
+            [result.returned_defaults._mapping[k] for k in (t1.c.upddef,)], [1]
+        )
 
     def test_insert_non_default(self):
         """test that a column not marked at all as a
@@ -370,7 +380,10 @@ class ReturnDefaultsTest(fixtures.TablesTest):
             t1.insert().values(upddef=1).return_defaults(t1.c.data)
         )
         eq_(
-            [result.returned_defaults[k] for k in (t1.c.id, t1.c.data)],
+            [
+                result.returned_defaults._mapping[k]
+                for k in (t1.c.id, t1.c.data)
+            ],
             [1, None],
         )
 
@@ -383,7 +396,10 @@ class ReturnDefaultsTest(fixtures.TablesTest):
         result = testing.db.execute(
             t1.update().values(upddef=2).return_defaults(t1.c.data)
         )
-        eq_([result.returned_defaults[k] for k in (t1.c.data,)], [None])
+        eq_(
+            [result.returned_defaults._mapping[k] for k in (t1.c.data,)],
+            [None],
+        )
 
     def test_insert_non_default_plus_default(self):
         t1 = self.tables.t1
@@ -393,7 +409,7 @@ class ReturnDefaultsTest(fixtures.TablesTest):
             .return_defaults(t1.c.data, t1.c.insdef)
         )
         eq_(
-            dict(result.returned_defaults),
+            dict(result.returned_defaults._mapping),
             {"id": 1, "data": None, "insdef": 0},
         )
 
@@ -405,7 +421,10 @@ class ReturnDefaultsTest(fixtures.TablesTest):
             .values(insdef=2)
             .return_defaults(t1.c.data, t1.c.upddef)
         )
-        eq_(dict(result.returned_defaults), {"data": None, "upddef": 1})
+        eq_(
+            dict(result.returned_defaults._mapping),
+            {"data": None, "upddef": 1},
+        )
 
     def test_insert_all(self):
         t1 = self.tables.t1
@@ -413,7 +432,7 @@ class ReturnDefaultsTest(fixtures.TablesTest):
             t1.insert().values(upddef=1).return_defaults()
         )
         eq_(
-            dict(result.returned_defaults),
+            dict(result.returned_defaults._mapping),
             {"id": 1, "data": None, "insdef": 0},
         )
 
@@ -423,7 +442,7 @@ class ReturnDefaultsTest(fixtures.TablesTest):
         result = testing.db.execute(
             t1.update().values(insdef=2).return_defaults()
         )
-        eq_(dict(result.returned_defaults), {"upddef": 1})
+        eq_(dict(result.returned_defaults._mapping), {"upddef": 1})
 
 
 class ImplicitReturningFlag(fixtures.TestBase):
index 2d45231cd6663fd9c0499e29da839169b55f7fca..8c3e8c5d4678fbe683f7f22ee20fa1a695620387 100644 (file)
@@ -364,14 +364,14 @@ class RoundTripTestBase(object):
             self.tables.test_table.insert(), {"x": "X1", "y": "Y1"}
         )
         row = testing.db.execute(select([self.tables.test_table])).first()
-        eq_(row[self.tables.test_table.c.y], "Y1")
+        eq_(row._mapping[self.tables.test_table.c.y], "Y1")
 
     def test_targeting_by_string(self):
         testing.db.execute(
             self.tables.test_table.insert(), {"x": "X1", "y": "Y1"}
         )
         row = testing.db.execute(select([self.tables.test_table])).first()
-        eq_(row["y"], "Y1")
+        eq_(row._mapping["y"], "Y1")
 
     def test_targeting_apply_labels(self):
         testing.db.execute(
@@ -380,7 +380,7 @@ class RoundTripTestBase(object):
         row = testing.db.execute(
             select([self.tables.test_table]).apply_labels()
         ).first()
-        eq_(row[self.tables.test_table.c.y], "Y1")
+        eq_(row._mapping[self.tables.test_table.c.y], "Y1")
 
     def test_targeting_individual_labels(self):
         testing.db.execute(
@@ -394,7 +394,7 @@ class RoundTripTestBase(object):
                 ]
             )
         ).first()
-        eq_(row[self.tables.test_table.c.y], "Y1")
+        eq_(row._mapping[self.tables.test_table.c.y], "Y1")
 
 
 class StringRoundTripTest(fixtures.TablesTest, RoundTripTestBase):
index d40d0902f35d6ff048599d604a9772965f00ee88..356470dd3a91396467e916857d98b1a2186e5f20 100644 (file)
@@ -1970,13 +1970,15 @@ class BinaryTest(fixtures.TestBase, AssertsExecutionResults):
             ),
         ):
             result = stmt.execute().fetchall()
-            eq_(stream1, result[0]["data"])
-            eq_(stream1[0:100], result[0]["data_slice"])
-            eq_(stream2, result[1]["data"])
-            eq_(testobj1, result[0]["pickled"])
-            eq_(testobj2, result[1]["pickled"])
-            eq_(testobj3.moredata, result[0]["mypickle"].moredata)
-            eq_(result[0]["mypickle"].stuff, "this is the right stuff")
+            eq_(stream1, result[0]._mapping["data"])
+            eq_(stream1[0:100], result[0]._mapping["data_slice"])
+            eq_(stream2, result[1]._mapping["data"])
+            eq_(testobj1, result[0]._mapping["pickled"])
+            eq_(testobj2, result[1]._mapping["pickled"])
+            eq_(testobj3.moredata, result[0]._mapping["mypickle"].moredata)
+            eq_(
+                result[0]._mapping["mypickle"].stuff, "this is the right stuff"
+            )
 
     @testing.requires.binary_comparisons
     def test_comparison(self):
@@ -2875,9 +2877,9 @@ class IntervalTest(fixtures.TestBase, AssertsExecutionResults):
                 non_native_interval=delta,
             )
             row = conn.execute(interval_table.select()).first()
-        eq_(row["native_interval"], small_delta)
-        eq_(row["native_interval_args"], delta)
-        eq_(row["non_native_interval"], delta)
+        eq_(row.native_interval, small_delta)
+        eq_(row.native_interval_args, delta)
+        eq_(row.non_native_interval, delta)
 
     def test_null(self):
         with testing.db.begin() as conn:
@@ -2888,9 +2890,9 @@ class IntervalTest(fixtures.TestBase, AssertsExecutionResults):
                 non_native_interval=None,
             )
             row = conn.execute(interval_table.select()).first()
-        eq_(row["native_interval"], None)
-        eq_(row["native_interval_args"], None)
-        eq_(row["non_native_interval"], None)
+        eq_(row.native_interval, None)
+        eq_(row.native_interval_args, None)
+        eq_(row.non_native_interval, None)
 
 
 class IntegerTest(fixtures.TestBase):
index dd7cad6b2688db71c91ab9f917dc8086e88f1470..195a699bbf9b4750e982f0dfa45803092cc471ab 100644 (file)
@@ -128,18 +128,18 @@ class UnicodeSchemaTest(fixtures.TestBase):
         )
 
         row = t1.select().execute().first()
-        eq_(row[t1.c[u("méil")]], 1)
-        eq_(row[t1.c[ue("\u6e2c\u8a66")]], 5)
+        eq_(row._mapping[t1.c[u("méil")]], 1)
+        eq_(row._mapping[t1.c[ue("\u6e2c\u8a66")]], 5)
 
         row = t2.select().execute().first()
-        eq_(row[t2.c[u("a")]], 1)
-        eq_(row[t2.c[u("b")]], 1)
+        eq_(row._mapping[t2.c[u("a")]], 1)
+        eq_(row._mapping[t2.c[u("b")]], 1)
 
         row = t3.select().execute().first()
-        eq_(row[t3.c[ue("\u6e2c\u8a66_id")]], 1)
-        eq_(row[t3.c[ue("unitable1_\u6e2c\u8a66")]], 5)
-        eq_(row[t3.c[u("Unitéble2_b")]], 1)
-        eq_(row[t3.c[ue("\u6e2c\u8a66_self")]], 1)
+        eq_(row._mapping[t3.c[ue("\u6e2c\u8a66_id")]], 1)
+        eq_(row._mapping[t3.c[ue("unitable1_\u6e2c\u8a66")]], 5)
+        eq_(row._mapping[t3.c[u("Unitéble2_b")]], 1)
+        eq_(row._mapping[t3.c[ue("\u6e2c\u8a66_self")]], 1)
 
     def test_reflect(self):
         t1.insert().execute({u("méil"): 2, ue("\u6e2c\u8a66"): 7})