]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
Add deprecation warnings to all deprecated APIs
authorMike Bayer <mike_mp@zzzcomputing.com>
Fri, 21 Dec 2018 03:05:36 +0000 (22:05 -0500)
committerMike Bayer <mike_mp@zzzcomputing.com>
Wed, 23 Jan 2019 23:10:06 +0000 (18:10 -0500)
A large change throughout the library has ensured that all objects, parameters,
and behaviors which have been noted as deprecated or legacy now emit
``DeprecationWarning`` warnings when invoked.   As the Python 3 interpreter now
defaults to displaying deprecation warnings, as well as that modern test suites
based on tools like tox and pytest tend to display deprecation warnings,
this change should make it easier to note what API features are obsolete.

See the notes added to the changelog and migration notes for further
details.

Fixes: #4393
Change-Id: If0ea11a1fc24f9a8029352eeadfc49a7a54c0a1b

101 files changed:
doc/build/changelog/migration_13.rst
doc/build/changelog/unreleased_13/4393.rst [new file with mode: 0644]
doc/build/core/connections.rst
lib/sqlalchemy/dialects/mssql/base.py
lib/sqlalchemy/dialects/mssql/information_schema.py
lib/sqlalchemy/dialects/mysql/oursql.py
lib/sqlalchemy/dialects/oracle/cx_oracle.py
lib/sqlalchemy/dialects/postgresql/psycopg2.py
lib/sqlalchemy/engine/__init__.py
lib/sqlalchemy/engine/base.py
lib/sqlalchemy/engine/default.py
lib/sqlalchemy/engine/interfaces.py
lib/sqlalchemy/engine/reflection.py
lib/sqlalchemy/engine/result.py
lib/sqlalchemy/engine/strategies.py
lib/sqlalchemy/engine/threadlocal.py
lib/sqlalchemy/event/base.py
lib/sqlalchemy/event/registry.py
lib/sqlalchemy/events.py
lib/sqlalchemy/ext/horizontal_shard.py
lib/sqlalchemy/interfaces.py
lib/sqlalchemy/orm/attributes.py
lib/sqlalchemy/orm/collections.py
lib/sqlalchemy/orm/deprecated_interfaces.py
lib/sqlalchemy/orm/descriptor_props.py
lib/sqlalchemy/orm/mapper.py
lib/sqlalchemy/orm/properties.py
lib/sqlalchemy/orm/query.py
lib/sqlalchemy/orm/relationships.py
lib/sqlalchemy/orm/session.py
lib/sqlalchemy/orm/strategy_options.py
lib/sqlalchemy/pool/base.py
lib/sqlalchemy/pool/dbapi_proxy.py
lib/sqlalchemy/pool/impl.py
lib/sqlalchemy/sql/compiler.py
lib/sqlalchemy/sql/ddl.py
lib/sqlalchemy/sql/elements.py
lib/sqlalchemy/sql/schema.py
lib/sqlalchemy/sql/selectable.py
lib/sqlalchemy/sql/sqltypes.py
lib/sqlalchemy/testing/engines.py
lib/sqlalchemy/testing/suite/test_reflection.py
lib/sqlalchemy/testing/suite/test_results.py
lib/sqlalchemy/testing/suite/test_types.py
lib/sqlalchemy/util/__init__.py
lib/sqlalchemy/util/compat.py
lib/sqlalchemy/util/deprecations.py
test/aaa_profiling/test_pool.py
test/dialect/mssql/test_query.py
test/dialect/mysql/test_types.py
test/dialect/oracle/test_compiler.py
test/dialect/oracle/test_dialect.py
test/dialect/oracle/test_types.py
test/dialect/postgresql/test_dialect.py
test/dialect/postgresql/test_types.py
test/dialect/test_sqlite.py
test/engine/test_bind.py
test/engine/test_ddlevents.py
test/engine/test_deprecations.py [new file with mode: 0644]
test/engine/test_execute.py
test/engine/test_parseconnect.py
test/engine/test_pool.py
test/engine/test_reconnect.py
test/engine/test_transaction.py
test/ext/declarative/test_basic.py
test/ext/test_associationproxy.py
test/ext/test_horizontal_shard.py
test/orm/inheritance/test_assorted_poly.py
test/orm/inheritance/test_basic.py
test/orm/inheritance/test_polymorphic_rel.py
test/orm/inheritance/test_relationship.py
test/orm/test_assorted_eager.py
test/orm/test_attributes.py
test/orm/test_collection.py
test/orm/test_defaults.py
test/orm/test_deferred.py
test/orm/test_deprecations.py
test/orm/test_eager_relations.py
test/orm/test_events.py
test/orm/test_froms.py
test/orm/test_generative.py
test/orm/test_lockmode.py
test/orm/test_mapper.py
test/orm/test_merge.py
test/orm/test_of_type.py
test/orm/test_options.py
test/orm/test_query.py
test/orm/test_selectin_relations.py
test/orm/test_session.py
test/orm/test_subquery_relations.py
test/orm/test_transaction.py
test/orm/test_versioning.py
test/sql/test_compiler.py
test/sql/test_defaults.py
test/sql/test_deprecations.py [new file with mode: 0644]
test/sql/test_generative.py
test/sql/test_metadata.py
test/sql/test_resultset.py
test/sql/test_selectable.py
test/sql/test_text.py
test/sql/test_types.py

index dd1f9009e03a9ec988fcbfd4c5f9f1ddf169e1b4..94e7c857a762f4dfedafec4ca18b617f0bd7b0be 100644 (file)
@@ -17,6 +17,65 @@ their applications from the 1.2 series of SQLAlchemy to 1.3.
 Please carefully review the sections on behavioral changes for
 potentially backwards-incompatible changes in behavior.
 
+General
+=======
+
+.. _change_4393_general:
+
+Deprecation warnings are emitted for all deprecated elements; new deprecations added
+------------------------------------------------------------------------------------
+
+Release 1.3 ensures that all behaviors and APIs that are deprecated, including
+all those that have been long listed as "legacy" for years, are emitting
+``DeprecationWarning`` warnings. This includes when making use of parameters
+such as :paramref:`.Session.weak_identity_map` and classes such as
+:class:`.MapperExtension`.     While all deprecations have been noted in the
+documentation, often they did not use a proper restructured text directive, or
+include in what version they were deprecated.  Whether or not a particular API
+feature actually emitted a deprecation warning was not consistent.  The general
+attitude was that most or all of these deprecated features were treated as
+long-term legacy features with no plans to remove them.
+
+The change includes that all documented deprecations now use a proper
+restructured text directive in the documentation with a version number, the
+verbiage that the feature or use case will be removed in a future release is
+made explicit (e.g., no more legacy forever use cases), and that use of any
+such feature or use case will definitely emit a ``DeprecationWarning``, which
+in Python 3 as well as when using modern testing tools like Pytest are now made
+more explicit in the standard error stream.  The goal is that these long
+deprecated features, going back as far as version 0.7 or 0.6, should start
+being removed entirely, rather than keeping them around as "legacy" features.
+Additionally, some major new deprecations are being added as of version 1.3.
+As SQLAlchemy has 14 years of real world use by thousands of developers, it's
+possible to point to a single stream of use cases that blend together well, and
+to trim away features and patterns that work against this single way of
+working.
+
+The larger context is that SQLAlchemy seeks to adjust to the coming Python
+3-only world, as well as a type-annotated world, and towards this goal there
+are **tentative** plans for a major rework of  SQLAlchemy which would hopefully
+greatly reduce the cognitive load of the API as well as perform a major pass
+over the great many differences in implementation and use between Core and ORM.
+As these two systems evolved dramatically after SQLAlchemy's first release, in
+particular the ORM still retains lots of "bolted on" behaviors that keep the
+wall of separation between Core and  ORM too high.  By focusing the API
+ahead of time on a single pattern for each supported use case, the eventual
+job of migrating to a significantly altered API becomes simpler.
+
+For the most major deprecations being added in 1.3, see the linked sections
+below.
+
+
+.. seealso::
+
+    :ref:`change_4393_threadlocal`
+
+    :ref:`change_4393_convertunicode`
+
+    :ref:`FIXME` - FIXME - link to non-primary mapper deprecation
+
+:ticket:`4393`
+
 New Features and Improvements - ORM
 ===================================
 
@@ -1055,6 +1114,125 @@ considered, however this was too much verbosity).
 
 
 
+Key Changes - Core
+==================
+
+.. _change_4393_threadlocal:
+
+"threadlocal" engine strategy deprecated
+-----------------------------------------
+
+The :ref:`"threadlocal" engine strategy <threadlocal_strategy>` was added
+around SQLAlchemy 0.2, as a solution to the problem that the standard way of
+operating in SQLAlchemy 0.1, which can be summed up as "threadlocal
+everything",  was found to be lacking. In retrospect, it seems fairly absurd
+that by SQLAlchemy's first releases which were in every regard "alpha", that
+there was concern that too many users had already settled on the existing API
+to simply change it.
+
+The original usage model for SQLAlchemy looked like this::
+
+    engine.begin()
+
+    table.insert().execute(<params>)
+    result = table.select().execute()
+
+    table.update().execute(<params>)
+
+    engine.commit()
+
+After a few months of real world use, it was clear that trying to pretend a
+"connection" or a "transaction" was a hidden implementation detail was a bad
+idea, particularly the moment someone needed to deal with more than one
+database connection at a time.   So the usage paradigm we see today was
+introduced, minus the context managers since they didn't yet exist in Python::
+
+    conn = engine.connect()
+    try:
+        trans = conn.begin()
+
+        conn.execute(table.insert(), <params>)
+        result = conn.execute(table.select())
+
+        conn.execute(table.update(), <params>)
+
+        trans.commit()
+    except:
+        trans.rollback()
+        raise
+    finally:
+        conn.close()
+
+The above paradigm was what people needed, but since it was still kind of
+verbose (because no context managers), the old way of working was kept around
+as well and it became the threadlocal engine strategy.
+
+Today, working with Core is much more succinct, and even more succinct than
+the original pattern, thanks to context managers::
+
+    with engine.begin() as conn:
+        conn.execute(table.insert(), <params>)
+        result = conn.execute(table.select())
+
+        conn.execute(table.update(), <params>)
+
+At this point, any remaining code that is still relying upon the "threadlocal"
+style will be encouraged via this deprecation to modernize - the feature should
+be removed totally by the next major series of SQLAlchemy, e.g. 1.4.  The
+connection pool parameter :paramref:`.Pool.use_threadlocal` is also deprecated
+as it does not actually have any effect in most cases, as is the
+:meth:`.Engine.contextual_connect` method, which is normally synonymous with
+the :meth:`.Engine.connect` method except in the case where the threadlocal
+engine is in use.
+
+.. seealso::
+
+    :ref:`threadlocal_strategy`
+
+
+:ticket:`4393`
+
+
+.. _change_4393_convertunicode:
+
+convert_unicode parameters deprecated
+--------------------------------------
+
+The parameters :paramref:`.String.convert_unicode` and
+:paramref:`.create_engine.convert_unicode` are deprecated.    The purpose of
+these parameters was to instruct SQLAlchemy to ensure that incoming Python
+Unicode objects under Python 2 were encoded to bytestrings before passing to
+the database, and to expect bytestrings from the database to be converted back
+to Python Unicode objects.   In the pre-Python 3 era, this was an enormous
+ordeal to get right, as virtually all Python DBAPIs had no Unicode support
+enabled by default, and most had major issues with the Unicode extensions that
+they did provide.    Eventually, SQLAlchemy added C extensions, one of the
+primary purposes of these extensions was to speed up the Unicode decode process
+within result sets.
+
+Once Python 3 was introduced, DBAPIs began to start supporting Unicode more
+fully, and more importantly, by default.  However, the conditions under which a
+particular DBAPI would or would not return Unicode data from a result, as well
+as accept Python Unicode values as parameters, remained extremely complicated.
+This was the beginning of the obsolesence of the "convert_unicode" flags,
+because they were no longer sufficient as a means of ensuring that
+encode/decode was occurring only where needed and not where it wasn't needed.
+Instead, "convert_unicode" started to be automatically detected by dialects.
+Part of this can be seen in the "SELECT 'test plain returns'" and "SELECT
+'test_unicode_returns'" SQL emitted by an engine the first time it connects;
+the dialect is testing that the current DBAPI with its current settings and
+backend database connection is returning Unicode by default or not.
+
+The end result is that end-user use of the "convert_unicode" flags should no
+longer be needed in any circumstances, and if they are, the SQLAlchemy project
+needs to know what those cases are and why.   Currently, hundreds of Unicode
+round trip tests pass across all major databases without the use of this flag
+so there is a fairly high level of confidence that they are no longer needed
+except in arguable non use cases such as accessing mis-encoded data from a
+legacy database, which would be better suited using custom types.
+
+
+:ticket:`4393`
 
 
 Dialect Improvements and Changes - PostgreSQL
diff --git a/doc/build/changelog/unreleased_13/4393.rst b/doc/build/changelog/unreleased_13/4393.rst
new file mode 100644 (file)
index 0000000..fd06fc5
--- /dev/null
@@ -0,0 +1,59 @@
+.. change::
+   :tags: change, general
+   :tickets: 4393
+
+   A large change throughout the library has ensured that all objects,
+   parameters, and behaviors which have been noted as deprecated or legacy now
+   emit ``DeprecationWarning`` warnings when invoked.As the Python 3
+   interpreter now defaults to displaying deprecation warnings, as well as that
+   modern test suites based on tools like tox and pytest tend to display
+   deprecation warnings, this change should make it easier to note what API
+   features are obsolete. A major rationale for this change is so that long-
+   deprecated features that nonetheless still see continue to see real world
+   use can  finally be removed in the near future; the biggest example of this
+   are the :class:`.SessionExtension` and :class:`.MapperExtension` classes as
+   well as a handful of other pre-event extension hooks, which have been
+   deprecated since version 0.7 but still remain in the library.  Another is
+   that several major longstanding behaviors are to be deprecated as well,
+   including the threadlocal engine strategy, the convert_unicode flag, and non
+   primary mappers.
+
+   .. seealso::
+
+      :ref:`change_4393_general`
+
+
+.. change::
+   :tags: change, engine
+   :tickets: 4393
+
+   The "threadlocal" engine strategy which has been a legacy feature of
+   SQLAlchemy since around version 0.2 is now deprecated, along with the
+   :paramref:`.Pool.threadlocal` parameter of :class:`.Pool` which has no
+   effect in most modern use cases.
+
+   .. seealso::
+
+      :ref:`change_4393_threadlocal`
+
+.. change::
+   :tags: change, sql
+   :tickets: 4393
+
+   The :paramref:`.create_engine.convert_unicode` and
+   :paramref:`.String.convert_unicode` parameters have been deprecated.  These
+   parameters were built back when most Python DBAPIs had little to no support
+   for Python Unicode objects, and SQLAlchemy needed to take on the very
+   complex task of marshalling data and SQL strings between Unicode and
+   bytestrings throughout the system in a performant way.  Thanks to Python 3,
+   DBAPIs were compelled to adapt to Unicode-aware APIs and today all DBAPIs
+   supported by SQLAlchemy support Unicode natively, including on Python 2,
+   allowing this long-lived and very complicated feature to finally be (mostly)
+   removed.  There are still of course a few Python 2 edge cases where
+   SQLAlchemy has to deal with Unicode however these are handled automatically;
+   in modern use, there should be no need for end-user interaction with these
+   flags.
+
+   .. seealso::
+
+      :ref:`change_4393_convertunicode`
index aacb537f5db7c551e16fc4c82f0cc11f87197733..4612da7cd882d207377dd0145c5df7b8f820f88e 100644 (file)
@@ -510,18 +510,24 @@ with the current thread, such that all parts of the
 application can participate in that transaction implicitly without the need to
 explicitly reference a :class:`.Connection`.
 
-.. note::
+.. deprecated:: 1.3
+
+    The "threadlocal" engine strategy is deprecated, and will be removed
+    in a future release.
 
-    The "threadlocal" feature is generally discouraged.   It's
-    designed for a particular pattern of usage which is generally
-    considered as a legacy pattern.  It has **no impact** on the "thread safety"
-    of SQLAlchemy components
-    or one's application. It also should not be used when using an ORM
+    This strategy is designed for a particular pattern of usage which is
+    generally considered as a legacy pattern.  It has **no impact** on the
+    "thread safety" of SQLAlchemy components or one's application. It also
+    should not be used when using an ORM
     :class:`~sqlalchemy.orm.session.Session` object, as the
     :class:`~sqlalchemy.orm.session.Session` itself represents an ongoing
     transaction and itself handles the job of maintaining connection and
     transactional resources.
 
+    .. seealso::
+
+        :ref:`change_4393_threadlocal`
+
 Enabling ``threadlocal`` is achieved as follows::
 
     db = create_engine('mysql://localhost/test', strategy='threadlocal')
index d0f02bb23098e05066343a01c2a6640e60ecfdf1..5c446341c3b4fc4c66bef2a3ef32c70d72578b5c 100644 (file)
@@ -2377,12 +2377,8 @@ class MSDialect(default.DefaultDialect):
                 "and ind.is_primary_key=0 and ind.type != 0"
             )
             .bindparams(
-                sql.bindparam(
-                    "tabname", tablename, sqltypes.String(convert_unicode=True)
-                ),
-                sql.bindparam(
-                    "schname", owner, sqltypes.String(convert_unicode=True)
-                ),
+                sql.bindparam("tabname", tablename, sqltypes.String()),
+                sql.bindparam("schname", owner, sqltypes.String()),
             )
             .columns(name=sqltypes.Unicode())
         )
@@ -2406,12 +2402,8 @@ class MSDialect(default.DefaultDialect):
                 "and sch.name=:schname"
             )
             .bindparams(
-                sql.bindparam(
-                    "tabname", tablename, sqltypes.String(convert_unicode=True)
-                ),
-                sql.bindparam(
-                    "schname", owner, sqltypes.String(convert_unicode=True)
-                ),
+                sql.bindparam("tabname", tablename, sqltypes.String()),
+                sql.bindparam("schname", owner, sqltypes.String()),
             )
             .columns(name=sqltypes.Unicode())
         )
@@ -2436,12 +2428,8 @@ class MSDialect(default.DefaultDialect):
                 "views.schema_id=sch.schema_id and "
                 "views.name=:viewname and sch.name=:schname"
             ).bindparams(
-                sql.bindparam(
-                    "viewname", viewname, sqltypes.String(convert_unicode=True)
-                ),
-                sql.bindparam(
-                    "schname", owner, sqltypes.String(convert_unicode=True)
-                ),
+                sql.bindparam("viewname", viewname, sqltypes.String()),
+                sql.bindparam("schname", owner, sqltypes.String()),
             )
         )
 
index 88628e6a7a09a26168a0c6cfd2c02cd9c1c62cdb..b72dbfe930c9b3f219872c3c2cbea5bd0752e631 100644 (file)
@@ -69,7 +69,7 @@ tables = Table(
     Column("TABLE_CATALOG", CoerceUnicode, key="table_catalog"),
     Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
     Column("TABLE_NAME", CoerceUnicode, key="table_name"),
-    Column("TABLE_TYPE", String(convert_unicode=True), key="table_type"),
+    Column("TABLE_TYPE", CoerceUnicode, key="table_type"),
     schema="INFORMATION_SCHEMA",
 )
 
@@ -98,9 +98,7 @@ constraints = Table(
     Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
     Column("TABLE_NAME", CoerceUnicode, key="table_name"),
     Column("CONSTRAINT_NAME", CoerceUnicode, key="constraint_name"),
-    Column(
-        "CONSTRAINT_TYPE", String(convert_unicode=True), key="constraint_type"
-    ),
+    Column("CONSTRAINT_TYPE", CoerceUnicode, key="constraint_type"),
     schema="INFORMATION_SCHEMA",
 )
 
index 165a8f4e20283d02d1af7a4da90872c39abb6b0d..80313a2fc9847a7c8137760ae3e4865ecbe45c7c 100644 (file)
@@ -175,7 +175,7 @@ class MySQLDialect_oursql(MySQLDialect):
     ):
         return MySQLDialect._show_create_table(
             self,
-            connection.contextual_connect(
+            connection._contextual_connect(
                 close_with_result=True
             ).execution_options(_oursql_plain_query=True),
             table,
index 1164c09f77785f61075f53789ba88aa9b314b96e..abeef39d2c90a5a2bc6b06efada53bbb8ebc8022 100644 (file)
@@ -736,6 +736,17 @@ class OracleDialect_cx_oracle(OracleDialect):
 
     _cx_oracle_threaded = None
 
+    @util.deprecated_params(
+        threaded=(
+            "1.3",
+            "The 'threaded' parameter to the cx_oracle dialect "
+            "is deprecated as a dialect-level argument, and will be removed "
+            "in a future release.  As of version 1.3, it defaults to False "
+            "rather than True.  The 'threaded' option can be passed to "
+            "cx_Oracle directly in the URL query string passed to "
+            ":func:`.create_engine`.",
+        )
+    )
     def __init__(
         self,
         auto_convert_lobs=True,
@@ -749,13 +760,6 @@ class OracleDialect_cx_oracle(OracleDialect):
         OracleDialect.__init__(self, **kwargs)
         self.arraysize = arraysize
         if threaded is not None:
-            util.warn_deprecated(
-                "The 'threaded' parameter to the cx_oracle dialect "
-                "itself is deprecated.  The value now defaults to False in "
-                "any case.  To pass an explicit True value, use the "
-                "create_engine connect_args dictionary or add ?threaded=true "
-                "to the URL string."
-            )
             self._cx_oracle_threaded = threaded
         self.auto_convert_lobs = auto_convert_lobs
         self.coerce_to_unicode = coerce_to_unicode
@@ -811,23 +815,6 @@ class OracleDialect_cx_oracle(OracleDialect):
 
         self._is_cx_oracle_6 = self.cx_oracle_ver >= (6,)
 
-    def _pop_deprecated_kwargs(self, kwargs):
-        auto_setinputsizes = kwargs.pop("auto_setinputsizes", None)
-        exclude_setinputsizes = kwargs.pop("exclude_setinputsizes", None)
-        if auto_setinputsizes or exclude_setinputsizes:
-            util.warn_deprecated(
-                "auto_setinputsizes and exclude_setinputsizes are deprecated. "
-                "Modern cx_Oracle only requires that LOB types are part "
-                "of this behavior, and these parameters no longer have any "
-                "effect."
-            )
-        allow_twophase = kwargs.pop("allow_twophase", None)
-        if allow_twophase is not None:
-            util.warn.deprecated(
-                "allow_twophase is deprecated.  The cx_Oracle dialect no "
-                "longer supports two-phase transaction mode."
-            )
-
     def _parse_cx_oracle_ver(self, version):
         m = re.match(r"(\d+)\.(\d+)(?:\.(\d+))?", version)
         if m:
@@ -982,6 +969,7 @@ class OracleDialect_cx_oracle(OracleDialect):
     def create_connect_args(self, url):
         opts = dict(url.query)
 
+        # deprecated in 1.3
         for opt in ("use_ansi", "auto_convert_lobs"):
             if opt in opts:
                 util.warn_deprecated(
@@ -1067,15 +1055,20 @@ class OracleDialect_cx_oracle(OracleDialect):
         else:
             return False
 
+    @util.deprecated(
+        "1.2",
+        "The create_xid() method of the cx_Oracle dialect is deprecated and "
+        "will be removed in a future release.  "
+        "Two-phase transaction support is no longer functional "
+        "in SQLAlchemy's cx_Oracle dialect as of cx_Oracle 6.0b1, which no "
+        "longer supports the API that SQLAlchemy relied upon.",
+    )
     def create_xid(self):
         """create a two-phase transaction ID.
 
         this id will be passed to do_begin_twophase(), do_rollback_twophase(),
         do_commit_twophase().  its format is unspecified.
 
-        .. deprecated:: two-phase transaction support is no longer functional
-           in SQLAlchemy's cx_Oracle dialect as of cx_Oracle 6.0b1
-
         """
 
         id_ = random.randint(0, 2 ** 128)
index 98e703fb520331d9fc5240f66a7b1abf187af64c..8a15a8559308b598a98678786ee3641214465f5f 100644 (file)
@@ -407,11 +407,17 @@ class _PGNumeric(sqltypes.Numeric):
 
 class _PGEnum(ENUM):
     def result_processor(self, dialect, coltype):
-        if util.py2k and self.convert_unicode is True:
-            # we can't easily use PG's extensions here because
-            # the OID is on the fly, and we need to give it a python
-            # function anyway - not really worth it.
-            self.convert_unicode = "force_nocheck"
+        if util.py2k and self._expect_unicode is True:
+            # for py2k, if the enum type needs unicode data (which is set up as
+            # part of the Enum() constructor based on values passed as py2k
+            # unicode objects) we have to use our own converters since
+            # psycopg2's don't work, a rare exception to the "modern DBAPIs
+            # support unicode everywhere" theme of deprecating
+            # convert_unicode=True. Use the special "force_nocheck" directive
+            # which forces unicode conversion to happen on the Python side
+            # without an isinstance() check.   in py3k psycopg2 does the right
+            # thing automatically.
+            self._expect_unicode = "force_nocheck"
         return super(_PGEnum, self).result_processor(dialect, coltype)
 
 
index 5f77d17a1e5a889e58b9d1c033c0933a1e6edc26..fbf346cec4bde7acce363c797f2e04015113d6ec 100644 (file)
@@ -147,6 +147,13 @@ def create_engine(*args, **kwargs):
         columns to accommodate Python Unicode objects directly as though the
         datatype were the :class:`.Unicode` type.
 
+          .. deprecated:: The :paramref:`.create_engine.convert_unicode` flag
+             and related Unicode conversion features are legacy Python 2
+             mechanisms which no longer have relevance under Python 3.
+             As all modern DBAPIs now support Python Unicode fully even
+             under Python 2, these flags will be removed in an upcoming
+             release.
+
         .. note::
 
             SQLAlchemy's unicode-conversion flags and features only apply
index ebf8dd28a5b1aa04e70024cfde7ec03acbff607e..64303f29058169cdf32d6f75e2e4a1b974ffe077 100644 (file)
@@ -493,19 +493,7 @@ class Connection(Connectable):
 
         return self._branch()
 
-    def contextual_connect(self, **kwargs):
-        """Returns a branched version of this :class:`.Connection`.
-
-        The :meth:`.Connection.close` method on the returned
-        :class:`.Connection` can be called and this
-        :class:`.Connection` will remain open.
-
-        This method provides usage symmetry with
-        :meth:`.Engine.contextual_connect`, including for usage
-        with context managers.
-
-        """
-
+    def _contextual_connect(self, **kwargs):
         return self._branch()
 
     def invalidate(self, exception=None):
@@ -1993,13 +1981,13 @@ class Engine(Connectable, log.Identified):
         self.dispatch.engine_disposed(self)
 
     def _execute_default(self, default):
-        with self.contextual_connect() as conn:
+        with self._contextual_connect() as conn:
             return conn._execute_default(default, (), {})
 
     @contextlib.contextmanager
     def _optional_conn_ctx_manager(self, connection=None):
         if connection is None:
-            with self.contextual_connect() as conn:
+            with self._contextual_connect() as conn:
                 yield conn
         else:
             yield connection
@@ -2058,7 +2046,7 @@ class Engine(Connectable, log.Identified):
             for a particular :class:`.Connection`.
 
         """
-        conn = self.contextual_connect(close_with_result=close_with_result)
+        conn = self._contextual_connect(close_with_result=close_with_result)
         try:
             trans = conn.begin()
         except:
@@ -2105,7 +2093,7 @@ class Engine(Connectable, log.Identified):
 
         """
 
-        with self.contextual_connect() as conn:
+        with self._contextual_connect() as conn:
             return conn.transaction(callable_, *args, **kwargs)
 
     def run_callable(self, callable_, *args, **kwargs):
@@ -2121,7 +2109,7 @@ class Engine(Connectable, log.Identified):
         which one is being dealt with.
 
         """
-        with self.contextual_connect() as conn:
+        with self._contextual_connect() as conn:
             return conn.run_callable(callable_, *args, **kwargs)
 
     def execute(self, statement, *multiparams, **params):
@@ -2140,18 +2128,18 @@ class Engine(Connectable, log.Identified):
 
         """
 
-        connection = self.contextual_connect(close_with_result=True)
+        connection = self._contextual_connect(close_with_result=True)
         return connection.execute(statement, *multiparams, **params)
 
     def scalar(self, statement, *multiparams, **params):
         return self.execute(statement, *multiparams, **params).scalar()
 
     def _execute_clauseelement(self, elem, multiparams=None, params=None):
-        connection = self.contextual_connect(close_with_result=True)
+        connection = self._contextual_connect(close_with_result=True)
         return connection._execute_clauseelement(elem, multiparams, params)
 
     def _execute_compiled(self, compiled, multiparams, params):
-        connection = self.contextual_connect(close_with_result=True)
+        connection = self._contextual_connect(close_with_result=True)
         return connection._execute_compiled(compiled, multiparams, params)
 
     def connect(self, **kwargs):
@@ -2170,6 +2158,13 @@ class Engine(Connectable, log.Identified):
 
         return self._connection_cls(self, **kwargs)
 
+    @util.deprecated(
+        "1.3",
+        "The :meth:`.Engine.contextual_connect` method is deprecated.  This "
+        "method is an artifact of the threadlocal engine strategy which is "
+        "also to be deprecated.   For explicit connections from an "
+        ":class:`.Engine`, use the :meth:`.Engine.connect` method.",
+    )
     def contextual_connect(self, close_with_result=False, **kwargs):
         """Return a :class:`.Connection` object which may be part of some
         ongoing context.
@@ -2187,6 +2182,11 @@ class Engine(Connectable, log.Identified):
 
         """
 
+        return self._contextual_connect(
+            close_with_result=close_with_result, **kwargs
+        )
+
+    def _contextual_connect(self, close_with_result=False, **kwargs):
         return self._connection_cls(
             self,
             self._wrap_pool_connect(self.pool.connect, None),
index 9c8069ff12fa6626e40ef73ef09a01931c1300b9..e54e99b75db52c97b9e3f0cac078399e646849ff 100644 (file)
@@ -184,6 +184,15 @@ class DefaultDialect(interfaces.Dialect):
 
     """
 
+    @util.deprecated_params(
+        convert_unicode=(
+            "1.3",
+            "The :paramref:`.create_engine.convert_unicode` parameter "
+            "and corresponding dialect-level parameters are deprecated, "
+            "and will be removed in a future release.  Modern DBAPIs support "
+            "Python Unicode natively and this parameter is unnecessary.",
+        )
+    )
     def __init__(
         self,
         convert_unicode=False,
index d4cd55b2fc63a78ccedd9364a5f398725ec60413..d579e6fdbe92902dd91655e48f34174addb9296b 100644 (file)
@@ -251,15 +251,15 @@ class Dialect(object):
 
         raise NotImplementedError()
 
+    @util.deprecated(
+        "0.8",
+        "The :meth:`.Dialect.get_primary_keys` method is deprecated and "
+        "will be removed in a future release.   Please refer to the "
+        ":meth:`.Dialect.get_pk_constraint` method. ",
+    )
     def get_primary_keys(self, connection, table_name, schema=None, **kw):
         """Return information about primary keys in `table_name`.
 
-        .. deprecated:: 0.8
-
-            The :meth:`.Dialect.get_primary_keys` method is deprecated and
-            will be removed in a future release.   Please refer to the
-            :meth:`.Dialect.get_pk_constraint` method.
-
         """
 
         raise NotImplementedError()
@@ -1117,7 +1117,15 @@ class Connectable(object):
 
         """
 
-    def contextual_connect(self):
+    @util.deprecated(
+        "1.3",
+        "The :meth:`.Engine.contextual_connect` and "
+        ":meth:`.Connection.contextual_connect` methods are deprecated.  This "
+        "method is an artifact of the threadlocal engine strategy which is "
+        "also to be deprecated.   For explicit connections from an "
+        ":class:`.Engine`, use the :meth:`.Engine.connect` method.",
+    )
+    def contextual_connect(self, *arg, **kw):
         """Return a :class:`.Connection` object which may be part of an ongoing
         context.
 
@@ -1128,6 +1136,9 @@ class Connectable(object):
 
         """
 
+        return self._contextual_connect(*arg, **kw)
+
+    def _contextual_connect(self):
         raise NotImplementedError()
 
     @util.deprecated(
@@ -1136,7 +1147,7 @@ class Connectable(object):
         "removed in a future release.  Please use the ``.create()`` method "
         "on specific schema objects to emit DDL sequences, including "
         ":meth:`.Table.create`, :meth:`.Index.create`, and "
-        ":meth:`.MetaData.create_all`."
+        ":meth:`.MetaData.create_all`.",
     )
     def create(self, entity, **kwargs):
         """Emit CREATE statements for the given schema entity.
@@ -1150,7 +1161,8 @@ class Connectable(object):
         "removed in a future release.  Please use the ``.drop()`` method "
         "on specific schema objects to emit DDL sequences, including "
         ":meth:`.Table.drop`, :meth:`.Index.drop`, and "
-        ":meth:`.MetaData.drop_all`.")
+        ":meth:`.MetaData.drop_all`.",
+    )
     def drop(self, entity, **kwargs):
         """Emit DROP statements for the given schema entity.
         """
index 4e4ddab6d57324a62758b6f343a03d04e51e576a..14d647b9a200bca0733fcc408404e4fca56e670f 100644 (file)
@@ -160,6 +160,16 @@ class Inspector(object):
             )
         return []
 
+    @util.deprecated_params(
+        order_by=(
+            "1.0",
+            "The :paramref:`get_table_names.order_by` parameter is deprecated "
+            "and will be removed in a future release.  Please refer to "
+            ":meth:`.Inspector.get_sorted_table_and_fkc_names` for a "
+            "more comprehensive solution to resolving foreign key cycles "
+            "between tables.",
+        )
+    )
     def get_table_names(self, schema=None, order_by=None):
         """Return all table names in referred to within a particular schema.
 
@@ -179,14 +189,6 @@ class Inspector(object):
          resolve cycles, and will raise :class:`.CircularDependencyError`
          if cycles exist.
 
-         .. deprecated:: 1.0
-
-            The :paramref:`get_table_names.order_by` parameter is deprecated
-            and will be removed in a future release.  Please refer to
-            :meth:`.Inspector.get_sorted_table_and_fkc_names` for a
-            more comprehensive solution to resolving foreign key cycles
-            between tables.
-
         .. seealso::
 
             :meth:`.Inspector.get_sorted_table_and_fkc_names`
@@ -380,7 +382,7 @@ class Inspector(object):
         "0.7",
         "The :meth:`.Inspector.get_primary_keys` method is deprecated and "
         "will be removed in a future release.  Please refer to the "
-        ":meth:`.Inspector.get_pk_constraint` method."
+        ":meth:`.Inspector.get_pk_constraint` method.",
     )
     def get_primary_keys(self, table_name, schema=None, **kw):
         """Return information about primary keys in `table_name`.
index 55310d8b051d320b6016713e947ceb938828c108..9255343e1dea4d4fdbf2ff4be15fe8090192133b 100644 (file)
@@ -512,7 +512,6 @@ class ResultMetaData(object):
                 "smaller than number of columns requested (%d)"
                 % (num_ctx_cols, len(cursor_description))
             )
-
         seen = set()
         for (
             idx,
index 2ae48acbda44c3d83bcec31b59164d77f2580088..e367ef890423f2ad554d09f3bbae4b86c0ed7298 100644 (file)
@@ -271,6 +271,9 @@ class MockEngineStrategy(EngineStrategy):
         def contextual_connect(self, **kwargs):
             return self
 
+        def connect(self, **kwargs):
+            return self
+
         def execution_options(self, **kw):
             return self
 
index 01d6ecbb290aa8272ab8b8dde07e59076fa01058..8e8663ccc098f30bcc42ab6fbd4960f99e6a77e1 100644 (file)
@@ -46,11 +46,22 @@ class TLEngine(base.Engine):
 
     _tl_connection_cls = TLConnection
 
+    @util.deprecated(
+        "1.3",
+        "The 'threadlocal' engine strategy is deprecated, and will be "
+        "removed in a future release.  The strategy is no longer relevant "
+        "to modern usage patterns (including that of the ORM "
+        ":class:`.Session` object) which make use of a :class:`.Connection` "
+        "object in order to invoke statements.",
+    )
     def __init__(self, *args, **kwargs):
         super(TLEngine, self).__init__(*args, **kwargs)
         self._connections = util.threading.local()
 
     def contextual_connect(self, **kw):
+        return self._contextual_connect(**kw)
+
+    def _contextual_connect(self, **kw):
         if not hasattr(self._connections, "conn"):
             connection = None
         else:
@@ -72,7 +83,7 @@ class TLEngine(base.Engine):
         if not hasattr(self._connections, "trans"):
             self._connections.trans = []
         self._connections.trans.append(
-            self.contextual_connect().begin_twophase(xid=xid)
+            self._contextual_connect().begin_twophase(xid=xid)
         )
         return self
 
@@ -80,14 +91,14 @@ class TLEngine(base.Engine):
         if not hasattr(self._connections, "trans"):
             self._connections.trans = []
         self._connections.trans.append(
-            self.contextual_connect().begin_nested()
+            self._contextual_connect().begin_nested()
         )
         return self
 
     def begin(self):
         if not hasattr(self._connections, "trans"):
             self._connections.trans = []
-        self._connections.trans.append(self.contextual_connect().begin())
+        self._connections.trans.append(self._contextual_connect().begin())
         return self
 
     def __enter__(self):
@@ -139,7 +150,7 @@ class TLEngine(base.Engine):
 
     def close(self):
         if not self.closed:
-            self.contextual_connect().close()
+            self._contextual_connect().close()
             connection = self._connections.conn()
             connection._force_close()
             del self._connections.conn
index aa5de7af0e44ff0d88f5a2ca073f87052e26242c..9364714ab65ed5c847b928c26598f6232fb86937 100644 (file)
@@ -275,6 +275,10 @@ class _JoinedDispatcher(object):
     def _listen(self):
         return self.parent._listen
 
+    @property
+    def _events(self):
+        return self.parent._events
+
 
 class dispatcher(object):
     """Descriptor used by target classes to
index 382e640eb13d633fd1d5ae56e6722d316b35e2a5..07b961c012d781f217a39fca543a690e15df9dda 100644 (file)
@@ -206,6 +206,12 @@ class _EventKey(object):
 
         self = self.with_wrapper(adjusted_fn)
 
+        stub_function = getattr(
+            self.dispatch_target.dispatch._events, self.identifier
+        )
+        if hasattr(stub_function, "_sa_warn"):
+            stub_function._sa_warn()
+
         if once:
             self.with_wrapper(util.only_once(self._listen_fn)).listen(
                 *args, **kw
index 5eb84a1564cad729b638dd4294cb0f7a85ac5f38..11d3402c5bedb543cd2bbd42676ae1ed61314093 100644 (file)
@@ -9,6 +9,7 @@
 
 from . import event
 from . import exc
+from . import util
 from .engine import Connectable
 from .engine import Dialect
 from .engine import Engine
@@ -749,6 +750,13 @@ class ConnectionEvents(event.Events):
 
         """
 
+    @util.deprecated(
+        "0.9",
+        "The :meth:`.ConnectionEvents.dbapi_error` "
+        "event is deprecated and will be removed in a future release. "
+        "Please refer to the :meth:`.ConnectionEvents.handle_error` "
+        "event.",
+    )
     def dbapi_error(
         self, conn, cursor, statement, parameters, context, exception
     ):
@@ -792,11 +800,6 @@ class ConnectionEvents(event.Events):
         :param exception: The **unwrapped** exception emitted directly from the
          DBAPI.  The class here is specific to the DBAPI module in use.
 
-        .. deprecated:: 0.9 - The :meth:`.ConnectionEvents.dbapi_error`
-           event is deprecated and will be removed in a future release.
-           Please refer to the :meth:`.ConnectionEvents.handle_error`
-           event.
-
         """
 
     def handle_error(self, exception_context):
index 0628415ae786d4499076f357f764c557c58f464d..c263b17346cf9a44880bce3484ea4d7d88d97f51 100644 (file)
@@ -242,7 +242,7 @@ class ShardedSession(Session):
         else:
             return self.get_bind(
                 mapper, shard_id=shard_id, instance=instance
-            ).contextual_connect(**kwargs)
+            )._contextual_connect(**kwargs)
 
     def get_bind(
         self, mapper, shard_id=None, instance=None, clause=None, **kw
index 0caf85a2317628420162c57ee426e63cc703f07e..374199143ad8f0dc173d36158efd00f94a5a66ee 100644 (file)
@@ -86,10 +86,23 @@ class PoolListener(object):
 
         """
 
-        listener = util.as_interface(
-            listener,
-            methods=("connect", "first_connect", "checkout", "checkin"),
-        )
+        methods = ["connect", "first_connect", "checkout", "checkin"]
+        listener = util.as_interface(listener, methods=methods)
+
+        for meth in methods:
+            me_meth = getattr(PoolListener, meth)
+            ls_meth = getattr(listener, meth, None)
+
+            if ls_meth is not None and not util.methods_equivalent(
+                me_meth, ls_meth
+            ):
+                util.warn_deprecated(
+                    "PoolListener.%s is deprecated.  The "
+                    "PoolListener class will be removed in a future "
+                    "release.  Please transition to the @event interface, "
+                    "using @event.listens_for(Engine, '%s')." % (meth, meth)
+                )
+
         if hasattr(listener, "connect"):
             event.listen(self, "connect", listener.connect)
         if hasattr(listener, "first_connect"):
@@ -195,6 +208,33 @@ class ConnectionProxy(object):
 
     @classmethod
     def _adapt_listener(cls, self, listener):
+
+        methods = [
+            "execute",
+            "cursor_execute",
+            "begin",
+            "rollback",
+            "commit",
+            "savepoint",
+            "rollback_savepoint",
+            "release_savepoint",
+            "begin_twophase",
+            "prepare_twophase",
+            "rollback_twophase",
+            "commit_twophase",
+        ]
+        for meth in methods:
+            me_meth = getattr(ConnectionProxy, meth)
+            ls_meth = getattr(listener, meth)
+
+            if not util.methods_equivalent(me_meth, ls_meth):
+                util.warn_deprecated(
+                    "ConnectionProxy.%s is deprecated.  The "
+                    "ConnectionProxy class will be removed in a future "
+                    "release.  Please transition to the @event interface, "
+                    "using @event.listens_for(Engine, '%s')." % (meth, meth)
+                )
+
         def adapt_execute(conn, clauseelement, multiparams, params):
             def execute_wrapper(clauseelement, *multiparams, **params):
                 return clauseelement, multiparams, params
index c7c2425010dcd5467b322e7d4ce1daefa67667d5..cd81d759d7c36388cf5ae113b97c8610aec016bb 100644 (file)
@@ -452,50 +452,57 @@ class AttributeImpl(object):
     ):
         r"""Construct an AttributeImpl.
 
-        \class_
-          associated class
+        :param \class_: associated class
 
-        key
-          string name of the attribute
+        :param key: string name of the attribute
 
-        \callable_
+        :param \callable_:
           optional function which generates a callable based on a parent
           instance, which produces the "default" values for a scalar or
           collection attribute when it's first accessed, if not present
           already.
 
-        trackparent
+        :param trackparent:
           if True, attempt to track if an instance has a parent attached
           to it via this attribute.
 
-        extension
+        :param extension:
           a single or list of AttributeExtension object(s) which will
-          receive set/delete/append/remove/etc. events.  Deprecated.
+          receive set/delete/append/remove/etc. events.
           The event package is now used.
 
-        compare_function
+          .. deprecated::  1.3
+
+            The :paramref:`.AttributeImpl.extension` parameter is deprecated
+            and will be removed in a future release, corresponding to the
+            "extension" parameter on the :class:`.MapperProprty` classes
+            like :func:`.column_property` and :func:`.relationship`  The
+            events system is now used.
+
+        :param compare_function:
           a function that compares two values which are normally
           assignable to this attribute.
 
-        active_history
+        :param active_history:
           indicates that get_history() should always return the "old" value,
           even if it means executing a lazy callable upon attribute change.
 
-        parent_token
+        :param parent_token:
           Usually references the MapperProperty, used as a key for
           the hasparent() function to identify an "owning" attribute.
           Allows multiple AttributeImpls to all match a single
           owner attribute.
 
-        expire_missing
+        :param expire_missing:
           if False, don't add an "expiry" callable to this attribute
           during state.expire_attributes(None), if no value is present
           for this key.
 
-        send_modified_events
+        :param send_modified_events:
           if False, the InstanceState._modified_event method will have no
           effect; this means the attribute will never show up as changed in a
           history entry.
+
         """
         self.class_ = class_
         self.key = key
@@ -1841,9 +1848,9 @@ def init_collection(obj, key):
     For an easier way to do the above, see
     :func:`~sqlalchemy.orm.attributes.set_committed_value`.
 
-    obj is an instrumented object instance.  An InstanceState
-    is accepted directly for backwards compatibility but
-    this usage is deprecated.
+    :param obj: a mapped object
+
+    :param key: string attribute name where the collection is located.
 
     """
     state = instance_state(obj)
index 1e561369fb97d2d6438150fe310d8afb945e7162..b9297e15c9b041c8125a6295696f1cf7a4f8e1ef 100644 (file)
@@ -460,7 +460,7 @@ class collection(object):
     @staticmethod
     @util.deprecated(
         "1.3",
-        "The :meth:`.collection.converter` method is deprecated and will "
+        "The :meth:`.collection.converter` handler is deprecated and will "
         "be removed in a future release.  Please refer to the "
         ":class:`.AttributeEvents.bulk_replace` listener interface in "
         "conjunction with the :func:`.event.listen` function.",
index 50ef8448a69d55989bb8cfdcee5b4b47328cd2f7..4069b43a5ff20bcdd37c5d33b62a3055506d0763 100644 (file)
@@ -87,6 +87,14 @@ class MapperExtension(object):
             ls_meth = getattr(listener, meth)
 
             if not util.methods_equivalent(me_meth, ls_meth):
+                util.warn_deprecated(
+                    "MapperExtension.%s is deprecated.  The "
+                    "MapperExtension class will be removed in a future "
+                    "release.  Please transition to the @event interface, "
+                    "using @event.listens_for(mapped_class, '%s')."
+                    % (meth, meth)
+                )
+
                 if meth == "reconstruct_instance":
 
                     def go(ls_meth):
@@ -359,6 +367,13 @@ class SessionExtension(object):
             ls_meth = getattr(listener, meth)
 
             if not util.methods_equivalent(me_meth, ls_meth):
+                util.warn_deprecated(
+                    "SessionExtension.%s is deprecated.  The "
+                    "SessionExtension class will be removed in a future "
+                    "release.  Please transition to the @event interface, "
+                    "using @event.listens_for(Session, '%s')." % (meth, meth)
+                )
+
                 event.listen(self, meth, getattr(listener, meth))
 
     def before_commit(self, session):
@@ -492,6 +507,19 @@ class AttributeExtension(object):
 
     @classmethod
     def _adapt_listener(cls, self, listener):
+        for meth in ["append", "remove", "set"]:
+            me_meth = getattr(AttributeExtension, meth)
+            ls_meth = getattr(listener, meth)
+
+            if not util.methods_equivalent(me_meth, ls_meth):
+                util.warn_deprecated(
+                    "AttributeExtension.%s is deprecated.  The "
+                    "AttributeExtension class will be removed in a future "
+                    "release.  Please transition to the @event interface, "
+                    "using @event.listens_for(Class.attribute, '%s')."
+                    % (meth, meth)
+                )
+
         event.listen(
             self,
             "append",
index 45600928fa879fc41d995a24757668ff207075cc..c1e5866b5fcbcea66a341e9c1112c25df2f0d75e 100644 (file)
@@ -93,6 +93,15 @@ class CompositeProperty(DescriptorProperty):
 
     """
 
+    @util.deprecated_params(
+        extension=(
+            "0.7",
+            ":class:`.AttributeExtension` is deprecated in favor of the "
+            ":class:`.AttributeEvents` listener interface.  The "
+            ":paramref:`.composite.extension` parameter will be "
+            "removed in a future release.",
+        )
+    )
     def __init__(self, class_, *attrs, **kwargs):
         r"""Return a composite column-based property for use with a Mapper.
 
@@ -141,13 +150,6 @@ class CompositeProperty(DescriptorProperty):
           attribute listeners for the resulting descriptor placed on the
           class.
 
-           .. deprecated:: 0.7
-
-                :class:`.AttributeExtension` is deprecated in favor of the
-                :class:`.AttributeEvents` listener interface.  The
-                :paramref:`.composite.extension` parameter will be
-                removed in a future release.
-
         """
         super(CompositeProperty, self).__init__()
 
@@ -698,6 +700,12 @@ class SynonymProperty(DescriptorProperty):
 
 
 @util.langhelpers.dependency_for("sqlalchemy.orm.properties", add_to_all=True)
+@util.deprecated_cls(
+    "0.7",
+    ":func:`.comparable_property` is deprecated and will be removed in a "
+    "future release.  Please refer to the :mod:`~sqlalchemy.ext.hybrid` "
+    "extension.",
+)
 class ComparableProperty(DescriptorProperty):
     """Instruments a Python property for use in query expressions."""
 
@@ -707,10 +715,6 @@ class ComparableProperty(DescriptorProperty):
         """Provides a method of applying a :class:`.PropComparator`
         to any Python descriptor attribute.
 
-        .. deprecated:: 0.7
-            :func:`.comparable_property` is superseded by
-            the :mod:`~sqlalchemy.ext.hybrid` extension.  See the example
-            at :ref:`hybrid_custom_comparators`.
 
         Allows any Python descriptor to behave like a SQL-enabled
         attribute when used at the class level in queries, allowing
index a394ec06e4924d7e4e66b25f8b117a99057118d7..0c8ab0b10eed616187683049b424bb4c8887dcf0 100644 (file)
@@ -104,6 +104,22 @@ class Mapper(InspectionAttr):
     _new_mappers = False
     _dispose_called = False
 
+    @util.deprecated_params(
+        extension=(
+            "0.7",
+            ":class:`.MapperExtension` is deprecated in favor of the "
+            ":class:`.MapperEvents` listener interface.  The "
+            ":paramref:`.mapper.extension` parameter will be "
+            "removed in a future release.",
+        ),
+        order_by=(
+            "1.1",
+            "The :paramref:`.Mapper.order_by` parameter "
+            "is deprecated, and will be removed in a future release. "
+            "Use :meth:`.Query.order_by` to determine the ordering of a "
+            "result set.",
+        ),
+    )
     def __init__(
         self,
         class_,
@@ -272,13 +288,6 @@ class Mapper(InspectionAttr):
            list of :class:`.MapperExtension` instances which will be applied
            to all operations by this :class:`.Mapper`.
 
-           .. deprecated:: 0.7
-
-                :class:`.MapperExtension` is deprecated in favor of the
-                :class:`.MapperEvents` listener interface.  The
-                :paramref:`.mapper.extension` parameter will be
-                removed in a future release.
-
         :param include_properties: An inclusive list or set of string column
           names to map.
 
@@ -339,11 +348,6 @@ class Mapper(InspectionAttr):
            ordering for entities.  By default mappers have no pre-defined
            ordering.
 
-           .. deprecated:: 1.1 The :paramref:`.Mapper.order_by` parameter
-              is deprecated, and will be removed in a future release.
-              Use :meth:`.Query.order_by` to determine the ordering of a
-              result set.
-
         :param passive_deletes: Indicates DELETE behavior of foreign key
            columns when a joined-table inheritance entity is being deleted.
            Defaults to ``False`` for a base mapper; for an inheriting mapper,
@@ -604,12 +608,6 @@ class Mapper(InspectionAttr):
 
         if order_by is not False:
             self.order_by = util.to_list(order_by)
-            util.warn_deprecated(
-                "Mapper.order_by is deprecated."
-                "Use Query.order_by() in order to affect the ordering of ORM "
-                "result sets."
-            )
-
         else:
             self.order_by = order_by
 
index 328c9b1b4e84df0f64335aefb3e4d89b90b894cf..530eadb6b722700cabbc4ca74e2eca92eda468f1 100644 (file)
@@ -55,6 +55,15 @@ class ColumnProperty(StrategizedProperty):
         "_deferred_column_loader",
     )
 
+    @util.deprecated_params(
+        extension=(
+            "0.7",
+            ":class:`.AttributeExtension` is deprecated in favor of the "
+            ":class:`.AttributeEvents` listener interface.  The "
+            ":paramref:`.column_property.extension` parameter will be "
+            "removed in a future release.",
+        )
+    )
     def __init__(self, *columns, **kwargs):
         r"""Provide a column-level property for use with a Mapper.
 
@@ -120,13 +129,6 @@ class ColumnProperty(StrategizedProperty):
             which will be prepended to the list of attribute listeners for the
             resulting descriptor placed on the class.
 
-           .. deprecated:: 0.7
-
-                :class:`.AttributeExtension` is deprecated in favor of the
-                :class:`.AttributeEvents` listener interface.   The
-                :paramref:`.column_property.extension` parameter will be
-                removed in a future release.
-
         """
         super(ColumnProperty, self).__init__()
         self._orig_columns = [expression._labeled(c) for c in columns]
index 387a72d0b981b2e39ea86cccaa126c349a863065..150347995178cc7f4cb0b843f88ac590d053ace7 100644 (file)
@@ -1569,16 +1569,16 @@ class Query(object):
         self._execution_options = self._execution_options.union(kwargs)
 
     @_generative()
+    @util.deprecated(
+        "0.9",
+        "The :meth:`.Query.with_lockmode` method is deprecated and will "
+        "be removed in a future release.  Please refer to "
+        ":meth:`.Query.with_for_update`. ",
+    )
     def with_lockmode(self, mode):
         """Return a new :class:`.Query` object with the specified "locking mode",
         which essentially refers to the ``FOR UPDATE`` clause.
 
-        .. deprecated:: 0.9
-
-            The :meth:`.Query.with_lockmode` method is deprecated and will
-            be removed in a future release.  Please refer to
-            :meth:`.Query.with_for_update`.
-
         :param mode: a string representing the desired locking mode.
          Valid values are:
 
index af4d9a7825bbd6911253316a567eaadb3e4fe541..be2093fb9afa13082970c63dedc9f4551e74930f 100644 (file)
@@ -105,6 +105,15 @@ class RelationshipProperty(StrategizedProperty):
 
     _dependency_processor = None
 
+    @util.deprecated_params(
+        extension=(
+            "0.7",
+            ":class:`.AttributeExtension` is deprecated in favor of the "
+            ":class:`.AttributeEvents` listener interface.  The "
+            ":paramref:`.relationship.extension` parameter will be "
+            "removed in a future release.",
+        )
+    )
     def __init__(
         self,
         argument,
@@ -402,13 +411,6 @@ class RelationshipProperty(StrategizedProperty):
           which will be prepended to the list of attribute listeners for
           the resulting descriptor placed on the class.
 
-           .. deprecated:: 0.7
-
-                :class:`.AttributeExtension` is deprecated in favor of the
-                :class:`.AttributeEvents` listener interface. The
-                :paramref:`.relationship.extension` parameter will be
-                removed in a future release.
-
         :param foreign_keys:
 
           a list of columns which are to be used as "foreign key"
index 5993e91b82b2d95a3affa101c9b28147910140f8..53f99b99d04ef4d6d2dc3bf2d0566dd3a97e49be 100644 (file)
@@ -427,7 +427,7 @@ class SessionTransaction(object):
                         "given Connection's Engine"
                     )
             else:
-                conn = bind.contextual_connect()
+                conn = bind._contextual_connect()
 
         if execution_options:
             conn = conn.execution_options(**execution_options)
@@ -642,6 +642,30 @@ class Session(_SessionClassMethods):
         "scalar",
     )
 
+    @util.deprecated_params(
+        weak_identity_map=(
+            "1.0",
+            "The :paramref:`.Session.weak_identity_map` parameter as well as "
+            "the strong-referencing identity map are deprecated, and will be "
+            "removed in a future release.  For the use case where objects "
+            "present in a :class:`.Session` need to be automatically strong "
+            "referenced, see the recipe at "
+            ":ref:`session_referencing_behavior` for an event-based approach "
+            "to maintaining strong identity references. ",
+        ),
+        _enable_transaction_accounting=(
+            "0.7",
+            "The :paramref:`.Session._enable_transaction_accounting` "
+            "parameter is deprecated and will be removed in a future release.",
+        ),
+        extension=(
+            "0.7",
+            ":class:`.SessionExtension` is deprecated in favor of the "
+            ":class:`.SessionEvents` listener interface.  The "
+            ":paramref:`.Session.extension` parameter will be "
+            "removed in a future release.",
+        ),
+    )
     def __init__(
         self,
         bind=None,
@@ -650,12 +674,12 @@ class Session(_SessionClassMethods):
         _enable_transaction_accounting=True,
         autocommit=False,
         twophase=False,
-        weak_identity_map=True,
+        weak_identity_map=None,
         binds=None,
         extension=None,
         enable_baked_queries=True,
         info=None,
-        query_cls=query.Query,
+        query_cls=None,
     ):
         r"""Construct a new Session.
 
@@ -754,15 +778,10 @@ class Session(_SessionClassMethods):
 
            .. versionadded:: 1.2
 
-        :param _enable_transaction_accounting:  Defaults to ``True``.  A
+        :param _enable_transaction_accounting:   A
            legacy-only flag which when ``False`` disables *all* 0.5-style
            object accounting on transaction boundaries.
 
-           .. deprecated::  0.7
-
-                the :paramref:`.Session._enable_transaction_accounting`
-                parameter will be removed in a future release.
-
         :param expire_on_commit:  Defaults to ``True``. When ``True``, all
            instances will be fully expired after each :meth:`~.commit`,
            so that all attribute/object access subsequent to a completed
@@ -773,13 +792,6 @@ class Session(_SessionClassMethods):
            of such instances, which will receive pre- and post- commit and
            flush events, as well as a post-rollback event.
 
-           .. deprecated:: 0.7
-
-                :class:`.SessionExtension` is deprecated in favor of the
-                :class:`.SessionEvents` listener interface.  The
-                :paramref:`.Session.extension` parameter will be
-                removed in a future release.
-
         :param info: optional dictionary of arbitrary data to be associated
            with this :class:`.Session`.  Is available via the
            :attr:`.Session.info` attribute.  Note the dictionary is copied at
@@ -807,30 +819,14 @@ class Session(_SessionClassMethods):
            strongly referenced until explicitly removed or the
            :class:`.Session` is closed.
 
-           .. deprecated:: 1.0
-
-               The :paramref:`.Session.weak_identity_map` parameter as well as
-               the strong-referencing identity map are deprecated, and will be
-               removed in a future release.  For the use case where objects
-               present in a :class:`.Session` need to be automatically strong
-               referenced, see the recipe at
-               :ref:`session_referencing_behavior` for an event-based approach
-               to maintaining strong identity references.
-
 
         """
 
-        if weak_identity_map:
+        if weak_identity_map in (True, None):
             self._identity_cls = identity.WeakInstanceDict
         else:
-            util.warn_deprecated(
-                "weak_identity_map=False is deprecated.  "
-                "See the documentation on 'Session Referencing Behavior' "
-                "for an event-based approach to maintaining strong identity "
-                "references."
-            )
-
             self._identity_cls = identity.StrongInstanceDict
+
         self.identity_map = self._identity_cls()
 
         self._new = {}  # InstanceState->object, strong refs object
@@ -846,8 +842,9 @@ class Session(_SessionClassMethods):
         self.expire_on_commit = expire_on_commit
         self.enable_baked_queries = enable_baked_queries
         self._enable_transaction_accounting = _enable_transaction_accounting
+
         self.twophase = twophase
-        self._query_cls = query_cls
+        self._query_cls = query_cls if query_cls else query.Query
         if info:
             self.info.update(info)
 
@@ -1068,7 +1065,7 @@ class Session(_SessionClassMethods):
 
         Alternatively, if this :class:`.Session` is configured with
         ``autocommit=True``, an ad-hoc :class:`.Connection` is returned
-        using :meth:`.Engine.contextual_connect` on the underlying
+        using :meth:`.Engine.connect` on the underlying
         :class:`.Engine`.
 
         Ambiguity in multi-bind or unbound :class:`.Session` objects can be
@@ -1132,7 +1129,7 @@ class Session(_SessionClassMethods):
                 engine, execution_options
             )
         else:
-            conn = engine.contextual_connect(**kw)
+            conn = engine._contextual_connect(**kw)
             if execution_options:
                 conn = conn.execution_options(**execution_options)
             return conn
@@ -2872,7 +2869,15 @@ class Session(_SessionClassMethods):
         finally:
             self._flushing = False
 
-    def is_modified(self, instance, include_collections=True, passive=True):
+    @util.deprecated_params(
+        passive=(
+            "0.8",
+            "The :paramref:`.Session.is_modified.passive` flag is deprecated "
+            "and will be removed in a future release.  The flag is no longer "
+            "used and is ignored.",
+        )
+    )
+    def is_modified(self, instance, include_collections=True, passive=None):
         r"""Return ``True`` if the given instance has locally
         modified attributes.
 
@@ -2921,11 +2926,7 @@ class Session(_SessionClassMethods):
          way to detect only local-column based properties (i.e. scalar columns
          or many-to-one foreign keys) that would result in an UPDATE for this
          instance upon flush.
-        :param passive:
-
-         .. deprecated:: 0.8
-             The ``passive`` flag is deprecated and will be removed
-             in a future release.  The flag is no longer used and is ignored.
+        :param passive: not used
 
         """
         state = object_state(instance)
index b3f52a2f7414350b6a5b04dd524e05afa4280cd9..6f9746daaf1612550c70d7b804a013a2dbccaac3 100644 (file)
@@ -818,7 +818,6 @@ See :func:`.orm.%(name)s` for usage examples.
         return self
 
     def _add_unbound_all_fn(self, fn):
-        self._unbound_all_fn = fn
         fn.__doc__ = """Produce a standalone "all" option for :func:`.orm.%(name)s`.
 
 .. deprecated:: 0.9
@@ -834,6 +833,15 @@ See :func:`.orm.%(name)s` for usage examples.
 """ % {
             "name": self.name
         }
+        fn = util.deprecated(
+            "0.9",
+            "The :func:`.%(name)s_all` function is deprecated, and will be "
+            "removed in a future release.  Please use method chaining with "
+            ":func:`.%(name)s` instead" % {"name": self.name},
+            add_deprecation_to_docstring=False,
+        )(fn)
+
+        self._unbound_all_fn = fn
         return self
 
 
@@ -1307,8 +1315,8 @@ def defaultload(*keys):
 
 @loader_option()
 def defer(loadopt, key):
-    r"""Indicate that the given column-oriented attribute should be deferred, e.g.
-    not loaded until accessed.
+    r"""Indicate that the given column-oriented attribute should be deferred,
+    e.g. not loaded until accessed.
 
     This function is part of the :class:`.Load` interface and supports
     both method-chained and standalone operation.
@@ -1346,10 +1354,16 @@ def defer(loadopt, key):
 
     :param key: Attribute to be deferred.
 
-    :param \*addl_attrs: Deprecated; this option supports the old 0.8 style
+    :param \*addl_attrs: This option supports the old 0.8 style
      of specifying a path as a series of attributes, which is now superseded
      by the method-chained style.
 
+        .. deprecated:: 0.9  The \*addl_attrs on :func:`.orm.defer` is
+           deprecated and will be removed in a future release.   Please
+           use method chaining in conjunction with defaultload() to
+           indicate a path.
+
+
     .. seealso::
 
         :ref:`deferred`
@@ -1364,6 +1378,12 @@ def defer(loadopt, key):
 
 @defer._add_unbound_fn
 def defer(key, *addl_attrs):
+    if addl_attrs:
+        util.warn_deprecated(
+            "The *addl_attrs on orm.defer is deprecated.  Please use "
+            "method chaining in conjunction with defaultload() to "
+            "indicate a path."
+        )
     return _UnboundLoad._from_keys(
         _UnboundLoad.defer, (key,) + addl_attrs, False, {}
     )
@@ -1389,12 +1409,21 @@ def undefer(loadopt, key):
         session.query(MyClass, MyOtherClass).options(
             Load(MyClass).undefer("*"))
 
+        # undefer a column on a related object
+        session.query(MyClass).options(
+            defaultload(MyClass.items).undefer('text'))
+
     :param key: Attribute to be undeferred.
 
-    :param \*addl_attrs: Deprecated; this option supports the old 0.8 style
+    :param \*addl_attrs: This option supports the old 0.8 style
      of specifying a path as a series of attributes, which is now superseded
      by the method-chained style.
 
+        .. deprecated:: 0.9  The \*addl_attrs on :func:`.orm.undefer` is
+           deprecated and will be removed in a future release.   Please
+           use method chaining in conjunction with defaultload() to
+           indicate a path.
+
     .. seealso::
 
         :ref:`deferred`
@@ -1411,6 +1440,12 @@ def undefer(loadopt, key):
 
 @undefer._add_unbound_fn
 def undefer(key, *addl_attrs):
+    if addl_attrs:
+        util.warn_deprecated(
+            "The *addl_attrs on orm.undefer is deprecated.  Please use "
+            "method chaining in conjunction with defaultload() to "
+            "indicate a path."
+        )
     return _UnboundLoad._from_keys(
         _UnboundLoad.undefer, (key,) + addl_attrs, False, {}
     )
index 15ac495617fb7700469d0f4d5d0a87d576fc753f..40d97515e7087588ac925935898fd077a079fb7c 100644 (file)
@@ -60,6 +60,20 @@ class Pool(log.Identified):
 
     _dialect = _ConnDialect()
 
+    @util.deprecated_params(
+        use_threadlocal=(
+            "1.3",
+            "The :paramref:`.Pool.use_threadlocal` parameter is "
+            "deprecated and will be removed in a future release.",
+        ),
+        listeners=(
+            "0.7",
+            ":class:`.PoolListener` is deprecated in favor of the "
+            ":class:`.PoolEvents` listener interface.  The "
+            ":paramref:`.Pool.listeners` parameter will be removed in a "
+            "future release.",
+        ),
+    )
     def __init__(
         self,
         creator,
@@ -99,35 +113,9 @@ class Pool(log.Identified):
 
         :param use_threadlocal: If set to True, repeated calls to
           :meth:`connect` within the same application thread will be
-          guaranteed to return the same connection object, if one has
-          already been retrieved from the pool and has not been
-          returned yet.  Offers a slight performance advantage at the
-          cost of individual transactions by default.  The
-          :meth:`.Pool.unique_connection` method is provided to return
-          a consistently unique connection to bypass this behavior
-          when the flag is set.
-
-          .. warning::  The :paramref:`.Pool.use_threadlocal` flag
-             **does not affect the behavior** of :meth:`.Engine.connect`.
-             :meth:`.Engine.connect` makes use of the
-             :meth:`.Pool.unique_connection` method which **does not use thread
-             local context**.  To produce a :class:`.Connection` which refers
-             to the :meth:`.Pool.connect` method, use
-             :meth:`.Engine.contextual_connect`.
-
-             Note that other SQLAlchemy connectivity systems such as
-             :meth:`.Engine.execute` as well as the orm
-             :class:`.Session` make use of
-             :meth:`.Engine.contextual_connect` internally, so these functions
-             are compatible with the :paramref:`.Pool.use_threadlocal` setting.
-
-          .. seealso::
-
-            :ref:`threadlocal_strategy` - contains detail on the
-            "threadlocal" engine strategy, which provides a more comprehensive
-            approach to "threadlocal" connectivity for the specific
-            use case of using :class:`.Engine` and :class:`.Connection` objects
-            directly.
+          guaranteed to return the same connection object that is already
+          checked out.   This is a legacy use case and the flag has no
+          effect when using the pool with a :class:`.Engine` object.
 
         :param reset_on_return: Determine steps to take on
           connections as they are returned to the pool.
@@ -175,13 +163,6 @@ class Pool(log.Identified):
           connections are created, checked out and checked in to the
           pool.
 
-          .. deprecated:: 0.7
-
-                :class:`.PoolListener` is deprecated in favor of the
-                :class:`.PoolEvents` listener interface.  The
-                :paramref:`.Pool.listeners` parameter will be removed in a
-                future release.
-
         :param dialect: a :class:`.Dialect` that will handle the job
          of calling rollback(), close(), or commit() on DBAPI connections.
          If omitted, a built-in "stub" dialect is used.   Applications that
@@ -235,12 +216,6 @@ class Pool(log.Identified):
             for fn, target in events:
                 event.listen(self, target, fn)
         if listeners:
-            util.warn_deprecated(
-                "The 'listeners' argument to Pool and create_engine() is "
-                "deprecated and will be removed in a future release. "
-                "Please refer to the PoolEvents class in conjunction "
-                "with event.listen()"
-            )
             for l in listeners:
                 self.add_listener(l)
 
@@ -290,9 +265,10 @@ class Pool(log.Identified):
             )
 
     @util.deprecated(
-        "0.7", "The :meth:`.Pool.add_listener` method is deprecated and "
+        "0.7",
+        "The :meth:`.Pool.add_listener` method is deprecated and "
         "will be removed in a future release.  Please use the "
-        ":class:`.PoolEvents` listener interface."
+        ":class:`.PoolEvents` listener interface.",
     )
     def add_listener(self, listener):
         """Add a :class:`.PoolListener`-like object to this pool.
index 6049c8e289362fa0d21e5122ae1125d4114068b6..d78d85d1fce48533df5ab71a33f7159034b13f47 100644 (file)
@@ -16,12 +16,17 @@ today.
 """
 
 from .impl import QueuePool
+from .. import util
 from ..util import threading
 
-
 proxies = {}
 
 
+@util.deprecated(
+    "1.3",
+    "The :func:`.pool.manage` function is deprecated, and will be "
+    "removed in a future release.",
+)
 def manage(module, **params):
     r"""Return a proxy for a DB-API module that automatically
     pools connections.
index ebbbfdb3d0315e9c47322c82d01cd3b43e99d655..768921423cbc349095d393fde596273189049c85 100644 (file)
@@ -283,7 +283,6 @@ class SingletonThreadPool(Pool):
     """
 
     def __init__(self, creator, pool_size=5, **kw):
-        kw["use_threadlocal"] = True
         Pool.__init__(self, creator, **kw)
         self._conn = threading.local()
         self._all_conns = set()
index a3184f270abb35d3cd6fb6ddcc42a47b311a8014..b703c59f230baa1ec9d88f1ed5f5e247efef6bd0 100644 (file)
@@ -315,7 +315,7 @@ class Compiled(object):
         "The :meth:`.Compiled.compile` method is deprecated and will be "
         "removed in a future release.   The :class:`.Compiled` object "
         "now runs its compilation within the constructor, and this method "
-        "does nothing."
+        "does nothing.",
     )
     def compile(self):
         """Produce the internal string representation of this element.
@@ -3442,6 +3442,7 @@ class IdentifierPreparer(object):
     def quote_schema(self, schema, force=None):
         """Conditionally quote a schema name.
 
+
         The name is quoted if it is a reserved word, contains quote-necessary
         characters, or is an instance of :class:`.quoted_name` which includes
         ``quote`` set to ``True``.
@@ -3450,17 +3451,30 @@ class IdentifierPreparer(object):
         quoting behavior for schema names.
 
         :param schema: string schema name
-        :param force: this parameter is no longer used.
+        :param force: unused
 
-         .. deprecated:: 0.9
+            .. deprecated:: 0.9
 
-            The :paramref:`.IdentifierPreparer.force` parameter is deprecated
-            and will be removed in a future release.  Quoting preference
-            is now intrinsic to the string being quoted by making use of the
-            :class:`.quoted_name` class.
+                The :paramref:`.IdentifierPreparer.quote_schema.force`
+                parameter is deprecated and will be removed in a future
+                release.  This flag has no effect on the behavior of the
+                :meth:`.IdentifierPreparer.quote` method; please refer to
+                :class:`.quoted_name`.
 
         """
-        return self.quote(schema, force)
+        if force is not None:
+            # not using the util.deprecated_params() decorator in this
+            # case because of the additional function call overhead on this
+            # very performance-critical spot.
+            util.warn_deprecated(
+                "The IdentifierPreparer.quote_schema.force parameter is "
+                "deprecated and will be removed in a future release.  This "
+                "flag has no effect on the behavior of the "
+                "IdentifierPreparer.quote method; please refer to "
+                "quoted_name()."
+            )
+
+        return self.quote(schema)
 
     def quote(self, ident, force=None):
         """Conditionally quote an identfier.
@@ -3473,16 +3487,28 @@ class IdentifierPreparer(object):
         quoting behavior for identifier names.
 
         :param ident: string identifier
-        :param force: this parameter is no longer used.
+        :param force: unused
 
-         .. deprecated:: 0.9
+            .. deprecated:: 0.9
 
-             The :paramref:`.IdentifierPreparer.force` parameter is deprecated
-             and will be removed in a future release.  Quoting preference
-             is now intrinsic to the string being quoted by making use of the
-             :class:`.quoted_name` class.
+                The :paramref:`.IdentifierPreparer.quote.force`
+                parameter is deprecated and will be removed in a future
+                release.  This flag has no effect on the behavior of the
+                :meth:`.IdentifierPreparer.quote` method; please refer to
+                :class:`.quoted_name`.
 
         """
+        if force is not None:
+            # not using the util.deprecated_params() decorator in this
+            # case because of the additional function call overhead on this
+            # very performance-critical spot.
+            util.warn_deprecated(
+                "The IdentifierPreparer.quote.force parameter is "
+                "deprecated and will be removed in a future release.  This "
+                "flag has no effect on the behavior of the "
+                "IdentifierPreparer.quote method; please refer to "
+                "quoted_name()."
+            )
 
         force = getattr(ident, "quote", None)
 
@@ -3580,10 +3606,10 @@ class IdentifierPreparer(object):
             result = self.quote_schema(effective_schema) + "." + result
         return result
 
-    def format_schema(self, name, quote=None):
+    def format_schema(self, name):
         """Prepare a quoted schema name."""
 
-        return self.quote(name, quote)
+        return self.quote(name)
 
     def format_column(
         self,
index 3deb588abbdd5198201ddddc0927fd74858f19f3..954f769ef0b35e1ee8c5ee16b7b029879482dc1c 100644 (file)
@@ -106,7 +106,7 @@ class DDLElement(Executable, _DDLCompiles):
         "The :meth:`.DDLElement.execute_at` method is deprecated and will "
         "be removed in a future release.  Please use the :class:`.DDLEvents` "
         "listener interface in conjunction with the "
-        ":meth:`.DDLElement.execute_if` method."
+        ":meth:`.DDLElement.execute_if` method.",
     )
     def execute_at(self, event_name, target):
         """Link execution of this DDL to the DDL lifecycle of a SchemaItem.
@@ -317,6 +317,14 @@ class DDL(DDLElement):
 
     __visit_name__ = "ddl"
 
+    @util.deprecated_params(
+        on=(
+            "0.7",
+            "The :paramref:`.DDL.on` parameter is deprecated and will be "
+            "removed in a future release.  Please refer to "
+            ":meth:`.DDLElement.execute_if`.",
+        )
+    )
     def __init__(self, statement, on=None, context=None, bind=None):
         """Create a DDL statement.
 
@@ -331,12 +339,6 @@ class DDL(DDLElement):
 
         :param on:
 
-          .. deprecated:: 0.7
-
-            The :paramref:`.DDL.on` parameter is deprecated and will be
-            removed in a future release.  Please refer to
-            :meth:`.DDLElement.execute_if`.
-
           Optional filtering criteria.  May be a string, tuple or a callable
           predicate.  If a string, it will be compared to the name of the
           executing database dialect::
index fae25cc2c4373325642321dce1cd2a6f4b84eed0..71f346f45e53546d3cd9e2f97da9eb39d143f491 100644 (file)
@@ -461,21 +461,27 @@ class ClauseElement(Visitable):
                 "ascii", "backslashreplace"
             )  # noqa
 
+    @util.deprecated(
+        "0.9",
+        "The :meth:`.ClauseElement.__and__` method is deprecated and will "
+        "be removed in a future release.   Conjunctions should only be "
+        "used from a :class:`.ColumnElement` subclass, e.g. "
+        ":meth:`.ColumnElement.__and__`.",
+    )
     def __and__(self, other):
         """'and' at the ClauseElement level.
-
-        .. deprecated:: 0.9.5 - conjunctions are intended to be
-           at the :class:`.ColumnElement`. level
-
         """
         return and_(self, other)
 
+    @util.deprecated(
+        "0.9",
+        "The :meth:`.ClauseElement.__or__` method is deprecated and will "
+        "be removed in a future release.   Conjunctions should only be "
+        "used from a :class:`.ColumnElement` subclass, e.g. "
+        ":meth:`.ColumnElement.__or__`.",
+    )
     def __or__(self, other):
         """'or' at the ClauseElement level.
-
-        .. deprecated:: 0.9.5 - conjunctions are intended to be
-           at the :class:`.ColumnElement`. level
-
         """
         return or_(self, other)
 
@@ -1280,6 +1286,10 @@ class TextClause(Executable, ClauseElement):
     )
     _is_implicitly_boolean = False
 
+    def __and__(self, other):
+        # support use in select.where(), query.filter()
+        return and_(self, other)
+
     @property
     def _select_iterable(self):
         return (self,)
@@ -1311,6 +1321,28 @@ class TextClause(Executable, ClauseElement):
         self.text = self._bind_params_regex.sub(repl, text)
 
     @classmethod
+    @util.deprecated_params(
+        autocommit=(
+            "0.6",
+            "The :paramref:`.text.autocommit` parameter is deprecated and "
+            "will be removed in a future release.  Please use the "
+            ":paramref:`.Connection.execution_options.autocommit` parameter "
+            "in conjunction with the :meth:`.Executable.execution_options` "
+            "method.",
+        ),
+        bindparams=(
+            "0.9",
+            "The :paramref:`.text.bindparams` parameter "
+            "is deprecated and will be removed in a future release.  Please "
+            "refer to the :meth:`.TextClause.bindparams` method.",
+        ),
+        typemap=(
+            "0.9",
+            "The :paramref:`.text.typemap` parameter is "
+            "deprecated and will be removed in a future release.  Please "
+            "refer to the :meth:`.TextClause.columns` method.",
+        ),
+    )
     def _create_text(
         self, text, bind=None, bindparams=None, typemap=None, autocommit=None
     ):
@@ -1389,15 +1421,8 @@ class TextClause(Executable, ClauseElement):
           to specify bind parameters; they will be compiled to their
           engine-specific format.
 
-        :param autocommit:
-
-            .. deprecated:: 0.6
-
-                The :paramref:`.text.autocommit` flag is deprecated and
-                will be removed in a future release.  Please use the
-                :paramref:`.Connection.execution_options.autocommit` parameter
-                in conjunction with the :meth:`.Executable.execution_options`
-                method.
+        :param autocommit: whether or not to set the "autocommit" execution
+          option for this :class:`.TextClause` object.
 
         :param bind:
           an optional connection or engine to be used for this text query.
@@ -1405,27 +1430,22 @@ class TextClause(Executable, ClauseElement):
         :param bindparams:
           A list of :func:`.bindparam` instances used to
           provide information about parameters embedded in the statement.
+
           E.g.::
 
               stmt = text("SELECT * FROM table WHERE id=:id",
                         bindparams=[bindparam('id', value=5, type_=Integer)])
 
-          .. deprecated:: 0.9 the :paramref:`.TextClause.bindparams` parameter
-             is deprecated and will be removed in a future release.  Please
-             refer to the :meth:`.TextClause.bindparams` method.
-
         :param typemap:
           A dictionary mapping the names of columns represented in the columns
-          clause of a ``SELECT`` statement to type objects, e.g.::
+          clause of a ``SELECT`` statement to type objects.
+
+          E.g.::
 
               stmt = text("SELECT * FROM table",
                             typemap={'id': Integer, 'name': String},
                         )
 
-          .. deprecated:: 0.9  The :paramref:`.TextClause.typemap` argument is
-             deprecated and will be removed in a future release.  Please
-             refer to the :meth:`.TextClause.columns` method.
-
         .. seealso::
 
             :ref:`sqlexpression_text` - in the Core tutorial
@@ -1439,10 +1459,6 @@ class TextClause(Executable, ClauseElement):
         if typemap:
             stmt = stmt.columns(**typemap)
         if autocommit is not None:
-            util.warn_deprecated(
-                "autocommit on text() is deprecated.  "
-                "Use .execution_options(autocommit=True)"
-            )
             stmt = stmt.execution_options(autocommit=autocommit)
 
         return stmt
@@ -1511,12 +1527,6 @@ class TextClause(Executable, ClauseElement):
                 timestamp=datetime.datetime(2012, 10, 8, 15, 12, 5)
             )
 
-
-        .. versionadded:: 0.9.0 The :meth:`.TextClause.bindparams` method
-           supersedes the argument ``bindparams`` passed to
-           :func:`~.expression.text`.
-
-
         """
         self._bindparams = new_params = self._bindparams.copy()
 
@@ -1631,8 +1641,7 @@ class TextClause(Executable, ClauseElement):
 
         .. versionadded:: 0.9.0 :func:`.text` can now be converted into a
            fully featured "selectable" construct using the
-           :meth:`.TextClause.columns` method.  This method supersedes the
-           ``typemap`` argument to :func:`.text`.
+           :meth:`.TextClause.columns` method.
 
 
         """
@@ -3364,13 +3373,16 @@ class Over(ColumnElement):
         return lower, upper
 
     @property
+    @util.deprecated(
+        "1.1",
+        "the :attr:`.Over.func` member of the :class:`.Over` "
+        "class is deprecated and will be removed in a future release.  "
+        "Please refer to the :attr:`.Over.element` attribute.",
+    )
     def func(self):
         """the element referred to by this :class:`.Over`
         clause.
 
-        .. deprecated:: 1.1 the :attr:`.Over.func` member of the :class:`.Over`
-           class is deprecated and will be removed in a future release.  Please
-           refer to the :attr:`.Over.element` attribute.
 
         """
         return self.element
index 43a2e5d0f02264254979a653878ae5a3b5f6b20d..d9555b196b45858f1186c49d9805496a84193cf3 100644 (file)
@@ -118,7 +118,8 @@ class SchemaItem(SchemaEventTarget, visitors.Visitable):
         "The :attr:`.SchemaItem.quote` attribute is deprecated and will be "
         "removed in a future release.  Use the :attr:`.quoted_name.quote` "
         "attribute on the ``name`` field of the target schema item to retrieve"
-        "quoted status.")
+        "quoted status.",
+    )
     def quote(self):
         """Return the value of the ``quote`` flag passed
         to this schema object, for those schema items which
@@ -394,7 +395,7 @@ class Table(DialectKWArgs, SchemaItem, TableClause):
         name, specify the flag ``quote_schema=True`` to the constructor, or use
         the :class:`.quoted_name` construct to specify the name.
 
-    :param useexisting: Deprecated.  Use :paramref:`.Table.extend_existing`.
+    :param useexisting: the same as :paramref:`.Table.extend_existing`.
 
     :param comment: Optional string that will render an SQL comment on table
          creation.
@@ -411,6 +412,14 @@ class Table(DialectKWArgs, SchemaItem, TableClause):
 
     __visit_name__ = "table"
 
+    @util.deprecated_params(
+        useexisting=(
+            "0.7",
+            "The :paramref:`.Table.useexisting` parameter is deprecated and "
+            "will be removed in a future release.  Please use "
+            ":paramref:`.Table.extend_existing`.",
+        )
+    )
     def __new__(cls, *args, **kw):
         if not args:
             # python3k pickle seems to call this
@@ -429,8 +438,6 @@ class Table(DialectKWArgs, SchemaItem, TableClause):
         keep_existing = kw.pop("keep_existing", False)
         extend_existing = kw.pop("extend_existing", False)
         if "useexisting" in kw:
-            msg = "useexisting is deprecated.  Use extend_existing."
-            util.warn_deprecated(msg)
             if extend_existing:
                 msg = "useexisting is synonymous with extend_existing."
                 raise exc.ArgumentError(msg)
@@ -475,7 +482,8 @@ class Table(DialectKWArgs, SchemaItem, TableClause):
         "The :meth:`.SchemaItem.quote` method is deprecated and will be "
         "removed in a future release.  Use the :attr:`.quoted_name.quote` "
         "attribute on the ``schema`` field of the target schema item to "
-        "retrieve quoted status.")
+        "retrieve quoted status.",
+    )
     def quote_schema(self):
         """Return the value of the ``quote_schema`` flag passed
         to this :class:`.Table`.
@@ -763,12 +771,15 @@ class Table(DialectKWArgs, SchemaItem, TableClause):
 
         constraint._set_parent_with_dispatch(self)
 
+    @util.deprecated(
+        "0.7",
+        "the :meth:`.Table.append_ddl_listener` method is deprecated and "
+        "will be removed in a future release.  Please refer to "
+        ":class:`.DDLEvents`.",
+    )
     def append_ddl_listener(self, event_name, listener):
         """Append a DDL event listener to this ``Table``.
 
-        .. deprecated:: 0.7
-            See :class:`.DDLEvents`.
-
         """
 
         def adapt_listener(target, connection, **kw):
@@ -2579,22 +2590,15 @@ class DefaultClause(FetchedValue):
         return "DefaultClause(%r, for_update=%r)" % (self.arg, self.for_update)
 
 
+@util.deprecated_cls(
+    "0.6",
+    ":class:`.PassiveDefault` is deprecated and will be removed in a "
+    "future release.  Please refer to :class:`.DefaultClause`.",
+)
 class PassiveDefault(DefaultClause):
     """A DDL-specified DEFAULT column value.
-
-    .. deprecated:: 0.6
-
-        The :class:`.PassiveDefault` class is deprecated and will be removed
-        in a future release.  Please use :class:`.DefaultClause`.
-
     """
 
-    @util.deprecated(
-        "0.6",
-        ":class:`.PassiveDefault` is deprecated and will be removed in a "
-        "future release. Use :class:`.DefaultClause`.",
-        False,
-    )
     def __init__(self, *arg, **kw):
         DefaultClause.__init__(self, *arg, **kw)
 
@@ -3711,13 +3715,21 @@ class MetaData(SchemaItem):
 
     __visit_name__ = "metadata"
 
+    @util.deprecated_params(
+        reflect=(
+            "0.8",
+            "The :paramref:`.MetaData.reflect` flag is deprecated and will "
+            "be removed in a future release.   Please use the "
+            ":meth:`.MetaData.reflect` method.",
+        )
+    )
     def __init__(
         self,
         bind=None,
         reflect=False,
         schema=None,
         quote_schema=None,
-        naming_convention=DEFAULT_NAMING_CONVENTION,
+        naming_convention=None,
         info=None,
     ):
         """Create a new MetaData object.
@@ -3731,12 +3743,6 @@ class MetaData(SchemaItem):
           Optional, automatically load all tables from the bound database.
           Defaults to False. ``bind`` is required when this option is set.
 
-          .. deprecated:: 0.8
-
-                The :paramref:`.MetaData.reflect` flag is deprecated and will
-                be removed in a future release.   Please use the
-                :meth:`.MetaData.reflect` method.
-
         :param schema:
            The default schema to use for the :class:`.Table`,
            :class:`.Sequence`, and potentially other objects associated with
@@ -3877,7 +3883,11 @@ class MetaData(SchemaItem):
         """
         self.tables = util.immutabledict()
         self.schema = quoted_name(schema, quote_schema)
-        self.naming_convention = naming_convention
+        self.naming_convention = (
+            naming_convention
+            if naming_convention
+            else DEFAULT_NAMING_CONVENTION
+        )
         if info:
             self.info = info
         self._schemas = set()
@@ -3886,10 +3896,6 @@ class MetaData(SchemaItem):
 
         self.bind = bind
         if reflect:
-            util.warn_deprecated(
-                "reflect=True is deprecate; please "
-                "use the reflect() method."
-            )
             if not bind:
                 raise exc.ArgumentError(
                     "A bind must be supplied in conjunction "
@@ -4180,11 +4186,15 @@ class MetaData(SchemaItem):
                 except exc.UnreflectableTableError as uerr:
                     util.warn("Skipping table %s: %s" % (name, uerr))
 
+    @util.deprecated(
+        "0.7",
+        "the :meth:`.MetaData.append_ddl_listener` method is deprecated and "
+        "will be removed in a future release.  Please refer to "
+        ":class:`.DDLEvents`.",
+    )
     def append_ddl_listener(self, event_name, listener):
         """Append a DDL event listener to this ``MetaData``.
 
-        .. deprecated:: 0.7
-            See :class:`.DDLEvents`.
 
         """
 
index 0b2155a68342c1134bf3aaf3d85d261e06e45c72..f48fa6f57570fedd7ba0d36149c83c63ee12be62 100644 (file)
@@ -391,7 +391,7 @@ class FromClause(Selectable):
         message="The :meth:`.FromClause.count` method is deprecated, "
         "and will be removed in a future release.   Please use the "
         ":class:`.functions.count` function available from the "
-        ":attr:`.func` namespace."
+        ":attr:`.func` namespace.",
     )
     @util.dependencies("sqlalchemy.sql.functions")
     def count(self, functions, whereclause=None, **params):
@@ -973,6 +973,15 @@ class Join(FromClause):
         return self._join_condition(left, right, a_subset=left_right)
 
     @classmethod
+    @util.deprecated_params(
+        ignore_nonexistent_tables=(
+            "0.9",
+            "The :paramref:`.join_condition.ignore_nonexistent_tables` "
+            "parameter is deprecated and will be removed in a future "
+            "release.  Tables outside of the two tables being handled "
+            "are no longer considered.",
+        )
+    )
     def _join_condition(
         cls,
         a,
@@ -995,15 +1004,8 @@ class Join(FromClause):
         between the two selectables.   If there are multiple ways
         to join, or no way to join, an error is raised.
 
-        :param ignore_nonexistent_tables:
-
-            .. deprecated::  0.9
-
-                The :paramref:`_join_condition.ignore_nonexistent_tables`
-                parameter is deprecated and will be removed in a future
-                release.  Tables outside of the two tables being handled
-                are no longer considered.
-
+        :param ignore_nonexistent_tables: unused - tables outside of the
+         two tables being handled are not considered.
 
         :param a_subset: An optional expression that is a sub-component
          of ``a``.  An attempt will be made to join to just this sub-component
@@ -2026,7 +2028,7 @@ class SelectBase(HasCTE, Executable, FromClause):
         "and will be removed in a future release.   Please use the "
         "the :paramref:`.Connection.execution_options.autocommit` "
         "parameter in conjunction with the "
-        ":meth:`.Executable.execution_options` method."
+        ":meth:`.Executable.execution_options` method.",
     )
     def autocommit(self):
         """return a new selectable with the 'autocommit' flag set to
@@ -2642,6 +2644,24 @@ class Select(HasPrefixes, HasSuffixes, GenerativeSelect):
     _memoized_property = SelectBase._memoized_property
     _is_select = True
 
+    @util.deprecated_params(
+        autocommit=(
+            "0.6",
+            "The :paramref:`.select.autocommit` parameter is deprecated "
+            "and will be removed in a future release.  Please refer to "
+            "the :paramref:`.Connection.execution_options.autocommit` "
+            "parameter in conjunction with the the "
+            ":meth:`.Executable.execution_options` method in order to "
+            "affect the autocommit behavior for a statement.",
+        ),
+        for_update=(
+            "0.9",
+            "The :paramref:`.select.for_update` parameter is deprecated and "
+            "will be removed in a future release.  Please refer to the "
+            ":meth:`.Select.with_for_update` to specify the "
+            "structure of the ``FOR UPDATE`` clause.",
+        ),
+    )
     def __init__(
         self,
         columns=None,
@@ -2712,16 +2732,7 @@ class Select(HasPrefixes, HasSuffixes, GenerativeSelect):
             :meth:`.Select.select_from` - full description of explicit
             FROM clause specification.
 
-        :param autocommit:
-
-            .. deprecated:: 0.6
-
-                The :paramref:`.select.autocommit` parameter is deprecated
-                and will be removed in a future release.  Please refer to
-                the :paramref:`.Connection.execution_options.autocommit`
-                parameter in conjunction with the the
-                :meth:`.Executable.execution_options` method in order to
-                affect the autocommit behavior for a statement.
+        :param autocommit: legacy autocommit parameter.
 
         :param bind=None:
           an :class:`~.Engine` or :class:`~.Connection` instance
@@ -2762,13 +2773,6 @@ class Select(HasPrefixes, HasSuffixes, GenerativeSelect):
           when ``True``, applies ``FOR UPDATE`` to the end of the
           resulting statement.
 
-          .. deprecated:: 0.9
-
-            The :paramref:`.select.for_update` parameter is deprecated and
-            will be removed in a future release.  Please refer to the
-            :meth:`.Select.with_for_update` to specify the
-            structure of the ``FOR UPDATE`` clause.
-
           ``for_update`` accepts various string values interpreted by
           specific backends, including:
 
index 1d97bf35c54044aeeeab70e804057b07e585c156..8131be443a020a31ca67727240232b128f81c44a 100644 (file)
@@ -137,6 +137,22 @@ class String(Concatenable, TypeEngine):
 
     __visit_name__ = "string"
 
+    @util.deprecated_params(
+        convert_unicode=(
+            "1.3",
+            "The :paramref:`.String.convert_unicode` parameter is deprecated "
+            "and will be removed in a future release.  All modern DBAPIs "
+            "now support Python Unicode directly and this parameter is "
+            "unnecessary.",
+        ),
+        unicode_error=(
+            "1.3",
+            "The :paramref:`.String.unicode_errors` parameter is deprecated "
+            "and will be removed in a future release.  This parameter is "
+            "unnecessary for modern Python DBAPIs and degrades performance "
+            "significantly.",
+        ),
+    )
     def __init__(
         self,
         length=None,
@@ -144,6 +160,7 @@ class String(Concatenable, TypeEngine):
         convert_unicode=False,
         unicode_error=None,
         _warn_on_bytestring=False,
+        _expect_unicode=False,
     ):
         """
         Create a string-holding type.
@@ -207,15 +224,9 @@ class String(Concatenable, TypeEngine):
 
         :param unicode_error: Optional, a method to use to handle Unicode
           conversion errors. Behaves like the ``errors`` keyword argument to
-          the standard library's ``string.decode()`` functions.   This flag
-          requires that :paramref:`.String.convert_unicode` is set to
-          ``"force"`` - otherwise,
-          SQLAlchemy is not guaranteed to handle the task of unicode
-          conversion.   Note that this flag adds significant performance
-          overhead to row-fetching operations for backends that already
-          return unicode objects natively (which most DBAPIs do).  This
-          flag should only be used as a last resort for reading
-          strings from a column with varied or corrupted encodings.
+          the standard library's ``string.decode()`` functions, requires
+          that :paramref:`.String.convert_unicode` is set to
+          ``"force"``
 
         """
         if unicode_error is not None and convert_unicode != "force":
@@ -225,8 +236,9 @@ class String(Concatenable, TypeEngine):
 
         self.length = length
         self.collation = collation
-        self.convert_unicode = convert_unicode
-        self.unicode_error = unicode_error
+        self._expect_unicode = convert_unicode or _expect_unicode
+        self._expect_unicode_error = unicode_error
+
         self._warn_on_bytestring = _warn_on_bytestring
 
     def literal_processor(self, dialect):
@@ -241,10 +253,10 @@ class String(Concatenable, TypeEngine):
         return process
 
     def bind_processor(self, dialect):
-        if self.convert_unicode or dialect.convert_unicode:
+        if self._expect_unicode or dialect.convert_unicode:
             if (
                 dialect.supports_unicode_binds
-                and self.convert_unicode != "force"
+                and self._expect_unicode != "force"
             ):
                 if self._warn_on_bytestring:
 
@@ -266,7 +278,7 @@ class String(Concatenable, TypeEngine):
 
                 def process(value):
                     if isinstance(value, util.text_type):
-                        return encoder(value, self.unicode_error)[0]
+                        return encoder(value, self._expect_unicode_error)[0]
                     elif warn_on_bytestring and value is not None:
                         util.warn_limited(
                             "Unicode type received non-unicode bind "
@@ -280,31 +292,31 @@ class String(Concatenable, TypeEngine):
             return None
 
     def result_processor(self, dialect, coltype):
-        wants_unicode = self.convert_unicode or dialect.convert_unicode
+        wants_unicode = self._expect_unicode or dialect.convert_unicode
         needs_convert = wants_unicode and (
             dialect.returns_unicode_strings is not True
-            or self.convert_unicode in ("force", "force_nocheck")
+            or self._expect_unicode in ("force", "force_nocheck")
         )
         needs_isinstance = (
             needs_convert
             and dialect.returns_unicode_strings
-            and self.convert_unicode != "force_nocheck"
+            and self._expect_unicode != "force_nocheck"
         )
         if needs_convert:
             if needs_isinstance:
                 return processors.to_conditional_unicode_processor_factory(
-                    dialect.encoding, self.unicode_error
+                    dialect.encoding, self._expect_unicode_error
                 )
             else:
                 return processors.to_unicode_processor_factory(
-                    dialect.encoding, self.unicode_error
+                    dialect.encoding, self._expect_unicode_error
                 )
         else:
             return None
 
     @property
     def python_type(self):
-        if self.convert_unicode:
+        if self._expect_unicode:
             return util.text_type
         else:
             return str
@@ -312,6 +324,16 @@ class String(Concatenable, TypeEngine):
     def get_dbapi_type(self, dbapi):
         return dbapi.STRING
 
+    @classmethod
+    def _warn_deprecated_unicode(cls):
+        util.warn_deprecated(
+            "The convert_unicode on Engine and String as well as the "
+            "unicode_error flag on String are deprecated.  All modern "
+            "DBAPIs now support Python Unicode natively under Python 2, and "
+            "under Python 3 all strings are inherently Unicode.  These flags "
+            "will be removed in a future release."
+        )
+
 
 class Text(String):
 
@@ -395,7 +417,7 @@ class Unicode(String):
         defaults to ``True``.
 
         """
-        kwargs.setdefault("convert_unicode", True)
+        kwargs.setdefault("_expect_unicode", True)
         kwargs.setdefault("_warn_on_bytestring", True)
         super(Unicode, self).__init__(length=length, **kwargs)
 
@@ -424,10 +446,13 @@ class UnicodeText(Text):
         defaults to ``True``.
 
         """
-        kwargs.setdefault("convert_unicode", True)
+        kwargs.setdefault("_expect_unicode", True)
         kwargs.setdefault("_warn_on_bytestring", True)
         super(UnicodeText, self).__init__(length=length, **kwargs)
 
+    def _warn_deprecated_unicode(self):
+        pass
+
 
 class Integer(_LookupExpressionAdapter, TypeEngine):
 
@@ -697,11 +722,7 @@ class Float(Numeric):
     scale = None
 
     def __init__(
-        self,
-        precision=None,
-        asdecimal=False,
-        decimal_return_scale=None,
-        **kwargs
+        self, precision=None, asdecimal=False, decimal_return_scale=None
     ):
         r"""
         Construct a Float.
@@ -724,25 +745,10 @@ class Float(Numeric):
 
          .. versionadded:: 0.9.0
 
-        :param \**kwargs:
-
-            .. deprecated:: 0.9
-
-                Additional keyword arguments are ignored by the base
-                :class:`.Float` type, and keyword arguments will no longer
-                be accepted in a future release.  For database specific floats
-                that support additional arguments, see that dialect's
-                documentation for details, such as
-                :class:`sqlalchemy.dialects.mysql.FLOAT`.
-
         """
         self.precision = precision
         self.asdecimal = asdecimal
         self.decimal_return_scale = decimal_return_scale
-        if kwargs:
-            util.warn_deprecated(
-                "Additional keyword arguments " "passed to Float ignored."
-            )
 
     def result_processor(self, dialect, coltype):
         if self.asdecimal:
@@ -975,19 +981,13 @@ class LargeBinary(_Binary):
         _Binary.__init__(self, length=length)
 
 
+@util.deprecated_cls(
+    "0.6",
+    "The :class:`.Binary` class is deprecated and will be removed "
+    "in a future relase.  Please use :class:`.LargeBinary`.",
+)
 class Binary(LargeBinary):
-
-    """.. deprecated:: 0.6
-
-            The :class:`.Binary` class is deprecated and will be removed
-            in a future relase.  Please use :class:`.LargeBinary`.
-
-    """
-
     def __init__(self, *arg, **kw):
-        util.warn_deprecated(
-            "The Binary type has been renamed to " "LargeBinary."
-        )
         LargeBinary.__init__(self, *arg, **kw)
 
 
@@ -1264,6 +1264,15 @@ class Enum(Emulated, String, SchemaType):
 
     __visit_name__ = "enum"
 
+    @util.deprecated_params(
+        convert_unicode=(
+            "1.3",
+            "The :paramref:`.Enum.convert_unicode` parameter is deprecated "
+            "and will be removed in a future release.  All modern DBAPIs "
+            "now support Python Unicode directly and this parameter is "
+            "unnecessary.",
+        )
+    )
     def __init__(self, *enums, **kw):
         r"""Construct an enum.
 
@@ -1376,11 +1385,15 @@ class Enum(Emulated, String, SchemaType):
 
         if convert_unicode is None:
             for e in self.enums:
+                # this is all py2k logic that can go away for py3k only,
+                # "expect unicode" will always be implicitly true
                 if isinstance(e, util.text_type):
-                    convert_unicode = True
+                    _expect_unicode = True
                     break
             else:
-                convert_unicode = False
+                _expect_unicode = False
+        else:
+            _expect_unicode = convert_unicode
 
         if self.enums:
             length = max(len(x) for x in self.enums)
@@ -1389,7 +1402,7 @@ class Enum(Emulated, String, SchemaType):
         self._valid_lookup[None] = self._object_lookup[None] = None
 
         super(Enum, self).__init__(
-            length=length, convert_unicode=convert_unicode
+            length=length, _expect_unicode=_expect_unicode
         )
 
         if self.enum_class:
@@ -1469,7 +1482,7 @@ class Enum(Emulated, String, SchemaType):
             )
             if op is operators.concat_op:
                 typ = String(
-                    self.type.length, convert_unicode=self.type.convert_unicode
+                    self.type.length, _expect_unicode=self.type._expect_unicode
                 )
             return op, typ
 
@@ -1491,7 +1504,7 @@ class Enum(Emulated, String, SchemaType):
         )
 
     def adapt_to_emulated(self, impltype, **kw):
-        kw.setdefault("convert_unicode", self.convert_unicode)
+        kw.setdefault("_expect_unicode", self._expect_unicode)
         kw.setdefault("validate_strings", self.validate_strings)
         kw.setdefault("name", self.name)
         kw.setdefault("schema", self.schema)
@@ -2205,7 +2218,7 @@ class JSON(Indexable, TypeEngine):
 
     @util.memoized_property
     def _str_impl(self):
-        return String(convert_unicode=True)
+        return String(_expect_unicode=True)
 
     def bind_processor(self, dialect):
         string_process = self._str_impl.bind_processor(dialect)
index 22faa23941c70c2d8cd428e753bc961fe1059e0b..232eebeb342d59d93c402450979c09277928656a 100644 (file)
@@ -12,6 +12,7 @@ import warnings
 import weakref
 
 from . import config
+from . import uses_deprecated
 from .util import decorator
 from .. import event
 from .. import pool
@@ -74,6 +75,7 @@ class ConnectionKiller(object):
         else:
             self._stop_test_ctx_aggressive()
 
+    @uses_deprecated()
     def _stop_test_ctx_minimal(self):
         self.close_all()
 
@@ -83,6 +85,7 @@ class ConnectionKiller(object):
             if rec is not config.db:
                 rec.dispose()
 
+    @uses_deprecated()
     def _stop_test_ctx_aggressive(self):
         self.close_all()
         for conn, rec in list(self.conns):
index aa04b6073a0e8c560532167d29a5a6aca8f6fc91..e8d75bafa23b2cc9eb568ebfe500f1bc235c2848 100644 (file)
@@ -313,11 +313,15 @@ class ComponentReflectionTest(fixtures.TablesTest):
             answer = ["email_addresses_v", "users_v"]
             eq_(sorted(table_names), answer)
         else:
-            table_names = [
-                t
-                for t in insp.get_table_names(schema, order_by=order_by)
-                if t not in _ignore_tables
-            ]
+            if order_by:
+                tables = [
+                    rec[0]
+                    for rec in insp.get_sorted_table_and_fkc_names(schema)
+                    if rec[0]
+                ]
+            else:
+                tables = insp.get_table_names(schema)
+            table_names = [t for t in tables if t not in _ignore_tables]
 
             if order_by == "foreign_key":
                 answer = ["users", "email_addresses", "dingalings"]
index aa98a508821b0512882bd80d43064cd3b09dac71..a07e45df2d37bcd0178197094436801e6ec374c9 100644 (file)
@@ -320,7 +320,7 @@ class ServerSideCursorsTest(
 
     def test_for_update_expr(self):
         engine = self._fixture(True)
-        s1 = select([1], for_update=True)
+        s1 = select([1]).with_for_update()
         result = engine.execute(s1)
         assert self._is_server_side(result.cursor)
 
index 4791671f31d9a2d01b28f9f1b25f0514640ebd67..1e02c0e74246ce56358a649418f27ae91bd86f39 100644 (file)
@@ -933,10 +933,7 @@ class JSONTest(_LiteralRoundTripFixture, fixtures.TablesTest):
         )
         eq_(
             s.query(
-                cast(
-                    self.tables.data_table.c.data,
-                    String(convert_unicode="force"),
-                ),
+                cast(self.tables.data_table.c.data, String()),
                 cast(self.tables.data_table.c.nulldata, String),
             )
             .filter(self.tables.data_table.c.name == "d1")
@@ -945,10 +942,7 @@ class JSONTest(_LiteralRoundTripFixture, fixtures.TablesTest):
         )
         eq_(
             s.query(
-                cast(
-                    self.tables.data_table.c.data,
-                    String(convert_unicode="force"),
-                ),
+                cast(self.tables.data_table.c.data, String()),
                 cast(self.tables.data_table.c.nulldata, String),
             )
             .filter(self.tables.data_table.c.name == "d2")
index f812671d34c809e45a68e9aeb3ff040811d3afd4..383c143c4ce554ef5beadaca6d2e681a5f940908 100644 (file)
@@ -87,6 +87,8 @@ from .compat import win32  # noqa
 from .compat import with_metaclass  # noqa
 from .compat import zip_longest  # noqa
 from .deprecations import deprecated  # noqa
+from .deprecations import deprecated_cls  # noqa
+from .deprecations import deprecated_params  # noqa
 from .deprecations import inject_docstring_text  # noqa
 from .deprecations import pending_deprecation  # noqa
 from .deprecations import warn_deprecated  # noqa
index abd36d9b07ab6a1bf8c2dd5b78f0e5532e7bd85c..ff644cd36f18995324e708b287f4ce10b954964d 100644 (file)
@@ -44,6 +44,19 @@ FullArgSpec = collections.namedtuple(
     ],
 )
 
+FullArgSpec = collections.namedtuple(
+    "FullArgSpec",
+    [
+        "args",
+        "varargs",
+        "varkw",
+        "defaults",
+        "kwonlyargs",
+        "kwonlydefaults",
+        "annotations",
+    ],
+)
+
 try:
     import threading
 except ImportError:
index e7b972deb25f3e74dd6a9e2f81e3f88f7b86860c..a43acc72ebcb36afce733c0b55a0e8ddbc79c085 100644 (file)
@@ -12,6 +12,7 @@ import re
 import textwrap
 import warnings
 
+from . import compat
 from .langhelpers import decorator
 from .. import exc
 
@@ -24,6 +25,21 @@ def warn_pending_deprecation(msg, stacklevel=3):
     warnings.warn(msg, exc.SAPendingDeprecationWarning, stacklevel=stacklevel)
 
 
+def deprecated_cls(version, message, constructor="__init__"):
+    header = ".. deprecated:: %s %s" % (version, (message or ""))
+
+    def decorate(cls):
+        return _decorate_cls_with_warning(
+            cls,
+            constructor,
+            exc.SADeprecationWarning,
+            message % dict(func=constructor),
+            header,
+        )
+
+    return decorate
+
+
 def deprecated(version, message=None, add_deprecation_to_docstring=True):
     """Decorates a function and issues a deprecation warning on use.
 
@@ -60,6 +76,74 @@ def deprecated(version, message=None, add_deprecation_to_docstring=True):
     return decorate
 
 
+def deprecated_params(**specs):
+    """Decorates a function to warn on use of certain parameters.
+
+    e.g. ::
+
+        @deprecated_params(
+            weak_identity_map=(
+                "0.7",
+                "the :paramref:`.Session.weak_identity_map parameter "
+                "is deprecated."
+            )
+
+        )
+
+    """
+
+    messages = {}
+    for param, (version, message) in specs.items():
+        messages[param] = _sanitize_restructured_text(message)
+
+    def decorate(fn):
+        spec = compat.inspect_getfullargspec(fn)
+        if spec.defaults is not None:
+            defaults = dict(
+                zip(
+                    spec.args[(len(spec.args) - len(spec.defaults)) :],
+                    spec.defaults,
+                )
+            )
+            check_defaults = set(defaults).intersection(messages)
+            check_kw = set(messages).difference(defaults)
+        else:
+            check_defaults = ()
+            check_kw = set(messages)
+
+        has_kw = spec.varkw is not None
+
+        @decorator
+        def warned(fn, *args, **kwargs):
+            for m in check_defaults:
+                if kwargs[m] != defaults[m]:
+                    warnings.warn(
+                        messages[m], exc.SADeprecationWarning, stacklevel=3
+                    )
+            for m in check_kw:
+                if m in kwargs:
+                    warnings.warn(
+                        messages[m], exc.SADeprecationWarning, stacklevel=3
+                    )
+
+            return fn(*args, **kwargs)
+
+        doc = fn.__doc__ is not None and fn.__doc__ or ""
+        if doc:
+            doc = inject_param_text(
+                doc,
+                {
+                    param: ".. deprecated:: %s %s" % (version, (message or ""))
+                    for param, (version, message) in specs.items()
+                },
+            )
+        decorated = warned(fn)
+        decorated.__doc__ = doc
+        return decorated
+
+    return decorate
+
+
 def pending_deprecation(
     version, message=None, add_deprecation_to_docstring=True
 ):
@@ -98,6 +182,14 @@ def pending_deprecation(
     return decorate
 
 
+def deprecated_option_value(parameter_value, default_value, warning_text):
+    if parameter_value is None:
+        return default_value
+    else:
+        warn_deprecated(warning_text)
+        return parameter_value
+
+
 def _sanitize_restructured_text(text):
     def repl(m):
         type_, name = m.group(1, 2)
@@ -108,6 +200,33 @@ def _sanitize_restructured_text(text):
     return re.sub(r"\:(\w+)\:`~?\.?(.+?)`", repl, text)
 
 
+def _decorate_cls_with_warning(
+    cls, constructor, wtype, message, docstring_header=None
+):
+    doc = cls.__doc__ is not None and cls.__doc__ or ""
+    if docstring_header is not None:
+        docstring_header %= dict(func=constructor)
+
+        doc = inject_docstring_text(doc, docstring_header, 1)
+
+        if type(cls) is type:
+            clsdict = dict(cls.__dict__)
+            clsdict["__doc__"] = doc
+            cls = type(cls.__name__, cls.__bases__, clsdict)
+            constructor_fn = clsdict[constructor]
+        else:
+            cls.__doc__ = doc
+            constructor_fn = getattr(cls, constructor)
+
+    setattr(
+        cls,
+        constructor,
+        _decorate_with_warning(constructor_fn, wtype, message, None),
+    )
+
+    return cls
+
+
 def _decorate_with_warning(func, wtype, message, docstring_header=None):
     """Wrap a function with a warnings.warn and augmented docstring."""
 
@@ -126,6 +245,7 @@ def _decorate_with_warning(func, wtype, message, docstring_header=None):
 
     decorated = warned(func)
     decorated.__doc__ = doc
+    decorated._sa_warn = lambda: warnings.warn(message, wtype, stacklevel=3)
     return decorated
 
 
@@ -155,3 +275,36 @@ def inject_docstring_text(doctext, injecttext, pos):
 
     lines = lines[0:inject_pos] + injectlines + lines[inject_pos:]
     return "\n".join(lines)
+
+
+def inject_param_text(doctext, inject_params):
+    doclines = doctext.splitlines()
+    lines = []
+
+    to_inject = None
+    while doclines:
+        line = doclines.pop(0)
+        if to_inject is None:
+            m = re.match(r"(\s+):param (.+?):", line)
+            if m:
+                param = m.group(2)
+                if param in inject_params:
+                    # default indent to that of :param: plus one
+                    indent = " " * len(m.group(1)) + " "
+
+                    # but if the next line has text, use that line's
+                    # indentntation
+                    if doclines:
+                        m2 = re.match(r"(\s+)\S", doclines[0])
+                        if m2:
+                            indent = " " * len(m2.group(1))
+
+                    to_inject = indent + inject_params[param]
+        elif not line.rstrip():
+            lines.append(line)
+            lines.append(to_inject)
+            lines.append("\n")
+            to_inject = None
+        lines.append(line)
+
+    return "\n".join(lines)
index ece619fb5f208eed34ce19c8f693e5eebe695a65..07438f880ff5869967d3505e648528c0eb1a1792 100644 (file)
@@ -30,21 +30,11 @@ class QueuePoolTest(fixtures.TestBase, AssertsExecutionResults):
         # has the effect of initializing
         # class-level event listeners on Pool,
         # if not present already.
-        p1 = QueuePool(
-            creator=self.Connection,
-            pool_size=3,
-            max_overflow=-1,
-            use_threadlocal=True,
-        )
+        p1 = QueuePool(creator=self.Connection, pool_size=3, max_overflow=-1)
         p1.connect()
 
         global pool
-        pool = QueuePool(
-            creator=self.Connection,
-            pool_size=3,
-            max_overflow=-1,
-            use_threadlocal=True,
-        )
+        pool = QueuePool(creator=self.Connection, pool_size=3, max_overflow=-1)
 
     @profiling.function_call_count()
     def test_first_connect(self):
@@ -60,13 +50,3 @@ class QueuePoolTest(fixtures.TestBase, AssertsExecutionResults):
             return conn2
 
         go()
-
-    def test_second_samethread_connect(self):
-        conn = pool.connect()
-        conn  # strong ref
-
-        @profiling.function_call_count()
-        def go():
-            return pool.connect()
-
-        go()
index ef8c63d6bcb88620f09ae0271ffb547c6469df2d..46af4658d3433edac8a5ed935e882282f0828a65 100644 (file)
@@ -211,34 +211,28 @@ class QueryUnicodeTest(fixtures.TestBase):
 
     @testing.requires.mssql_freetds
     @testing.requires.python2
+    @testing.provide_metadata
     def test_convert_unicode(self):
-        meta = MetaData(testing.db)
+        meta = self.metadata
         t1 = Table(
             "unitest_table",
             meta,
             Column("id", Integer, primary_key=True),
-            Column("descr", mssql.MSText(convert_unicode=True)),
+            Column("descr", mssql.MSText()),
         )
         meta.create_all()
-        con = testing.db.connect()
-
-        # encode in UTF-8 (sting object) because this is the default
-        # dialect encoding
-
-        con.execute(
-            ue(
-                "insert into unitest_table values ('bien u\
-                    umang\xc3\xa9')"
-            ).encode("UTF-8")
-        )
-        try:
-            r = t1.select().execute().first()
+        with testing.db.connect() as con:
+            con.execute(
+                ue(
+                    "insert into unitest_table values ('abc \xc3\xa9 def')"
+                ).encode("UTF-8")
+            )
+            r = con.execute(t1.select()).first()
             assert isinstance(r[1], util.text_type), (
                 "%s is %s instead of unicode, working on %s"
                 % (r[1], type(r[1]), meta.bind)
             )
-        finally:
-            meta.drop_all()
+            eq_(r[1], util.ue("abc \xc3\xa9 def"))
 
 
 class QueryTest(testing.AssertsExecutionResults, fixtures.TestBase):
index ed5bbcb2bebf85f962246ec30318a92e9fb6cc3d..12f73fe2463c4d1406ae7ceddd648136d4042048 100644 (file)
@@ -1117,12 +1117,7 @@ class EnumSetTest(
             "t",
             self.metadata,
             Column("id", Integer, primary_key=True),
-            Column(
-                "data",
-                mysql.SET(
-                    u("réveillé"), u("drôle"), u("S’il"), convert_unicode=True
-                ),
-            ),
+            Column("data", mysql.SET(u("réveillé"), u("drôle"), u("S’il"))),
         )
 
         set_table.create()
index a9c84ba49e79e9428dca35cdccaaf65588a9a1a9..596161ef2700d97b791f4d0f4f2437d9a1c98c0b 100644 (file)
@@ -211,7 +211,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
         eq_(len(c._result_columns), 2)
         assert t.c.col1 in set(c._create_result_map()["col1"][1])
 
-        s = select([t], for_update=True).limit(10).order_by(t.c.col2)
+        s = select([t]).with_for_update().limit(10).order_by(t.c.col2)
         self.assert_compile(
             s,
             "SELECT col1, col2 FROM (SELECT "
@@ -222,7 +222,8 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
         )
 
         s = (
-            select([t], for_update=True)
+            select([t])
+            .with_for_update()
             .limit(10)
             .offset(20)
             .order_by(t.c.col2)
index 8d0e37188127baf82693b4205c0e4f4d74ca80b3..1401d40d033c401e46cfb9b147bb239ff6fb4c9b 100644 (file)
@@ -85,13 +85,12 @@ class OutParamTest(fixtures.TestBase, AssertsExecutionResults):
     def test_out_params(self):
         result = testing.db.execute(
             text(
-                "begin foo(:x_in, :x_out, :y_out, " ":z_out); end;",
-                bindparams=[
-                    bindparam("x_in", Float),
-                    outparam("x_out", Integer),
-                    outparam("y_out", Float),
-                    outparam("z_out", String),
-                ],
+                "begin foo(:x_in, :x_out, :y_out, " ":z_out); end;"
+            ).bindparams(
+                bindparam("x_in", Float),
+                outparam("x_out", Integer),
+                outparam("y_out", Float),
+                outparam("z_out", String),
             ),
             x_in=5,
         )
@@ -268,7 +267,7 @@ class ExecuteTest(fixtures.TestBase):
 
         # here, we can't use ORDER BY.
         eq_(
-            t.select(for_update=True).limit(2).execute().fetchall(),
+            t.select().with_for_update().limit(2).execute().fetchall(),
             [(1, 1), (2, 7)],
         )
 
@@ -277,7 +276,7 @@ class ExecuteTest(fixtures.TestBase):
         assert_raises_message(
             exc.DatabaseError,
             "ORA-02014",
-            t.select(for_update=True).limit(2).offset(3).execute,
+            t.select().with_for_update().limit(2).offset(3).execute,
         )
 
 
index dbe91ec03058e37ce83f55c1d2d7faf7f580eff1..5af5459ff83430105bfea44dbc382ebe836ea4c0 100644 (file)
@@ -556,15 +556,12 @@ class TypesTest(fixtures.TestBase):
         )
 
         row = testing.db.execute(
-            text(
-                stmt,
-                typemap={
-                    "idata": Integer(),
-                    "ndata": Numeric(20, 2),
-                    "ndata2": Numeric(20, 2),
-                    "nidata": Numeric(5, 0),
-                    "fdata": Float(),
-                },
+            text(stmt).columns(
+                idata=Integer(),
+                ndata=Numeric(20, 2),
+                ndata2=Numeric(20, 2),
+                nidata=Numeric(5, 0),
+                fdata=Float(),
             )
         ).fetchall()[0]
         eq_(
@@ -616,15 +613,12 @@ class TypesTest(fixtures.TestBase):
         )
 
         row = testing.db.execute(
-            text(
-                stmt,
-                typemap={
-                    "anon_1_idata": Integer(),
-                    "anon_1_ndata": Numeric(20, 2),
-                    "anon_1_ndata2": Numeric(20, 2),
-                    "anon_1_nidata": Numeric(5, 0),
-                    "anon_1_fdata": Float(),
-                },
+            text(stmt).columns(
+                anon_1_idata=Integer(),
+                anon_1_ndata=Numeric(20, 2),
+                anon_1_ndata2=Numeric(20, 2),
+                anon_1_nidata=Numeric(5, 0),
+                anon_1_fdata=Float(),
             )
         ).fetchall()[0]
         eq_(
@@ -643,15 +637,12 @@ class TypesTest(fixtures.TestBase):
         )
 
         row = testing.db.execute(
-            text(
-                stmt,
-                typemap={
-                    "anon_1_idata": Integer(),
-                    "anon_1_ndata": Numeric(20, 2, asdecimal=False),
-                    "anon_1_ndata2": Numeric(20, 2, asdecimal=False),
-                    "anon_1_nidata": Numeric(5, 0, asdecimal=False),
-                    "anon_1_fdata": Float(asdecimal=True),
-                },
+            text(stmt).columns(
+                anon_1_idata=Integer(),
+                anon_1_ndata=Numeric(20, 2, asdecimal=False),
+                anon_1_ndata2=Numeric(20, 2, asdecimal=False),
+                anon_1_nidata=Numeric(5, 0, asdecimal=False),
+                anon_1_fdata=Float(asdecimal=True),
             )
         ).fetchall()[0]
         eq_(
index d4e1cddc40434b1c8988a368ba515b804cb711c4..cadcbdc1c04997a711f38483f5e25b0765033fbf 100644 (file)
@@ -407,7 +407,7 @@ class MiscBackendTest(
     @testing.fails_on("+zxjdbc", "psycopg2/pg8000 specific assertion")
     @testing.requires.psycopg2_or_pg8000_compatibility
     def test_numeric_raise(self):
-        stmt = text("select cast('hi' as char) as hi", typemap={"hi": Numeric})
+        stmt = text("select cast('hi' as char) as hi").columns(hi=Numeric)
         assert_raises(exc.InvalidRequestError, testing.db.execute, stmt)
 
     @testing.only_if(
index f20f92251ac7a8824ab5dec1b5a02add2c3070f8..e55754f1b526190d02c585e6119ad2e55fc9e986 100644 (file)
@@ -165,8 +165,9 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults):
         'zxjdbc fails on ENUM: column "XXX" is of type '
         "XXX but expression is of type character varying",
     )
+    @testing.provide_metadata
     def test_create_table(self):
-        metadata = MetaData(testing.db)
+        metadata = self.metadata
         t1 = Table(
             "table",
             metadata,
@@ -175,19 +176,16 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults):
                 "value", Enum("one", "two", "three", name="onetwothreetype")
             ),
         )
-        t1.create()
-        t1.create(checkfirst=True)  # check the create
-        try:
-            t1.insert().execute(value="two")
-            t1.insert().execute(value="three")
-            t1.insert().execute(value="three")
+        with testing.db.connect() as conn:
+            t1.create(conn)
+            t1.create(conn, checkfirst=True)  # check the create
+            conn.execute(t1.insert(), value="two")
+            conn.execute(t1.insert(), value="three")
+            conn.execute(t1.insert(), value="three")
             eq_(
-                t1.select().order_by(t1.c.id).execute().fetchall(),
+                conn.execute(t1.select().order_by(t1.c.id)).fetchall(),
                 [(1, "two"), (2, "three"), (3, "three")],
             )
-        finally:
-            metadata.drop_all()
-            metadata.drop_all()
 
     def test_name_required(self):
         metadata = MetaData(testing.db)
index c480653b32a89d8e59538a065758bc0dad77fe34..adfca5b5346d578169c1984f68f37db578b35e6c 100644 (file)
@@ -116,7 +116,7 @@ class TestTypes(fixtures.TestBase, AssertsExecutionResults):
                 ValueError,
                 "Couldn't parse %s string." % disp,
                 lambda: testing.db.execute(
-                    text("select 'ASDF' as value", typemap={"value": typ})
+                    text("select 'ASDF' as value").columns(value=typ)
                 ).scalar(),
             )
 
@@ -254,12 +254,12 @@ class TestTypes(fixtures.TestBase, AssertsExecutionResults):
 
         dialect = sqlite.dialect()
         for t in (
-            String(convert_unicode=True),
-            sqltypes.CHAR(convert_unicode=True),
+            String(),
+            sqltypes.CHAR(),
             sqltypes.Unicode(),
             sqltypes.UnicodeText(),
-            String(convert_unicode=True),
-            sqltypes.CHAR(convert_unicode=True),
+            String(),
+            sqltypes.CHAR(),
             sqltypes.Unicode(),
             sqltypes.UnicodeText(),
         ):
index 98d53ee6def562bc5f4c0ecb0828e8d11fc32eab..ac209c69b40639c88a446f9e9c998b720146bfbd 100644 (file)
@@ -23,10 +23,6 @@ class BindTest(fixtures.TestBase):
             assert not conn.closed
         assert conn.closed
 
-        with e.contextual_connect() as conn:
-            assert not conn.closed
-        assert conn.closed
-
     def test_bind_close_conn(self):
         e = testing.db
         conn = e.connect()
@@ -35,11 +31,6 @@ class BindTest(fixtures.TestBase):
         assert not conn.closed
         assert c2.closed
 
-        with conn.contextual_connect() as c2:
-            assert not c2.closed
-        assert not conn.closed
-        assert c2.closed
-
     def test_create_drop_explicit(self):
         metadata = MetaData()
         table = Table("test_table", metadata, Column("foo", Integer))
index 2762eaa7ec14cf25bf0e483a03be81cdf381a03b..c9177e6ad346ca26d0993feea04f151d4fa0a289 100644 (file)
@@ -1,7 +1,6 @@
 import sqlalchemy as tsa
 from sqlalchemy import create_engine
 from sqlalchemy import event
-from sqlalchemy import exc
 from sqlalchemy import Integer
 from sqlalchemy import MetaData
 from sqlalchemy import String
@@ -11,7 +10,6 @@ from sqlalchemy.schema import AddConstraint
 from sqlalchemy.schema import CheckConstraint
 from sqlalchemy.schema import DDL
 from sqlalchemy.schema import DropConstraint
-from sqlalchemy.testing import assert_raises
 from sqlalchemy.testing import AssertsCompiledSQL
 from sqlalchemy.testing import engines
 from sqlalchemy.testing import eq_
@@ -373,22 +371,6 @@ class DDLEventTest(fixtures.TestBase):
         )
         eq_(metadata_canary.mock_calls, [])
 
-    def test_append_listener(self):
-        metadata, table, bind = self.metadata, self.table, self.bind
-
-        def fn(*a):
-            return None
-
-        table.append_ddl_listener("before-create", fn)
-        assert_raises(
-            exc.InvalidRequestError, table.append_ddl_listener, "blah", fn
-        )
-
-        metadata.append_ddl_listener("before-create", fn)
-        assert_raises(
-            exc.InvalidRequestError, metadata.append_ddl_listener, "blah", fn
-        )
-
 
 class DDLExecutionTest(fixtures.TestBase):
     def setup(self):
@@ -466,66 +448,6 @@ class DDLExecutionTest(fixtures.TestBase):
         assert "xyzzy" in strings
         assert "fnord" in strings
 
-    def test_deprecated_append_ddl_listener_table(self):
-        metadata, users, engine = self.metadata, self.users, self.engine
-        canary = []
-        users.append_ddl_listener(
-            "before-create", lambda e, t, b: canary.append("mxyzptlk")
-        )
-        users.append_ddl_listener(
-            "after-create", lambda e, t, b: canary.append("klptzyxm")
-        )
-        users.append_ddl_listener(
-            "before-drop", lambda e, t, b: canary.append("xyzzy")
-        )
-        users.append_ddl_listener(
-            "after-drop", lambda e, t, b: canary.append("fnord")
-        )
-
-        metadata.create_all()
-        assert "mxyzptlk" in canary
-        assert "klptzyxm" in canary
-        assert "xyzzy" not in canary
-        assert "fnord" not in canary
-        del engine.mock[:]
-        canary[:] = []
-        metadata.drop_all()
-        assert "mxyzptlk" not in canary
-        assert "klptzyxm" not in canary
-        assert "xyzzy" in canary
-        assert "fnord" in canary
-
-    def test_deprecated_append_ddl_listener_metadata(self):
-        metadata, users, engine = self.metadata, self.users, self.engine
-        canary = []
-        metadata.append_ddl_listener(
-            "before-create",
-            lambda e, t, b, tables=None: canary.append("mxyzptlk"),
-        )
-        metadata.append_ddl_listener(
-            "after-create",
-            lambda e, t, b, tables=None: canary.append("klptzyxm"),
-        )
-        metadata.append_ddl_listener(
-            "before-drop", lambda e, t, b, tables=None: canary.append("xyzzy")
-        )
-        metadata.append_ddl_listener(
-            "after-drop", lambda e, t, b, tables=None: canary.append("fnord")
-        )
-
-        metadata.create_all()
-        assert "mxyzptlk" in canary
-        assert "klptzyxm" in canary
-        assert "xyzzy" not in canary
-        assert "fnord" not in canary
-        del engine.mock[:]
-        canary[:] = []
-        metadata.drop_all()
-        assert "mxyzptlk" not in canary
-        assert "klptzyxm" not in canary
-        assert "xyzzy" in canary
-        assert "fnord" in canary
-
     def test_metadata(self):
         metadata, engine = self.metadata, self.engine
 
@@ -779,27 +701,3 @@ class DDLTest(fixtures.TestBase, AssertsCompiledSQL):
             )
             ._should_execute(tbl, cx)
         )
-
-    @testing.uses_deprecated(r"See DDLEvents")
-    def test_filter_deprecated(self):
-        cx = self.mock_engine()
-
-        tbl = Table("t", MetaData(), Column("id", Integer))
-        target = cx.name
-
-        assert DDL("")._should_execute_deprecated("x", tbl, cx)
-        assert DDL("", on=target)._should_execute_deprecated("x", tbl, cx)
-        assert not DDL("", on="bogus")._should_execute_deprecated("x", tbl, cx)
-        assert DDL("", on=lambda d, x, y, z: True)._should_execute_deprecated(
-            "x", tbl, cx
-        )
-        assert DDL(
-            "", on=lambda d, x, y, z: z.engine.name != "bogus"
-        )._should_execute_deprecated("x", tbl, cx)
-
-    def test_repr(self):
-        assert repr(DDL("s"))
-        assert repr(DDL("s", on="engine"))
-        assert repr(DDL("s", on=lambda x: 1))
-        assert repr(DDL("s", context={"a": 1}))
-        assert repr(DDL("s", on="engine", context={"a": 1}))
diff --git a/test/engine/test_deprecations.py b/test/engine/test_deprecations.py
new file mode 100644 (file)
index 0000000..35226a0
--- /dev/null
@@ -0,0 +1,1793 @@
+import re
+import time
+
+import sqlalchemy as tsa
+from sqlalchemy import column
+from sqlalchemy import create_engine
+from sqlalchemy import engine_from_config
+from sqlalchemy import event
+from sqlalchemy import ForeignKey
+from sqlalchemy import func
+from sqlalchemy import inspect
+from sqlalchemy import INT
+from sqlalchemy import Integer
+from sqlalchemy import literal
+from sqlalchemy import MetaData
+from sqlalchemy import pool
+from sqlalchemy import select
+from sqlalchemy import Sequence
+from sqlalchemy import String
+from sqlalchemy import testing
+from sqlalchemy import text
+from sqlalchemy import TypeDecorator
+from sqlalchemy import VARCHAR
+from sqlalchemy.engine.base import Engine
+from sqlalchemy.interfaces import ConnectionProxy
+from sqlalchemy.testing import assert_raises_message
+from sqlalchemy.testing import engines
+from sqlalchemy.testing import eq_
+from sqlalchemy.testing import fixtures
+from sqlalchemy.testing.engines import testing_engine
+from sqlalchemy.testing.mock import call
+from sqlalchemy.testing.mock import Mock
+from sqlalchemy.testing.schema import Column
+from sqlalchemy.testing.schema import Table
+from sqlalchemy.testing.util import gc_collect
+from sqlalchemy.testing.util import lazy_gc
+from .test_parseconnect import mock_dbapi
+
+tlengine = None
+
+
+class SomeException(Exception):
+    pass
+
+
+def _tlengine_deprecated():
+    return testing.expect_deprecated(
+        "The 'threadlocal' engine strategy is deprecated"
+    )
+
+
+class TableNamesOrderByTest(fixtures.TestBase):
+    @testing.provide_metadata
+    def test_order_by_foreign_key(self):
+        Table(
+            "t1",
+            self.metadata,
+            Column("id", Integer, primary_key=True),
+            test_needs_acid=True,
+        )
+        Table(
+            "t2",
+            self.metadata,
+            Column("id", Integer, primary_key=True),
+            Column("t1id", Integer, ForeignKey("t1.id")),
+            test_needs_acid=True,
+        )
+        Table(
+            "t3",
+            self.metadata,
+            Column("id", Integer, primary_key=True),
+            Column("t2id", Integer, ForeignKey("t2.id")),
+            test_needs_acid=True,
+        )
+        self.metadata.create_all()
+        insp = inspect(testing.db)
+        with testing.expect_deprecated(
+            "The get_table_names.order_by parameter is deprecated "
+        ):
+            tnames = insp.get_table_names(order_by="foreign_key")
+        eq_(tnames, ["t1", "t2", "t3"])
+
+
+class CreateEngineTest(fixtures.TestBase):
+    def test_pool_threadlocal_from_config(self):
+        dbapi = mock_dbapi
+
+        config = {
+            "sqlalchemy.url": "postgresql://scott:tiger@somehost/test",
+            "sqlalchemy.pool_threadlocal": "false",
+        }
+
+        e = engine_from_config(config, module=dbapi, _initialize=False)
+        eq_(e.pool._use_threadlocal, False)
+
+        config = {
+            "sqlalchemy.url": "postgresql://scott:tiger@somehost/test",
+            "sqlalchemy.pool_threadlocal": "true",
+        }
+
+        with testing.expect_deprecated(
+            "The Pool.use_threadlocal parameter is deprecated"
+        ):
+            e = engine_from_config(config, module=dbapi, _initialize=False)
+        eq_(e.pool._use_threadlocal, True)
+
+
+class RecycleTest(fixtures.TestBase):
+    __backend__ = True
+
+    def test_basic(self):
+        with testing.expect_deprecated(
+            "The Pool.use_threadlocal parameter is deprecated"
+        ):
+            engine = engines.reconnecting_engine(
+                options={"pool_threadlocal": True}
+            )
+
+        with testing.expect_deprecated(
+            r"The Engine.contextual_connect\(\) method is deprecated"
+        ):
+            conn = engine.contextual_connect()
+        eq_(conn.execute(select([1])).scalar(), 1)
+        conn.close()
+
+        # set the pool recycle down to 1.
+        # we aren't doing this inline with the
+        # engine create since cx_oracle takes way
+        # too long to create the 1st connection and don't
+        # want to build a huge delay into this test.
+
+        engine.pool._recycle = 1
+
+        # kill the DB connection
+        engine.test_shutdown()
+
+        # wait until past the recycle period
+        time.sleep(2)
+
+        # can connect, no exception
+        with testing.expect_deprecated(
+            r"The Engine.contextual_connect\(\) method is deprecated"
+        ):
+            conn = engine.contextual_connect()
+        eq_(conn.execute(select([1])).scalar(), 1)
+        conn.close()
+
+
+class TLTransactionTest(fixtures.TestBase):
+    __requires__ = ("ad_hoc_engines",)
+    __backend__ = True
+
+    @classmethod
+    def setup_class(cls):
+        global users, metadata, tlengine
+
+        with _tlengine_deprecated():
+            tlengine = testing_engine(options=dict(strategy="threadlocal"))
+        metadata = MetaData()
+        users = Table(
+            "query_users",
+            metadata,
+            Column(
+                "user_id",
+                INT,
+                Sequence("query_users_id_seq", optional=True),
+                primary_key=True,
+            ),
+            Column("user_name", VARCHAR(20)),
+            test_needs_acid=True,
+        )
+        metadata.create_all(tlengine)
+
+    def teardown(self):
+        tlengine.execute(users.delete()).close()
+
+    @classmethod
+    def teardown_class(cls):
+        tlengine.close()
+        metadata.drop_all(tlengine)
+        tlengine.dispose()
+
+    def setup(self):
+
+        # ensure tests start with engine closed
+
+        tlengine.close()
+
+    @testing.crashes(
+        "oracle", "TNS error of unknown origin occurs on the buildbot."
+    )
+    def test_rollback_no_trans(self):
+        with _tlengine_deprecated():
+            tlengine = testing_engine(options=dict(strategy="threadlocal"))
+
+        # shouldn't fail
+        tlengine.rollback()
+
+        tlengine.begin()
+        tlengine.rollback()
+
+        # shouldn't fail
+        tlengine.rollback()
+
+    def test_commit_no_trans(self):
+        with _tlengine_deprecated():
+            tlengine = testing_engine(options=dict(strategy="threadlocal"))
+
+        # shouldn't fail
+        tlengine.commit()
+
+        tlengine.begin()
+        tlengine.rollback()
+
+        # shouldn't fail
+        tlengine.commit()
+
+    def test_prepare_no_trans(self):
+        with _tlengine_deprecated():
+            tlengine = testing_engine(options=dict(strategy="threadlocal"))
+
+        # shouldn't fail
+        tlengine.prepare()
+
+        tlengine.begin()
+        tlengine.rollback()
+
+        # shouldn't fail
+        tlengine.prepare()
+
+    def test_connection_close(self):
+        """test that when connections are closed for real, transactions
+        are rolled back and disposed."""
+
+        c = tlengine.contextual_connect()
+        c.begin()
+        assert c.in_transaction()
+        c.close()
+        assert not c.in_transaction()
+
+    def test_transaction_close(self):
+        c = tlengine.contextual_connect()
+        t = c.begin()
+        tlengine.execute(users.insert(), user_id=1, user_name="user1")
+        tlengine.execute(users.insert(), user_id=2, user_name="user2")
+        t2 = c.begin()
+        tlengine.execute(users.insert(), user_id=3, user_name="user3")
+        tlengine.execute(users.insert(), user_id=4, user_name="user4")
+        t2.close()
+        result = c.execute("select * from query_users")
+        assert len(result.fetchall()) == 4
+        t.close()
+        external_connection = tlengine.connect()
+        result = external_connection.execute("select * from query_users")
+        try:
+            assert len(result.fetchall()) == 0
+        finally:
+            c.close()
+            external_connection.close()
+
+    def test_rollback(self):
+        """test a basic rollback"""
+
+        tlengine.begin()
+        tlengine.execute(users.insert(), user_id=1, user_name="user1")
+        tlengine.execute(users.insert(), user_id=2, user_name="user2")
+        tlengine.execute(users.insert(), user_id=3, user_name="user3")
+        tlengine.rollback()
+        external_connection = tlengine.connect()
+        result = external_connection.execute("select * from query_users")
+        try:
+            assert len(result.fetchall()) == 0
+        finally:
+            external_connection.close()
+
+    def test_commit(self):
+        """test a basic commit"""
+
+        tlengine.begin()
+        tlengine.execute(users.insert(), user_id=1, user_name="user1")
+        tlengine.execute(users.insert(), user_id=2, user_name="user2")
+        tlengine.execute(users.insert(), user_id=3, user_name="user3")
+        tlengine.commit()
+        external_connection = tlengine.connect()
+        result = external_connection.execute("select * from query_users")
+        try:
+            assert len(result.fetchall()) == 3
+        finally:
+            external_connection.close()
+
+    def test_with_interface(self):
+        trans = tlengine.begin()
+        tlengine.execute(users.insert(), user_id=1, user_name="user1")
+        tlengine.execute(users.insert(), user_id=2, user_name="user2")
+        trans.commit()
+
+        trans = tlengine.begin()
+        tlengine.execute(users.insert(), user_id=3, user_name="user3")
+        trans.__exit__(Exception, "fake", None)
+        trans = tlengine.begin()
+        tlengine.execute(users.insert(), user_id=4, user_name="user4")
+        trans.__exit__(None, None, None)
+        eq_(
+            tlengine.execute(
+                users.select().order_by(users.c.user_id)
+            ).fetchall(),
+            [(1, "user1"), (2, "user2"), (4, "user4")],
+        )
+
+    def test_commits(self):
+        connection = tlengine.connect()
+        assert (
+            connection.execute("select count(*) from query_users").scalar()
+            == 0
+        )
+        connection.close()
+        connection = tlengine.contextual_connect()
+        transaction = connection.begin()
+        connection.execute(users.insert(), user_id=1, user_name="user1")
+        transaction.commit()
+        transaction = connection.begin()
+        connection.execute(users.insert(), user_id=2, user_name="user2")
+        connection.execute(users.insert(), user_id=3, user_name="user3")
+        transaction.commit()
+        transaction = connection.begin()
+        result = connection.execute("select * from query_users")
+        rows = result.fetchall()
+        assert len(rows) == 3, "expected 3 got %d" % len(rows)
+        transaction.commit()
+        connection.close()
+
+    def test_rollback_off_conn(self):
+
+        # test that a TLTransaction opened off a TLConnection allows
+        # that TLConnection to be aware of the transactional context
+
+        conn = tlengine.contextual_connect()
+        trans = conn.begin()
+        conn.execute(users.insert(), user_id=1, user_name="user1")
+        conn.execute(users.insert(), user_id=2, user_name="user2")
+        conn.execute(users.insert(), user_id=3, user_name="user3")
+        trans.rollback()
+        external_connection = tlengine.connect()
+        result = external_connection.execute("select * from query_users")
+        try:
+            assert len(result.fetchall()) == 0
+        finally:
+            conn.close()
+            external_connection.close()
+
+    def test_morerollback_off_conn(self):
+
+        # test that an existing TLConnection automatically takes place
+        # in a TLTransaction opened on a second TLConnection
+
+        conn = tlengine.contextual_connect()
+        conn2 = tlengine.contextual_connect()
+        trans = conn2.begin()
+        conn.execute(users.insert(), user_id=1, user_name="user1")
+        conn.execute(users.insert(), user_id=2, user_name="user2")
+        conn.execute(users.insert(), user_id=3, user_name="user3")
+        trans.rollback()
+        external_connection = tlengine.connect()
+        result = external_connection.execute("select * from query_users")
+        try:
+            assert len(result.fetchall()) == 0
+        finally:
+            conn.close()
+            conn2.close()
+            external_connection.close()
+
+    def test_commit_off_connection(self):
+        conn = tlengine.contextual_connect()
+        trans = conn.begin()
+        conn.execute(users.insert(), user_id=1, user_name="user1")
+        conn.execute(users.insert(), user_id=2, user_name="user2")
+        conn.execute(users.insert(), user_id=3, user_name="user3")
+        trans.commit()
+        external_connection = tlengine.connect()
+        result = external_connection.execute("select * from query_users")
+        try:
+            assert len(result.fetchall()) == 3
+        finally:
+            conn.close()
+            external_connection.close()
+
+    def test_nesting_rollback(self):
+        """tests nesting of transactions, rollback at the end"""
+
+        external_connection = tlengine.connect()
+        self.assert_(
+            external_connection.connection
+            is not tlengine.contextual_connect().connection
+        )
+        tlengine.begin()
+        tlengine.execute(users.insert(), user_id=1, user_name="user1")
+        tlengine.execute(users.insert(), user_id=2, user_name="user2")
+        tlengine.execute(users.insert(), user_id=3, user_name="user3")
+        tlengine.begin()
+        tlengine.execute(users.insert(), user_id=4, user_name="user4")
+        tlengine.execute(users.insert(), user_id=5, user_name="user5")
+        tlengine.commit()
+        tlengine.rollback()
+        try:
+            self.assert_(
+                external_connection.scalar("select count(*) from query_users")
+                == 0
+            )
+        finally:
+            external_connection.close()
+
+    def test_nesting_commit(self):
+        """tests nesting of transactions, commit at the end."""
+
+        external_connection = tlengine.connect()
+        self.assert_(
+            external_connection.connection
+            is not tlengine.contextual_connect().connection
+        )
+        tlengine.begin()
+        tlengine.execute(users.insert(), user_id=1, user_name="user1")
+        tlengine.execute(users.insert(), user_id=2, user_name="user2")
+        tlengine.execute(users.insert(), user_id=3, user_name="user3")
+        tlengine.begin()
+        tlengine.execute(users.insert(), user_id=4, user_name="user4")
+        tlengine.execute(users.insert(), user_id=5, user_name="user5")
+        tlengine.commit()
+        tlengine.commit()
+        try:
+            self.assert_(
+                external_connection.scalar("select count(*) from query_users")
+                == 5
+            )
+        finally:
+            external_connection.close()
+
+    def test_mixed_nesting(self):
+        """tests nesting of transactions off the TLEngine directly
+        inside of transactions off the connection from the TLEngine"""
+
+        external_connection = tlengine.connect()
+        self.assert_(
+            external_connection.connection
+            is not tlengine.contextual_connect().connection
+        )
+        conn = tlengine.contextual_connect()
+        trans = conn.begin()
+        trans2 = conn.begin()
+        tlengine.execute(users.insert(), user_id=1, user_name="user1")
+        tlengine.execute(users.insert(), user_id=2, user_name="user2")
+        tlengine.execute(users.insert(), user_id=3, user_name="user3")
+        tlengine.begin()
+        tlengine.execute(users.insert(), user_id=4, user_name="user4")
+        tlengine.begin()
+        tlengine.execute(users.insert(), user_id=5, user_name="user5")
+        tlengine.execute(users.insert(), user_id=6, user_name="user6")
+        tlengine.execute(users.insert(), user_id=7, user_name="user7")
+        tlengine.commit()
+        tlengine.execute(users.insert(), user_id=8, user_name="user8")
+        tlengine.commit()
+        trans2.commit()
+        trans.rollback()
+        conn.close()
+        try:
+            self.assert_(
+                external_connection.scalar("select count(*) from query_users")
+                == 0
+            )
+        finally:
+            external_connection.close()
+
+    def test_more_mixed_nesting(self):
+        """tests nesting of transactions off the connection from the
+        TLEngine inside of transactions off the TLEngine directly."""
+
+        external_connection = tlengine.connect()
+        self.assert_(
+            external_connection.connection
+            is not tlengine.contextual_connect().connection
+        )
+        tlengine.begin()
+        connection = tlengine.contextual_connect()
+        connection.execute(users.insert(), user_id=1, user_name="user1")
+        tlengine.begin()
+        connection.execute(users.insert(), user_id=2, user_name="user2")
+        connection.execute(users.insert(), user_id=3, user_name="user3")
+        trans = connection.begin()
+        connection.execute(users.insert(), user_id=4, user_name="user4")
+        connection.execute(users.insert(), user_id=5, user_name="user5")
+        trans.commit()
+        tlengine.commit()
+        tlengine.rollback()
+        connection.close()
+        try:
+            self.assert_(
+                external_connection.scalar("select count(*) from query_users")
+                == 0
+            )
+        finally:
+            external_connection.close()
+
+    @testing.requires.savepoints
+    def test_nested_subtransaction_rollback(self):
+        tlengine.begin()
+        tlengine.execute(users.insert(), user_id=1, user_name="user1")
+        tlengine.begin_nested()
+        tlengine.execute(users.insert(), user_id=2, user_name="user2")
+        tlengine.rollback()
+        tlengine.execute(users.insert(), user_id=3, user_name="user3")
+        tlengine.commit()
+        tlengine.close()
+        eq_(
+            tlengine.execute(
+                select([users.c.user_id]).order_by(users.c.user_id)
+            ).fetchall(),
+            [(1,), (3,)],
+        )
+        tlengine.close()
+
+    @testing.requires.savepoints
+    @testing.crashes(
+        "oracle+zxjdbc",
+        "Errors out and causes subsequent tests to " "deadlock",
+    )
+    def test_nested_subtransaction_commit(self):
+        tlengine.begin()
+        tlengine.execute(users.insert(), user_id=1, user_name="user1")
+        tlengine.begin_nested()
+        tlengine.execute(users.insert(), user_id=2, user_name="user2")
+        tlengine.commit()
+        tlengine.execute(users.insert(), user_id=3, user_name="user3")
+        tlengine.commit()
+        tlengine.close()
+        eq_(
+            tlengine.execute(
+                select([users.c.user_id]).order_by(users.c.user_id)
+            ).fetchall(),
+            [(1,), (2,), (3,)],
+        )
+        tlengine.close()
+
+    @testing.requires.savepoints
+    def test_rollback_to_subtransaction(self):
+        tlengine.begin()
+        tlengine.execute(users.insert(), user_id=1, user_name="user1")
+        tlengine.begin_nested()
+        tlengine.execute(users.insert(), user_id=2, user_name="user2")
+        tlengine.begin()
+        tlengine.execute(users.insert(), user_id=3, user_name="user3")
+        tlengine.rollback()
+        tlengine.rollback()
+        tlengine.execute(users.insert(), user_id=4, user_name="user4")
+        tlengine.commit()
+        tlengine.close()
+        eq_(
+            tlengine.execute(
+                select([users.c.user_id]).order_by(users.c.user_id)
+            ).fetchall(),
+            [(1,), (4,)],
+        )
+        tlengine.close()
+
+    def test_connections(self):
+        """tests that contextual_connect is threadlocal"""
+
+        c1 = tlengine.contextual_connect()
+        c2 = tlengine.contextual_connect()
+        assert c1.connection is c2.connection
+        c2.close()
+        assert not c1.closed
+        assert not tlengine.closed
+
+    @testing.requires.independent_cursors
+    def test_result_closing(self):
+        """tests that contextual_connect is threadlocal"""
+
+        r1 = tlengine.execute(select([1]))
+        r2 = tlengine.execute(select([1]))
+        row1 = r1.fetchone()
+        row2 = r2.fetchone()
+        r1.close()
+        assert r2.connection is r1.connection
+        assert not r2.connection.closed
+        assert not tlengine.closed
+
+        # close again, nothing happens since resultproxy calls close()
+        # only once
+
+        r1.close()
+        assert r2.connection is r1.connection
+        assert not r2.connection.closed
+        assert not tlengine.closed
+        r2.close()
+        assert r2.connection.closed
+        assert tlengine.closed
+
+    @testing.crashes(
+        "oracle+cx_oracle", "intermittent failures on the buildbot"
+    )
+    def test_dispose(self):
+        with _tlengine_deprecated():
+            eng = testing_engine(options=dict(strategy="threadlocal"))
+        result = eng.execute(select([1]))
+        eng.dispose()
+        eng.execute(select([1]))
+
+    @testing.requires.two_phase_transactions
+    def test_two_phase_transaction(self):
+        tlengine.begin_twophase()
+        tlengine.execute(users.insert(), user_id=1, user_name="user1")
+        tlengine.prepare()
+        tlengine.commit()
+        tlengine.begin_twophase()
+        tlengine.execute(users.insert(), user_id=2, user_name="user2")
+        tlengine.commit()
+        tlengine.begin_twophase()
+        tlengine.execute(users.insert(), user_id=3, user_name="user3")
+        tlengine.rollback()
+        tlengine.begin_twophase()
+        tlengine.execute(users.insert(), user_id=4, user_name="user4")
+        tlengine.prepare()
+        tlengine.rollback()
+        eq_(
+            tlengine.execute(
+                select([users.c.user_id]).order_by(users.c.user_id)
+            ).fetchall(),
+            [(1,), (2,)],
+        )
+
+
+class ConvenienceExecuteTest(fixtures.TablesTest):
+    __backend__ = True
+
+    @classmethod
+    def define_tables(cls, metadata):
+        cls.table = Table(
+            "exec_test",
+            metadata,
+            Column("a", Integer),
+            Column("b", Integer),
+            test_needs_acid=True,
+        )
+
+    def _trans_fn(self, is_transaction=False):
+        def go(conn, x, value=None):
+            if is_transaction:
+                conn = conn.connection
+            conn.execute(self.table.insert().values(a=x, b=value))
+
+        return go
+
+    def _trans_rollback_fn(self, is_transaction=False):
+        def go(conn, x, value=None):
+            if is_transaction:
+                conn = conn.connection
+            conn.execute(self.table.insert().values(a=x, b=value))
+            raise SomeException("breakage")
+
+        return go
+
+    def _assert_no_data(self):
+        eq_(
+            testing.db.scalar(
+                select([func.count("*")]).select_from(self.table)
+            ),
+            0,
+        )
+
+    def _assert_fn(self, x, value=None):
+        eq_(testing.db.execute(self.table.select()).fetchall(), [(x, value)])
+
+    def test_transaction_tlocal_engine_ctx_commit(self):
+        fn = self._trans_fn()
+        with _tlengine_deprecated():
+            engine = engines.testing_engine(
+                options=dict(strategy="threadlocal", pool=testing.db.pool)
+            )
+        ctx = engine.begin()
+        testing.run_as_contextmanager(ctx, fn, 5, value=8)
+        self._assert_fn(5, value=8)
+
+    def test_transaction_tlocal_engine_ctx_rollback(self):
+        fn = self._trans_rollback_fn()
+        with _tlengine_deprecated():
+            engine = engines.testing_engine(
+                options=dict(strategy="threadlocal", pool=testing.db.pool)
+            )
+        ctx = engine.begin()
+        assert_raises_message(
+            Exception,
+            "breakage",
+            testing.run_as_contextmanager,
+            ctx,
+            fn,
+            5,
+            value=8,
+        )
+        self._assert_no_data()
+
+
+def _proxy_execute_deprecated():
+    return (
+        testing.expect_deprecated("ConnectionProxy.execute is deprecated."),
+        testing.expect_deprecated(
+            "ConnectionProxy.cursor_execute is deprecated."
+        ),
+    )
+
+
+class ProxyConnectionTest(fixtures.TestBase):
+
+    """These are the same tests as EngineEventsTest, except using
+    the deprecated ConnectionProxy interface.
+
+    """
+
+    __requires__ = ("ad_hoc_engines",)
+    __prefer_requires__ = ("two_phase_transactions",)
+
+    @testing.uses_deprecated(r".*Use event.listen")
+    @testing.fails_on("firebird", "Data type unknown")
+    def test_proxy(self):
+
+        stmts = []
+        cursor_stmts = []
+
+        class MyProxy(ConnectionProxy):
+            def execute(
+                self, conn, execute, clauseelement, *multiparams, **params
+            ):
+                stmts.append((str(clauseelement), params, multiparams))
+                return execute(clauseelement, *multiparams, **params)
+
+            def cursor_execute(
+                self,
+                execute,
+                cursor,
+                statement,
+                parameters,
+                context,
+                executemany,
+            ):
+                cursor_stmts.append((str(statement), parameters, None))
+                return execute(cursor, statement, parameters, context)
+
+        def assert_stmts(expected, received):
+            for stmt, params, posn in expected:
+                if not received:
+                    assert False, "Nothing available for stmt: %s" % stmt
+                while received:
+                    teststmt, testparams, testmultiparams = received.pop(0)
+                    teststmt = (
+                        re.compile(r"[\n\t ]+", re.M)
+                        .sub(" ", teststmt)
+                        .strip()
+                    )
+                    if teststmt.startswith(stmt) and (
+                        testparams == params or testparams == posn
+                    ):
+                        break
+
+        with testing.expect_deprecated(
+            "ConnectionProxy.execute is deprecated.",
+            "ConnectionProxy.cursor_execute is deprecated.",
+        ):
+            plain_engine = engines.testing_engine(
+                options=dict(implicit_returning=False, proxy=MyProxy())
+            )
+
+        with testing.expect_deprecated(
+            "ConnectionProxy.execute is deprecated.",
+            "ConnectionProxy.cursor_execute is deprecated.",
+            "The 'threadlocal' engine strategy is deprecated",
+        ):
+
+            tl_engine = engines.testing_engine(
+                options=dict(
+                    implicit_returning=False,
+                    proxy=MyProxy(),
+                    strategy="threadlocal",
+                )
+            )
+
+        for engine in (plain_engine, tl_engine):
+            m = MetaData(engine)
+            t1 = Table(
+                "t1",
+                m,
+                Column("c1", Integer, primary_key=True),
+                Column(
+                    "c2",
+                    String(50),
+                    default=func.lower("Foo"),
+                    primary_key=True,
+                ),
+            )
+            m.create_all()
+            try:
+                t1.insert().execute(c1=5, c2="some data")
+                t1.insert().execute(c1=6)
+                eq_(
+                    engine.execute("select * from t1").fetchall(),
+                    [(5, "some data"), (6, "foo")],
+                )
+            finally:
+                m.drop_all()
+            engine.dispose()
+            compiled = [
+                ("CREATE TABLE t1", {}, None),
+                (
+                    "INSERT INTO t1 (c1, c2)",
+                    {"c2": "some data", "c1": 5},
+                    None,
+                ),
+                ("INSERT INTO t1 (c1, c2)", {"c1": 6}, None),
+                ("select * from t1", {}, None),
+                ("DROP TABLE t1", {}, None),
+            ]
+
+            cursor = [
+                ("CREATE TABLE t1", {}, ()),
+                (
+                    "INSERT INTO t1 (c1, c2)",
+                    {"c2": "some data", "c1": 5},
+                    (5, "some data"),
+                ),
+                ("SELECT lower", {"lower_1": "Foo"}, ("Foo",)),
+                (
+                    "INSERT INTO t1 (c1, c2)",
+                    {"c2": "foo", "c1": 6},
+                    (6, "foo"),
+                ),
+                ("select * from t1", {}, ()),
+                ("DROP TABLE t1", {}, ()),
+            ]
+
+            assert_stmts(compiled, stmts)
+            assert_stmts(cursor, cursor_stmts)
+
+    @testing.uses_deprecated(r".*Use event.listen")
+    def test_options(self):
+        canary = []
+
+        class TrackProxy(ConnectionProxy):
+            def __getattribute__(self, key):
+                fn = object.__getattribute__(self, key)
+
+                def go(*arg, **kw):
+                    canary.append(fn.__name__)
+                    return fn(*arg, **kw)
+
+                return go
+
+        with testing.expect_deprecated(
+            *[
+                "ConnectionProxy.%s is deprecated" % name
+                for name in [
+                    "execute",
+                    "cursor_execute",
+                    "begin",
+                    "rollback",
+                    "commit",
+                    "savepoint",
+                    "rollback_savepoint",
+                    "release_savepoint",
+                    "begin_twophase",
+                    "prepare_twophase",
+                    "rollback_twophase",
+                    "commit_twophase",
+                ]
+            ]
+        ):
+            engine = engines.testing_engine(options={"proxy": TrackProxy()})
+        conn = engine.connect()
+        c2 = conn.execution_options(foo="bar")
+        eq_(c2._execution_options, {"foo": "bar"})
+        c2.execute(select([1]))
+        c3 = c2.execution_options(bar="bat")
+        eq_(c3._execution_options, {"foo": "bar", "bar": "bat"})
+        eq_(canary, ["execute", "cursor_execute"])
+
+    @testing.uses_deprecated(r".*Use event.listen")
+    def test_transactional(self):
+        canary = []
+
+        class TrackProxy(ConnectionProxy):
+            def __getattribute__(self, key):
+                fn = object.__getattribute__(self, key)
+
+                def go(*arg, **kw):
+                    canary.append(fn.__name__)
+                    return fn(*arg, **kw)
+
+                return go
+
+        with testing.expect_deprecated(
+            *[
+                "ConnectionProxy.%s is deprecated" % name
+                for name in [
+                    "execute",
+                    "cursor_execute",
+                    "begin",
+                    "rollback",
+                    "commit",
+                    "savepoint",
+                    "rollback_savepoint",
+                    "release_savepoint",
+                    "begin_twophase",
+                    "prepare_twophase",
+                    "rollback_twophase",
+                    "commit_twophase",
+                ]
+            ]
+        ):
+            engine = engines.testing_engine(options={"proxy": TrackProxy()})
+        conn = engine.connect()
+        trans = conn.begin()
+        conn.execute(select([1]))
+        trans.rollback()
+        trans = conn.begin()
+        conn.execute(select([1]))
+        trans.commit()
+
+        eq_(
+            canary,
+            [
+                "begin",
+                "execute",
+                "cursor_execute",
+                "rollback",
+                "begin",
+                "execute",
+                "cursor_execute",
+                "commit",
+            ],
+        )
+
+    @testing.uses_deprecated(r".*Use event.listen")
+    @testing.requires.savepoints
+    @testing.requires.two_phase_transactions
+    def test_transactional_advanced(self):
+        canary = []
+
+        class TrackProxy(ConnectionProxy):
+            def __getattribute__(self, key):
+                fn = object.__getattribute__(self, key)
+
+                def go(*arg, **kw):
+                    canary.append(fn.__name__)
+                    return fn(*arg, **kw)
+
+                return go
+
+        with testing.expect_deprecated(
+            *[
+                "ConnectionProxy.%s is deprecated" % name
+                for name in [
+                    "execute",
+                    "cursor_execute",
+                    "begin",
+                    "rollback",
+                    "commit",
+                    "savepoint",
+                    "rollback_savepoint",
+                    "release_savepoint",
+                    "begin_twophase",
+                    "prepare_twophase",
+                    "rollback_twophase",
+                    "commit_twophase",
+                ]
+            ]
+        ):
+            engine = engines.testing_engine(options={"proxy": TrackProxy()})
+        conn = engine.connect()
+
+        trans = conn.begin()
+        trans2 = conn.begin_nested()
+        conn.execute(select([1]))
+        trans2.rollback()
+        trans2 = conn.begin_nested()
+        conn.execute(select([1]))
+        trans2.commit()
+        trans.rollback()
+
+        trans = conn.begin_twophase()
+        conn.execute(select([1]))
+        trans.prepare()
+        trans.commit()
+
+        canary = [t for t in canary if t not in ("cursor_execute", "execute")]
+        eq_(
+            canary,
+            [
+                "begin",
+                "savepoint",
+                "rollback_savepoint",
+                "savepoint",
+                "release_savepoint",
+                "rollback",
+                "begin_twophase",
+                "prepare_twophase",
+                "commit_twophase",
+            ],
+        )
+
+
+class HandleInvalidatedOnConnectTest(fixtures.TestBase):
+    __requires__ = ("sqlite",)
+
+    def setUp(self):
+        e = create_engine("sqlite://")
+
+        connection = Mock(get_server_version_info=Mock(return_value="5.0"))
+
+        def connect(*args, **kwargs):
+            return connection
+
+        dbapi = Mock(
+            sqlite_version_info=(99, 9, 9),
+            version_info=(99, 9, 9),
+            sqlite_version="99.9.9",
+            paramstyle="named",
+            connect=Mock(side_effect=connect),
+        )
+
+        sqlite3 = e.dialect.dbapi
+        dbapi.Error = (sqlite3.Error,)
+        dbapi.ProgrammingError = sqlite3.ProgrammingError
+
+        self.dbapi = dbapi
+        self.ProgrammingError = sqlite3.ProgrammingError
+
+    def test_dont_touch_non_dbapi_exception_on_contextual_connect(self):
+        dbapi = self.dbapi
+        dbapi.connect = Mock(side_effect=TypeError("I'm not a DBAPI error"))
+
+        e = create_engine("sqlite://", module=dbapi)
+        e.dialect.is_disconnect = is_disconnect = Mock()
+        with testing.expect_deprecated(
+            r"The Engine.contextual_connect\(\) method is deprecated"
+        ):
+            assert_raises_message(
+                TypeError, "I'm not a DBAPI error", e.contextual_connect
+            )
+        eq_(is_disconnect.call_count, 0)
+
+    def test_invalidate_on_contextual_connect(self):
+        """test that is_disconnect() is called during connect.
+
+        interpretation of connection failures are not supported by
+        every backend.
+
+        """
+
+        dbapi = self.dbapi
+        dbapi.connect = Mock(
+            side_effect=self.ProgrammingError(
+                "Cannot operate on a closed database."
+            )
+        )
+        e = create_engine("sqlite://", module=dbapi)
+        try:
+            with testing.expect_deprecated(
+                r"The Engine.contextual_connect\(\) method is deprecated"
+            ):
+                e.contextual_connect()
+            assert False
+        except tsa.exc.DBAPIError as de:
+            assert de.connection_invalidated
+
+
+class HandleErrorTest(fixtures.TestBase):
+    __requires__ = ("ad_hoc_engines",)
+    __backend__ = True
+
+    def tearDown(self):
+        Engine.dispatch._clear()
+        Engine._has_events = False
+
+    def test_legacy_dbapi_error(self):
+        engine = engines.testing_engine()
+        canary = Mock()
+
+        with testing.expect_deprecated(
+            r"The ConnectionEvents.dbapi_error\(\) event is deprecated"
+        ):
+            event.listen(engine, "dbapi_error", canary)
+
+        with engine.connect() as conn:
+            try:
+                conn.execute("SELECT FOO FROM I_DONT_EXIST")
+                assert False
+            except tsa.exc.DBAPIError as e:
+                eq_(canary.mock_calls[0][1][5], e.orig)
+                eq_(canary.mock_calls[0][1][2], "SELECT FOO FROM I_DONT_EXIST")
+
+    def test_legacy_dbapi_error_no_ad_hoc_context(self):
+        engine = engines.testing_engine()
+
+        listener = Mock(return_value=None)
+        with testing.expect_deprecated(
+            r"The ConnectionEvents.dbapi_error\(\) event is deprecated"
+        ):
+            event.listen(engine, "dbapi_error", listener)
+
+        nope = SomeException("nope")
+
+        class MyType(TypeDecorator):
+            impl = Integer
+
+            def process_bind_param(self, value, dialect):
+                raise nope
+
+        with engine.connect() as conn:
+            assert_raises_message(
+                tsa.exc.StatementError,
+                r"\(.*SomeException\) " r"nope \[SQL\: u?'SELECT 1 ",
+                conn.execute,
+                select([1]).where(column("foo") == literal("bar", MyType())),
+            )
+        # no legacy event
+        eq_(listener.mock_calls, [])
+
+    def test_legacy_dbapi_error_non_dbapi_error(self):
+        engine = engines.testing_engine()
+
+        listener = Mock(return_value=None)
+        with testing.expect_deprecated(
+            r"The ConnectionEvents.dbapi_error\(\) event is deprecated"
+        ):
+            event.listen(engine, "dbapi_error", listener)
+
+        nope = TypeError("I'm not a DBAPI error")
+        with engine.connect() as c:
+            c.connection.cursor = Mock(
+                return_value=Mock(execute=Mock(side_effect=nope))
+            )
+
+            assert_raises_message(
+                TypeError, "I'm not a DBAPI error", c.execute, "select "
+            )
+        # no legacy event
+        eq_(listener.mock_calls, [])
+
+
+def MockDBAPI():  # noqa
+    def cursor():
+        return Mock()
+
+    def connect(*arg, **kw):
+        def close():
+            conn.closed = True
+
+        # mock seems like it might have an issue logging
+        # call_count correctly under threading, not sure.
+        # adding a side_effect for close seems to help.
+        conn = Mock(
+            cursor=Mock(side_effect=cursor),
+            close=Mock(side_effect=close),
+            closed=False,
+        )
+        return conn
+
+    def shutdown(value):
+        if value:
+            db.connect = Mock(side_effect=Exception("connect failed"))
+        else:
+            db.connect = Mock(side_effect=connect)
+        db.is_shutdown = value
+
+    db = Mock(
+        connect=Mock(side_effect=connect), shutdown=shutdown, is_shutdown=False
+    )
+    return db
+
+
+class PoolTestBase(fixtures.TestBase):
+    def setup(self):
+        pool.clear_managers()
+        self._teardown_conns = []
+
+    def teardown(self):
+        for ref in self._teardown_conns:
+            conn = ref()
+            if conn:
+                conn.close()
+
+    @classmethod
+    def teardown_class(cls):
+        pool.clear_managers()
+
+    def _queuepool_fixture(self, **kw):
+        dbapi, pool = self._queuepool_dbapi_fixture(**kw)
+        return pool
+
+    def _queuepool_dbapi_fixture(self, **kw):
+        dbapi = MockDBAPI()
+        return (
+            dbapi,
+            pool.QueuePool(creator=lambda: dbapi.connect("foo.db"), **kw),
+        )
+
+
+class DeprecatedPoolListenerTest(PoolTestBase):
+    @testing.requires.predictable_gc
+    @testing.uses_deprecated(
+        r".*Use the PoolEvents", r".*'listeners' argument .* is deprecated"
+    )
+    def test_listeners(self):
+        class InstrumentingListener(object):
+            def __init__(self):
+                if hasattr(self, "connect"):
+                    self.connect = self.inst_connect
+                if hasattr(self, "first_connect"):
+                    self.first_connect = self.inst_first_connect
+                if hasattr(self, "checkout"):
+                    self.checkout = self.inst_checkout
+                if hasattr(self, "checkin"):
+                    self.checkin = self.inst_checkin
+                self.clear()
+
+            def clear(self):
+                self.connected = []
+                self.first_connected = []
+                self.checked_out = []
+                self.checked_in = []
+
+            def assert_total(self, conn, fconn, cout, cin):
+                eq_(len(self.connected), conn)
+                eq_(len(self.first_connected), fconn)
+                eq_(len(self.checked_out), cout)
+                eq_(len(self.checked_in), cin)
+
+            def assert_in(self, item, in_conn, in_fconn, in_cout, in_cin):
+                eq_((item in self.connected), in_conn)
+                eq_((item in self.first_connected), in_fconn)
+                eq_((item in self.checked_out), in_cout)
+                eq_((item in self.checked_in), in_cin)
+
+            def inst_connect(self, con, record):
+                print("connect(%s, %s)" % (con, record))
+                assert con is not None
+                assert record is not None
+                self.connected.append(con)
+
+            def inst_first_connect(self, con, record):
+                print("first_connect(%s, %s)" % (con, record))
+                assert con is not None
+                assert record is not None
+                self.first_connected.append(con)
+
+            def inst_checkout(self, con, record, proxy):
+                print("checkout(%s, %s, %s)" % (con, record, proxy))
+                assert con is not None
+                assert record is not None
+                assert proxy is not None
+                self.checked_out.append(con)
+
+            def inst_checkin(self, con, record):
+                print("checkin(%s, %s)" % (con, record))
+                # con can be None if invalidated
+                assert record is not None
+                self.checked_in.append(con)
+
+        class ListenAll(tsa.interfaces.PoolListener, InstrumentingListener):
+            pass
+
+        class ListenConnect(InstrumentingListener):
+            def connect(self, con, record):
+                pass
+
+        class ListenFirstConnect(InstrumentingListener):
+            def first_connect(self, con, record):
+                pass
+
+        class ListenCheckOut(InstrumentingListener):
+            def checkout(self, con, record, proxy, num):
+                pass
+
+        class ListenCheckIn(InstrumentingListener):
+            def checkin(self, con, record):
+                pass
+
+        def assert_listeners(p, total, conn, fconn, cout, cin):
+            for instance in (p, p.recreate()):
+                self.assert_(len(instance.dispatch.connect) == conn)
+                self.assert_(len(instance.dispatch.first_connect) == fconn)
+                self.assert_(len(instance.dispatch.checkout) == cout)
+                self.assert_(len(instance.dispatch.checkin) == cin)
+
+        p = self._queuepool_fixture()
+        assert_listeners(p, 0, 0, 0, 0, 0)
+
+        with testing.expect_deprecated(
+            *[
+                "PoolListener.%s is deprecated." % name
+                for name in ["connect", "first_connect", "checkout", "checkin"]
+            ]
+        ):
+            p.add_listener(ListenAll())
+        assert_listeners(p, 1, 1, 1, 1, 1)
+
+        with testing.expect_deprecated(
+            *["PoolListener.%s is deprecated." % name for name in ["connect"]]
+        ):
+            p.add_listener(ListenConnect())
+        assert_listeners(p, 2, 2, 1, 1, 1)
+
+        with testing.expect_deprecated(
+            *[
+                "PoolListener.%s is deprecated." % name
+                for name in ["first_connect"]
+            ]
+        ):
+            p.add_listener(ListenFirstConnect())
+        assert_listeners(p, 3, 2, 2, 1, 1)
+
+        with testing.expect_deprecated(
+            *["PoolListener.%s is deprecated." % name for name in ["checkout"]]
+        ):
+            p.add_listener(ListenCheckOut())
+        assert_listeners(p, 4, 2, 2, 2, 1)
+
+        with testing.expect_deprecated(
+            *["PoolListener.%s is deprecated." % name for name in ["checkin"]]
+        ):
+            p.add_listener(ListenCheckIn())
+        assert_listeners(p, 5, 2, 2, 2, 2)
+        del p
+
+        snoop = ListenAll()
+
+        with testing.expect_deprecated(
+            *[
+                "PoolListener.%s is deprecated." % name
+                for name in ["connect", "first_connect", "checkout", "checkin"]
+            ]
+            + [
+                "PoolListener is deprecated in favor of the PoolEvents "
+                "listener interface.  The Pool.listeners parameter "
+                "will be removed"
+            ]
+        ):
+            p = self._queuepool_fixture(listeners=[snoop])
+        assert_listeners(p, 1, 1, 1, 1, 1)
+
+        c = p.connect()
+        snoop.assert_total(1, 1, 1, 0)
+        cc = c.connection
+        snoop.assert_in(cc, True, True, True, False)
+        c.close()
+        snoop.assert_in(cc, True, True, True, True)
+        del c, cc
+
+        snoop.clear()
+
+        # this one depends on immediate gc
+        c = p.connect()
+        cc = c.connection
+        snoop.assert_in(cc, False, False, True, False)
+        snoop.assert_total(0, 0, 1, 0)
+        del c, cc
+        lazy_gc()
+        snoop.assert_total(0, 0, 1, 1)
+
+        p.dispose()
+        snoop.clear()
+
+        c = p.connect()
+        c.close()
+        c = p.connect()
+        snoop.assert_total(1, 0, 2, 1)
+        c.close()
+        snoop.assert_total(1, 0, 2, 2)
+
+        # invalidation
+        p.dispose()
+        snoop.clear()
+
+        c = p.connect()
+        snoop.assert_total(1, 0, 1, 0)
+        c.invalidate()
+        snoop.assert_total(1, 0, 1, 1)
+        c.close()
+        snoop.assert_total(1, 0, 1, 1)
+        del c
+        lazy_gc()
+        snoop.assert_total(1, 0, 1, 1)
+        c = p.connect()
+        snoop.assert_total(2, 0, 2, 1)
+        c.close()
+        del c
+        lazy_gc()
+        snoop.assert_total(2, 0, 2, 2)
+
+        # detached
+        p.dispose()
+        snoop.clear()
+
+        c = p.connect()
+        snoop.assert_total(1, 0, 1, 0)
+        c.detach()
+        snoop.assert_total(1, 0, 1, 0)
+        c.close()
+        del c
+        snoop.assert_total(1, 0, 1, 0)
+        c = p.connect()
+        snoop.assert_total(2, 0, 2, 0)
+        c.close()
+        del c
+        snoop.assert_total(2, 0, 2, 1)
+
+        # recreated
+        p = p.recreate()
+        snoop.clear()
+
+        c = p.connect()
+        snoop.assert_total(1, 1, 1, 0)
+        c.close()
+        snoop.assert_total(1, 1, 1, 1)
+        c = p.connect()
+        snoop.assert_total(1, 1, 2, 1)
+        c.close()
+        snoop.assert_total(1, 1, 2, 2)
+
+    @testing.uses_deprecated(r".*Use the PoolEvents")
+    def test_listeners_callables(self):
+        def connect(dbapi_con, con_record):
+            counts[0] += 1
+
+        def checkout(dbapi_con, con_record, con_proxy):
+            counts[1] += 1
+
+        def checkin(dbapi_con, con_record):
+            counts[2] += 1
+
+        i_all = dict(connect=connect, checkout=checkout, checkin=checkin)
+        i_connect = dict(connect=connect)
+        i_checkout = dict(checkout=checkout)
+        i_checkin = dict(checkin=checkin)
+
+        for cls in (pool.QueuePool, pool.StaticPool):
+            counts = [0, 0, 0]
+
+            def assert_listeners(p, total, conn, cout, cin):
+                for instance in (p, p.recreate()):
+                    eq_(len(instance.dispatch.connect), conn)
+                    eq_(len(instance.dispatch.checkout), cout)
+                    eq_(len(instance.dispatch.checkin), cin)
+
+            p = self._queuepool_fixture()
+            assert_listeners(p, 0, 0, 0, 0)
+
+            with testing.expect_deprecated(
+                *[
+                    "PoolListener.%s is deprecated." % name
+                    for name in ["connect", "checkout", "checkin"]
+                ]
+            ):
+                p.add_listener(i_all)
+            assert_listeners(p, 1, 1, 1, 1)
+
+            with testing.expect_deprecated(
+                *[
+                    "PoolListener.%s is deprecated." % name
+                    for name in ["connect"]
+                ]
+            ):
+                p.add_listener(i_connect)
+            assert_listeners(p, 2, 1, 1, 1)
+
+            with testing.expect_deprecated(
+                *[
+                    "PoolListener.%s is deprecated." % name
+                    for name in ["checkout"]
+                ]
+            ):
+                p.add_listener(i_checkout)
+            assert_listeners(p, 3, 1, 1, 1)
+
+            with testing.expect_deprecated(
+                *[
+                    "PoolListener.%s is deprecated." % name
+                    for name in ["checkin"]
+                ]
+            ):
+                p.add_listener(i_checkin)
+            assert_listeners(p, 4, 1, 1, 1)
+            del p
+
+            with testing.expect_deprecated(
+                *[
+                    "PoolListener.%s is deprecated." % name
+                    for name in ["connect", "checkout", "checkin"]
+                ]
+                + [".*The Pool.listeners parameter will be removed"]
+            ):
+                p = self._queuepool_fixture(listeners=[i_all])
+            assert_listeners(p, 1, 1, 1, 1)
+
+            c = p.connect()
+            assert counts == [1, 1, 0]
+            c.close()
+            assert counts == [1, 1, 1]
+
+            c = p.connect()
+            assert counts == [1, 2, 1]
+            with testing.expect_deprecated(
+                *[
+                    "PoolListener.%s is deprecated." % name
+                    for name in ["checkin"]
+                ]
+            ):
+                p.add_listener(i_checkin)
+            c.close()
+            assert counts == [1, 2, 2]
+
+
+class PoolTest(PoolTestBase):
+    def test_manager(self):
+        with testing.expect_deprecated(
+            r"The pool.manage\(\) function is deprecated,"
+        ):
+            manager = pool.manage(MockDBAPI(), use_threadlocal=True)
+
+        with testing.expect_deprecated(
+            r".*Pool.use_threadlocal parameter is deprecated"
+        ):
+            c1 = manager.connect("foo.db")
+            c2 = manager.connect("foo.db")
+            c3 = manager.connect("bar.db")
+            c4 = manager.connect("foo.db", bar="bat")
+            c5 = manager.connect("foo.db", bar="hoho")
+            c6 = manager.connect("foo.db", bar="bat")
+
+        assert c1.cursor() is not None
+        assert c1 is c2
+        assert c1 is not c3
+        assert c4 is c6
+        assert c4 is not c5
+
+    def test_manager_with_key(self):
+
+        dbapi = MockDBAPI()
+
+        with testing.expect_deprecated(
+            r"The pool.manage\(\) function is deprecated,"
+        ):
+            manager = pool.manage(dbapi, use_threadlocal=True)
+
+        with testing.expect_deprecated(
+            r".*Pool.use_threadlocal parameter is deprecated"
+        ):
+            c1 = manager.connect("foo.db", sa_pool_key="a")
+            c2 = manager.connect("foo.db", sa_pool_key="b")
+            c3 = manager.connect("bar.db", sa_pool_key="a")
+
+        assert c1.cursor() is not None
+        assert c1 is not c2
+        assert c1 is c3
+
+        eq_(dbapi.connect.mock_calls, [call("foo.db"), call("foo.db")])
+
+    def test_bad_args(self):
+        with testing.expect_deprecated(
+            r"The pool.manage\(\) function is deprecated,"
+        ):
+            manager = pool.manage(MockDBAPI())
+        manager.connect(None)
+
+    def test_non_thread_local_manager(self):
+        with testing.expect_deprecated(
+            r"The pool.manage\(\) function is deprecated,"
+        ):
+            manager = pool.manage(MockDBAPI(), use_threadlocal=False)
+
+        connection = manager.connect("foo.db")
+        connection2 = manager.connect("foo.db")
+
+        self.assert_(connection.cursor() is not None)
+        self.assert_(connection is not connection2)
+
+    def test_threadlocal_del(self):
+        self._do_testthreadlocal(useclose=False)
+
+    def test_threadlocal_close(self):
+        self._do_testthreadlocal(useclose=True)
+
+    def _do_testthreadlocal(self, useclose=False):
+        dbapi = MockDBAPI()
+
+        with testing.expect_deprecated(
+            r".*Pool.use_threadlocal parameter is deprecated"
+        ):
+            for p in (
+                pool.QueuePool(
+                    creator=dbapi.connect,
+                    pool_size=3,
+                    max_overflow=-1,
+                    use_threadlocal=True,
+                ),
+                pool.SingletonThreadPool(
+                    creator=dbapi.connect, use_threadlocal=True
+                ),
+            ):
+                c1 = p.connect()
+                c2 = p.connect()
+                self.assert_(c1 is c2)
+                c3 = p.unique_connection()
+                self.assert_(c3 is not c1)
+                if useclose:
+                    c2.close()
+                else:
+                    c2 = None
+                c2 = p.connect()
+                self.assert_(c1 is c2)
+                self.assert_(c3 is not c1)
+                if useclose:
+                    c2.close()
+                else:
+                    c2 = None
+                    lazy_gc()
+                if useclose:
+                    c1 = p.connect()
+                    c2 = p.connect()
+                    c3 = p.connect()
+                    c3.close()
+                    c2.close()
+                    self.assert_(c1.connection is not None)
+                    c1.close()
+                c1 = c2 = c3 = None
+
+                # extra tests with QueuePool to ensure connections get
+                # __del__()ed when dereferenced
+
+                if isinstance(p, pool.QueuePool):
+                    lazy_gc()
+                    self.assert_(p.checkedout() == 0)
+                    c1 = p.connect()
+                    c2 = p.connect()
+                    if useclose:
+                        c2.close()
+                        c1.close()
+                    else:
+                        c2 = None
+                        c1 = None
+                        lazy_gc()
+                    self.assert_(p.checkedout() == 0)
+
+    def test_mixed_close(self):
+        pool._refs.clear()
+        with testing.expect_deprecated(
+            r".*Pool.use_threadlocal parameter is deprecated"
+        ):
+            p = self._queuepool_fixture(
+                pool_size=3, max_overflow=-1, use_threadlocal=True
+            )
+        c1 = p.connect()
+        c2 = p.connect()
+        assert c1 is c2
+        c1.close()
+        c2 = None
+        assert p.checkedout() == 1
+        c1 = None
+        lazy_gc()
+        assert p.checkedout() == 0
+        lazy_gc()
+        assert not pool._refs
+
+
+class QueuePoolTest(PoolTestBase):
+    def test_threadfairy(self):
+        with testing.expect_deprecated(
+            r".*Pool.use_threadlocal parameter is deprecated"
+        ):
+            p = self._queuepool_fixture(
+                pool_size=3, max_overflow=-1, use_threadlocal=True
+            )
+        c1 = p.connect()
+        c1.close()
+        c2 = p.connect()
+        assert c2.connection is not None
+
+    def test_trick_the_counter(self):
+        """this is a "flaw" in the connection pool; since threadlocal
+        uses a single ConnectionFairy per thread with an open/close
+        counter, you can fool the counter into giving you a
+        ConnectionFairy with an ambiguous counter.  i.e. its not true
+        reference counting."""
+
+        with testing.expect_deprecated(
+            r".*Pool.use_threadlocal parameter is deprecated"
+        ):
+            p = self._queuepool_fixture(
+                pool_size=3, max_overflow=-1, use_threadlocal=True
+            )
+        c1 = p.connect()
+        c2 = p.connect()
+        assert c1 is c2
+        c1.close()
+        c2 = p.connect()
+        c2.close()
+        self.assert_(p.checkedout() != 0)
+        c2.close()
+        self.assert_(p.checkedout() == 0)
+
+    @testing.requires.predictable_gc
+    def test_weakref_kaboom(self):
+        with testing.expect_deprecated(
+            r".*Pool.use_threadlocal parameter is deprecated"
+        ):
+            p = self._queuepool_fixture(
+                pool_size=3, max_overflow=-1, use_threadlocal=True
+            )
+        c1 = p.connect()
+        c2 = p.connect()
+        c1.close()
+        c2 = None
+        del c1
+        del c2
+        gc_collect()
+        assert p.checkedout() == 0
+        c3 = p.connect()
+        assert c3 is not None
+
+
+class ExplicitAutoCommitDeprecatedTest(fixtures.TestBase):
+
+    """test the 'autocommit' flag on select() and text() objects.
+
+    Requires PostgreSQL so that we may define a custom function which
+    modifies the database. """
+
+    __only_on__ = "postgresql"
+
+    @classmethod
+    def setup_class(cls):
+        global metadata, foo
+        metadata = MetaData(testing.db)
+        foo = Table(
+            "foo",
+            metadata,
+            Column("id", Integer, primary_key=True),
+            Column("data", String(100)),
+        )
+        metadata.create_all()
+        testing.db.execute(
+            "create function insert_foo(varchar) "
+            "returns integer as 'insert into foo(data) "
+            "values ($1);select 1;' language sql"
+        )
+
+    def teardown(self):
+        foo.delete().execute().close()
+
+    @classmethod
+    def teardown_class(cls):
+        testing.db.execute("drop function insert_foo(varchar)")
+        metadata.drop_all()
+
+    def test_explicit_compiled(self):
+        conn1 = testing.db.connect()
+        conn2 = testing.db.connect()
+        with testing.expect_deprecated(
+            "The select.autocommit parameter is deprecated"
+        ):
+            conn1.execute(select([func.insert_foo("data1")], autocommit=True))
+        assert conn2.execute(select([foo.c.data])).fetchall() == [("data1",)]
+        with testing.expect_deprecated(
+            r"The SelectBase.autocommit\(\) method is deprecated,"
+        ):
+            conn1.execute(select([func.insert_foo("data2")]).autocommit())
+        assert conn2.execute(select([foo.c.data])).fetchall() == [
+            ("data1",),
+            ("data2",),
+        ]
+        conn1.close()
+        conn2.close()
+
+    def test_explicit_text(self):
+        conn1 = testing.db.connect()
+        conn2 = testing.db.connect()
+        with testing.expect_deprecated(
+            "The text.autocommit parameter is deprecated"
+        ):
+            conn1.execute(
+                text("select insert_foo('moredata')", autocommit=True)
+            )
+        assert conn2.execute(select([foo.c.data])).fetchall() == [
+            ("moredata",)
+        ]
+        conn1.close()
+        conn2.close()
index 8613be5bc6ab4bd1d0d879ca8b28f3d1c264fb25..061dae005788d967e1abeff69f523e68af3f0098 100644 (file)
@@ -22,7 +22,6 @@ from sqlalchemy import util
 from sqlalchemy import VARCHAR
 from sqlalchemy.engine import default
 from sqlalchemy.engine.base import Engine
-from sqlalchemy.interfaces import ConnectionProxy
 from sqlalchemy.sql import column
 from sqlalchemy.sql import literal
 from sqlalchemy.testing import assert_raises
@@ -372,8 +371,7 @@ class ExecuteTest(fixtures.TestBase):
         def _go(conn):
             assert_raises_message(
                 tsa.exc.StatementError,
-                r"\(test.engine.test_execute.SomeException\) "
-                r"nope \[SQL\: u?'SELECT 1 ",
+                r"\(.*.SomeException\) " r"nope \[SQL\: u?'SELECT 1 ",
                 conn.execute,
                 select([1]).where(column("foo") == literal("bar", MyType())),
             )
@@ -613,7 +611,7 @@ class ExecuteTest(fixtures.TestBase):
         eng = engines.testing_engine(
             options={"execution_options": {"foo": "bar"}}
         )
-        with eng.contextual_connect() as conn:
+        with eng.connect() as conn:
             eq_(conn._execution_options["foo"], "bar")
             eq_(
                 conn.execution_options(bat="hoho")._execution_options["foo"],
@@ -628,7 +626,7 @@ class ExecuteTest(fixtures.TestBase):
                 "hoho",
             )
             eng.update_execution_options(foo="hoho")
-            conn = eng.contextual_connect()
+            conn = eng.connect()
             eq_(conn._execution_options["foo"], "hoho")
 
     @testing.requires.ad_hoc_engines
@@ -787,32 +785,6 @@ class ConvenienceExecuteTest(fixtures.TablesTest):
         )
         self._assert_no_data()
 
-    def test_transaction_tlocal_engine_ctx_commit(self):
-        fn = self._trans_fn()
-        engine = engines.testing_engine(
-            options=dict(strategy="threadlocal", pool=testing.db.pool)
-        )
-        ctx = engine.begin()
-        testing.run_as_contextmanager(ctx, fn, 5, value=8)
-        self._assert_fn(5, value=8)
-
-    def test_transaction_tlocal_engine_ctx_rollback(self):
-        fn = self._trans_rollback_fn()
-        engine = engines.testing_engine(
-            options=dict(strategy="threadlocal", pool=testing.db.pool)
-        )
-        ctx = engine.begin()
-        assert_raises_message(
-            Exception,
-            "breakage",
-            testing.run_as_contextmanager,
-            ctx,
-            fn,
-            5,
-            value=8,
-        )
-        self._assert_no_data()
-
     def test_transaction_connection_ctx_commit(self):
         fn = self._trans_fn(True)
         with testing.db.connect() as conn:
@@ -1495,11 +1467,16 @@ class EngineEventsTest(fixtures.TestBase):
         ):
             cursor_stmts.append((str(statement), parameters, None))
 
+        with testing.expect_deprecated(
+            "The 'threadlocal' engine strategy is deprecated"
+        ):
+            tl_engine = engines.testing_engine(
+                options=dict(implicit_returning=False, strategy="threadlocal")
+            )
+
         for engine in [
             engines.testing_engine(options=dict(implicit_returning=False)),
-            engines.testing_engine(
-                options=dict(implicit_returning=False, strategy="threadlocal")
-            ),
+            tl_engine,
             engines.testing_engine(
                 options=dict(implicit_returning=False)
             ).connect(),
@@ -1999,63 +1976,6 @@ class HandleErrorTest(fixtures.TestBase):
         Engine.dispatch._clear()
         Engine._has_events = False
 
-    def test_legacy_dbapi_error(self):
-        engine = engines.testing_engine()
-        canary = Mock()
-
-        event.listen(engine, "dbapi_error", canary)
-
-        with engine.connect() as conn:
-            try:
-                conn.execute("SELECT FOO FROM I_DONT_EXIST")
-                assert False
-            except tsa.exc.DBAPIError as e:
-                eq_(canary.mock_calls[0][1][5], e.orig)
-                eq_(canary.mock_calls[0][1][2], "SELECT FOO FROM I_DONT_EXIST")
-
-    def test_legacy_dbapi_error_no_ad_hoc_context(self):
-        engine = engines.testing_engine()
-
-        listener = Mock(return_value=None)
-        event.listen(engine, "dbapi_error", listener)
-
-        nope = SomeException("nope")
-
-        class MyType(TypeDecorator):
-            impl = Integer
-
-            def process_bind_param(self, value, dialect):
-                raise nope
-
-        with engine.connect() as conn:
-            assert_raises_message(
-                tsa.exc.StatementError,
-                r"\(test.engine.test_execute.SomeException\) "
-                r"nope \[SQL\: u?'SELECT 1 ",
-                conn.execute,
-                select([1]).where(column("foo") == literal("bar", MyType())),
-            )
-        # no legacy event
-        eq_(listener.mock_calls, [])
-
-    def test_legacy_dbapi_error_non_dbapi_error(self):
-        engine = engines.testing_engine()
-
-        listener = Mock(return_value=None)
-        event.listen(engine, "dbapi_error", listener)
-
-        nope = TypeError("I'm not a DBAPI error")
-        with engine.connect() as c:
-            c.connection.cursor = Mock(
-                return_value=Mock(execute=Mock(side_effect=nope))
-            )
-
-            assert_raises_message(
-                TypeError, "I'm not a DBAPI error", c.execute, "select "
-            )
-        # no legacy event
-        eq_(listener.mock_calls, [])
-
     def test_handle_error(self):
         engine = engines.testing_engine()
         canary = Mock(return_value=None)
@@ -2249,8 +2169,7 @@ class HandleErrorTest(fixtures.TestBase):
         with engine.connect() as conn:
             assert_raises_message(
                 tsa.exc.StatementError,
-                r"\(test.engine.test_execute.SomeException\) "
-                r"nope \[SQL\: u?'SELECT 1 ",
+                r"\(.*.SomeException\) " r"nope \[SQL\: u?'SELECT 1 ",
                 conn.execute,
                 select([1]).where(column("foo") == literal("bar", MyType())),
             )
@@ -2571,27 +2490,15 @@ class HandleInvalidatedOnConnectTest(fixtures.TestBase):
         except tsa.exc.DBAPIError:
             assert conn.invalidated
 
-    def _test_dont_touch_non_dbapi_exception_on_connect(self, connect_fn):
+    def test_dont_touch_non_dbapi_exception_on_connect(self):
         dbapi = self.dbapi
         dbapi.connect = Mock(side_effect=TypeError("I'm not a DBAPI error"))
 
         e = create_engine("sqlite://", module=dbapi)
         e.dialect.is_disconnect = is_disconnect = Mock()
-        assert_raises_message(
-            TypeError, "I'm not a DBAPI error", connect_fn, e
-        )
+        assert_raises_message(TypeError, "I'm not a DBAPI error", e.connect)
         eq_(is_disconnect.call_count, 0)
 
-    def test_dont_touch_non_dbapi_exception_on_connect(self):
-        self._test_dont_touch_non_dbapi_exception_on_connect(
-            lambda engine: engine.connect()
-        )
-
-    def test_dont_touch_non_dbapi_exception_on_contextual_connect(self):
-        self._test_dont_touch_non_dbapi_exception_on_connect(
-            lambda engine: engine.contextual_connect()
-        )
-
     def test_ensure_dialect_does_is_disconnect_no_conn(self):
         """test that is_disconnect() doesn't choke if no connection,
         cursor given."""
@@ -2601,275 +2508,26 @@ class HandleInvalidatedOnConnectTest(fixtures.TestBase):
             dbapi.OperationalError("test"), None, None
         )
 
-    def _test_invalidate_on_connect(self, connect_fn):
+    def test_invalidate_on_connect(self):
         """test that is_disconnect() is called during connect.
 
         interpretation of connection failures are not supported by
         every backend.
 
         """
-
         dbapi = self.dbapi
         dbapi.connect = Mock(
             side_effect=self.ProgrammingError(
                 "Cannot operate on a closed database."
             )
         )
+        e = create_engine("sqlite://", module=dbapi)
         try:
-            connect_fn(create_engine("sqlite://", module=dbapi))
+            e.connect()
             assert False
         except tsa.exc.DBAPIError as de:
             assert de.connection_invalidated
 
-    def test_invalidate_on_connect(self):
-        """test that is_disconnect() is called during connect.
-
-        interpretation of connection failures are not supported by
-        every backend.
-
-        """
-        self._test_invalidate_on_connect(lambda engine: engine.connect())
-
-    def test_invalidate_on_contextual_connect(self):
-        """test that is_disconnect() is called during connect.
-
-        interpretation of connection failures are not supported by
-        every backend.
-
-        """
-        self._test_invalidate_on_connect(
-            lambda engine: engine.contextual_connect()
-        )
-
-
-class ProxyConnectionTest(fixtures.TestBase):
-
-    """These are the same tests as EngineEventsTest, except using
-    the deprecated ConnectionProxy interface.
-
-    """
-
-    __requires__ = ("ad_hoc_engines",)
-    __prefer_requires__ = ("two_phase_transactions",)
-
-    @testing.uses_deprecated(r".*Use event.listen")
-    @testing.fails_on("firebird", "Data type unknown")
-    def test_proxy(self):
-
-        stmts = []
-        cursor_stmts = []
-
-        class MyProxy(ConnectionProxy):
-            def execute(
-                self, conn, execute, clauseelement, *multiparams, **params
-            ):
-                stmts.append((str(clauseelement), params, multiparams))
-                return execute(clauseelement, *multiparams, **params)
-
-            def cursor_execute(
-                self,
-                execute,
-                cursor,
-                statement,
-                parameters,
-                context,
-                executemany,
-            ):
-                cursor_stmts.append((str(statement), parameters, None))
-                return execute(cursor, statement, parameters, context)
-
-        def assert_stmts(expected, received):
-            for stmt, params, posn in expected:
-                if not received:
-                    assert False, "Nothing available for stmt: %s" % stmt
-                while received:
-                    teststmt, testparams, testmultiparams = received.pop(0)
-                    teststmt = (
-                        re.compile(r"[\n\t ]+", re.M)
-                        .sub(" ", teststmt)
-                        .strip()
-                    )
-                    if teststmt.startswith(stmt) and (
-                        testparams == params or testparams == posn
-                    ):
-                        break
-
-        for engine in (
-            engines.testing_engine(
-                options=dict(implicit_returning=False, proxy=MyProxy())
-            ),
-            engines.testing_engine(
-                options=dict(
-                    implicit_returning=False,
-                    proxy=MyProxy(),
-                    strategy="threadlocal",
-                )
-            ),
-        ):
-            m = MetaData(engine)
-            t1 = Table(
-                "t1",
-                m,
-                Column("c1", Integer, primary_key=True),
-                Column(
-                    "c2",
-                    String(50),
-                    default=func.lower("Foo"),
-                    primary_key=True,
-                ),
-            )
-            m.create_all()
-            try:
-                t1.insert().execute(c1=5, c2="some data")
-                t1.insert().execute(c1=6)
-                eq_(
-                    engine.execute("select * from t1").fetchall(),
-                    [(5, "some data"), (6, "foo")],
-                )
-            finally:
-                m.drop_all()
-            engine.dispose()
-            compiled = [
-                ("CREATE TABLE t1", {}, None),
-                (
-                    "INSERT INTO t1 (c1, c2)",
-                    {"c2": "some data", "c1": 5},
-                    None,
-                ),
-                ("INSERT INTO t1 (c1, c2)", {"c1": 6}, None),
-                ("select * from t1", {}, None),
-                ("DROP TABLE t1", {}, None),
-            ]
-
-            cursor = [
-                ("CREATE TABLE t1", {}, ()),
-                (
-                    "INSERT INTO t1 (c1, c2)",
-                    {"c2": "some data", "c1": 5},
-                    (5, "some data"),
-                ),
-                ("SELECT lower", {"lower_1": "Foo"}, ("Foo",)),
-                (
-                    "INSERT INTO t1 (c1, c2)",
-                    {"c2": "foo", "c1": 6},
-                    (6, "foo"),
-                ),
-                ("select * from t1", {}, ()),
-                ("DROP TABLE t1", {}, ()),
-            ]
-
-            assert_stmts(compiled, stmts)
-            assert_stmts(cursor, cursor_stmts)
-
-    @testing.uses_deprecated(r".*Use event.listen")
-    def test_options(self):
-        canary = []
-
-        class TrackProxy(ConnectionProxy):
-            def __getattribute__(self, key):
-                fn = object.__getattribute__(self, key)
-
-                def go(*arg, **kw):
-                    canary.append(fn.__name__)
-                    return fn(*arg, **kw)
-
-                return go
-
-        engine = engines.testing_engine(options={"proxy": TrackProxy()})
-        conn = engine.connect()
-        c2 = conn.execution_options(foo="bar")
-        eq_(c2._execution_options, {"foo": "bar"})
-        c2.execute(select([1]))
-        c3 = c2.execution_options(bar="bat")
-        eq_(c3._execution_options, {"foo": "bar", "bar": "bat"})
-        eq_(canary, ["execute", "cursor_execute"])
-
-    @testing.uses_deprecated(r".*Use event.listen")
-    def test_transactional(self):
-        canary = []
-
-        class TrackProxy(ConnectionProxy):
-            def __getattribute__(self, key):
-                fn = object.__getattribute__(self, key)
-
-                def go(*arg, **kw):
-                    canary.append(fn.__name__)
-                    return fn(*arg, **kw)
-
-                return go
-
-        engine = engines.testing_engine(options={"proxy": TrackProxy()})
-        conn = engine.connect()
-        trans = conn.begin()
-        conn.execute(select([1]))
-        trans.rollback()
-        trans = conn.begin()
-        conn.execute(select([1]))
-        trans.commit()
-
-        eq_(
-            canary,
-            [
-                "begin",
-                "execute",
-                "cursor_execute",
-                "rollback",
-                "begin",
-                "execute",
-                "cursor_execute",
-                "commit",
-            ],
-        )
-
-    @testing.uses_deprecated(r".*Use event.listen")
-    @testing.requires.savepoints
-    @testing.requires.two_phase_transactions
-    def test_transactional_advanced(self):
-        canary = []
-
-        class TrackProxy(ConnectionProxy):
-            def __getattribute__(self, key):
-                fn = object.__getattribute__(self, key)
-
-                def go(*arg, **kw):
-                    canary.append(fn.__name__)
-                    return fn(*arg, **kw)
-
-                return go
-
-        engine = engines.testing_engine(options={"proxy": TrackProxy()})
-        conn = engine.connect()
-
-        trans = conn.begin()
-        trans2 = conn.begin_nested()
-        conn.execute(select([1]))
-        trans2.rollback()
-        trans2 = conn.begin_nested()
-        conn.execute(select([1]))
-        trans2.commit()
-        trans.rollback()
-
-        trans = conn.begin_twophase()
-        conn.execute(select([1]))
-        trans.prepare()
-        trans.commit()
-
-        canary = [t for t in canary if t not in ("cursor_execute", "execute")]
-        eq_(
-            canary,
-            [
-                "begin",
-                "savepoint",
-                "rollback_savepoint",
-                "savepoint",
-                "release_savepoint",
-                "rollback",
-                "begin_twophase",
-                "prepare_twophase",
-                "commit_twophase",
-            ],
-        )
-
 
 class DialectEventTest(fixtures.TestBase):
     @contextmanager
index 7a89188179bdf14030072ffd41dc8c3f8391906e..be90378c93080246f36a2fc5c5712567367e2a2c 100644 (file)
@@ -209,25 +209,6 @@ class CreateEngineTest(fixtures.TestBase):
         )
         assert e.echo is True
 
-    def test_pool_threadlocal_from_config(self):
-        dbapi = mock_dbapi
-
-        config = {
-            "sqlalchemy.url": "postgresql://scott:tiger@somehost/test",
-            "sqlalchemy.pool_threadlocal": "false",
-        }
-
-        e = engine_from_config(config, module=dbapi, _initialize=False)
-        eq_(e.pool._use_threadlocal, False)
-
-        config = {
-            "sqlalchemy.url": "postgresql://scott:tiger@somehost/test",
-            "sqlalchemy.pool_threadlocal": "true",
-        }
-
-        e = engine_from_config(config, module=dbapi, _initialize=False)
-        eq_(e.pool._use_threadlocal, True)
-
     def test_pool_reset_on_return_from_config(self):
         dbapi = mock_dbapi
 
index 75caa233afd0191175a4bd8e2118de83967d861e..feff61b881201c2c259a5c173ab7375b1f28b0b9 100644 (file)
@@ -90,50 +90,6 @@ class PoolTestBase(fixtures.TestBase):
 
 
 class PoolTest(PoolTestBase):
-    def test_manager(self):
-        manager = pool.manage(MockDBAPI(), use_threadlocal=True)
-
-        c1 = manager.connect("foo.db")
-        c2 = manager.connect("foo.db")
-        c3 = manager.connect("bar.db")
-        c4 = manager.connect("foo.db", bar="bat")
-        c5 = manager.connect("foo.db", bar="hoho")
-        c6 = manager.connect("foo.db", bar="bat")
-
-        assert c1.cursor() is not None
-        assert c1 is c2
-        assert c1 is not c3
-        assert c4 is c6
-        assert c4 is not c5
-
-    def test_manager_with_key(self):
-
-        dbapi = MockDBAPI()
-        manager = pool.manage(dbapi, use_threadlocal=True)
-
-        c1 = manager.connect("foo.db", sa_pool_key="a")
-        c2 = manager.connect("foo.db", sa_pool_key="b")
-        c3 = manager.connect("bar.db", sa_pool_key="a")
-
-        assert c1.cursor() is not None
-        assert c1 is not c2
-        assert c1 is c3
-
-        eq_(dbapi.connect.mock_calls, [call("foo.db"), call("foo.db")])
-
-    def test_bad_args(self):
-        manager = pool.manage(MockDBAPI())
-        manager.connect(None)
-
-    def test_non_thread_local_manager(self):
-        manager = pool.manage(MockDBAPI(), use_threadlocal=False)
-
-        connection = manager.connect("foo.db")
-        connection2 = manager.connect("foo.db")
-
-        self.assert_(connection.cursor() is not None)
-        self.assert_(connection is not connection2)
-
     @testing.fails_on(
         "+pyodbc", "pyodbc cursor doesn't implement tuple __eq__"
     )
@@ -170,69 +126,6 @@ class PoolTest(PoolTestBase):
             p.dispose()
             p.recreate()
 
-    def test_threadlocal_del(self):
-        self._do_testthreadlocal(useclose=False)
-
-    def test_threadlocal_close(self):
-        self._do_testthreadlocal(useclose=True)
-
-    def _do_testthreadlocal(self, useclose=False):
-        dbapi = MockDBAPI()
-        for p in (
-            pool.QueuePool(
-                creator=dbapi.connect,
-                pool_size=3,
-                max_overflow=-1,
-                use_threadlocal=True,
-            ),
-            pool.SingletonThreadPool(
-                creator=dbapi.connect, use_threadlocal=True
-            ),
-        ):
-            c1 = p.connect()
-            c2 = p.connect()
-            self.assert_(c1 is c2)
-            c3 = p.unique_connection()
-            self.assert_(c3 is not c1)
-            if useclose:
-                c2.close()
-            else:
-                c2 = None
-            c2 = p.connect()
-            self.assert_(c1 is c2)
-            self.assert_(c3 is not c1)
-            if useclose:
-                c2.close()
-            else:
-                c2 = None
-                lazy_gc()
-            if useclose:
-                c1 = p.connect()
-                c2 = p.connect()
-                c3 = p.connect()
-                c3.close()
-                c2.close()
-                self.assert_(c1.connection is not None)
-                c1.close()
-            c1 = c2 = c3 = None
-
-            # extra tests with QueuePool to ensure connections get
-            # __del__()ed when dereferenced
-
-            if isinstance(p, pool.QueuePool):
-                lazy_gc()
-                self.assert_(p.checkedout() == 0)
-                c1 = p.connect()
-                c2 = p.connect()
-                if useclose:
-                    c2.close()
-                    c1.close()
-                else:
-                    c2 = None
-                    c1 = None
-                    lazy_gc()
-                self.assert_(p.checkedout() == 0)
-
     def test_info(self):
         p = self._queuepool_fixture(pool_size=1, max_overflow=0)
 
@@ -822,255 +715,6 @@ class PoolFirstConnectSyncTest(PoolTestBase):
         )
 
 
-class DeprecatedPoolListenerTest(PoolTestBase):
-    @testing.requires.predictable_gc
-    @testing.uses_deprecated(
-        r".*Use the PoolEvents",
-        r".*'listeners' argument .* is deprecated"
-    )
-    def test_listeners(self):
-        class InstrumentingListener(object):
-            def __init__(self):
-                if hasattr(self, "connect"):
-                    self.connect = self.inst_connect
-                if hasattr(self, "first_connect"):
-                    self.first_connect = self.inst_first_connect
-                if hasattr(self, "checkout"):
-                    self.checkout = self.inst_checkout
-                if hasattr(self, "checkin"):
-                    self.checkin = self.inst_checkin
-                self.clear()
-
-            def clear(self):
-                self.connected = []
-                self.first_connected = []
-                self.checked_out = []
-                self.checked_in = []
-
-            def assert_total(self, conn, fconn, cout, cin):
-                eq_(len(self.connected), conn)
-                eq_(len(self.first_connected), fconn)
-                eq_(len(self.checked_out), cout)
-                eq_(len(self.checked_in), cin)
-
-            def assert_in(self, item, in_conn, in_fconn, in_cout, in_cin):
-                eq_((item in self.connected), in_conn)
-                eq_((item in self.first_connected), in_fconn)
-                eq_((item in self.checked_out), in_cout)
-                eq_((item in self.checked_in), in_cin)
-
-            def inst_connect(self, con, record):
-                print("connect(%s, %s)" % (con, record))
-                assert con is not None
-                assert record is not None
-                self.connected.append(con)
-
-            def inst_first_connect(self, con, record):
-                print("first_connect(%s, %s)" % (con, record))
-                assert con is not None
-                assert record is not None
-                self.first_connected.append(con)
-
-            def inst_checkout(self, con, record, proxy):
-                print("checkout(%s, %s, %s)" % (con, record, proxy))
-                assert con is not None
-                assert record is not None
-                assert proxy is not None
-                self.checked_out.append(con)
-
-            def inst_checkin(self, con, record):
-                print("checkin(%s, %s)" % (con, record))
-                # con can be None if invalidated
-                assert record is not None
-                self.checked_in.append(con)
-
-        class ListenAll(tsa.interfaces.PoolListener, InstrumentingListener):
-            pass
-
-        class ListenConnect(InstrumentingListener):
-            def connect(self, con, record):
-                pass
-
-        class ListenFirstConnect(InstrumentingListener):
-            def first_connect(self, con, record):
-                pass
-
-        class ListenCheckOut(InstrumentingListener):
-            def checkout(self, con, record, proxy, num):
-                pass
-
-        class ListenCheckIn(InstrumentingListener):
-            def checkin(self, con, record):
-                pass
-
-        def assert_listeners(p, total, conn, fconn, cout, cin):
-            for instance in (p, p.recreate()):
-                self.assert_(len(instance.dispatch.connect) == conn)
-                self.assert_(len(instance.dispatch.first_connect) == fconn)
-                self.assert_(len(instance.dispatch.checkout) == cout)
-                self.assert_(len(instance.dispatch.checkin) == cin)
-
-        p = self._queuepool_fixture()
-        assert_listeners(p, 0, 0, 0, 0, 0)
-
-        p.add_listener(ListenAll())
-        assert_listeners(p, 1, 1, 1, 1, 1)
-
-        p.add_listener(ListenConnect())
-        assert_listeners(p, 2, 2, 1, 1, 1)
-
-        p.add_listener(ListenFirstConnect())
-        assert_listeners(p, 3, 2, 2, 1, 1)
-
-        p.add_listener(ListenCheckOut())
-        assert_listeners(p, 4, 2, 2, 2, 1)
-
-        p.add_listener(ListenCheckIn())
-        assert_listeners(p, 5, 2, 2, 2, 2)
-        del p
-
-        snoop = ListenAll()
-        p = self._queuepool_fixture(listeners=[snoop])
-        assert_listeners(p, 1, 1, 1, 1, 1)
-
-        c = p.connect()
-        snoop.assert_total(1, 1, 1, 0)
-        cc = c.connection
-        snoop.assert_in(cc, True, True, True, False)
-        c.close()
-        snoop.assert_in(cc, True, True, True, True)
-        del c, cc
-
-        snoop.clear()
-
-        # this one depends on immediate gc
-        c = p.connect()
-        cc = c.connection
-        snoop.assert_in(cc, False, False, True, False)
-        snoop.assert_total(0, 0, 1, 0)
-        del c, cc
-        lazy_gc()
-        snoop.assert_total(0, 0, 1, 1)
-
-        p.dispose()
-        snoop.clear()
-
-        c = p.connect()
-        c.close()
-        c = p.connect()
-        snoop.assert_total(1, 0, 2, 1)
-        c.close()
-        snoop.assert_total(1, 0, 2, 2)
-
-        # invalidation
-        p.dispose()
-        snoop.clear()
-
-        c = p.connect()
-        snoop.assert_total(1, 0, 1, 0)
-        c.invalidate()
-        snoop.assert_total(1, 0, 1, 1)
-        c.close()
-        snoop.assert_total(1, 0, 1, 1)
-        del c
-        lazy_gc()
-        snoop.assert_total(1, 0, 1, 1)
-        c = p.connect()
-        snoop.assert_total(2, 0, 2, 1)
-        c.close()
-        del c
-        lazy_gc()
-        snoop.assert_total(2, 0, 2, 2)
-
-        # detached
-        p.dispose()
-        snoop.clear()
-
-        c = p.connect()
-        snoop.assert_total(1, 0, 1, 0)
-        c.detach()
-        snoop.assert_total(1, 0, 1, 0)
-        c.close()
-        del c
-        snoop.assert_total(1, 0, 1, 0)
-        c = p.connect()
-        snoop.assert_total(2, 0, 2, 0)
-        c.close()
-        del c
-        snoop.assert_total(2, 0, 2, 1)
-
-        # recreated
-        p = p.recreate()
-        snoop.clear()
-
-        c = p.connect()
-        snoop.assert_total(1, 1, 1, 0)
-        c.close()
-        snoop.assert_total(1, 1, 1, 1)
-        c = p.connect()
-        snoop.assert_total(1, 1, 2, 1)
-        c.close()
-        snoop.assert_total(1, 1, 2, 2)
-
-    @testing.uses_deprecated(
-        r".*Use the PoolEvents",
-        r".*'listeners' argument .* is deprecated"
-    )
-    def test_listeners_callables(self):
-        def connect(dbapi_con, con_record):
-            counts[0] += 1
-
-        def checkout(dbapi_con, con_record, con_proxy):
-            counts[1] += 1
-
-        def checkin(dbapi_con, con_record):
-            counts[2] += 1
-
-        i_all = dict(connect=connect, checkout=checkout, checkin=checkin)
-        i_connect = dict(connect=connect)
-        i_checkout = dict(checkout=checkout)
-        i_checkin = dict(checkin=checkin)
-
-        for cls in (pool.QueuePool, pool.StaticPool):
-            counts = [0, 0, 0]
-
-            def assert_listeners(p, total, conn, cout, cin):
-                for instance in (p, p.recreate()):
-                    eq_(len(instance.dispatch.connect), conn)
-                    eq_(len(instance.dispatch.checkout), cout)
-                    eq_(len(instance.dispatch.checkin), cin)
-
-            p = self._queuepool_fixture()
-            assert_listeners(p, 0, 0, 0, 0)
-
-            p.add_listener(i_all)
-            assert_listeners(p, 1, 1, 1, 1)
-
-            p.add_listener(i_connect)
-            assert_listeners(p, 2, 1, 1, 1)
-
-            p.add_listener(i_checkout)
-            assert_listeners(p, 3, 1, 1, 1)
-
-            p.add_listener(i_checkin)
-            assert_listeners(p, 4, 1, 1, 1)
-            del p
-
-            p = self._queuepool_fixture(listeners=[i_all])
-            assert_listeners(p, 1, 1, 1, 1)
-
-            c = p.connect()
-            assert counts == [1, 1, 0]
-            c.close()
-            assert counts == [1, 1, 1]
-
-            c = p.connect()
-            assert counts == [1, 2, 1]
-            p.add_listener(i_checkin)
-            c.close()
-            assert counts == [1, 2, 2]
-
-
 class QueuePoolTest(PoolTestBase):
     def test_queuepool_del(self):
         self._do_testqueuepool(useclose=False)
@@ -1491,30 +1135,7 @@ class QueuePoolTest(PoolTestBase):
     def test_max_overflow(self):
         self._test_overflow(40, 5)
 
-    def test_mixed_close(self):
-        pool._refs.clear()
-        p = self._queuepool_fixture(
-            pool_size=3, max_overflow=-1, use_threadlocal=True
-        )
-        c1 = p.connect()
-        c2 = p.connect()
-        assert c1 is c2
-        c1.close()
-        c2 = None
-        assert p.checkedout() == 1
-        c1 = None
-        lazy_gc()
-        assert p.checkedout() == 0
-        lazy_gc()
-        assert not pool._refs
-
-    def test_overflow_no_gc_tlocal(self):
-        self._test_overflow_no_gc(True)
-
     def test_overflow_no_gc(self):
-        self._test_overflow_no_gc(False)
-
-    def _test_overflow_no_gc(self, threadlocal):
         p = self._queuepool_fixture(pool_size=2, max_overflow=2)
 
         # disable weakref collection of the
@@ -1543,42 +1164,6 @@ class QueuePoolTest(PoolTestBase):
             set([1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0]),
         )
 
-    @testing.requires.predictable_gc
-    def test_weakref_kaboom(self):
-        p = self._queuepool_fixture(
-            pool_size=3, max_overflow=-1, use_threadlocal=True
-        )
-        c1 = p.connect()
-        c2 = p.connect()
-        c1.close()
-        c2 = None
-        del c1
-        del c2
-        gc_collect()
-        assert p.checkedout() == 0
-        c3 = p.connect()
-        assert c3 is not None
-
-    def test_trick_the_counter(self):
-        """this is a "flaw" in the connection pool; since threadlocal
-        uses a single ConnectionFairy per thread with an open/close
-        counter, you can fool the counter into giving you a
-        ConnectionFairy with an ambiguous counter.  i.e. its not true
-        reference counting."""
-
-        p = self._queuepool_fixture(
-            pool_size=3, max_overflow=-1, use_threadlocal=True
-        )
-        c1 = p.connect()
-        c2 = p.connect()
-        assert c1 is c2
-        c1.close()
-        c2 = p.connect()
-        c2.close()
-        self.assert_(p.checkedout() != 0)
-        c2.close()
-        self.assert_(p.checkedout() == 0)
-
     def test_recycle(self):
         with patch("sqlalchemy.pool.base.time.time") as mock:
             mock.return_value = 10000
@@ -1957,15 +1542,6 @@ class QueuePoolTest(PoolTestBase):
         c2.close()
         eq_(c2_con.close.call_count, 0)
 
-    def test_threadfairy(self):
-        p = self._queuepool_fixture(
-            pool_size=3, max_overflow=-1, use_threadlocal=True
-        )
-        c1 = p.connect()
-        c1.close()
-        c2 = p.connect()
-        assert c2.connection is not None
-
     def test_no_double_checkin(self):
         p = self._queuepool_fixture(pool_size=1)
 
index f6904174b9161ad4d12a60f328cc6cb9adf89dcd..dd2ebb1c41c65866692a85667f7719ee3ff8f125 100644 (file)
@@ -950,33 +950,30 @@ class RecycleTest(fixtures.TestBase):
     __backend__ = True
 
     def test_basic(self):
-        for threadlocal in False, True:
-            engine = engines.reconnecting_engine(
-                options={"pool_threadlocal": threadlocal}
-            )
+        engine = engines.reconnecting_engine()
 
-            conn = engine.contextual_connect()
-            eq_(conn.execute(select([1])).scalar(), 1)
-            conn.close()
+        conn = engine.connect()
+        eq_(conn.execute(select([1])).scalar(), 1)
+        conn.close()
 
-            # set the pool recycle down to 1.
-            # we aren't doing this inline with the
-            # engine create since cx_oracle takes way
-            # too long to create the 1st connection and don't
-            # want to build a huge delay into this test.
+        # set the pool recycle down to 1.
+        # we aren't doing this inline with the
+        # engine create since cx_oracle takes way
+        # too long to create the 1st connection and don't
+        # want to build a huge delay into this test.
 
-            engine.pool._recycle = 1
+        engine.pool._recycle = 1
 
-            # kill the DB connection
-            engine.test_shutdown()
+        # kill the DB connection
+        engine.test_shutdown()
 
-            # wait until past the recycle period
-            time.sleep(2)
+        # wait until past the recycle period
+        time.sleep(2)
 
-            # can connect, no exception
-            conn = engine.contextual_connect()
-            eq_(conn.execute(select([1])).scalar(), 1)
-            conn.close()
+        # can connect, no exception
+        conn = engine.connect()
+        eq_(conn.execute(select([1])).scalar(), 1)
+        conn.close()
 
 
 class PrePingRealTest(fixtures.TestBase):
index d8161a29abd7badd02af27f7c8787315c655876b..81f86089be2dece8376998b112cba428f5cd7fab 100644 (file)
@@ -8,7 +8,6 @@ from sqlalchemy import INT
 from sqlalchemy import Integer
 from sqlalchemy import MetaData
 from sqlalchemy import select
-from sqlalchemy import Sequence
 from sqlalchemy import String
 from sqlalchemy import testing
 from sqlalchemy import text
@@ -822,34 +821,6 @@ class ExplicitAutoCommitTest(fixtures.TestBase):
         conn1.close()
         conn2.close()
 
-    @testing.uses_deprecated(
-        r".*select.autocommit parameter is deprecated",
-        r".*SelectBase.autocommit\(\) .* is deprecated",
-    )
-    def test_explicit_compiled_deprecated(self):
-        conn1 = testing.db.connect()
-        conn2 = testing.db.connect()
-        conn1.execute(select([func.insert_foo("data1")], autocommit=True))
-        assert conn2.execute(select([foo.c.data])).fetchall() == [("data1",)]
-        conn1.execute(select([func.insert_foo("data2")]).autocommit())
-        assert conn2.execute(select([foo.c.data])).fetchall() == [
-            ("data1",),
-            ("data2",),
-        ]
-        conn1.close()
-        conn2.close()
-
-    @testing.uses_deprecated(r"autocommit on text\(\) is deprecated")
-    def test_explicit_text_deprecated(self):
-        conn1 = testing.db.connect()
-        conn2 = testing.db.connect()
-        conn1.execute(text("select insert_foo('moredata')", autocommit=True))
-        assert conn2.execute(select([foo.c.data])).fetchall() == [
-            ("moredata",)
-        ]
-        conn1.close()
-        conn2.close()
-
     def test_implicit_text(self):
         conn1 = testing.db.connect()
         conn2 = testing.db.connect()
@@ -861,485 +832,6 @@ class ExplicitAutoCommitTest(fixtures.TestBase):
         conn2.close()
 
 
-tlengine = None
-
-
-class TLTransactionTest(fixtures.TestBase):
-    __requires__ = ("ad_hoc_engines",)
-    __backend__ = True
-
-    @classmethod
-    def setup_class(cls):
-        global users, metadata, tlengine
-        tlengine = testing_engine(options=dict(strategy="threadlocal"))
-        metadata = MetaData()
-        users = Table(
-            "query_users",
-            metadata,
-            Column(
-                "user_id",
-                INT,
-                Sequence("query_users_id_seq", optional=True),
-                primary_key=True,
-            ),
-            Column("user_name", VARCHAR(20)),
-            test_needs_acid=True,
-        )
-        metadata.create_all(tlengine)
-
-    def teardown(self):
-        tlengine.execute(users.delete()).close()
-
-    @classmethod
-    def teardown_class(cls):
-        tlengine.close()
-        metadata.drop_all(tlengine)
-        tlengine.dispose()
-
-    def setup(self):
-
-        # ensure tests start with engine closed
-
-        tlengine.close()
-
-    @testing.crashes(
-        "oracle", "TNS error of unknown origin occurs on the buildbot."
-    )
-    def test_rollback_no_trans(self):
-        tlengine = testing_engine(options=dict(strategy="threadlocal"))
-
-        # shouldn't fail
-        tlengine.rollback()
-
-        tlengine.begin()
-        tlengine.rollback()
-
-        # shouldn't fail
-        tlengine.rollback()
-
-    def test_commit_no_trans(self):
-        tlengine = testing_engine(options=dict(strategy="threadlocal"))
-
-        # shouldn't fail
-        tlengine.commit()
-
-        tlengine.begin()
-        tlengine.rollback()
-
-        # shouldn't fail
-        tlengine.commit()
-
-    def test_prepare_no_trans(self):
-        tlengine = testing_engine(options=dict(strategy="threadlocal"))
-
-        # shouldn't fail
-        tlengine.prepare()
-
-        tlengine.begin()
-        tlengine.rollback()
-
-        # shouldn't fail
-        tlengine.prepare()
-
-    def test_connection_close(self):
-        """test that when connections are closed for real, transactions
-        are rolled back and disposed."""
-
-        c = tlengine.contextual_connect()
-        c.begin()
-        assert c.in_transaction()
-        c.close()
-        assert not c.in_transaction()
-
-    def test_transaction_close(self):
-        c = tlengine.contextual_connect()
-        t = c.begin()
-        tlengine.execute(users.insert(), user_id=1, user_name="user1")
-        tlengine.execute(users.insert(), user_id=2, user_name="user2")
-        t2 = c.begin()
-        tlengine.execute(users.insert(), user_id=3, user_name="user3")
-        tlengine.execute(users.insert(), user_id=4, user_name="user4")
-        t2.close()
-        result = c.execute("select * from query_users")
-        assert len(result.fetchall()) == 4
-        t.close()
-        external_connection = tlengine.connect()
-        result = external_connection.execute("select * from query_users")
-        try:
-            assert len(result.fetchall()) == 0
-        finally:
-            c.close()
-            external_connection.close()
-
-    def test_rollback(self):
-        """test a basic rollback"""
-
-        tlengine.begin()
-        tlengine.execute(users.insert(), user_id=1, user_name="user1")
-        tlengine.execute(users.insert(), user_id=2, user_name="user2")
-        tlengine.execute(users.insert(), user_id=3, user_name="user3")
-        tlengine.rollback()
-        external_connection = tlengine.connect()
-        result = external_connection.execute("select * from query_users")
-        try:
-            assert len(result.fetchall()) == 0
-        finally:
-            external_connection.close()
-
-    def test_commit(self):
-        """test a basic commit"""
-
-        tlengine.begin()
-        tlengine.execute(users.insert(), user_id=1, user_name="user1")
-        tlengine.execute(users.insert(), user_id=2, user_name="user2")
-        tlengine.execute(users.insert(), user_id=3, user_name="user3")
-        tlengine.commit()
-        external_connection = tlengine.connect()
-        result = external_connection.execute("select * from query_users")
-        try:
-            assert len(result.fetchall()) == 3
-        finally:
-            external_connection.close()
-
-    def test_with_interface(self):
-        trans = tlengine.begin()
-        tlengine.execute(users.insert(), user_id=1, user_name="user1")
-        tlengine.execute(users.insert(), user_id=2, user_name="user2")
-        trans.commit()
-
-        trans = tlengine.begin()
-        tlengine.execute(users.insert(), user_id=3, user_name="user3")
-        trans.__exit__(Exception, "fake", None)
-        trans = tlengine.begin()
-        tlengine.execute(users.insert(), user_id=4, user_name="user4")
-        trans.__exit__(None, None, None)
-        eq_(
-            tlengine.execute(
-                users.select().order_by(users.c.user_id)
-            ).fetchall(),
-            [(1, "user1"), (2, "user2"), (4, "user4")],
-        )
-
-    def test_commits(self):
-        connection = tlengine.connect()
-        assert (
-            connection.execute("select count(*) from query_users").scalar()
-            == 0
-        )
-        connection.close()
-        connection = tlengine.contextual_connect()
-        transaction = connection.begin()
-        connection.execute(users.insert(), user_id=1, user_name="user1")
-        transaction.commit()
-        transaction = connection.begin()
-        connection.execute(users.insert(), user_id=2, user_name="user2")
-        connection.execute(users.insert(), user_id=3, user_name="user3")
-        transaction.commit()
-        transaction = connection.begin()
-        result = connection.execute("select * from query_users")
-        rows = result.fetchall()
-        assert len(rows) == 3, "expected 3 got %d" % len(rows)
-        transaction.commit()
-        connection.close()
-
-    def test_rollback_off_conn(self):
-
-        # test that a TLTransaction opened off a TLConnection allows
-        # that TLConnection to be aware of the transactional context
-
-        conn = tlengine.contextual_connect()
-        trans = conn.begin()
-        conn.execute(users.insert(), user_id=1, user_name="user1")
-        conn.execute(users.insert(), user_id=2, user_name="user2")
-        conn.execute(users.insert(), user_id=3, user_name="user3")
-        trans.rollback()
-        external_connection = tlengine.connect()
-        result = external_connection.execute("select * from query_users")
-        try:
-            assert len(result.fetchall()) == 0
-        finally:
-            conn.close()
-            external_connection.close()
-
-    def test_morerollback_off_conn(self):
-
-        # test that an existing TLConnection automatically takes place
-        # in a TLTransaction opened on a second TLConnection
-
-        conn = tlengine.contextual_connect()
-        conn2 = tlengine.contextual_connect()
-        trans = conn2.begin()
-        conn.execute(users.insert(), user_id=1, user_name="user1")
-        conn.execute(users.insert(), user_id=2, user_name="user2")
-        conn.execute(users.insert(), user_id=3, user_name="user3")
-        trans.rollback()
-        external_connection = tlengine.connect()
-        result = external_connection.execute("select * from query_users")
-        try:
-            assert len(result.fetchall()) == 0
-        finally:
-            conn.close()
-            conn2.close()
-            external_connection.close()
-
-    def test_commit_off_connection(self):
-        conn = tlengine.contextual_connect()
-        trans = conn.begin()
-        conn.execute(users.insert(), user_id=1, user_name="user1")
-        conn.execute(users.insert(), user_id=2, user_name="user2")
-        conn.execute(users.insert(), user_id=3, user_name="user3")
-        trans.commit()
-        external_connection = tlengine.connect()
-        result = external_connection.execute("select * from query_users")
-        try:
-            assert len(result.fetchall()) == 3
-        finally:
-            conn.close()
-            external_connection.close()
-
-    def test_nesting_rollback(self):
-        """tests nesting of transactions, rollback at the end"""
-
-        external_connection = tlengine.connect()
-        self.assert_(
-            external_connection.connection
-            is not tlengine.contextual_connect().connection
-        )
-        tlengine.begin()
-        tlengine.execute(users.insert(), user_id=1, user_name="user1")
-        tlengine.execute(users.insert(), user_id=2, user_name="user2")
-        tlengine.execute(users.insert(), user_id=3, user_name="user3")
-        tlengine.begin()
-        tlengine.execute(users.insert(), user_id=4, user_name="user4")
-        tlengine.execute(users.insert(), user_id=5, user_name="user5")
-        tlengine.commit()
-        tlengine.rollback()
-        try:
-            self.assert_(
-                external_connection.scalar("select count(*) from query_users")
-                == 0
-            )
-        finally:
-            external_connection.close()
-
-    def test_nesting_commit(self):
-        """tests nesting of transactions, commit at the end."""
-
-        external_connection = tlengine.connect()
-        self.assert_(
-            external_connection.connection
-            is not tlengine.contextual_connect().connection
-        )
-        tlengine.begin()
-        tlengine.execute(users.insert(), user_id=1, user_name="user1")
-        tlengine.execute(users.insert(), user_id=2, user_name="user2")
-        tlengine.execute(users.insert(), user_id=3, user_name="user3")
-        tlengine.begin()
-        tlengine.execute(users.insert(), user_id=4, user_name="user4")
-        tlengine.execute(users.insert(), user_id=5, user_name="user5")
-        tlengine.commit()
-        tlengine.commit()
-        try:
-            self.assert_(
-                external_connection.scalar("select count(*) from query_users")
-                == 5
-            )
-        finally:
-            external_connection.close()
-
-    def test_mixed_nesting(self):
-        """tests nesting of transactions off the TLEngine directly
-        inside of transactions off the connection from the TLEngine"""
-
-        external_connection = tlengine.connect()
-        self.assert_(
-            external_connection.connection
-            is not tlengine.contextual_connect().connection
-        )
-        conn = tlengine.contextual_connect()
-        trans = conn.begin()
-        trans2 = conn.begin()
-        tlengine.execute(users.insert(), user_id=1, user_name="user1")
-        tlengine.execute(users.insert(), user_id=2, user_name="user2")
-        tlengine.execute(users.insert(), user_id=3, user_name="user3")
-        tlengine.begin()
-        tlengine.execute(users.insert(), user_id=4, user_name="user4")
-        tlengine.begin()
-        tlengine.execute(users.insert(), user_id=5, user_name="user5")
-        tlengine.execute(users.insert(), user_id=6, user_name="user6")
-        tlengine.execute(users.insert(), user_id=7, user_name="user7")
-        tlengine.commit()
-        tlengine.execute(users.insert(), user_id=8, user_name="user8")
-        tlengine.commit()
-        trans2.commit()
-        trans.rollback()
-        conn.close()
-        try:
-            self.assert_(
-                external_connection.scalar("select count(*) from query_users")
-                == 0
-            )
-        finally:
-            external_connection.close()
-
-    def test_more_mixed_nesting(self):
-        """tests nesting of transactions off the connection from the
-        TLEngine inside of transactions off the TLEngine directly."""
-
-        external_connection = tlengine.connect()
-        self.assert_(
-            external_connection.connection
-            is not tlengine.contextual_connect().connection
-        )
-        tlengine.begin()
-        connection = tlengine.contextual_connect()
-        connection.execute(users.insert(), user_id=1, user_name="user1")
-        tlengine.begin()
-        connection.execute(users.insert(), user_id=2, user_name="user2")
-        connection.execute(users.insert(), user_id=3, user_name="user3")
-        trans = connection.begin()
-        connection.execute(users.insert(), user_id=4, user_name="user4")
-        connection.execute(users.insert(), user_id=5, user_name="user5")
-        trans.commit()
-        tlengine.commit()
-        tlengine.rollback()
-        connection.close()
-        try:
-            self.assert_(
-                external_connection.scalar("select count(*) from query_users")
-                == 0
-            )
-        finally:
-            external_connection.close()
-
-    @testing.requires.savepoints
-    def test_nested_subtransaction_rollback(self):
-        tlengine.begin()
-        tlengine.execute(users.insert(), user_id=1, user_name="user1")
-        tlengine.begin_nested()
-        tlengine.execute(users.insert(), user_id=2, user_name="user2")
-        tlengine.rollback()
-        tlengine.execute(users.insert(), user_id=3, user_name="user3")
-        tlengine.commit()
-        tlengine.close()
-        eq_(
-            tlengine.execute(
-                select([users.c.user_id]).order_by(users.c.user_id)
-            ).fetchall(),
-            [(1,), (3,)],
-        )
-        tlengine.close()
-
-    @testing.requires.savepoints
-    @testing.crashes(
-        "oracle+zxjdbc",
-        "Errors out and causes subsequent tests to " "deadlock",
-    )
-    def test_nested_subtransaction_commit(self):
-        tlengine.begin()
-        tlengine.execute(users.insert(), user_id=1, user_name="user1")
-        tlengine.begin_nested()
-        tlengine.execute(users.insert(), user_id=2, user_name="user2")
-        tlengine.commit()
-        tlengine.execute(users.insert(), user_id=3, user_name="user3")
-        tlengine.commit()
-        tlengine.close()
-        eq_(
-            tlengine.execute(
-                select([users.c.user_id]).order_by(users.c.user_id)
-            ).fetchall(),
-            [(1,), (2,), (3,)],
-        )
-        tlengine.close()
-
-    @testing.requires.savepoints
-    def test_rollback_to_subtransaction(self):
-        tlengine.begin()
-        tlengine.execute(users.insert(), user_id=1, user_name="user1")
-        tlengine.begin_nested()
-        tlengine.execute(users.insert(), user_id=2, user_name="user2")
-        tlengine.begin()
-        tlengine.execute(users.insert(), user_id=3, user_name="user3")
-        tlengine.rollback()
-        tlengine.rollback()
-        tlengine.execute(users.insert(), user_id=4, user_name="user4")
-        tlengine.commit()
-        tlengine.close()
-        eq_(
-            tlengine.execute(
-                select([users.c.user_id]).order_by(users.c.user_id)
-            ).fetchall(),
-            [(1,), (4,)],
-        )
-        tlengine.close()
-
-    def test_connections(self):
-        """tests that contextual_connect is threadlocal"""
-
-        c1 = tlengine.contextual_connect()
-        c2 = tlengine.contextual_connect()
-        assert c1.connection is c2.connection
-        c2.close()
-        assert not c1.closed
-        assert not tlengine.closed
-
-    @testing.requires.independent_cursors
-    def test_result_closing(self):
-        """tests that contextual_connect is threadlocal"""
-
-        r1 = tlengine.execute(select([1]))
-        r2 = tlengine.execute(select([1]))
-        row1 = r1.fetchone()
-        row2 = r2.fetchone()
-        r1.close()
-        assert r2.connection is r1.connection
-        assert not r2.connection.closed
-        assert not tlengine.closed
-
-        # close again, nothing happens since resultproxy calls close()
-        # only once
-
-        r1.close()
-        assert r2.connection is r1.connection
-        assert not r2.connection.closed
-        assert not tlengine.closed
-        r2.close()
-        assert r2.connection.closed
-        assert tlengine.closed
-
-    @testing.crashes(
-        "oracle+cx_oracle", "intermittent failures on the buildbot"
-    )
-    def test_dispose(self):
-        eng = testing_engine(options=dict(strategy="threadlocal"))
-        result = eng.execute(select([1]))
-        eng.dispose()
-        eng.execute(select([1]))
-
-    @testing.requires.two_phase_transactions
-    def test_two_phase_transaction(self):
-        tlengine.begin_twophase()
-        tlengine.execute(users.insert(), user_id=1, user_name="user1")
-        tlengine.prepare()
-        tlengine.commit()
-        tlengine.begin_twophase()
-        tlengine.execute(users.insert(), user_id=2, user_name="user2")
-        tlengine.commit()
-        tlengine.begin_twophase()
-        tlengine.execute(users.insert(), user_id=3, user_name="user3")
-        tlengine.rollback()
-        tlengine.begin_twophase()
-        tlengine.execute(users.insert(), user_id=4, user_name="user4")
-        tlengine.prepare()
-        tlengine.rollback()
-        eq_(
-            tlengine.execute(
-                select([users.c.user_id]).order_by(users.c.user_id)
-            ).fetchall(),
-            [(1,), (2,)],
-        )
-
-
 class IsolationLevelTest(fixtures.TestBase):
     __requires__ = ("isolation_level", "ad_hoc_engines")
     __backend__ = True
index 0f003501906cd828d8275511539eae5776c7997a..4406925ffa65b0a4f11136820bb98557c77dc599 100644 (file)
@@ -1973,44 +1973,6 @@ class DeclarativeTest(DeclarativeTestBase):
         rt = sess.query(User).filter(User.namesyn == "someuser").one()
         eq_(rt, u1)
 
-    def test_comparable_using(self):
-        class NameComparator(sa.orm.PropComparator):
-            @property
-            def upperself(self):
-                cls = self.prop.parent.class_
-                col = getattr(cls, "name")
-                return sa.func.upper(col)
-
-            def operate(self, op, other, **kw):
-                return op(self.upperself, other, **kw)
-
-        class User(Base, fixtures.ComparableEntity):
-
-            __tablename__ = "users"
-            id = Column(
-                "id", Integer, primary_key=True, test_needs_autoincrement=True
-            )
-            name = Column("name", String(50))
-
-            @decl.comparable_using(NameComparator)
-            @property
-            def uc_name(self):
-                return self.name is not None and self.name.upper() or None
-
-        Base.metadata.create_all()
-        sess = create_session()
-        u1 = User(name="someuser")
-        eq_(u1.name, "someuser", u1.name)
-        eq_(u1.uc_name, "SOMEUSER", u1.uc_name)
-        sess.add(u1)
-        sess.flush()
-        sess.expunge_all()
-        rt = sess.query(User).filter(User.uc_name == "SOMEUSER").one()
-        eq_(rt, u1)
-        sess.expunge_all()
-        rt = sess.query(User).filter(User.uc_name.startswith("SOMEUSE")).one()
-        eq_(rt, u1)
-
     def test_duplicate_classes_in_base(self):
         class Test(Base):
             __tablename__ = "a"
index 75b6b8901c7486ca4bf7b51fb8509625133733ec..1bf84a77c3aa669df7527f8d67157c3a133df0d1 100644 (file)
@@ -1325,7 +1325,7 @@ class ReconstitutionTest(fixtures.TestBase):
             Parent, self.parents, properties=dict(children=relationship(Child))
         )
         mapper(Child, self.children)
-        session = create_session(weak_identity_map=True)
+        session = create_session()
 
         def add_child(parent_name, child_name):
             parent = session.query(Parent).filter_by(name=parent_name).one()
index 8d590c49642e5c734d47b5ccff334af18a6ab511..99c0a3c1a3bf58bdb0ceaa7e67de0e8e3cc28530 100644 (file)
@@ -23,6 +23,7 @@ from sqlalchemy.orm import relationship
 from sqlalchemy.orm import selectinload
 from sqlalchemy.orm import Session
 from sqlalchemy.orm import sessionmaker
+from sqlalchemy.pool import SingletonThreadPool
 from sqlalchemy.sql import operators
 from sqlalchemy.testing import eq_
 from sqlalchemy.testing import fixtures
@@ -50,8 +51,8 @@ class ShardTest(object):
         def id_generator(ctx):
             # in reality, might want to use a separate transaction for this.
 
-            c = db1.contextual_connect()
-            nextid = c.execute(ids.select(for_update=True)).scalar()
+            c = db1.connect()
+            nextid = c.execute(ids.select().with_for_update()).scalar()
             c.execute(ids.update(values={ids.c.nextid: ids.c.nextid + 1}))
             return nextid
 
@@ -411,7 +412,7 @@ class DistinctEngineShardTest(ShardTest, fixtures.TestBase):
     def _init_dbs(self):
         db1 = testing_engine(
             "sqlite:///shard1_%s.db" % provision.FOLLOWER_IDENT,
-            options=dict(pool_threadlocal=True),
+            options=dict(poolclass=SingletonThreadPool),
         )
         db2 = testing_engine(
             "sqlite:///shard2_%s.db" % provision.FOLLOWER_IDENT
@@ -551,8 +552,7 @@ class RefreshDeferExpireTest(fixtures.DeclarativeMappedTest):
 class LazyLoadIdentityKeyTest(fixtures.DeclarativeMappedTest):
     def _init_dbs(self):
         self.db1 = db1 = testing_engine(
-            "sqlite:///shard1_%s.db" % provision.FOLLOWER_IDENT,
-            options=dict(pool_threadlocal=True),
+            "sqlite:///shard1_%s.db" % provision.FOLLOWER_IDENT
         )
         self.db2 = db2 = testing_engine(
             "sqlite:///shard2_%s.db" % provision.FOLLOWER_IDENT
index ee869ab22370588d1684fc2d0942f0284b9fb5cc..75d219b491c483c2585f962e4ac560ca7ae61fd8 100644 (file)
@@ -19,7 +19,6 @@ from sqlalchemy.orm import clear_mappers
 from sqlalchemy.orm import create_session
 from sqlalchemy.orm import join
 from sqlalchemy.orm import joinedload
-from sqlalchemy.orm import joinedload_all
 from sqlalchemy.orm import mapper
 from sqlalchemy.orm import polymorphic_union
 from sqlalchemy.orm import Query
@@ -2096,7 +2095,7 @@ class Ticket2419Test(fixtures.DeclarativeMappedTest):
 
         s.commit()
 
-        q = s.query(B, B.ds.any(D.id == 1)).options(joinedload_all("es"))
+        q = s.query(B, B.ds.any(D.id == 1)).options(joinedload("es"))
         q = q.join(C, C.b_id == B.id)
         q = q.limit(5)
         eq_(q.all(), [(b, True)])
index f2336980c8313d040cc89dface6b037a9ef66c0e..ab61162565130f553b63b420863e613602e19296 100644 (file)
@@ -1876,7 +1876,7 @@ class VersioningTest(fixtures.MappedTest):
 
         assert_raises(
             orm_exc.StaleDataError,
-            sess2.query(Base).with_lockmode("read").get,
+            sess2.query(Base).with_for_update(read=True).get,
             s1.id,
         )
 
index 508de986c13726cacd3b25758bcbccd4ba1da3f5..c16573b237d6d301ce59746935bcd66b38f8e957 100644 (file)
@@ -5,10 +5,9 @@ from sqlalchemy import select
 from sqlalchemy import testing
 from sqlalchemy.orm import aliased
 from sqlalchemy.orm import create_session
+from sqlalchemy.orm import defaultload
 from sqlalchemy.orm import joinedload
-from sqlalchemy.orm import joinedload_all
 from sqlalchemy.orm import subqueryload
-from sqlalchemy.orm import subqueryload_all
 from sqlalchemy.orm import with_polymorphic
 from sqlalchemy.testing import assert_raises
 from sqlalchemy.testing import eq_
@@ -652,12 +651,10 @@ class _PolymorphicTestBase(object):
         )
 
     def test_subclass_option_pathing(self):
-        from sqlalchemy.orm import defer
-
         sess = create_session()
         dilbert = (
             sess.query(Person)
-            .options(defer(Engineer.machines, Machine.name))
+            .options(defaultload(Engineer.machines).defer(Machine.name))
             .filter(Person.name == "dilbert")
             .first()
         )
@@ -805,8 +802,8 @@ class _PolymorphicTestBase(object):
             eq_(
                 sess.query(Company)
                 .options(
-                    joinedload_all(
-                        Company.employees.of_type(Engineer), Engineer.machines
+                    joinedload(Company.employees.of_type(Engineer)).joinedload(
+                        Engineer.machines
                     )
                 )
                 .all(),
@@ -832,9 +829,9 @@ class _PolymorphicTestBase(object):
             eq_(
                 sess.query(Company)
                 .options(
-                    subqueryload_all(
-                        Company.employees.of_type(Engineer), Engineer.machines
-                    )
+                    subqueryload(
+                        Company.employees.of_type(Engineer)
+                    ).subqueryload(Engineer.machines)
                 )
                 .all(),
                 expected,
index 887453b1bfdc6ad3ac87c7f80380a3c8ad21315c..9db2a5163a520eb42df85d50d192a184d11f7844 100644 (file)
@@ -9,12 +9,10 @@ from sqlalchemy.orm import backref
 from sqlalchemy.orm import contains_eager
 from sqlalchemy.orm import create_session
 from sqlalchemy.orm import joinedload
-from sqlalchemy.orm import joinedload_all
 from sqlalchemy.orm import mapper
 from sqlalchemy.orm import relationship
 from sqlalchemy.orm import Session
 from sqlalchemy.orm import subqueryload
-from sqlalchemy.orm import subqueryload_all
 from sqlalchemy.orm import with_polymorphic
 from sqlalchemy.testing import AssertsCompiledSQL
 from sqlalchemy.testing import eq_
@@ -956,7 +954,9 @@ class EagerToSubclassTest(fixtures.MappedTest):
         def go():
             eq_(
                 sess.query(Parent)
-                .options(subqueryload_all(Parent.children, Base.related))
+                .options(
+                    subqueryload(Parent.children).subqueryload(Base.related)
+                )
                 .order_by(Parent.data)
                 .all(),
                 [p1, p2],
@@ -973,7 +973,7 @@ class EagerToSubclassTest(fixtures.MappedTest):
         def go():
             eq_(
                 sess.query(pa)
-                .options(subqueryload_all(pa.children, Base.related))
+                .options(subqueryload(pa.children).subqueryload(Base.related))
                 .order_by(pa.data)
                 .all(),
                 [p1, p2],
@@ -1909,7 +1909,7 @@ class JoinedloadOverWPolyAliased(
 
         session = Session()
         q = session.query(cls).options(
-            joinedload_all(cls.links, Link.child, cls.links)
+            joinedload(cls.links).joinedload(Link.child).joinedload(cls.links)
         )
         if cls is self.classes.Sub1:
             extra = " WHERE parent.type IN (:type_1)"
@@ -1938,7 +1938,7 @@ class JoinedloadOverWPolyAliased(
 
         session = Session()
         q = session.query(Link).options(
-            joinedload_all(Link.child, parent_cls.owner)
+            joinedload(Link.child).joinedload(parent_cls.owner)
         )
 
         if Link.child.property.mapper.class_ is self.classes.Sub1:
index 0c2dcdf3daca27a64e3f76041c808de8d21e81f5..8317b58f1b4a32a76f8926a7fadffe4b3fba6a8a 100644 (file)
@@ -1309,9 +1309,10 @@ class EagerTest9(fixtures.MappedTest):
             acc = (
                 session.query(Account)
                 .options(
-                    sa.orm.joinedload_all(
-                        "entries.transaction.entries.account"
-                    )
+                    sa.orm.joinedload("entries")
+                    .joinedload("transaction")
+                    .joinedload("entries")
+                    .joinedload("account")
                 )
                 .order_by(Account.account_id)
             ).first()
index 2690c7442b2eb7e080d2b5b0ab6ed39032f14918..d99fcc77bba39aade5775cafc88dead14c9fc2aa 100644 (file)
@@ -7,7 +7,6 @@ from sqlalchemy.orm import attributes
 from sqlalchemy.orm import exc as orm_exc
 from sqlalchemy.orm import instrumentation
 from sqlalchemy.orm.collections import collection
-from sqlalchemy.orm.interfaces import AttributeExtension
 from sqlalchemy.orm.state import InstanceState
 from sqlalchemy.testing import assert_raises
 from sqlalchemy.testing import assert_raises_message
@@ -574,206 +573,6 @@ class AttributesTest(fixtures.ORMTest):
         eq_(u.addresses[0].email_address, "lala@123.com")
         eq_(u.addresses[1].email_address, "foo@bar.com")
 
-    def test_extension_commit_attr(self):
-        """test that an extension which commits attribute history
-        maintains the end-result history.
-
-        This won't work in conjunction with some unitofwork extensions.
-
-        """
-
-        class Foo(fixtures.BasicEntity):
-            pass
-
-        class Bar(fixtures.BasicEntity):
-            pass
-
-        class ReceiveEvents(AttributeExtension):
-            def __init__(self, key):
-                self.key = key
-
-            def append(self, state, child, initiator):
-                if commit:
-                    state._commit_all(state.dict)
-                return child
-
-            def remove(self, state, child, initiator):
-                if commit:
-                    state._commit_all(state.dict)
-                return child
-
-            def set(self, state, child, oldchild, initiator):
-                if commit:
-                    state._commit_all(state.dict)
-                return child
-
-        instrumentation.register_class(Foo)
-        instrumentation.register_class(Bar)
-
-        b1, b2, b3, b4 = Bar(id="b1"), Bar(id="b2"), Bar(id="b3"), Bar(id="b4")
-
-        def loadcollection(state, passive):
-            if passive is attributes.PASSIVE_NO_FETCH:
-                return attributes.PASSIVE_NO_RESULT
-            return [b1, b2]
-
-        def loadscalar(state, passive):
-            if passive is attributes.PASSIVE_NO_FETCH:
-                return attributes.PASSIVE_NO_RESULT
-            return b2
-
-        attributes.register_attribute(
-            Foo,
-            "bars",
-            uselist=True,
-            useobject=True,
-            callable_=loadcollection,
-            extension=[ReceiveEvents("bars")],
-        )
-
-        attributes.register_attribute(
-            Foo,
-            "bar",
-            uselist=False,
-            useobject=True,
-            callable_=loadscalar,
-            extension=[ReceiveEvents("bar")],
-        )
-
-        attributes.register_attribute(
-            Foo,
-            "scalar",
-            uselist=False,
-            useobject=False,
-            extension=[ReceiveEvents("scalar")],
-        )
-
-        def create_hist():
-            def hist(key, fn, *arg):
-                attributes.instance_state(f1)._commit_all(
-                    attributes.instance_dict(f1)
-                )
-                fn(*arg)
-                histories.append(attributes.get_history(f1, key))
-
-            f1 = Foo()
-            hist("bars", f1.bars.append, b3)
-            hist("bars", f1.bars.append, b4)
-            hist("bars", f1.bars.remove, b2)
-            hist("bar", setattr, f1, "bar", b3)
-            hist("bar", setattr, f1, "bar", None)
-            hist("bar", setattr, f1, "bar", b4)
-            hist("scalar", setattr, f1, "scalar", 5)
-            hist("scalar", setattr, f1, "scalar", None)
-            hist("scalar", setattr, f1, "scalar", 4)
-
-        histories = []
-        commit = False
-        create_hist()
-        without_commit = list(histories)
-        histories[:] = []
-        commit = True
-        create_hist()
-        with_commit = histories
-        for without, with_ in zip(without_commit, with_commit):
-            woc = without
-            wic = with_
-            eq_(woc, wic)
-
-    def test_extension_lazyload_assertion(self):
-        class Foo(fixtures.BasicEntity):
-            pass
-
-        class Bar(fixtures.BasicEntity):
-            pass
-
-        class ReceiveEvents(AttributeExtension):
-            def append(self, state, child, initiator):
-                state.obj().bars
-                return child
-
-            def remove(self, state, child, initiator):
-                state.obj().bars
-                return child
-
-            def set(self, state, child, oldchild, initiator):
-                return child
-
-        instrumentation.register_class(Foo)
-        instrumentation.register_class(Bar)
-
-        bar1, bar2, bar3 = [Bar(id=1), Bar(id=2), Bar(id=3)]
-
-        def func1(state, passive):
-            if passive is attributes.PASSIVE_NO_FETCH:
-                return attributes.PASSIVE_NO_RESULT
-
-            return [bar1, bar2, bar3]
-
-        attributes.register_attribute(
-            Foo,
-            "bars",
-            uselist=True,
-            callable_=func1,
-            useobject=True,
-            extension=[ReceiveEvents()],
-        )
-        attributes.register_attribute(
-            Bar, "foos", uselist=True, useobject=True, backref="bars"
-        )
-
-        x = Foo()
-        assert_raises(AssertionError, Bar(id=4).foos.append, x)
-
-        x.bars
-        b = Bar(id=4)
-        b.foos.append(x)
-        attributes.instance_state(x)._expire_attributes(
-            attributes.instance_dict(x), ["bars"]
-        )
-        assert_raises(AssertionError, b.foos.remove, x)
-
-    def test_scalar_listener(self):
-
-        # listeners on ScalarAttributeImpl aren't used normally. test that
-        # they work for the benefit of user extensions
-
-        class Foo(object):
-
-            pass
-
-        results = []
-
-        class ReceiveEvents(AttributeExtension):
-            def append(self, state, child, initiator):
-                assert False
-
-            def remove(self, state, child, initiator):
-                results.append(("remove", state.obj(), child))
-
-            def set(self, state, child, oldchild, initiator):
-                results.append(("set", state.obj(), child, oldchild))
-                return child
-
-        instrumentation.register_class(Foo)
-        attributes.register_attribute(
-            Foo, "x", uselist=False, useobject=False, extension=ReceiveEvents()
-        )
-
-        f = Foo()
-        f.x = 5
-        f.x = 17
-        del f.x
-
-        eq_(
-            results,
-            [
-                ("set", f, 5, attributes.NEVER_SET),
-                ("set", f, 17, 5),
-                ("remove", f, 17),
-            ],
-        )
-
     def test_lazytrackparent(self):
         """test that the "hasparent" flag works properly
            when lazy loaders and backrefs are used
index 4e40ac346d1544574525cb8c8301078a55bfde07..83f4f44511fdbde710855f845b9c630303cfb9fd 100644 (file)
@@ -1,11 +1,10 @@
 from operator import and_
 
-import sqlalchemy as sa
+from sqlalchemy import event
 from sqlalchemy import exc as sa_exc
 from sqlalchemy import ForeignKey
 from sqlalchemy import Integer
 from sqlalchemy import String
-from sqlalchemy import testing
 from sqlalchemy import text
 from sqlalchemy import util
 from sqlalchemy.orm import attributes
@@ -24,12 +23,17 @@ from sqlalchemy.testing.schema import Column
 from sqlalchemy.testing.schema import Table
 
 
-class Canary(sa.orm.interfaces.AttributeExtension):
+class Canary(object):
     def __init__(self):
         self.data = set()
         self.added = set()
         self.removed = set()
 
+    def listen(self, attr):
+        event.listen(attr, "append", self.append)
+        event.listen(attr, "remove", self.remove)
+        event.listen(attr, "set", self.set)
+
     def append(self, obj, value, initiator):
         assert value not in self.added
         self.data.add(value)
@@ -91,14 +95,14 @@ class CollectionsTest(fixtures.ORMTest):
 
         canary = Canary()
         instrumentation.register_class(Foo)
-        attributes.register_attribute(
+        d = attributes.register_attribute(
             Foo,
             "attr",
             uselist=True,
-            extension=canary,
             typecallable=typecallable,
             useobject=True,
         )
+        canary.listen(d)
 
         obj = Foo()
         adapter = collections.collection_adapter(obj.attr)
@@ -142,14 +146,14 @@ class CollectionsTest(fixtures.ORMTest):
 
         canary = Canary()
         instrumentation.register_class(Foo)
-        attributes.register_attribute(
+        d = attributes.register_attribute(
             Foo,
             "attr",
             uselist=True,
-            extension=canary,
             typecallable=typecallable,
             useobject=True,
         )
+        canary.listen(d)
 
         obj = Foo()
         adapter = collections.collection_adapter(obj.attr)
@@ -371,14 +375,14 @@ class CollectionsTest(fixtures.ORMTest):
 
         canary = Canary()
         instrumentation.register_class(Foo)
-        attributes.register_attribute(
+        d = attributes.register_attribute(
             Foo,
             "attr",
             uselist=True,
-            extension=canary,
             typecallable=typecallable,
             useobject=True,
         )
+        canary.listen(d)
 
         obj = Foo()
         direct = obj.attr
@@ -578,14 +582,14 @@ class CollectionsTest(fixtures.ORMTest):
 
         canary = Canary()
         instrumentation.register_class(Foo)
-        attributes.register_attribute(
+        d = attributes.register_attribute(
             Foo,
             "attr",
             uselist=True,
-            extension=canary,
             typecallable=typecallable,
             useobject=True,
         )
+        canary.listen(d)
 
         obj = Foo()
         adapter = collections.collection_adapter(obj.attr)
@@ -846,14 +850,14 @@ class CollectionsTest(fixtures.ORMTest):
 
         canary = Canary()
         instrumentation.register_class(Foo)
-        attributes.register_attribute(
+        d = attributes.register_attribute(
             Foo,
             "attr",
             uselist=True,
-            extension=canary,
             typecallable=typecallable,
             useobject=True,
         )
+        canary.listen(d)
 
         obj = Foo()
         direct = obj.attr
@@ -986,14 +990,14 @@ class CollectionsTest(fixtures.ORMTest):
 
         canary = Canary()
         instrumentation.register_class(Foo)
-        attributes.register_attribute(
+        d = attributes.register_attribute(
             Foo,
             "attr",
             uselist=True,
-            extension=canary,
             typecallable=typecallable,
             useobject=True,
         )
+        canary.listen(d)
 
         obj = Foo()
         adapter = collections.collection_adapter(obj.attr)
@@ -1114,14 +1118,14 @@ class CollectionsTest(fixtures.ORMTest):
 
         canary = Canary()
         instrumentation.register_class(Foo)
-        attributes.register_attribute(
+        d = attributes.register_attribute(
             Foo,
             "attr",
             uselist=True,
-            extension=canary,
             typecallable=typecallable,
             useobject=True,
         )
+        canary.listen(d)
 
         obj = Foo()
         direct = obj.attr
@@ -1228,38 +1232,6 @@ class CollectionsTest(fixtures.ORMTest):
         self._test_dict_bulk(MyOrdered)
         self.assert_(getattr(MyOrdered, "_sa_instrumented") == id(MyOrdered))
 
-    @testing.uses_deprecated(r".*Please refer to the .*bulk_replace listener")
-    def test_dict_subclass4(self):
-        # tests #2654
-        class MyDict(collections.MappedCollection):
-            def __init__(self):
-                super(MyDict, self).__init__(lambda value: "k%d" % value)
-
-            @collection.converter
-            def _convert(self, dictlike):
-                for key, value in dictlike.items():
-                    yield value + 5
-
-        class Foo(object):
-            pass
-
-        canary = Canary()
-
-        instrumentation.register_class(Foo)
-        attributes.register_attribute(
-            Foo,
-            "attr",
-            uselist=True,
-            extension=canary,
-            typecallable=MyDict,
-            useobject=True,
-        )
-
-        f = Foo()
-        f.attr = {"k1": 1, "k2": 2}
-
-        eq_(f.attr, {"k7": 7, "k6": 6})
-
     def test_dict_duck(self):
         class DictLike(object):
             def __init__(self):
@@ -1371,14 +1343,14 @@ class CollectionsTest(fixtures.ORMTest):
 
         canary = Canary()
         instrumentation.register_class(Foo)
-        attributes.register_attribute(
+        d = attributes.register_attribute(
             Foo,
             "attr",
             uselist=True,
-            extension=canary,
             typecallable=typecallable,
             useobject=True,
         )
+        canary.listen(d)
 
         obj = Foo()
         adapter = collections.collection_adapter(obj.attr)
@@ -1532,14 +1504,10 @@ class CollectionsTest(fixtures.ORMTest):
 
         canary = Canary()
         instrumentation.register_class(Foo)
-        attributes.register_attribute(
-            Foo,
-            "attr",
-            uselist=True,
-            extension=canary,
-            typecallable=Custom,
-            useobject=True,
+        d = attributes.register_attribute(
+            Foo, "attr", uselist=True, typecallable=Custom, useobject=True
         )
+        canary.listen(d)
 
         obj = Foo()
         adapter = collections.collection_adapter(obj.attr)
@@ -1610,9 +1578,10 @@ class CollectionsTest(fixtures.ORMTest):
         canary = Canary()
         creator = self.entity_maker
         instrumentation.register_class(Foo)
-        attributes.register_attribute(
-            Foo, "attr", uselist=True, extension=canary, useobject=True
+        d = attributes.register_attribute(
+            Foo, "attr", uselist=True, useobject=True
         )
+        canary.listen(d)
 
         obj = Foo()
         col1 = obj.attr
@@ -2420,77 +2389,6 @@ class InstrumentationTest(fixtures.ORMTest):
 
         collections._instrument_class(Touchy)
 
-    @testing.uses_deprecated(r".*Please refer to the .*bulk_replace listener")
-    def test_name_setup(self):
-        class Base(object):
-            @collection.iterator
-            def base_iterate(self, x):
-                return "base_iterate"
-
-            @collection.appender
-            def base_append(self, x):
-                return "base_append"
-
-            @collection.converter
-            def base_convert(self, x):
-                return "base_convert"
-
-            @collection.remover
-            def base_remove(self, x):
-                return "base_remove"
-
-        from sqlalchemy.orm.collections import _instrument_class
-
-        _instrument_class(Base)
-
-        eq_(Base._sa_remover(Base(), 5), "base_remove")
-        eq_(Base._sa_appender(Base(), 5), "base_append")
-        eq_(Base._sa_iterator(Base(), 5), "base_iterate")
-        eq_(Base._sa_converter(Base(), 5), "base_convert")
-
-        class Sub(Base):
-            @collection.converter
-            def base_convert(self, x):
-                return "sub_convert"
-
-            @collection.remover
-            def sub_remove(self, x):
-                return "sub_remove"
-
-        _instrument_class(Sub)
-
-        eq_(Sub._sa_appender(Sub(), 5), "base_append")
-        eq_(Sub._sa_remover(Sub(), 5), "sub_remove")
-        eq_(Sub._sa_iterator(Sub(), 5), "base_iterate")
-        eq_(Sub._sa_converter(Sub(), 5), "sub_convert")
-
-    @testing.uses_deprecated(r".*Please refer to the .*init_collection")
-    def test_link_event(self):
-        canary = []
-
-        class Collection(list):
-            @collection.linker
-            def _on_link(self, obj):
-                canary.append(obj)
-
-        class Foo(object):
-            pass
-
-        instrumentation.register_class(Foo)
-        attributes.register_attribute(
-            Foo, "attr", uselist=True, typecallable=Collection, useobject=True
-        )
-
-        f1 = Foo()
-        f1.attr.append(3)
-
-        eq_(canary, [f1.attr._sa_adapter])
-        adapter_1 = f1.attr._sa_adapter
-
-        l2 = Collection()
-        f1.attr = l2
-        eq_(canary, [adapter_1, f1.attr._sa_adapter, None])
-
     def test_referenced_by_owner(self):
         class Foo(object):
             pass
index 40e72ad74057f056bc39d65933582b7be1712468..e870b305738016d8e8e95b53c30d2e85140fca85 100644 (file)
@@ -40,29 +40,27 @@ class TriggerDefaultsTest(fixtures.MappedTest):
                 "CREATE TRIGGER dt_ins AFTER INSERT ON dt "
                 "FOR EACH ROW BEGIN "
                 "UPDATE dt SET col2='ins', col4='ins' "
-                "WHERE dt.id = NEW.id; END",
-                on="sqlite",
-            ),
+                "WHERE dt.id = NEW.id; END"
+            ).execute_if(dialect="sqlite"),
             sa.DDL(
                 "CREATE TRIGGER dt_ins ON dt AFTER INSERT AS "
                 "UPDATE dt SET col2='ins', col4='ins' "
-                "WHERE dt.id IN (SELECT id FROM inserted);",
-                on="mssql",
-            ),
+                "WHERE dt.id IN (SELECT id FROM inserted);"
+            ).execute_if(dialect="mssql"),
             sa.DDL(
                 "CREATE TRIGGER dt_ins BEFORE INSERT "
                 "ON dt "
                 "FOR EACH ROW "
                 "BEGIN "
-                ":NEW.col2 := 'ins'; :NEW.col4 := 'ins'; END;",
-                on="oracle",
-            ),
+                ":NEW.col2 := 'ins'; :NEW.col4 := 'ins'; END;"
+            ).execute_if(dialect="oracle"),
             sa.DDL(
                 "CREATE TRIGGER dt_ins BEFORE INSERT ON dt "
                 "FOR EACH ROW BEGIN "
-                "SET NEW.col2='ins'; SET NEW.col4='ins'; END",
-                on=lambda ddl, event, target, bind, **kw: bind.engine.name
-                not in ("oracle", "mssql", "sqlite"),
+                "SET NEW.col2='ins'; SET NEW.col4='ins'; END"
+            ).execute_if(
+                callable_=lambda ddl, target, bind, **kw: bind.engine.name
+                not in ("oracle", "mssql", "sqlite")
             ),
         ):
             event.listen(dt, "after_create", ins)
@@ -74,27 +72,25 @@ class TriggerDefaultsTest(fixtures.MappedTest):
                 "CREATE TRIGGER dt_up AFTER UPDATE ON dt "
                 "FOR EACH ROW BEGIN "
                 "UPDATE dt SET col3='up', col4='up' "
-                "WHERE dt.id = OLD.id; END",
-                on="sqlite",
-            ),
+                "WHERE dt.id = OLD.id; END"
+            ).execute_if(dialect="sqlite"),
             sa.DDL(
                 "CREATE TRIGGER dt_up ON dt AFTER UPDATE AS "
                 "UPDATE dt SET col3='up', col4='up' "
-                "WHERE dt.id IN (SELECT id FROM deleted);",
-                on="mssql",
-            ),
+                "WHERE dt.id IN (SELECT id FROM deleted);"
+            ).execute_if(dialect="mssql"),
             sa.DDL(
                 "CREATE TRIGGER dt_up BEFORE UPDATE ON dt "
                 "FOR EACH ROW BEGIN "
-                ":NEW.col3 := 'up'; :NEW.col4 := 'up'; END;",
-                on="oracle",
-            ),
+                ":NEW.col3 := 'up'; :NEW.col4 := 'up'; END;"
+            ).execute_if(dialect="oracle"),
             sa.DDL(
                 "CREATE TRIGGER dt_up BEFORE UPDATE ON dt "
                 "FOR EACH ROW BEGIN "
-                "SET NEW.col3='up'; SET NEW.col4='up'; END",
-                on=lambda ddl, event, target, bind, **kw: bind.engine.name
-                not in ("oracle", "mssql", "sqlite"),
+                "SET NEW.col3='up'; SET NEW.col4='up'; END"
+            ).execute_if(
+                callable_=lambda ddl, target, bind, **kw: bind.engine.name
+                not in ("oracle", "mssql", "sqlite")
             ),
         ):
             event.listen(dt, "after_create", up)
index 3c817285029410025e7b8e38cdc4b68bc7b917ff..551952cfe1c3a47a661163b75b85d379527494c6 100644 (file)
@@ -966,7 +966,9 @@ class DeferredOptionsTest(AssertsCompiledSQL, _fixtures.FixtureTest):
         )
 
         q = sess.query(User).options(
-            defer(User.orders, Order.items, Item.description)
+            defaultload(User.orders)
+            .defaultload(Order.items)
+            .defer(Item.description)
         )
         self.assert_compile(q, exp)
 
index 195012b9941f044a8aa5f699d07b8f7cb60f7419..04dff0252b47a25570e1b3c3f41c00cfd8d12008 100644 (file)
-"""The collection of modern alternatives to deprecated & removed functionality.
-
-Collects specimens of old ORM code and explicitly covers the recommended
-modern (i.e. not deprecated) alternative to them.  The tests snippets here can
-be migrated directly to the wiki, docs, etc.
-
-.. deprecated::
-
-    This test suite is interested in extremely old (pre 0.5) patterns
-    and in modern use illustrates trivial use cases that don't need
-    an additional test suite.
-
-"""
-from sqlalchemy import ForeignKey
+import sqlalchemy as sa
+from sqlalchemy import event
+from sqlalchemy import exc
 from sqlalchemy import func
 from sqlalchemy import Integer
+from sqlalchemy import MetaData
+from sqlalchemy import select
 from sqlalchemy import String
-from sqlalchemy import text
+from sqlalchemy import testing
+from sqlalchemy.ext.declarative import comparable_using
+from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy.orm import AttributeExtension
+from sqlalchemy.orm import attributes
+from sqlalchemy.orm import collections
+from sqlalchemy.orm import column_property
+from sqlalchemy.orm import comparable_property
+from sqlalchemy.orm import composite
+from sqlalchemy.orm import configure_mappers
 from sqlalchemy.orm import create_session
+from sqlalchemy.orm import defer
+from sqlalchemy.orm import deferred
+from sqlalchemy.orm import EXT_CONTINUE
+from sqlalchemy.orm import identity
+from sqlalchemy.orm import instrumentation
+from sqlalchemy.orm import joinedload
+from sqlalchemy.orm import joinedload_all
 from sqlalchemy.orm import mapper
+from sqlalchemy.orm import MapperExtension
+from sqlalchemy.orm import PropComparator
 from sqlalchemy.orm import relationship
+from sqlalchemy.orm import Session
+from sqlalchemy.orm import SessionExtension
 from sqlalchemy.orm import sessionmaker
+from sqlalchemy.orm import synonym
+from sqlalchemy.orm import undefer
+from sqlalchemy.orm.collections import collection
+from sqlalchemy.testing import assert_raises
+from sqlalchemy.testing import assert_raises_message
+from sqlalchemy.testing import assertions
+from sqlalchemy.testing import AssertsCompiledSQL
+from sqlalchemy.testing import engines
+from sqlalchemy.testing import eq_
 from sqlalchemy.testing import fixtures
+from sqlalchemy.testing import is_
 from sqlalchemy.testing.schema import Column
 from sqlalchemy.testing.schema import Table
+from sqlalchemy.testing.util import gc_collect
+from sqlalchemy.util.compat import pypy
+from . import _fixtures
+from .test_options import PathTest as OptionsPathTest
+from .test_transaction import _LocalFixture
+
+
+class DeprecationWarningsTest(fixtures.DeclarativeMappedTest):
+    run_setup_classes = "each"
+    run_setup_mappers = "each"
+    run_define_tables = "each"
+    run_create_tables = None
+
+    def test_attribute_extension(self):
+        class SomeExtension(AttributeExtension):
+            def append(self, obj, value, initiator):
+                pass
+
+            def remove(self, obj, value, initiator):
+                pass
+
+            def set(self, obj, value, oldvalue, initiator):
+                pass
+
+        with assertions.expect_deprecated(
+            ".*The column_property.extension parameter will be removed in a "
+            "future release."
+        ):
+
+            class Foo(self.DeclarativeBasic):
+                __tablename__ = "foo"
+
+                id = Column(Integer, primary_key=True)
+                foo = column_property(
+                    Column("q", Integer), extension=SomeExtension()
+                )
+
+        with assertions.expect_deprecated(
+            "AttributeExtension.append is deprecated.  The "
+            "AttributeExtension class will be removed in a future release.",
+            "AttributeExtension.remove is deprecated.  The "
+            "AttributeExtension class will be removed in a future release.",
+            "AttributeExtension.set is deprecated.  The "
+            "AttributeExtension class will be removed in a future release.",
+        ):
+            configure_mappers()
+
+    def test_attribute_extension_parameter(self):
+        class SomeExtension(AttributeExtension):
+            def append(self, obj, value, initiator):
+                pass
+
+        with assertions.expect_deprecated(
+            ".*The relationship.extension parameter will be removed in a "
+            "future release."
+        ):
+            relationship("Bar", extension=SomeExtension)
+
+        with assertions.expect_deprecated(
+            ".*The column_property.extension parameter will be removed in a "
+            "future release."
+        ):
+            column_property(Column("q", Integer), extension=SomeExtension)
+
+        with assertions.expect_deprecated(
+            ".*The composite.extension parameter will be removed in a "
+            "future release."
+        ):
+            composite("foo", extension=SomeExtension)
+
+    def test_session_extension(self):
+        class SomeExtension(SessionExtension):
+            def after_commit(self, session):
+                pass
+
+            def after_rollback(self, session):
+                pass
+
+            def before_flush(self, session, flush_context, instances):
+                pass
+
+        with assertions.expect_deprecated(
+            ".*The Session.extension parameter will be removed",
+            "SessionExtension.after_commit is deprecated.  "
+            "The SessionExtension class",
+            "SessionExtension.before_flush is deprecated.  "
+            "The SessionExtension class",
+            "SessionExtension.after_rollback is deprecated.  "
+            "The SessionExtension class",
+        ):
+            Session(extension=SomeExtension())
+
+    def test_mapper_extension(self):
+        class SomeExtension(MapperExtension):
+            def init_instance(
+                self, mapper, class_, oldinit, instance, args, kwargs
+            ):
+                pass
+
+            def init_failed(
+                self, mapper, class_, oldinit, instance, args, kwargs
+            ):
+                pass
+
+        with assertions.expect_deprecated(
+            "MapperExtension.init_instance is deprecated.  "
+            "The MapperExtension class",
+            "MapperExtension.init_failed is deprecated.  "
+            "The MapperExtension class",
+            ".*The mapper.extension parameter will be removed",
+        ):
+
+            class Foo(self.DeclarativeBasic):
+                __tablename__ = "foo"
+
+                id = Column(Integer, primary_key=True)
+
+                __mapper_args__ = {"extension": SomeExtension()}
+
+    def test_session_weak_identity_map(self):
+        with testing.expect_deprecated(
+            ".*Session.weak_identity_map parameter as well as the"
+        ):
+            s = Session(weak_identity_map=True)
+
+        is_(s._identity_cls, identity.WeakInstanceDict)
+
+        with assertions.expect_deprecated(
+            "The Session.weak_identity_map parameter as well as"
+        ):
+            s = Session(weak_identity_map=False)
+
+            is_(s._identity_cls, identity.StrongInstanceDict)
+
+        s = Session()
+        is_(s._identity_cls, identity.WeakInstanceDict)
+
+    def test_session_prune(self):
+        s = Session()
+
+        with assertions.expect_deprecated(
+            r"The Session.prune\(\) method is deprecated along with "
+            "Session.weak_identity_map"
+        ):
+            s.prune()
+
+    def test_session_enable_transaction_accounting(self):
+        with assertions.expect_deprecated(
+            "the Session._enable_transaction_accounting parameter is "
+            "deprecated"
+        ):
+            s = Session(_enable_transaction_accounting=False)
+
+    def test_session_is_modified(self):
+        class Foo(self.DeclarativeBasic):
+            __tablename__ = "foo"
+
+            id = Column(Integer, primary_key=True)
+
+        f1 = Foo()
+        s = Session()
+        with assertions.expect_deprecated(
+            "The Session.is_modified.passive flag is deprecated"
+        ):
+            # this flag was for a long time documented as requiring
+            # that it be set to True, so we've changed the default here
+            # so that the warning emits
+            s.is_modified(f1, passive=True)
+
+
+class DeprecatedAccountingFlagsTest(_LocalFixture):
+    def test_rollback_no_accounting(self):
+        User, users = self.classes.User, self.tables.users
+
+        with testing.expect_deprecated(
+            "The Session._enable_transaction_accounting parameter"
+        ):
+            sess = sessionmaker(_enable_transaction_accounting=False)()
+        u1 = User(name="ed")
+        sess.add(u1)
+        sess.commit()
+
+        u1.name = "edwardo"
+        sess.rollback()
+
+        testing.db.execute(
+            users.update(users.c.name == "ed").values(name="edward")
+        )
+
+        assert u1.name == "edwardo"
+        sess.expire_all()
+        assert u1.name == "edward"
+
+    def test_commit_no_accounting(self):
+        User, users = self.classes.User, self.tables.users
 
+        with testing.expect_deprecated(
+            "The Session._enable_transaction_accounting parameter"
+        ):
+            sess = sessionmaker(_enable_transaction_accounting=False)()
+        u1 = User(name="ed")
+        sess.add(u1)
+        sess.commit()
 
-class QueryAlternativesTest(fixtures.MappedTest):
-    r'''Collects modern idioms for Queries
+        u1.name = "edwardo"
+        sess.rollback()
 
-    The docstring for each test case serves as miniature documentation about
-    the deprecated use case, and the test body illustrates (and covers) the
-    intended replacement code to accomplish the same task.
+        testing.db.execute(
+            users.update(users.c.name == "ed").values(name="edward")
+        )
 
-    Documenting the "old way" including the argument signature helps these
-    cases remain useful to readers even after the deprecated method has been
-    removed from the modern codebase.
+        assert u1.name == "edwardo"
+        sess.commit()
 
-    Format::
+        assert testing.db.execute(select([users.c.name])).fetchall() == [
+            ("edwardo",)
+        ]
+        assert u1.name == "edwardo"
 
-        def test_deprecated_thing(self):
-            """Query.methodname(old, arg, **signature)
+        sess.delete(u1)
+        sess.commit()
 
-            output = session.query(User).deprecatedmethod(inputs)
+    def test_preflush_no_accounting(self):
+        User, users = self.classes.User, self.tables.users
 
-            """
+        with testing.expect_deprecated(
+            "The Session._enable_transaction_accounting parameter"
+        ):
+            sess = Session(
+                _enable_transaction_accounting=False,
+                autocommit=True,
+                autoflush=False,
+            )
+        u1 = User(name="ed")
+        sess.add(u1)
+        sess.flush()
 
-            # 0.4+
-            output = session.query(User).newway(inputs)
-            assert output is correct
+        sess.begin()
+        u1.name = "edwardo"
+        u2 = User(name="some other user")
+        sess.add(u2)
 
-            # 0.5+
-            output = session.query(User).evennewerway(inputs)
-            assert output is correct
+        sess.rollback()
 
-    '''
+        sess.begin()
+        assert testing.db.execute(select([users.c.name])).fetchall() == [
+            ("ed",)
+        ]
 
-    run_inserts = "once"
-    run_deletes = None
+
+class TLTransactionTest(fixtures.MappedTest):
+    run_dispose_bind = "once"
+    __backend__ = True
 
     @classmethod
-    def define_tables(cls, metadata):
-        Table(
-            "users_table",
-            metadata,
-            Column("id", Integer, primary_key=True),
-            Column("name", String(64)),
-        )
+    def setup_bind(cls):
+        with testing.expect_deprecated(
+            ".*'threadlocal' engine strategy is deprecated"
+        ):
+            return engines.testing_engine(options=dict(strategy="threadlocal"))
 
+    @classmethod
+    def define_tables(cls, metadata):
         Table(
-            "addresses_table",
+            "users",
             metadata,
-            Column("id", Integer, primary_key=True),
-            Column("user_id", Integer, ForeignKey("users_table.id")),
-            Column("email_address", String(128)),
-            Column("purpose", String(16)),
-            Column("bounces", Integer, default=0),
+            Column(
+                "id", Integer, primary_key=True, test_needs_autoincrement=True
+            ),
+            Column("name", String(20)),
+            test_needs_acid=True,
         )
 
     @classmethod
@@ -82,511 +322,1843 @@ class QueryAlternativesTest(fixtures.MappedTest):
         class User(cls.Basic):
             pass
 
-        class Address(cls.Basic):
-            pass
-
     @classmethod
     def setup_mappers(cls):
-        addresses_table, User, users_table, Address = (
-            cls.tables.addresses_table,
-            cls.classes.User,
-            cls.tables.users_table,
-            cls.classes.Address,
-        )
-
-        mapper(
-            User,
-            users_table,
-            properties=dict(addresses=relationship(Address, backref="user")),
-        )
-        mapper(Address, addresses_table)
+        users, User = cls.tables.users, cls.classes.User
 
-    @classmethod
-    def fixtures(cls):
-        return dict(
-            users_table=(
-                ("id", "name"),
-                (1, "jack"),
-                (2, "ed"),
-                (3, "fred"),
-                (4, "chuck"),
-            ),
-            addresses_table=(
-                ("id", "user_id", "email_address", "purpose", "bounces"),
-                (1, 1, "jack@jack.home", "Personal", 0),
-                (2, 1, "jack@jack.bizz", "Work", 1),
-                (3, 2, "ed@foo.bar", "Personal", 0),
-                (4, 3, "fred@the.fred", "Personal", 10),
-            ),
-        )
+        mapper(User, users)
 
-    ######################################################################
+    @testing.exclude("mysql", "<", (5, 0, 3), "FIXME: unknown")
+    def test_session_nesting(self):
+        User = self.classes.User
 
-    def test_override_get(self):
-        """MapperExtension.get()
+        sess = create_session(bind=self.bind)
+        self.bind.begin()
+        u = User(name="ed")
+        sess.add(u)
+        sess.flush()
+        self.bind.commit()
 
-        x = session.query.get(5)
 
-        """
+class DeprecatedSessionFeatureTest(_fixtures.FixtureTest):
+    run_inserts = None
 
-        Address = self.classes.Address
+    def test_fast_discard_race(self):
+        # test issue #4068
+        users, User = self.tables.users, self.classes.User
 
-        from sqlalchemy.orm.query import Query
+        mapper(User, users)
 
-        cache = {}
+        with testing.expect_deprecated(".*identity map are deprecated"):
+            sess = Session(weak_identity_map=False)
 
-        class MyQuery(Query):
-            def get(self, ident, **kwargs):
-                if ident in cache:
-                    return cache[ident]
-                else:
-                    x = super(MyQuery, self).get(ident)
-                    cache[ident] = x
-                    return x
+        u1 = User(name="u1")
+        sess.add(u1)
+        sess.commit()
 
-        session = sessionmaker(query_cls=MyQuery)()
+        u1_state = u1._sa_instance_state
+        sess.identity_map._dict.pop(u1_state.key)
+        ref = u1_state.obj
+        u1_state.obj = lambda: None
 
-        ad1 = session.query(Address).get(1)
-        assert ad1 in list(cache.values())
+        u2 = sess.query(User).first()
+        u1_state._cleanup(ref)
 
-    def test_load(self):
-        """x = session.query(Address).load(1)
+        u3 = sess.query(User).first()
 
-            x = session.load(Address, 1)
+        is_(u2, u3)
 
-        """
+        u2_state = u2._sa_instance_state
+        assert sess.identity_map.contains_state(u2._sa_instance_state)
+        ref = u2_state.obj
+        u2_state.obj = lambda: None
+        u2_state._cleanup(ref)
+        assert not sess.identity_map.contains_state(u2._sa_instance_state)
 
-        Address = self.classes.Address
+    def test_is_modified_passive_on(self):
+        User, Address = self.classes.User, self.classes.Address
+        users, addresses = self.tables.users, self.tables.addresses
+        mapper(User, users, properties={"addresses": relationship(Address)})
+        mapper(Address, addresses)
 
-        session = create_session()
-        ad1 = session.query(Address).populate_existing().get(1)
-        assert bool(ad1)
+        s = Session()
+        u = User(name="fred", addresses=[Address(email_address="foo")])
+        s.add(u)
+        s.commit()
 
-    def test_apply_max(self):
-        """Query.apply_max(col)
+        u.id
 
-        max = session.query(Address).apply_max(Address.bounces)
+        def go():
+            assert not s.is_modified(u, passive=True)
 
-        """
+        with testing.expect_deprecated(
+            ".*Session.is_modified.passive flag is deprecated "
+        ):
+            self.assert_sql_count(testing.db, go, 0)
 
-        Address = self.classes.Address
+        u.name = "newname"
 
-        session = create_session()
+        def go():
+            assert s.is_modified(u, passive=True)
 
-        # 0.5.0
-        maxes = list(session.query(Address).values(func.max(Address.bounces)))
-        max_ = maxes[0][0]
-        assert max_ == 10
+        with testing.expect_deprecated(
+            ".*Session.is_modified.passive flag is deprecated "
+        ):
+            self.assert_sql_count(testing.db, go, 0)
 
-        max_ = session.query(func.max(Address.bounces)).one()[0]
-        assert max_ == 10
 
-    def test_apply_min(self):
-        """Query.apply_min(col)
+class StrongIdentityMapTest(_fixtures.FixtureTest):
+    run_inserts = None
 
-        min = session.query(Address).apply_min(Address.bounces)
+    def _strong_ident_fixture(self):
+        with testing.expect_deprecated(
+            ".*Session.weak_identity_map parameter as well as the"
+        ):
+            sess = create_session(weak_identity_map=False)
 
-        """
+        def prune():
+            with testing.expect_deprecated(".*Session.prune"):
+                return sess.prune()
 
-        Address = self.classes.Address
+        return sess, prune
 
+    def _event_fixture(self):
         session = create_session()
 
-        # 0.5.0
-        mins = list(session.query(Address).values(func.min(Address.bounces)))
-        min_ = mins[0][0]
-        assert min_ == 0
+        @event.listens_for(session, "pending_to_persistent")
+        @event.listens_for(session, "deleted_to_persistent")
+        @event.listens_for(session, "detached_to_persistent")
+        @event.listens_for(session, "loaded_as_persistent")
+        def strong_ref_object(sess, instance):
+            if "refs" not in sess.info:
+                sess.info["refs"] = refs = set()
+            else:
+                refs = sess.info["refs"]
+
+            refs.add(instance)
+
+        @event.listens_for(session, "persistent_to_detached")
+        @event.listens_for(session, "persistent_to_deleted")
+        @event.listens_for(session, "persistent_to_transient")
+        def deref_object(sess, instance):
+            sess.info["refs"].discard(instance)
+
+        def prune():
+            if "refs" not in session.info:
+                return 0
+
+            sess_size = len(session.identity_map)
+            session.info["refs"].clear()
+            gc_collect()
+            session.info["refs"] = set(
+                s.obj() for s in session.identity_map.all_states()
+            )
+            return sess_size - len(session.identity_map)
+
+        return session, prune
+
+    def test_strong_ref_imap(self):
+        self._test_strong_ref(self._strong_ident_fixture)
+
+    def test_strong_ref_events(self):
+        self._test_strong_ref(self._event_fixture)
+
+    def _test_strong_ref(self, fixture):
+        s, prune = fixture()
+
+        users, User = self.tables.users, self.classes.User
+
+        mapper(User, users)
+
+        # save user
+        s.add(User(name="u1"))
+        s.flush()
+        user = s.query(User).one()
+        user = None
+        print(s.identity_map)
+        gc_collect()
+        assert len(s.identity_map) == 1
+
+        user = s.query(User).one()
+        assert not s.identity_map._modified
+        user.name = "u2"
+        assert s.identity_map._modified
+        s.flush()
+        eq_(users.select().execute().fetchall(), [(user.id, "u2")])
+
+    def test_prune_imap(self):
+        self._test_prune(self._strong_ident_fixture)
+
+    def test_prune_events(self):
+        self._test_prune(self._event_fixture)
+
+    @testing.fails_if(lambda: pypy, "pypy has a real GC")
+    @testing.fails_on("+zxjdbc", "http://www.sqlalchemy.org/trac/ticket/1473")
+    def _test_prune(self, fixture):
+        s, prune = fixture()
+
+        users, User = self.tables.users, self.classes.User
+
+        mapper(User, users)
+
+        for o in [User(name="u%s" % x) for x in range(10)]:
+            s.add(o)
+        # o is still live after this loop...
+
+        self.assert_(len(s.identity_map) == 0)
+        eq_(prune(), 0)
+        s.flush()
+        gc_collect()
+        eq_(prune(), 9)
+        # o is still in local scope here, so still present
+        self.assert_(len(s.identity_map) == 1)
+
+        id_ = o.id
+        del o
+        eq_(prune(), 1)
+        self.assert_(len(s.identity_map) == 0)
+
+        u = s.query(User).get(id_)
+        eq_(prune(), 0)
+        self.assert_(len(s.identity_map) == 1)
+        u.name = "squiznart"
+        del u
+        eq_(prune(), 0)
+        self.assert_(len(s.identity_map) == 1)
+        s.flush()
+        eq_(prune(), 1)
+        self.assert_(len(s.identity_map) == 0)
+
+        s.add(User(name="x"))
+        eq_(prune(), 0)
+        self.assert_(len(s.identity_map) == 0)
+        s.flush()
+        self.assert_(len(s.identity_map) == 1)
+        eq_(prune(), 1)
+        self.assert_(len(s.identity_map) == 0)
+
+        u = s.query(User).get(id_)
+        s.delete(u)
+        del u
+        eq_(prune(), 0)
+        self.assert_(len(s.identity_map) == 1)
+        s.flush()
+        eq_(prune(), 0)
+        self.assert_(len(s.identity_map) == 0)
+
+
+class DeprecatedMapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
+    __dialect__ = "default"
+
+    def test_cancel_order_by(self):
+        users, User = self.tables.users, self.classes.User
+
+        with testing.expect_deprecated(
+            "The Mapper.order_by parameter is deprecated, and will be "
+            "removed in a future release."
+        ):
+            mapper(User, users, order_by=users.c.name.desc())
+
+        assert (
+            "order by users.name desc"
+            in str(create_session().query(User).statement).lower()
+        )
+        assert (
+            "order by"
+            not in str(
+                create_session().query(User).order_by(None).statement
+            ).lower()
+        )
+        assert (
+            "order by users.name asc"
+            in str(
+                create_session()
+                .query(User)
+                .order_by(User.name.asc())
+                .statement
+            ).lower()
+        )
 
-        min_ = session.query(func.min(Address.bounces)).one()[0]
-        assert min_ == 0
+        eq_(
+            create_session().query(User).all(),
+            [
+                User(id=7, name="jack"),
+                User(id=9, name="fred"),
+                User(id=8, name="ed"),
+                User(id=10, name="chuck"),
+            ],
+        )
 
-    def test_apply_avg(self):
-        """Query.apply_avg(col)
+        eq_(
+            create_session().query(User).order_by(User.name).all(),
+            [
+                User(id=10, name="chuck"),
+                User(id=8, name="ed"),
+                User(id=9, name="fred"),
+                User(id=7, name="jack"),
+            ],
+        )
 
-        avg = session.query(Address).apply_avg(Address.bounces)
+    def test_comparable(self):
+        users = self.tables.users
 
-        """
+        class extendedproperty(property):
+            attribute = 123
 
-        Address = self.classes.Address
+            def method1(self):
+                return "method1"
 
-        session = create_session()
+        from sqlalchemy.orm.properties import ColumnProperty
 
-        avgs = list(session.query(Address).values(func.avg(Address.bounces)))
-        avg = avgs[0][0]
-        assert avg > 0 and avg < 10
+        class UCComparator(ColumnProperty.Comparator):
+            __hash__ = None
 
-        avg = session.query(func.avg(Address.bounces)).one()[0]
-        assert avg > 0 and avg < 10
+            def method1(self):
+                return "uccmethod1"
 
-    def test_apply_sum(self):
-        """Query.apply_sum(col)
+            def method2(self, other):
+                return "method2"
 
-        avg = session.query(Address).apply_avg(Address.bounces)
+            def __eq__(self, other):
+                cls = self.prop.parent.class_
+                col = getattr(cls, "name")
+                if other is None:
+                    return col is None
+                else:
+                    return sa.func.upper(col) == sa.func.upper(other)
+
+        def map_(with_explicit_property):
+            class User(object):
+                @extendedproperty
+                def uc_name(self):
+                    if self.name is None:
+                        return None
+                    return self.name.upper()
+
+            if with_explicit_property:
+                args = (UCComparator, User.uc_name)
+            else:
+                args = (UCComparator,)
+
+            with assertions.expect_deprecated(
+                r"comparable_property\(\) is deprecated and will be "
+                "removed in a future release."
+            ):
+                mapper(
+                    User,
+                    users,
+                    properties=dict(uc_name=sa.orm.comparable_property(*args)),
+                )
+                return User
+
+        for User in (map_(True), map_(False)):
+            sess = create_session()
+            sess.begin()
+            q = sess.query(User)
+
+            assert hasattr(User, "name")
+            assert hasattr(User, "uc_name")
+
+            eq_(User.uc_name.method1(), "method1")
+            eq_(User.uc_name.method2("x"), "method2")
+
+            assert_raises_message(
+                AttributeError,
+                "Neither 'extendedproperty' object nor 'UCComparator' "
+                "object associated with User.uc_name has an attribute "
+                "'nonexistent'",
+                getattr,
+                User.uc_name,
+                "nonexistent",
+            )
 
-        """
+            # test compile
+            assert not isinstance(User.uc_name == "jack", bool)
+            u = q.filter(User.uc_name == "JACK").one()
 
-        Address = self.classes.Address
+            assert u.uc_name == "JACK"
+            assert u not in sess.dirty
 
-        session = create_session()
+            u.name = "some user name"
+            eq_(u.name, "some user name")
+            assert u in sess.dirty
+            eq_(u.uc_name, "SOME USER NAME")
 
-        avgs = list(session.query(Address).values(func.sum(Address.bounces)))
-        avg = avgs[0][0]
-        assert avg == 11
+            sess.flush()
+            sess.expunge_all()
 
-        avg = session.query(func.sum(Address.bounces)).one()[0]
-        assert avg == 11
+            q = sess.query(User)
+            u2 = q.filter(User.name == "some user name").one()
+            u3 = q.filter(User.uc_name == "SOME USER NAME").one()
 
-    def test_count_by(self):
-        r"""Query.count_by(\*args, \**params)
+            assert u2 is u3
 
-        num = session.query(Address).count_by(purpose='Personal')
+            eq_(User.uc_name.attribute, 123)
+            sess.rollback()
 
-        # old-style implicit *_by join
-        num = session.query(User).count_by(purpose='Personal')
+    def test_comparable_column(self):
+        users, User = self.tables.users, self.classes.User
 
-        """
+        class MyComparator(sa.orm.properties.ColumnProperty.Comparator):
+            __hash__ = None
 
-        User, Address = self.classes.User, self.classes.Address
+            def __eq__(self, other):
+                # lower case comparison
+                return func.lower(self.__clause_element__()) == func.lower(
+                    other
+                )
 
-        session = create_session()
+            def intersects(self, other):
+                # non-standard comparator
+                return self.__clause_element__().op("&=")(other)
+
+        mapper(
+            User,
+            users,
+            properties={
+                "name": sa.orm.column_property(
+                    users.c.name, comparator_factory=MyComparator
+                )
+            },
+        )
 
-        num = session.query(Address).filter_by(purpose="Personal").count()
-        assert num == 3, num
+        assert_raises_message(
+            AttributeError,
+            "Neither 'InstrumentedAttribute' object nor "
+            "'MyComparator' object associated with User.name has "
+            "an attribute 'nonexistent'",
+            getattr,
+            User.name,
+            "nonexistent",
+        )
 
-        num = (
-            session.query(User)
-            .join("addresses")
-            .filter(Address.purpose == "Personal")
-        ).count()
-        assert num == 3, num
+        eq_(
+            str(
+                (User.name == "ed").compile(
+                    dialect=sa.engine.default.DefaultDialect()
+                )
+            ),
+            "lower(users.name) = lower(:lower_1)",
+        )
+        eq_(
+            str(
+                (User.name.intersects("ed")).compile(
+                    dialect=sa.engine.default.DefaultDialect()
+                )
+            ),
+            "users.name &= :name_1",
+        )
 
-    def test_count_whereclause(self):
-        r"""Query.count(whereclause=None, params=None, \**kwargs)
+    def test_info(self):
+        users = self.tables.users
+        Address = self.classes.Address
 
-        num = session.query(Address).count(address_table.c.bounces > 1)
+        class MyComposite(object):
+            pass
 
-        """
+        with assertions.expect_deprecated(
+            r"comparable_property\(\) is deprecated and will be "
+            "removed in a future release."
+        ):
+            for constructor, args in [(comparable_property, "foo")]:
+                obj = constructor(info={"x": "y"}, *args)
+                eq_(obj.info, {"x": "y"})
+                obj.info["q"] = "p"
+                eq_(obj.info, {"x": "y", "q": "p"})
 
-        Address = self.classes.Address
+                obj = constructor(*args)
+                eq_(obj.info, {})
+                obj.info["q"] = "p"
+                eq_(obj.info, {"q": "p"})
 
-        session = create_session()
+    def test_add_property(self):
+        users = self.tables.users
 
-        num = session.query(Address).filter(Address.bounces > 1).count()
-        assert num == 1, num
+        assert_col = []
 
-    def test_execute(self):
-        r"""Query.execute(clauseelement, params=None, \*args, \**kwargs)
+        class User(fixtures.ComparableEntity):
+            def _get_name(self):
+                assert_col.append(("get", self._name))
+                return self._name
 
-        users = session.query(User).execute(users_table.select())
+            def _set_name(self, name):
+                assert_col.append(("set", name))
+                self._name = name
 
-        """
+            name = property(_get_name, _set_name)
 
-        User, users_table = self.classes.User, self.tables.users_table
+            def _uc_name(self):
+                if self._name is None:
+                    return None
+                return self._name.upper()
 
-        session = create_session()
+            uc_name = property(_uc_name)
+            uc_name2 = property(_uc_name)
 
-        users = session.query(User).from_statement(users_table.select()).all()
-        assert len(users) == 4
+        m = mapper(User, users)
 
-    def test_get_by(self):
-        r"""Query.get_by(\*args, \**params)
+        class UCComparator(PropComparator):
+            __hash__ = None
 
-        user = session.query(User).get_by(name='ed')
+            def __eq__(self, other):
+                cls = self.prop.parent.class_
+                col = getattr(cls, "name")
+                if other is None:
+                    return col is None
+                else:
+                    return func.upper(col) == func.upper(other)
+
+        m.add_property("_name", deferred(users.c.name))
+        m.add_property("name", synonym("_name"))
+        with assertions.expect_deprecated(
+            r"comparable_property\(\) is deprecated and will be "
+            "removed in a future release."
+        ):
+            m.add_property("uc_name", comparable_property(UCComparator))
+            m.add_property(
+                "uc_name2", comparable_property(UCComparator, User.uc_name2)
+            )
 
-        # 0.3-style implicit *_by join
-        user = session.query(User).get_by(email_addresss='fred@the.fred')
+        sess = create_session(autocommit=False)
+        assert sess.query(User).get(7)
 
-        """
+        u = sess.query(User).filter_by(name="jack").one()
 
-        User, Address = self.classes.User, self.classes.Address
+        def go():
+            eq_(u.name, "jack")
+            eq_(u.uc_name, "JACK")
+            eq_(u.uc_name2, "JACK")
+            eq_(assert_col, [("get", "jack")], str(assert_col))
 
-        session = create_session()
+        self.sql_count_(1, go)
 
-        user = session.query(User).filter_by(name="ed").first()
-        assert user.name == "ed"
+    def test_kwarg_accepted(self):
+        users, Address = self.tables.users, self.classes.Address
 
-        user = (
-            session.query(User)
-            .join("addresses")
-            .filter(Address.email_address == "fred@the.fred")
-        ).first()
-        assert user.name == "fred"
+        class DummyComposite(object):
+            def __init__(self, x, y):
+                pass
 
-        user = (
-            session.query(User)
-            .filter(
-                User.addresses.any(Address.email_address == "fred@the.fred")
+        class MyFactory(PropComparator):
+            pass
+
+        with assertions.expect_deprecated(
+            r"comparable_property\(\) is deprecated and will be "
+            "removed in a future release."
+        ):
+            for args in ((comparable_property,),):
+                fn = args[0]
+                args = args[1:]
+                fn(comparator_factory=MyFactory, *args)
+
+    def test_merge_synonym_comparable(self):
+        users = self.tables.users
+
+        class User(object):
+            class Comparator(PropComparator):
+                pass
+
+            def _getValue(self):
+                return self._value
+
+            def _setValue(self, value):
+                setattr(self, "_value", value)
+
+            value = property(_getValue, _setValue)
+
+        with assertions.expect_deprecated(
+            r"comparable_property\(\) is deprecated and will be "
+            "removed in a future release."
+        ):
+            mapper(
+                User,
+                users,
+                properties={
+                    "uid": synonym("id"),
+                    "foobar": comparable_property(User.Comparator, User.value),
+                },
             )
-            .first()
+
+        sess = create_session()
+        u = User()
+        u.name = "ed"
+        sess.add(u)
+        sess.flush()
+        sess.expunge(u)
+        sess.merge(u)
+
+
+class DeprecatedDeclTest(fixtures.TestBase):
+    @testing.provide_metadata
+    def test_comparable_using(self):
+        class NameComparator(sa.orm.PropComparator):
+            @property
+            def upperself(self):
+                cls = self.prop.parent.class_
+                col = getattr(cls, "name")
+                return sa.func.upper(col)
+
+            def operate(self, op, other, **kw):
+                return op(self.upperself, other, **kw)
+
+        Base = declarative_base(metadata=self.metadata)
+
+        with testing.expect_deprecated(
+            r"comparable_property\(\) is deprecated and will be "
+            "removed in a future release."
+        ):
+
+            class User(Base, fixtures.ComparableEntity):
+
+                __tablename__ = "users"
+                id = Column(
+                    "id",
+                    Integer,
+                    primary_key=True,
+                    test_needs_autoincrement=True,
+                )
+                name = Column("name", String(50))
+
+                @comparable_using(NameComparator)
+                @property
+                def uc_name(self):
+                    return self.name is not None and self.name.upper() or None
+
+        Base.metadata.create_all()
+        sess = create_session()
+        u1 = User(name="someuser")
+        eq_(u1.name, "someuser", u1.name)
+        eq_(u1.uc_name, "SOMEUSER", u1.uc_name)
+        sess.add(u1)
+        sess.flush()
+        sess.expunge_all()
+        rt = sess.query(User).filter(User.uc_name == "SOMEUSER").one()
+        eq_(rt, u1)
+        sess.expunge_all()
+        rt = sess.query(User).filter(User.uc_name.startswith("SOMEUSE")).one()
+        eq_(rt, u1)
+
+
+class DeprecatedMapperExtensionTest(_fixtures.FixtureTest):
+
+    """Superseded by MapperEventsTest - test backwards
+    compatibility of MapperExtension."""
+
+    run_inserts = None
+
+    def extension(self):
+        methods = []
+
+        class Ext(MapperExtension):
+            def instrument_class(self, mapper, cls):
+                methods.append("instrument_class")
+                return EXT_CONTINUE
+
+            def init_instance(
+                self, mapper, class_, oldinit, instance, args, kwargs
+            ):
+                methods.append("init_instance")
+                return EXT_CONTINUE
+
+            def init_failed(
+                self, mapper, class_, oldinit, instance, args, kwargs
+            ):
+                methods.append("init_failed")
+                return EXT_CONTINUE
+
+            def reconstruct_instance(self, mapper, instance):
+                methods.append("reconstruct_instance")
+                return EXT_CONTINUE
+
+            def before_insert(self, mapper, connection, instance):
+                methods.append("before_insert")
+                return EXT_CONTINUE
+
+            def after_insert(self, mapper, connection, instance):
+                methods.append("after_insert")
+                return EXT_CONTINUE
+
+            def before_update(self, mapper, connection, instance):
+                methods.append("before_update")
+                return EXT_CONTINUE
+
+            def after_update(self, mapper, connection, instance):
+                methods.append("after_update")
+                return EXT_CONTINUE
+
+            def before_delete(self, mapper, connection, instance):
+                methods.append("before_delete")
+                return EXT_CONTINUE
+
+            def after_delete(self, mapper, connection, instance):
+                methods.append("after_delete")
+                return EXT_CONTINUE
+
+        return Ext, methods
+
+    def test_basic(self):
+        """test that common user-defined methods get called."""
+
+        User, users = self.classes.User, self.tables.users
+
+        Ext, methods = self.extension()
+
+        with testing.expect_deprecated(
+            "MapperExtension is deprecated in favor of the MapperEvents",
+            "MapperExtension.before_insert is deprecated",
+            "MapperExtension.instrument_class is deprecated",
+            "MapperExtension.init_instance is deprecated",
+            "MapperExtension.after_insert is deprecated",
+            "MapperExtension.reconstruct_instance is deprecated",
+            "MapperExtension.before_delete is deprecated",
+            "MapperExtension.after_delete is deprecated",
+            "MapperExtension.before_update is deprecated",
+            "MapperExtension.after_update is deprecated",
+            "MapperExtension.init_failed is deprecated",
+        ):
+            mapper(User, users, extension=Ext())
+        sess = create_session()
+        u = User(name="u1")
+        sess.add(u)
+        sess.flush()
+        u = sess.query(User).populate_existing().get(u.id)
+        sess.expunge_all()
+        u = sess.query(User).get(u.id)
+        u.name = "u1 changed"
+        sess.flush()
+        sess.delete(u)
+        sess.flush()
+        eq_(
+            methods,
+            [
+                "instrument_class",
+                "init_instance",
+                "before_insert",
+                "after_insert",
+                "reconstruct_instance",
+                "before_update",
+                "after_update",
+                "before_delete",
+                "after_delete",
+            ],
+        )
+
+    def test_inheritance(self):
+        users, addresses, User = (
+            self.tables.users,
+            self.tables.addresses,
+            self.classes.User,
+        )
+
+        Ext, methods = self.extension()
+
+        class AdminUser(User):
+            pass
+
+        with testing.expect_deprecated(
+            "MapperExtension is deprecated in favor of the MapperEvents",
+            "MapperExtension.before_insert is deprecated",
+            "MapperExtension.instrument_class is deprecated",
+            "MapperExtension.init_instance is deprecated",
+            "MapperExtension.after_insert is deprecated",
+            "MapperExtension.reconstruct_instance is deprecated",
+            "MapperExtension.before_delete is deprecated",
+            "MapperExtension.after_delete is deprecated",
+            "MapperExtension.before_update is deprecated",
+            "MapperExtension.after_update is deprecated",
+            "MapperExtension.init_failed is deprecated",
+        ):
+            mapper(User, users, extension=Ext())
+        mapper(
+            AdminUser,
+            addresses,
+            inherits=User,
+            properties={"address_id": addresses.c.id},
         )
-        assert user.name == "fred"
 
-    def test_instances_entities(self):
-        r"""Query.instances(cursor, \*mappers_or_columns, \**kwargs)
+        sess = create_session()
+        am = AdminUser(name="au1", email_address="au1@e1")
+        sess.add(am)
+        sess.flush()
+        am = sess.query(AdminUser).populate_existing().get(am.id)
+        sess.expunge_all()
+        am = sess.query(AdminUser).get(am.id)
+        am.name = "au1 changed"
+        sess.flush()
+        sess.delete(am)
+        sess.flush()
+        eq_(
+            methods,
+            [
+                "instrument_class",
+                "instrument_class",
+                "init_instance",
+                "before_insert",
+                "after_insert",
+                "reconstruct_instance",
+                "before_update",
+                "after_update",
+                "before_delete",
+                "after_delete",
+            ],
+        )
 
-        sel = users_table.join(addresses_table).select(use_labels=True)
-        res = session.query(User).instances(sel.execute(), Address)
+    def test_before_after_only_collection(self):
+        """before_update is called on parent for collection modifications,
+        after_update is called even if no columns were updated.
 
         """
 
-        addresses_table, User, users_table, Address = (
-            self.tables.addresses_table,
-            self.classes.User,
-            self.tables.users_table,
-            self.classes.Address,
+        keywords, items, item_keywords, Keyword, Item = (
+            self.tables.keywords,
+            self.tables.items,
+            self.tables.item_keywords,
+            self.classes.Keyword,
+            self.classes.Item,
         )
 
-        session = create_session()
+        Ext1, methods1 = self.extension()
+        Ext2, methods2 = self.extension()
+
+        with testing.expect_deprecated(
+            "MapperExtension is deprecated in favor of the MapperEvents",
+            "MapperExtension.before_insert is deprecated",
+            "MapperExtension.instrument_class is deprecated",
+            "MapperExtension.init_instance is deprecated",
+            "MapperExtension.after_insert is deprecated",
+            "MapperExtension.reconstruct_instance is deprecated",
+            "MapperExtension.before_delete is deprecated",
+            "MapperExtension.after_delete is deprecated",
+            "MapperExtension.before_update is deprecated",
+            "MapperExtension.after_update is deprecated",
+            "MapperExtension.init_failed is deprecated",
+        ):
+            mapper(
+                Item,
+                items,
+                extension=Ext1(),
+                properties={
+                    "keywords": relationship(Keyword, secondary=item_keywords)
+                },
+            )
+        with testing.expect_deprecated(
+            "MapperExtension is deprecated in favor of the MapperEvents",
+            "MapperExtension.before_insert is deprecated",
+            "MapperExtension.instrument_class is deprecated",
+            "MapperExtension.init_instance is deprecated",
+            "MapperExtension.after_insert is deprecated",
+            "MapperExtension.reconstruct_instance is deprecated",
+            "MapperExtension.before_delete is deprecated",
+            "MapperExtension.after_delete is deprecated",
+            "MapperExtension.before_update is deprecated",
+            "MapperExtension.after_update is deprecated",
+            "MapperExtension.init_failed is deprecated",
+        ):
+            mapper(Keyword, keywords, extension=Ext2())
+
+        sess = create_session()
+        i1 = Item(description="i1")
+        k1 = Keyword(name="k1")
+        sess.add(i1)
+        sess.add(k1)
+        sess.flush()
+        eq_(
+            methods1,
+            [
+                "instrument_class",
+                "init_instance",
+                "before_insert",
+                "after_insert",
+            ],
+        )
+        eq_(
+            methods2,
+            [
+                "instrument_class",
+                "init_instance",
+                "before_insert",
+                "after_insert",
+            ],
+        )
 
-        sel = users_table.join(addresses_table).select(use_labels=True)
-        res = list(session.query(User, Address).instances(sel.execute()))
+        del methods1[:]
+        del methods2[:]
+        i1.keywords.append(k1)
+        sess.flush()
+        eq_(methods1, ["before_update", "after_update"])
+        eq_(methods2, [])
 
-        assert len(res) == 4
-        cola, colb = res[0]
-        assert isinstance(cola, User) and isinstance(colb, Address)
+    def test_inheritance_with_dupes(self):
+        """Inheritance with the same extension instance on both mappers."""
 
-    def test_join_by(self):
-        r"""Query.join_by(\*args, \**params)
+        users, addresses, User = (
+            self.tables.users,
+            self.tables.addresses,
+            self.classes.User,
+        )
 
-        TODO
-        """
+        Ext, methods = self.extension()
 
-        session = create_session()
+        class AdminUser(User):
+            pass
 
-    def test_join_to(self):
-        """Query.join_to(key)
+        ext = Ext()
+        with testing.expect_deprecated(
+            "MapperExtension is deprecated in favor of the MapperEvents",
+            "MapperExtension.before_insert is deprecated",
+            "MapperExtension.instrument_class is deprecated",
+            "MapperExtension.init_instance is deprecated",
+            "MapperExtension.after_insert is deprecated",
+            "MapperExtension.reconstruct_instance is deprecated",
+            "MapperExtension.before_delete is deprecated",
+            "MapperExtension.after_delete is deprecated",
+            "MapperExtension.before_update is deprecated",
+            "MapperExtension.after_update is deprecated",
+            "MapperExtension.init_failed is deprecated",
+        ):
+            mapper(User, users, extension=ext)
+
+        with testing.expect_deprecated(
+            "MapperExtension is deprecated in favor of the MapperEvents"
+        ):
+            mapper(
+                AdminUser,
+                addresses,
+                inherits=User,
+                extension=ext,
+                properties={"address_id": addresses.c.id},
+            )
 
-        TODO
-        """
+        sess = create_session()
+        am = AdminUser(name="au1", email_address="au1@e1")
+        sess.add(am)
+        sess.flush()
+        am = sess.query(AdminUser).populate_existing().get(am.id)
+        sess.expunge_all()
+        am = sess.query(AdminUser).get(am.id)
+        am.name = "au1 changed"
+        sess.flush()
+        sess.delete(am)
+        sess.flush()
+        eq_(
+            methods,
+            [
+                "instrument_class",
+                "instrument_class",
+                "init_instance",
+                "before_insert",
+                "after_insert",
+                "reconstruct_instance",
+                "before_update",
+                "after_update",
+                "before_delete",
+                "after_delete",
+            ],
+        )
 
-        session = create_session()
+    def test_unnecessary_methods_not_evented(self):
+        users = self.tables.users
 
-    def test_join_via(self):
-        """Query.join_via(keys)
+        class MyExtension(MapperExtension):
+            def before_insert(self, mapper, connection, instance):
+                pass
+
+        class Foo(object):
+            pass
+
+        with testing.expect_deprecated(
+            "MapperExtension is deprecated in favor of the MapperEvents",
+            "MapperExtension.before_insert is deprecated",
+        ):
+            m = mapper(Foo, users, extension=MyExtension())
+        assert not m.class_manager.dispatch.load
+        assert not m.dispatch.before_update
+        assert len(m.dispatch.before_insert) == 1
+
+
+class DeprecatedSessionExtensionTest(_fixtures.FixtureTest):
+    run_inserts = None
+
+    def test_extension(self):
+        User, users = self.classes.User, self.tables.users
+
+        mapper(User, users)
+        log = []
+
+        class MyExt(SessionExtension):
+            def before_commit(self, session):
+                log.append("before_commit")
+
+            def after_commit(self, session):
+                log.append("after_commit")
+
+            def after_rollback(self, session):
+                log.append("after_rollback")
+
+            def before_flush(self, session, flush_context, objects):
+                log.append("before_flush")
+
+            def after_flush(self, session, flush_context):
+                log.append("after_flush")
+
+            def after_flush_postexec(self, session, flush_context):
+                log.append("after_flush_postexec")
+
+            def after_begin(self, session, transaction, connection):
+                log.append("after_begin")
+
+            def after_attach(self, session, instance):
+                log.append("after_attach")
+
+            def after_bulk_update(self, session, query, query_context, result):
+                log.append("after_bulk_update")
+
+            def after_bulk_delete(self, session, query, query_context, result):
+                log.append("after_bulk_delete")
+
+        with testing.expect_deprecated(
+            "SessionExtension is deprecated in favor of " "the SessionEvents",
+            "SessionExtension.before_commit is deprecated",
+            "SessionExtension.after_commit is deprecated",
+            "SessionExtension.after_begin is deprecated",
+            "SessionExtension.after_attach is deprecated",
+            "SessionExtension.before_flush is deprecated",
+            "SessionExtension.after_flush is deprecated",
+            "SessionExtension.after_flush_postexec is deprecated",
+            "SessionExtension.after_rollback is deprecated",
+            "SessionExtension.after_bulk_update is deprecated",
+            "SessionExtension.after_bulk_delete is deprecated",
+        ):
+            sess = create_session(extension=MyExt())
+        u = User(name="u1")
+        sess.add(u)
+        sess.flush()
+        assert log == [
+            "after_attach",
+            "before_flush",
+            "after_begin",
+            "after_flush",
+            "after_flush_postexec",
+            "before_commit",
+            "after_commit",
+        ]
+        log = []
+        with testing.expect_deprecated(
+            "SessionExtension is deprecated in favor of " "the SessionEvents",
+            "SessionExtension.before_commit is deprecated",
+            "SessionExtension.after_commit is deprecated",
+            "SessionExtension.after_begin is deprecated",
+            "SessionExtension.after_attach is deprecated",
+            "SessionExtension.before_flush is deprecated",
+            "SessionExtension.after_flush is deprecated",
+            "SessionExtension.after_flush_postexec is deprecated",
+            "SessionExtension.after_rollback is deprecated",
+            "SessionExtension.after_bulk_update is deprecated",
+            "SessionExtension.after_bulk_delete is deprecated",
+        ):
+            sess = create_session(autocommit=False, extension=MyExt())
+        u = User(name="u1")
+        sess.add(u)
+        sess.flush()
+        assert log == [
+            "after_attach",
+            "before_flush",
+            "after_begin",
+            "after_flush",
+            "after_flush_postexec",
+        ]
+        log = []
+        u.name = "ed"
+        sess.commit()
+        assert log == [
+            "before_commit",
+            "before_flush",
+            "after_flush",
+            "after_flush_postexec",
+            "after_commit",
+        ]
+        log = []
+        sess.commit()
+        assert log == ["before_commit", "after_commit"]
+        log = []
+        sess.query(User).delete()
+        assert log == ["after_begin", "after_bulk_delete"]
+        log = []
+        sess.query(User).update({"name": "foo"})
+        assert log == ["after_bulk_update"]
+        log = []
+        with testing.expect_deprecated(
+            "SessionExtension is deprecated in favor of " "the SessionEvents",
+            "SessionExtension.before_commit is deprecated",
+            "SessionExtension.after_commit is deprecated",
+            "SessionExtension.after_begin is deprecated",
+            "SessionExtension.after_attach is deprecated",
+            "SessionExtension.before_flush is deprecated",
+            "SessionExtension.after_flush is deprecated",
+            "SessionExtension.after_flush_postexec is deprecated",
+            "SessionExtension.after_rollback is deprecated",
+            "SessionExtension.after_bulk_update is deprecated",
+            "SessionExtension.after_bulk_delete is deprecated",
+        ):
+            sess = create_session(
+                autocommit=False, extension=MyExt(), bind=testing.db
+            )
+        sess.connection()
+        assert log == ["after_begin"]
+        sess.close()
+
+    def test_multiple_extensions(self):
+        User, users = self.classes.User, self.tables.users
+
+        log = []
+
+        class MyExt1(SessionExtension):
+            def before_commit(self, session):
+                log.append("before_commit_one")
+
+        class MyExt2(SessionExtension):
+            def before_commit(self, session):
+                log.append("before_commit_two")
+
+        mapper(User, users)
+        with testing.expect_deprecated(
+            "SessionExtension is deprecated in favor of " "the SessionEvents",
+            "SessionExtension.before_commit is deprecated",
+        ):
+            sess = create_session(extension=[MyExt1(), MyExt2()])
+        u = User(name="u1")
+        sess.add(u)
+        sess.flush()
+        assert log == ["before_commit_one", "before_commit_two"]
+
+    def test_unnecessary_methods_not_evented(self):
+        class MyExtension(SessionExtension):
+            def before_commit(self, session):
+                pass
+
+        with testing.expect_deprecated(
+            "SessionExtension is deprecated in favor of " "the SessionEvents",
+            "SessionExtension.before_commit is deprecated.",
+        ):
+            s = Session(extension=MyExtension())
+        assert not s.dispatch.after_commit
+        assert len(s.dispatch.before_commit) == 1
+
+
+class DeprecatedAttributeExtensionTest1(fixtures.ORMTest):
+    def test_extension_commit_attr(self):
+        """test that an extension which commits attribute history
+        maintains the end-result history.
+
+        This won't work in conjunction with some unitofwork extensions.
 
-        TODO
         """
 
-        session = create_session()
+        class Foo(fixtures.BasicEntity):
+            pass
 
-    def test_list(self):
-        """Query.list()
+        class Bar(fixtures.BasicEntity):
+            pass
 
-        users = session.query(User).list()
+        class ReceiveEvents(AttributeExtension):
+            def __init__(self, key):
+                self.key = key
+
+            def append(self, state, child, initiator):
+                if commit:
+                    state._commit_all(state.dict)
+                return child
+
+            def remove(self, state, child, initiator):
+                if commit:
+                    state._commit_all(state.dict)
+                return child
+
+            def set(self, state, child, oldchild, initiator):
+                if commit:
+                    state._commit_all(state.dict)
+                return child
+
+        instrumentation.register_class(Foo)
+        instrumentation.register_class(Bar)
+
+        b1, b2, b3, b4 = Bar(id="b1"), Bar(id="b2"), Bar(id="b3"), Bar(id="b4")
+
+        def loadcollection(state, passive):
+            if passive is attributes.PASSIVE_NO_FETCH:
+                return attributes.PASSIVE_NO_RESULT
+            return [b1, b2]
+
+        def loadscalar(state, passive):
+            if passive is attributes.PASSIVE_NO_FETCH:
+                return attributes.PASSIVE_NO_RESULT
+            return b2
+
+        with testing.expect_deprecated(
+            "AttributeExtension.append is deprecated.",
+            "AttributeExtension.remove is deprecated.",
+            "AttributeExtension.set is deprecated.",
+        ):
+            attributes.register_attribute(
+                Foo,
+                "bars",
+                uselist=True,
+                useobject=True,
+                callable_=loadcollection,
+                extension=[ReceiveEvents("bars")],
+            )
 
-        """
+        with testing.expect_deprecated(
+            "AttributeExtension.append is deprecated.",
+            "AttributeExtension.remove is deprecated.",
+            "AttributeExtension.set is deprecated.",
+        ):
+            attributes.register_attribute(
+                Foo,
+                "bar",
+                uselist=False,
+                useobject=True,
+                callable_=loadscalar,
+                extension=[ReceiveEvents("bar")],
+            )
 
-        User = self.classes.User
+        with testing.expect_deprecated(
+            "AttributeExtension.append is deprecated.",
+            "AttributeExtension.remove is deprecated.",
+            "AttributeExtension.set is deprecated.",
+        ):
+            attributes.register_attribute(
+                Foo,
+                "scalar",
+                uselist=False,
+                useobject=False,
+                extension=[ReceiveEvents("scalar")],
+            )
 
-        session = create_session()
+        def create_hist():
+            def hist(key, fn, *arg):
+                attributes.instance_state(f1)._commit_all(
+                    attributes.instance_dict(f1)
+                )
+                fn(*arg)
+                histories.append(attributes.get_history(f1, key))
+
+            f1 = Foo()
+            hist("bars", f1.bars.append, b3)
+            hist("bars", f1.bars.append, b4)
+            hist("bars", f1.bars.remove, b2)
+            hist("bar", setattr, f1, "bar", b3)
+            hist("bar", setattr, f1, "bar", None)
+            hist("bar", setattr, f1, "bar", b4)
+            hist("scalar", setattr, f1, "scalar", 5)
+            hist("scalar", setattr, f1, "scalar", None)
+            hist("scalar", setattr, f1, "scalar", 4)
+
+        histories = []
+        commit = False
+        create_hist()
+        without_commit = list(histories)
+        histories[:] = []
+        commit = True
+        create_hist()
+        with_commit = histories
+        for without, with_ in zip(without_commit, with_commit):
+            woc = without
+            wic = with_
+            eq_(woc, wic)
+
+    def test_extension_lazyload_assertion(self):
+        class Foo(fixtures.BasicEntity):
+            pass
 
-        users = session.query(User).all()
-        assert len(users) == 4
+        class Bar(fixtures.BasicEntity):
+            pass
 
-    def test_scalar(self):
-        """Query.scalar()
+        class ReceiveEvents(AttributeExtension):
+            def append(self, state, child, initiator):
+                state.obj().bars
+                return child
+
+            def remove(self, state, child, initiator):
+                state.obj().bars
+                return child
+
+            def set(self, state, child, oldchild, initiator):
+                return child
+
+        instrumentation.register_class(Foo)
+        instrumentation.register_class(Bar)
+
+        bar1, bar2, bar3 = [Bar(id=1), Bar(id=2), Bar(id=3)]
+
+        def func1(state, passive):
+            if passive is attributes.PASSIVE_NO_FETCH:
+                return attributes.PASSIVE_NO_RESULT
+
+            return [bar1, bar2, bar3]
+
+        with testing.expect_deprecated(
+            "AttributeExtension.append is deprecated.",
+            "AttributeExtension.remove is deprecated.",
+            "AttributeExtension.set is deprecated.",
+        ):
+            attributes.register_attribute(
+                Foo,
+                "bars",
+                uselist=True,
+                callable_=func1,
+                useobject=True,
+                extension=[ReceiveEvents()],
+            )
+        attributes.register_attribute(
+            Bar, "foos", uselist=True, useobject=True, backref="bars"
+        )
 
-        user = session.query(User).filter(User.id==1).scalar()
+        x = Foo()
+        assert_raises(AssertionError, Bar(id=4).foos.append, x)
 
-        """
+        x.bars
+        b = Bar(id=4)
+        b.foos.append(x)
+        attributes.instance_state(x)._expire_attributes(
+            attributes.instance_dict(x), ["bars"]
+        )
+        assert_raises(AssertionError, b.foos.remove, x)
 
-        User = self.classes.User
+    def test_scalar_listener(self):
 
-        session = create_session()
+        # listeners on ScalarAttributeImpl aren't used normally. test that
+        # they work for the benefit of user extensions
 
-        user = session.query(User).filter(User.id == 1).first()
-        assert user.id == 1
+        class Foo(object):
 
-    def test_select(self):
-        r"""Query.select(arg=None, \**kwargs)
+            pass
 
-        users = session.query(User).select(users_table.c.name != None)
+        results = []
+
+        class ReceiveEvents(AttributeExtension):
+            def append(self, state, child, initiator):
+                assert False
+
+            def remove(self, state, child, initiator):
+                results.append(("remove", state.obj(), child))
+
+            def set(self, state, child, oldchild, initiator):
+                results.append(("set", state.obj(), child, oldchild))
+                return child
+
+        instrumentation.register_class(Foo)
+        with testing.expect_deprecated(
+            "AttributeExtension.append is deprecated.",
+            "AttributeExtension.remove is deprecated.",
+            "AttributeExtension.set is deprecated.",
+        ):
+            attributes.register_attribute(
+                Foo,
+                "x",
+                uselist=False,
+                useobject=False,
+                extension=ReceiveEvents(),
+            )
 
-        """
+        f = Foo()
+        f.x = 5
+        f.x = 17
+        del f.x
+
+        eq_(
+            results,
+            [
+                ("set", f, 5, attributes.NEVER_SET),
+                ("set", f, 17, 5),
+                ("remove", f, 17),
+            ],
+        )
 
-        User = self.classes.User
+    def test_cascading_extensions(self):
+        t1 = Table(
+            "t1",
+            MetaData(),
+            Column("id", Integer, primary_key=True),
+            Column("type", String(40)),
+            Column("data", String(50)),
+        )
 
-        session = create_session()
+        ext_msg = []
 
-        users = session.query(User).filter(User.name != None).all()  # noqa
-        assert len(users) == 4
+        class Ex1(AttributeExtension):
+            def set(self, state, value, oldvalue, initiator):
+                ext_msg.append("Ex1 %r" % value)
+                return "ex1" + value
 
-    def test_select_by(self):
-        r"""Query.select_by(\*args, \**params)
+        class Ex2(AttributeExtension):
+            def set(self, state, value, oldvalue, initiator):
+                ext_msg.append("Ex2 %r" % value)
+                return "ex2" + value
 
-        users = session.query(User).select_by(name='fred')
+        class A(fixtures.BasicEntity):
+            pass
 
-        # 0.3 magic join on \*_by methods
-        users = session.query(User).select_by(email_address='fred@the.fred')
+        class B(A):
+            pass
 
-        """
+        class C(B):
+            pass
 
-        User, Address = self.classes.User, self.classes.Address
+        with testing.expect_deprecated(
+            "AttributeExtension is deprecated in favor of the "
+            "AttributeEvents listener interface.  "
+            "The column_property.extension parameter"
+        ):
+            mapper(
+                A,
+                t1,
+                polymorphic_on=t1.c.type,
+                polymorphic_identity="a",
+                properties={
+                    "data": column_property(t1.c.data, extension=Ex1())
+                },
+            )
+        mapper(B, polymorphic_identity="b", inherits=A)
+        with testing.expect_deprecated(
+            "AttributeExtension is deprecated in favor of the "
+            "AttributeEvents listener interface.  "
+            "The column_property.extension parameter"
+        ):
+            mapper(
+                C,
+                polymorphic_identity="c",
+                inherits=B,
+                properties={
+                    "data": column_property(t1.c.data, extension=Ex2())
+                },
+            )
 
-        session = create_session()
+        with testing.expect_deprecated(
+            "AttributeExtension.set is deprecated. "
+        ):
+            configure_mappers()
+
+        a1 = A(data="a1")
+        b1 = B(data="b1")
+        c1 = C(data="c1")
+
+        eq_(a1.data, "ex1a1")
+        eq_(b1.data, "ex1b1")
+        eq_(c1.data, "ex2c1")
+
+        a1.data = "a2"
+        b1.data = "b2"
+        c1.data = "c2"
+        eq_(a1.data, "ex1a2")
+        eq_(b1.data, "ex1b2")
+        eq_(c1.data, "ex2c2")
+
+        eq_(
+            ext_msg,
+            [
+                "Ex1 'a1'",
+                "Ex1 'b1'",
+                "Ex2 'c1'",
+                "Ex1 'a2'",
+                "Ex1 'b2'",
+                "Ex2 'c2'",
+            ],
+        )
 
-        users = session.query(User).filter_by(name="fred").all()
-        assert len(users) == 1
 
-        users = session.query(User).filter(User.name == "fred").all()
-        assert len(users) == 1
+class DeprecatedOptionAllTest(OptionsPathTest, _fixtures.FixtureTest):
+    run_inserts = "once"
+    run_deletes = None
+
+    def _mapper_fixture_one(self):
+        users, User, addresses, Address, orders, Order = (
+            self.tables.users,
+            self.classes.User,
+            self.tables.addresses,
+            self.classes.Address,
+            self.tables.orders,
+            self.classes.Order,
+        )
+        keywords, items, item_keywords, Keyword, Item = (
+            self.tables.keywords,
+            self.tables.items,
+            self.tables.item_keywords,
+            self.classes.Keyword,
+            self.classes.Item,
+        )
+        mapper(
+            User,
+            users,
+            properties={
+                "addresses": relationship(Address),
+                "orders": relationship(Order),
+            },
+        )
+        mapper(Address, addresses)
+        mapper(
+            Order,
+            orders,
+            properties={
+                "items": relationship(Item, secondary=self.tables.order_items)
+            },
+        )
+        mapper(
+            Keyword,
+            keywords,
+            properties={
+                "keywords": column_property(keywords.c.name + "some keyword")
+            },
+        )
+        mapper(
+            Item,
+            items,
+            properties=dict(
+                keywords=relationship(Keyword, secondary=item_keywords)
+            ),
+        )
 
-        users = (
-            session.query(User)
-            .join("addresses")
-            .filter_by(email_address="fred@the.fred")
-        ).all()
-        assert len(users) == 1
+    def _assert_eager_with_entity_exception(
+        self, entity_list, options, message
+    ):
+        assert_raises_message(
+            sa.exc.ArgumentError,
+            message,
+            create_session().query(*entity_list).options,
+            *options
+        )
 
-        users = (
-            session.query(User)
-            .filter(
-                User.addresses.any(Address.email_address == "fred@the.fred")
+    def test_option_against_nonexistent_twolevel_all(self):
+        self._mapper_fixture_one()
+        Item = self.classes.Item
+        with testing.expect_deprecated(
+            r"The joinedload_all\(\) function is deprecated, and "
+            "will be removed in a future release.  "
+            r"Please use method chaining with joinedload\(\)"
+        ):
+            self._assert_eager_with_entity_exception(
+                [Item],
+                (joinedload_all("keywords.foo"),),
+                r"Can't find property named 'foo' on the mapped entity "
+                r"Mapper\|Keyword\|keywords in this Query.",
             )
-            .all()
+
+    def test_all_path_vs_chained(self):
+        self._mapper_fixture_one()
+        User = self.classes.User
+        Order = self.classes.Order
+        Item = self.classes.Item
+
+        with testing.expect_deprecated(
+            r"The joinedload_all\(\) function is deprecated, and "
+            "will be removed in a future release.  "
+            r"Please use method chaining with joinedload\(\)"
+        ):
+            l1 = joinedload_all("orders.items.keywords")
+
+        sess = Session()
+        q = sess.query(User)
+        self._assert_path_result(
+            l1,
+            q,
+            [
+                (User, "orders"),
+                (User, "orders", Order, "items"),
+                (User, "orders", Order, "items", Item, "keywords"),
+            ],
         )
-        assert len(users) == 1
 
-    def test_selectfirst(self):
-        r"""Query.selectfirst(arg=None, \**kwargs)
+        l2 = joinedload("orders").joinedload("items").joinedload("keywords")
+        self._assert_path_result(
+            l2,
+            q,
+            [
+                (User, "orders"),
+                (User, "orders", Order, "items"),
+                (User, "orders", Order, "items", Item, "keywords"),
+            ],
+        )
 
-        bounced = session.query(Address).selectfirst(
-          addresses_table.c.bounces > 0)
+    def test_subqueryload_mapper_order_by(self):
+        users, User, Address, addresses = (
+            self.tables.users,
+            self.classes.User,
+            self.classes.Address,
+            self.tables.addresses,
+        )
 
-        """
+        mapper(Address, addresses)
+
+        with testing.expect_deprecated(
+            ".*Mapper.order_by parameter is deprecated"
+        ):
+            mapper(
+                User,
+                users,
+                properties={
+                    "addresses": relationship(
+                        Address, lazy="subquery", order_by=addresses.c.id
+                    )
+                },
+                order_by=users.c.id.desc(),
+            )
 
-        Address = self.classes.Address
+        sess = create_session()
+        q = sess.query(User)
 
-        session = create_session()
+        result = q.limit(2).all()
+        eq_(result, list(reversed(self.static.user_address_result[2:4])))
 
-        bounced = session.query(Address).filter(Address.bounces > 0).first()
-        assert bounced.bounces > 0
+    def test_selectinload_mapper_order_by(self):
+        users, User, Address, addresses = (
+            self.tables.users,
+            self.classes.User,
+            self.classes.Address,
+            self.tables.addresses,
+        )
 
-    def test_selectfirst_by(self):
-        r"""Query.selectfirst_by(\*args, \**params)
+        mapper(Address, addresses)
+        with testing.expect_deprecated(
+            ".*Mapper.order_by parameter is deprecated"
+        ):
+            mapper(
+                User,
+                users,
+                properties={
+                    "addresses": relationship(
+                        Address, lazy="selectin", order_by=addresses.c.id
+                    )
+                },
+                order_by=users.c.id.desc(),
+            )
 
-        onebounce = session.query(Address).selectfirst_by(bounces=1)
+        sess = create_session()
+        q = sess.query(User)
 
-        # 0.3 magic join on *_by methods
-        onebounce_user = session.query(User).selectfirst_by(bounces=1)
+        result = q.limit(2).all()
+        eq_(result, list(reversed(self.static.user_address_result[2:4])))
 
-        """
+    def test_join_mapper_order_by(self):
+        """test that mapper-level order_by is adapted to a selectable."""
 
-        User, Address = self.classes.User, self.classes.Address
+        User, users = self.classes.User, self.tables.users
 
-        session = create_session()
+        with testing.expect_deprecated(
+            ".*Mapper.order_by parameter is deprecated"
+        ):
+            mapper(User, users, order_by=users.c.id)
 
-        onebounce = session.query(Address).filter_by(bounces=1).first()
-        assert onebounce.bounces == 1
+        sel = users.select(users.c.id.in_([7, 8]))
+        sess = create_session()
 
-        onebounce_user = (
-            session.query(User).join("addresses").filter_by(bounces=1)
-        ).first()
-        assert onebounce_user.name == "jack"
+        eq_(
+            sess.query(User).select_entity_from(sel).all(),
+            [User(name="jack", id=7), User(name="ed", id=8)],
+        )
 
-        onebounce_user = (
-            session.query(User).join("addresses").filter(Address.bounces == 1)
-        ).first()
-        assert onebounce_user.name == "jack"
+    def test_defer_addtl_attrs(self):
+        users, User, Address, addresses = (
+            self.tables.users,
+            self.classes.User,
+            self.classes.Address,
+            self.tables.addresses,
+        )
 
-        onebounce_user = (
-            session.query(User)
-            .filter(User.addresses.any(Address.bounces == 1))
-            .first()
+        mapper(Address, addresses)
+        mapper(
+            User,
+            users,
+            properties={
+                "addresses": relationship(
+                    Address, lazy="selectin", order_by=addresses.c.id
+                )
+            },
         )
-        assert onebounce_user.name == "jack"
 
-    def test_selectone(self):
-        r"""Query.selectone(arg=None, \**kwargs)
+        sess = create_session()
 
-        ed = session.query(User).selectone(users_table.c.name == 'ed')
+        with testing.expect_deprecated(
+            r"The \*addl_attrs on orm.defer is deprecated.  "
+            "Please use method chaining"
+        ):
+            sess.query(User).options(defer("addresses", "email_address"))
 
-        """
+        with testing.expect_deprecated(
+            r"The \*addl_attrs on orm.undefer is deprecated.  "
+            "Please use method chaining"
+        ):
+            sess.query(User).options(undefer("addresses", "email_address"))
 
+
+class LegacyLockModeTest(_fixtures.FixtureTest):
+    run_inserts = None
+
+    @classmethod
+    def setup_mappers(cls):
+        User, users = cls.classes.User, cls.tables.users
+        mapper(User, users)
+
+    def _assert_legacy(self, arg, read=False, nowait=False):
         User = self.classes.User
+        s = Session()
 
-        session = create_session()
+        with testing.expect_deprecated(
+            r"The Query.with_lockmode\(\) method is deprecated"
+        ):
+            q = s.query(User).with_lockmode(arg)
+        sel = q._compile_context().statement
 
-        ed = session.query(User).filter(User.name == "jack").one()
+        if arg is None:
+            assert q._for_update_arg is None
+            assert sel._for_update_arg is None
+            return
 
-    def test_selectone_by(self):
-        """Query.selectone_by
+        assert q._for_update_arg.read is read
+        assert q._for_update_arg.nowait is nowait
 
-        ed = session.query(User).selectone_by(name='ed')
+        assert sel._for_update_arg.read is read
+        assert sel._for_update_arg.nowait is nowait
 
-        # 0.3 magic join on *_by methods
-        ed = session.query(User).selectone_by(email_address='ed@foo.bar')
+    def test_false_legacy(self):
+        self._assert_legacy(None)
 
-        """
+    def test_plain_legacy(self):
+        self._assert_legacy("update")
 
-        User, Address = self.classes.User, self.classes.Address
+    def test_nowait_legacy(self):
+        self._assert_legacy("update_nowait", nowait=True)
 
-        session = create_session()
+    def test_read_legacy(self):
+        self._assert_legacy("read", read=True)
+
+    def test_unknown_legacy_lock_mode(self):
+        User = self.classes.User
+        sess = Session()
+        with testing.expect_deprecated(
+            r"The Query.with_lockmode\(\) method is deprecated"
+        ):
+            assert_raises_message(
+                exc.ArgumentError,
+                "Unknown with_lockmode argument: 'unknown_mode'",
+                sess.query(User.id).with_lockmode,
+                "unknown_mode",
+            )
 
-        ed = session.query(User).filter_by(name="jack").one()
 
-        ed = session.query(User).filter(User.name == "jack").one()
+class InstrumentationTest(fixtures.ORMTest):
+    def test_dict_subclass4(self):
+        # tests #2654
+        with testing.expect_deprecated(
+            r"The collection.converter\(\) handler is deprecated and will "
+            "be removed in a future release.  Please refer to the "
+            "AttributeEvents"
+        ):
 
-        ed = (
-            session.query(User)
-            .join("addresses")
-            .filter(Address.email_address == "ed@foo.bar")
-            .one()
-        )
+            class MyDict(collections.MappedCollection):
+                def __init__(self):
+                    super(MyDict, self).__init__(lambda value: "k%d" % value)
+
+                @collection.converter
+                def _convert(self, dictlike):
+                    for key, value in dictlike.items():
+                        yield value + 5
 
-        ed = (
-            session.query(User)
-            .filter(User.addresses.any(Address.email_address == "ed@foo.bar"))
-            .one()
+        class Foo(object):
+            pass
+
+        instrumentation.register_class(Foo)
+        d = attributes.register_attribute(
+            Foo, "attr", uselist=True, typecallable=MyDict, useobject=True
         )
 
-    def test_select_statement(self):
-        r"""Query.select_statement(statement, \**params)
+        f = Foo()
+        f.attr = {"k1": 1, "k2": 2}
 
-        users = session.query(User).select_statement(users_table.select())
+        eq_(f.attr, {"k7": 7, "k6": 6})
 
-        """
+    def test_name_setup(self):
+        with testing.expect_deprecated(
+            r"The collection.converter\(\) handler is deprecated and will "
+            "be removed in a future release.  Please refer to the "
+            "AttributeEvents"
+        ):
 
-        User, users_table = self.classes.User, self.tables.users_table
+            class Base(object):
+                @collection.iterator
+                def base_iterate(self, x):
+                    return "base_iterate"
 
-        session = create_session()
+                @collection.appender
+                def base_append(self, x):
+                    return "base_append"
 
-        users = session.query(User).from_statement(users_table.select()).all()
-        assert len(users) == 4
+                @collection.converter
+                def base_convert(self, x):
+                    return "base_convert"
 
-    def test_select_text(self):
-        r"""Query.select_text(text, \**params)
+                @collection.remover
+                def base_remove(self, x):
+                    return "base_remove"
 
-        users = session.query(User).select_text('SELECT * FROM users_table')
+        from sqlalchemy.orm.collections import _instrument_class
 
-        """
+        _instrument_class(Base)
 
-        User = self.classes.User
+        eq_(Base._sa_remover(Base(), 5), "base_remove")
+        eq_(Base._sa_appender(Base(), 5), "base_append")
+        eq_(Base._sa_iterator(Base(), 5), "base_iterate")
+        eq_(Base._sa_converter(Base(), 5), "base_convert")
 
-        session = create_session()
+        with testing.expect_deprecated(
+            r"The collection.converter\(\) handler is deprecated and will "
+            "be removed in a future release.  Please refer to the "
+            "AttributeEvents"
+        ):
 
-        users = (
-            session.query(User).from_statement(
-                text("SELECT * FROM users_table")
-            )
-        ).all()
-        assert len(users) == 4
+            class Sub(Base):
+                @collection.converter
+                def base_convert(self, x):
+                    return "sub_convert"
 
-    def test_select_whereclause(self):
-        r"""Query.select_whereclause(whereclause=None, params=None, \**kwargs)
+                @collection.remover
+                def sub_remove(self, x):
+                    return "sub_remove"
 
+        _instrument_class(Sub)
 
-        users = session,query(User).select_whereclause(users.c.name=='ed')
-        users = session.query(User).select_whereclause("name='ed'")
+        eq_(Sub._sa_appender(Sub(), 5), "base_append")
+        eq_(Sub._sa_remover(Sub(), 5), "sub_remove")
+        eq_(Sub._sa_iterator(Sub(), 5), "base_iterate")
+        eq_(Sub._sa_converter(Sub(), 5), "sub_convert")
 
-        """
+    def test_link_event(self):
+        canary = []
 
-        User = self.classes.User
+        with testing.expect_deprecated(
+            r"The collection.linker\(\) handler is deprecated and will "
+            "be removed in a future release.  Please refer to the "
+            "AttributeEvents"
+        ):
 
-        session = create_session()
+            class Collection(list):
+                @collection.linker
+                def _on_link(self, obj):
+                    canary.append(obj)
+
+        class Foo(object):
+            pass
+
+        instrumentation.register_class(Foo)
+        attributes.register_attribute(
+            Foo, "attr", uselist=True, typecallable=Collection, useobject=True
+        )
+
+        f1 = Foo()
+        f1.attr.append(3)
 
-        users = session.query(User).filter(User.name == "ed").all()
-        assert len(users) == 1 and users[0].name == "ed"
+        eq_(canary, [f1.attr._sa_adapter])
+        adapter_1 = f1.attr._sa_adapter
 
-        users = session.query(User).filter(text("name='ed'")).all()
-        assert len(users) == 1 and users[0].name == "ed"
+        l2 = Collection()
+        f1.attr = l2
+        eq_(canary, [adapter_1, f1.attr._sa_adapter, None])
index ea8ae764d9586efea10665a298cdca2762b6bcd1..fd272b1816aecf8f5084b426fed1ca0f88c38074 100644 (file)
@@ -21,7 +21,6 @@ from sqlalchemy.orm import create_session
 from sqlalchemy.orm import defaultload
 from sqlalchemy.orm import deferred
 from sqlalchemy.orm import joinedload
-from sqlalchemy.orm import joinedload_all
 from sqlalchemy.orm import lazyload
 from sqlalchemy.orm import Load
 from sqlalchemy.orm import load_only
@@ -1437,7 +1436,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
 
         self.assert_compile(
             sess.query(User)
-            .options(joinedload_all("orders.address"))
+            .options(joinedload("orders").joinedload("address"))
             .limit(10),
             "SELECT anon_1.users_id AS anon_1_users_id, "
             "anon_1.users_name AS anon_1_users_name, "
@@ -1459,7 +1458,8 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
 
         self.assert_compile(
             sess.query(User).options(
-                joinedload_all("orders.items"), joinedload("orders.address")
+                joinedload("orders").joinedload("items"),
+                joinedload("orders").joinedload("address"),
             ),
             "SELECT users.id AS users_id, users.name AS users_name, "
             "items_1.id AS items_1_id, "
@@ -2391,7 +2391,9 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
             sess.query(User)
             .join(User.orders)
             .join(Order.items)
-            .options(joinedload_all("orders.items.keywords"))
+            .options(
+                joinedload("orders").joinedload("items").joinedload("keywords")
+            )
         )
 
         # here, the eager join for keywords can catch onto
@@ -2583,7 +2585,9 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
 
         self.assert_compile(
             sess.query(User).options(
-                joinedload_all(User.orders, Order.items, innerjoin=True)
+                joinedload(User.orders, innerjoin=True).joinedload(
+                    Order.items, innerjoin=True
+                )
             ),
             "SELECT users.id AS users_id, users.name AS users_name, "
             "items_1.id AS items_1_id, "
@@ -3230,7 +3234,7 @@ class SubqueryAliasingTest(fixtures.MappedTest, testing.AssertsCompiledSQL):
         self.assert_compile(
             create_session()
             .query(A)
-            .options(joinedload_all("bs"))
+            .options(joinedload("bs"))
             .order_by(A.summation)
             .limit(50),
             "SELECT anon_1.anon_2 AS anon_1_anon_2, anon_1.a_id "
@@ -3253,7 +3257,7 @@ class SubqueryAliasingTest(fixtures.MappedTest, testing.AssertsCompiledSQL):
         self.assert_compile(
             create_session()
             .query(A)
-            .options(joinedload_all("bs"))
+            .options(joinedload("bs"))
             .order_by(A.summation.desc())
             .limit(50),
             "SELECT anon_1.anon_2 AS anon_1_anon_2, anon_1.a_id "
@@ -3278,7 +3282,7 @@ class SubqueryAliasingTest(fixtures.MappedTest, testing.AssertsCompiledSQL):
         self.assert_compile(
             create_session()
             .query(A)
-            .options(joinedload_all("bs"))
+            .options(joinedload("bs"))
             .order_by(A.summation)
             .limit(50),
             "SELECT anon_1.anon_2 AS anon_1_anon_2, anon_1.a_id "
@@ -3307,7 +3311,7 @@ class SubqueryAliasingTest(fixtures.MappedTest, testing.AssertsCompiledSQL):
         self.assert_compile(
             create_session()
             .query(A)
-            .options(joinedload_all("bs"))
+            .options(joinedload("bs"))
             .order_by(cp)
             .limit(50),
             "SELECT anon_1.a_id AS anon_1_a_id, anon_1.anon_2 "
@@ -3334,7 +3338,7 @@ class SubqueryAliasingTest(fixtures.MappedTest, testing.AssertsCompiledSQL):
         self.assert_compile(
             create_session()
             .query(A)
-            .options(joinedload_all("bs"))
+            .options(joinedload("bs"))
             .order_by(cp)
             .limit(50),
             "SELECT anon_1.a_id AS anon_1_a_id, anon_1.foo "
@@ -3361,7 +3365,7 @@ class SubqueryAliasingTest(fixtures.MappedTest, testing.AssertsCompiledSQL):
         self.assert_compile(
             create_session()
             .query(A)
-            .options(joinedload_all("bs"))
+            .options(joinedload("bs"))
             .order_by(~cp)
             .limit(50),
             "SELECT anon_1.a_id AS anon_1_a_id, anon_1.anon_2 "
@@ -3457,7 +3461,7 @@ class LoadOnExistingTest(_fixtures.FixtureTest):
         a2 = u1.addresses[0]
         a2.email_address = "foo"
         sess.query(User).options(
-            joinedload_all("addresses.dingaling")
+            joinedload("addresses").joinedload("dingaling")
         ).filter_by(id=8).all()
         assert u1.addresses[-1] is a1
         for a in u1.addresses:
@@ -3475,9 +3479,9 @@ class LoadOnExistingTest(_fixtures.FixtureTest):
         u1.orders
         o1 = Order()
         u1.orders.append(o1)
-        sess.query(User).options(joinedload_all("orders.items")).filter_by(
-            id=7
-        ).all()
+        sess.query(User).options(
+            joinedload("orders").joinedload("items")
+        ).filter_by(id=7).all()
         for o in u1.orders:
             if o is not o1:
                 assert "items" in o.__dict__
@@ -3494,7 +3498,7 @@ class LoadOnExistingTest(_fixtures.FixtureTest):
             .one()
         )
         sess.query(User).filter_by(id=8).options(
-            joinedload_all("addresses.dingaling")
+            joinedload("addresses").joinedload("dingaling")
         ).first()
         assert "dingaling" in u1.addresses[0].__dict__
 
@@ -3508,7 +3512,7 @@ class LoadOnExistingTest(_fixtures.FixtureTest):
             .one()
         )
         sess.query(User).filter_by(id=7).options(
-            joinedload_all("orders.items")
+            joinedload("orders").joinedload("items")
         ).first()
         assert "items" in u1.orders[0].__dict__
 
index bb1a935de3740679d69f56d328aa37f0ef08f60a..af5191569cd75fc01d5e4c24c8b02c4c979a8de0 100644 (file)
@@ -6,7 +6,6 @@ from sqlalchemy import testing
 from sqlalchemy.ext.declarative import declarative_base
 from sqlalchemy.orm import attributes
 from sqlalchemy.orm import class_mapper
-from sqlalchemy.orm import column_property
 from sqlalchemy.orm import configure_mappers
 from sqlalchemy.orm import create_session
 from sqlalchemy.orm import events
@@ -2387,470 +2386,6 @@ class SessionLifecycleEventsTest(_RemoveListeners, _fixtures.FixtureTest):
         )
 
 
-class MapperExtensionTest(_fixtures.FixtureTest):
-
-    """Superseded by MapperEventsTest - test backwards
-    compatibility of MapperExtension."""
-
-    run_inserts = None
-
-    def extension(self):
-        methods = []
-
-        class Ext(sa.orm.MapperExtension):
-            def instrument_class(self, mapper, cls):
-                methods.append("instrument_class")
-                return sa.orm.EXT_CONTINUE
-
-            def init_instance(
-                self, mapper, class_, oldinit, instance, args, kwargs
-            ):
-                methods.append("init_instance")
-                return sa.orm.EXT_CONTINUE
-
-            def init_failed(
-                self, mapper, class_, oldinit, instance, args, kwargs
-            ):
-                methods.append("init_failed")
-                return sa.orm.EXT_CONTINUE
-
-            def reconstruct_instance(self, mapper, instance):
-                methods.append("reconstruct_instance")
-                return sa.orm.EXT_CONTINUE
-
-            def before_insert(self, mapper, connection, instance):
-                methods.append("before_insert")
-                return sa.orm.EXT_CONTINUE
-
-            def after_insert(self, mapper, connection, instance):
-                methods.append("after_insert")
-                return sa.orm.EXT_CONTINUE
-
-            def before_update(self, mapper, connection, instance):
-                methods.append("before_update")
-                return sa.orm.EXT_CONTINUE
-
-            def after_update(self, mapper, connection, instance):
-                methods.append("after_update")
-                return sa.orm.EXT_CONTINUE
-
-            def before_delete(self, mapper, connection, instance):
-                methods.append("before_delete")
-                return sa.orm.EXT_CONTINUE
-
-            def after_delete(self, mapper, connection, instance):
-                methods.append("after_delete")
-                return sa.orm.EXT_CONTINUE
-
-        return Ext, methods
-
-    def test_basic(self):
-        """test that common user-defined methods get called."""
-
-        User, users = self.classes.User, self.tables.users
-
-        Ext, methods = self.extension()
-
-        mapper(User, users, extension=Ext())
-        sess = create_session()
-        u = User(name="u1")
-        sess.add(u)
-        sess.flush()
-        u = sess.query(User).populate_existing().get(u.id)
-        sess.expunge_all()
-        u = sess.query(User).get(u.id)
-        u.name = "u1 changed"
-        sess.flush()
-        sess.delete(u)
-        sess.flush()
-        eq_(
-            methods,
-            [
-                "instrument_class",
-                "init_instance",
-                "before_insert",
-                "after_insert",
-                "reconstruct_instance",
-                "before_update",
-                "after_update",
-                "before_delete",
-                "after_delete",
-            ],
-        )
-
-    def test_inheritance(self):
-        users, addresses, User = (
-            self.tables.users,
-            self.tables.addresses,
-            self.classes.User,
-        )
-
-        Ext, methods = self.extension()
-
-        class AdminUser(User):
-            pass
-
-        mapper(User, users, extension=Ext())
-        mapper(
-            AdminUser,
-            addresses,
-            inherits=User,
-            properties={"address_id": addresses.c.id},
-        )
-
-        sess = create_session()
-        am = AdminUser(name="au1", email_address="au1@e1")
-        sess.add(am)
-        sess.flush()
-        am = sess.query(AdminUser).populate_existing().get(am.id)
-        sess.expunge_all()
-        am = sess.query(AdminUser).get(am.id)
-        am.name = "au1 changed"
-        sess.flush()
-        sess.delete(am)
-        sess.flush()
-        eq_(
-            methods,
-            [
-                "instrument_class",
-                "instrument_class",
-                "init_instance",
-                "before_insert",
-                "after_insert",
-                "reconstruct_instance",
-                "before_update",
-                "after_update",
-                "before_delete",
-                "after_delete",
-            ],
-        )
-
-    def test_before_after_only_collection(self):
-        """before_update is called on parent for collection modifications,
-        after_update is called even if no columns were updated.
-
-        """
-
-        keywords, items, item_keywords, Keyword, Item = (
-            self.tables.keywords,
-            self.tables.items,
-            self.tables.item_keywords,
-            self.classes.Keyword,
-            self.classes.Item,
-        )
-
-        Ext1, methods1 = self.extension()
-        Ext2, methods2 = self.extension()
-
-        mapper(
-            Item,
-            items,
-            extension=Ext1(),
-            properties={
-                "keywords": relationship(Keyword, secondary=item_keywords)
-            },
-        )
-        mapper(Keyword, keywords, extension=Ext2())
-
-        sess = create_session()
-        i1 = Item(description="i1")
-        k1 = Keyword(name="k1")
-        sess.add(i1)
-        sess.add(k1)
-        sess.flush()
-        eq_(
-            methods1,
-            [
-                "instrument_class",
-                "init_instance",
-                "before_insert",
-                "after_insert",
-            ],
-        )
-        eq_(
-            methods2,
-            [
-                "instrument_class",
-                "init_instance",
-                "before_insert",
-                "after_insert",
-            ],
-        )
-
-        del methods1[:]
-        del methods2[:]
-        i1.keywords.append(k1)
-        sess.flush()
-        eq_(methods1, ["before_update", "after_update"])
-        eq_(methods2, [])
-
-    def test_inheritance_with_dupes(self):
-        """Inheritance with the same extension instance on both mappers."""
-
-        users, addresses, User = (
-            self.tables.users,
-            self.tables.addresses,
-            self.classes.User,
-        )
-
-        Ext, methods = self.extension()
-
-        class AdminUser(User):
-            pass
-
-        ext = Ext()
-        mapper(User, users, extension=ext)
-        mapper(
-            AdminUser,
-            addresses,
-            inherits=User,
-            extension=ext,
-            properties={"address_id": addresses.c.id},
-        )
-
-        sess = create_session()
-        am = AdminUser(name="au1", email_address="au1@e1")
-        sess.add(am)
-        sess.flush()
-        am = sess.query(AdminUser).populate_existing().get(am.id)
-        sess.expunge_all()
-        am = sess.query(AdminUser).get(am.id)
-        am.name = "au1 changed"
-        sess.flush()
-        sess.delete(am)
-        sess.flush()
-        eq_(
-            methods,
-            [
-                "instrument_class",
-                "instrument_class",
-                "init_instance",
-                "before_insert",
-                "after_insert",
-                "reconstruct_instance",
-                "before_update",
-                "after_update",
-                "before_delete",
-                "after_delete",
-            ],
-        )
-
-    def test_unnecessary_methods_not_evented(self):
-        users = self.tables.users
-
-        class MyExtension(sa.orm.MapperExtension):
-            def before_insert(self, mapper, connection, instance):
-                pass
-
-        class Foo(object):
-            pass
-
-        m = mapper(Foo, users, extension=MyExtension())
-        assert not m.class_manager.dispatch.load
-        assert not m.dispatch.before_update
-        assert len(m.dispatch.before_insert) == 1
-
-
-class AttributeExtensionTest(fixtures.MappedTest):
-    @classmethod
-    def define_tables(cls, metadata):
-        Table(
-            "t1",
-            metadata,
-            Column("id", Integer, primary_key=True),
-            Column("type", String(40)),
-            Column("data", String(50)),
-        )
-
-    def test_cascading_extensions(self):
-        t1 = self.tables.t1
-
-        ext_msg = []
-
-        class Ex1(sa.orm.AttributeExtension):
-            def set(self, state, value, oldvalue, initiator):
-                ext_msg.append("Ex1 %r" % value)
-                return "ex1" + value
-
-        class Ex2(sa.orm.AttributeExtension):
-            def set(self, state, value, oldvalue, initiator):
-                ext_msg.append("Ex2 %r" % value)
-                return "ex2" + value
-
-        class A(fixtures.BasicEntity):
-            pass
-
-        class B(A):
-            pass
-
-        class C(B):
-            pass
-
-        mapper(
-            A,
-            t1,
-            polymorphic_on=t1.c.type,
-            polymorphic_identity="a",
-            properties={"data": column_property(t1.c.data, extension=Ex1())},
-        )
-        mapper(B, polymorphic_identity="b", inherits=A)
-        mapper(
-            C,
-            polymorphic_identity="c",
-            inherits=B,
-            properties={"data": column_property(t1.c.data, extension=Ex2())},
-        )
-
-        a1 = A(data="a1")
-        b1 = B(data="b1")
-        c1 = C(data="c1")
-
-        eq_(a1.data, "ex1a1")
-        eq_(b1.data, "ex1b1")
-        eq_(c1.data, "ex2c1")
-
-        a1.data = "a2"
-        b1.data = "b2"
-        c1.data = "c2"
-        eq_(a1.data, "ex1a2")
-        eq_(b1.data, "ex1b2")
-        eq_(c1.data, "ex2c2")
-
-        eq_(
-            ext_msg,
-            [
-                "Ex1 'a1'",
-                "Ex1 'b1'",
-                "Ex2 'c1'",
-                "Ex1 'a2'",
-                "Ex1 'b2'",
-                "Ex2 'c2'",
-            ],
-        )
-
-
-class SessionExtensionTest(_fixtures.FixtureTest):
-    run_inserts = None
-
-    def test_extension(self):
-        User, users = self.classes.User, self.tables.users
-
-        mapper(User, users)
-        log = []
-
-        class MyExt(sa.orm.session.SessionExtension):
-            def before_commit(self, session):
-                log.append("before_commit")
-
-            def after_commit(self, session):
-                log.append("after_commit")
-
-            def after_rollback(self, session):
-                log.append("after_rollback")
-
-            def before_flush(self, session, flush_context, objects):
-                log.append("before_flush")
-
-            def after_flush(self, session, flush_context):
-                log.append("after_flush")
-
-            def after_flush_postexec(self, session, flush_context):
-                log.append("after_flush_postexec")
-
-            def after_begin(self, session, transaction, connection):
-                log.append("after_begin")
-
-            def after_attach(self, session, instance):
-                log.append("after_attach")
-
-            def after_bulk_update(self, session, query, query_context, result):
-                log.append("after_bulk_update")
-
-            def after_bulk_delete(self, session, query, query_context, result):
-                log.append("after_bulk_delete")
-
-        sess = create_session(extension=MyExt())
-        u = User(name="u1")
-        sess.add(u)
-        sess.flush()
-        assert log == [
-            "after_attach",
-            "before_flush",
-            "after_begin",
-            "after_flush",
-            "after_flush_postexec",
-            "before_commit",
-            "after_commit",
-        ]
-        log = []
-        sess = create_session(autocommit=False, extension=MyExt())
-        u = User(name="u1")
-        sess.add(u)
-        sess.flush()
-        assert log == [
-            "after_attach",
-            "before_flush",
-            "after_begin",
-            "after_flush",
-            "after_flush_postexec",
-        ]
-        log = []
-        u.name = "ed"
-        sess.commit()
-        assert log == [
-            "before_commit",
-            "before_flush",
-            "after_flush",
-            "after_flush_postexec",
-            "after_commit",
-        ]
-        log = []
-        sess.commit()
-        assert log == ["before_commit", "after_commit"]
-        log = []
-        sess.query(User).delete()
-        assert log == ["after_begin", "after_bulk_delete"]
-        log = []
-        sess.query(User).update({"name": "foo"})
-        assert log == ["after_bulk_update"]
-        log = []
-        sess = create_session(
-            autocommit=False, extension=MyExt(), bind=testing.db
-        )
-        sess.connection()
-        assert log == ["after_begin"]
-        sess.close()
-
-    def test_multiple_extensions(self):
-        User, users = self.classes.User, self.tables.users
-
-        log = []
-
-        class MyExt1(sa.orm.session.SessionExtension):
-            def before_commit(self, session):
-                log.append("before_commit_one")
-
-        class MyExt2(sa.orm.session.SessionExtension):
-            def before_commit(self, session):
-                log.append("before_commit_two")
-
-        mapper(User, users)
-        sess = create_session(extension=[MyExt1(), MyExt2()])
-        u = User(name="u1")
-        sess.add(u)
-        sess.flush()
-        assert log == ["before_commit_one", "before_commit_two"]
-
-    def test_unnecessary_methods_not_evented(self):
-        class MyExtension(sa.orm.session.SessionExtension):
-            def before_commit(self, session):
-                pass
-
-        s = Session(extension=MyExtension())
-        assert not s.dispatch.after_commit
-        assert len(s.dispatch.before_commit) == 1
-
-
 class QueryEventsTest(
     _RemoveListeners, _fixtures.FixtureTest, AssertsCompiledSQL
 ):
index fc2fb670ccb7bd4cb70a977a337777613d4b9c2c..c5e1d1485c318703239f0dc5165b5384c1a0e167 100644 (file)
@@ -26,7 +26,6 @@ from sqlalchemy.orm import contains_alias
 from sqlalchemy.orm import contains_eager
 from sqlalchemy.orm import create_session
 from sqlalchemy.orm import joinedload
-from sqlalchemy.orm import joinedload_all
 from sqlalchemy.orm import mapper
 from sqlalchemy.orm import relation
 from sqlalchemy.orm import relationship
@@ -2551,22 +2550,6 @@ class SelectFromTest(QueryTest, AssertsCompiledSQL):
         )
         eq_(q.all(), [("chuck",), ("ed",), ("fred",), ("jack",)])
 
-    @testing.uses_deprecated("Mapper.order_by")
-    def test_join_mapper_order_by(self):
-        """test that mapper-level order_by is adapted to a selectable."""
-
-        User, users = self.classes.User, self.tables.users
-
-        mapper(User, users, order_by=users.c.id)
-
-        sel = users.select(users.c.id.in_([7, 8]))
-        sess = create_session()
-
-        eq_(
-            sess.query(User).select_entity_from(sel).all(),
-            [User(name="jack", id=7), User(name="ed", id=8)],
-        )
-
     def test_differentiate_self_external(self):
         """test some different combinations of joining a table to a subquery of
         itself."""
@@ -2964,7 +2947,11 @@ class SelectFromTest(QueryTest, AssertsCompiledSQL):
             eq_(
                 sess.query(User)
                 .select_entity_from(sel)
-                .options(joinedload_all("orders.items.keywords"))
+                .options(
+                    joinedload("orders")
+                    .joinedload("items")
+                    .joinedload("keywords")
+                )
                 .join("orders", "items", "keywords", aliased=True)
                 .filter(Keyword.name.in_(["red", "big", "round"]))
                 .all(),
@@ -3425,7 +3412,7 @@ class ExternalColumnsTest(QueryTest):
         def go():
             o1 = (
                 sess.query(Order)
-                .options(joinedload_all("address.user"))
+                .options(joinedload("address").joinedload("user"))
                 .get(1)
             )
             eq_(o1.address.user.count, 1)
@@ -3437,7 +3424,7 @@ class ExternalColumnsTest(QueryTest):
         def go():
             o1 = (
                 sess.query(Order)
-                .options(joinedload_all("address.user"))
+                .options(joinedload("address").joinedload("user"))
                 .first()
             )
             eq_(o1.address.user.count, 1)
index abc666af195e193f56d65022d448a03d505f9370..97106cafca9ab5a173dbaaf4a8ba872e69889252 100644 (file)
@@ -73,7 +73,6 @@ class GenerativeQueryTest(fixtures.MappedTest):
 
         assert query[10:20][5] == orig[10:20][5]
 
-    @testing.uses_deprecated("Call to deprecated function apply_max")
     def test_aggregate(self):
         foo, Foo = self.tables.foo, self.classes.Foo
 
index dd928f0dbd3f2b2e6cc1ea533e77b54c1658b804..e3653c6acf1297041adc527bb5ff845bdf656272 100644 (file)
@@ -11,54 +11,6 @@ from sqlalchemy.testing import eq_
 from test.orm import _fixtures
 
 
-class LegacyLockModeTest(_fixtures.FixtureTest):
-    run_inserts = None
-
-    @classmethod
-    def setup_mappers(cls):
-        User, users = cls.classes.User, cls.tables.users
-        mapper(User, users)
-
-    def _assert_legacy(self, arg, read=False, nowait=False):
-        User = self.classes.User
-        s = Session()
-        q = s.query(User).with_lockmode(arg)
-        sel = q._compile_context().statement
-
-        if arg is None:
-            assert q._for_update_arg is None
-            assert sel._for_update_arg is None
-            return
-
-        assert q._for_update_arg.read is read
-        assert q._for_update_arg.nowait is nowait
-
-        assert sel._for_update_arg.read is read
-        assert sel._for_update_arg.nowait is nowait
-
-    def test_false_legacy(self):
-        self._assert_legacy(None)
-
-    def test_plain_legacy(self):
-        self._assert_legacy("update")
-
-    def test_nowait_legacy(self):
-        self._assert_legacy("update_nowait", nowait=True)
-
-    def test_read_legacy(self):
-        self._assert_legacy("read", read=True)
-
-    def test_unknown_legacy_lock_mode(self):
-        User = self.classes.User
-        sess = Session()
-        assert_raises_message(
-            exc.ArgumentError,
-            "Unknown with_lockmode argument: 'unknown_mode'",
-            sess.query(User.id).with_lockmode,
-            "unknown_mode",
-        )
-
-
 class ForUpdateTest(_fixtures.FixtureTest):
     @classmethod
     def setup_mappers(cls):
index 47710792e72404d1580a8e8f0ecd43fe39e01169..4e42e4f9aa9f56a75fe4917bea835f5f43313650 100644 (file)
@@ -18,7 +18,6 @@ from sqlalchemy.orm import attributes
 from sqlalchemy.orm import backref
 from sqlalchemy.orm import class_mapper
 from sqlalchemy.orm import column_property
-from sqlalchemy.orm import comparable_property
 from sqlalchemy.orm import composite
 from sqlalchemy.orm import configure_mappers
 from sqlalchemy.orm import create_session
@@ -555,7 +554,6 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
             (relationship, (Address,)),
             (composite, (MyComposite, "id", "name")),
             (synonym, "foo"),
-            (comparable_property, "foo"),
         ]:
             obj = constructor(info={"x": "y"}, *args)
             eq_(obj.info, {"x": "y"})
@@ -630,35 +628,12 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
 
             name = property(_get_name, _set_name)
 
-            def _uc_name(self):
-                if self._name is None:
-                    return None
-                return self._name.upper()
-
-            uc_name = property(_uc_name)
-            uc_name2 = property(_uc_name)
-
         m = mapper(User, users)
         mapper(Address, addresses)
 
-        class UCComparator(sa.orm.PropComparator):
-            __hash__ = None
-
-            def __eq__(self, other):
-                cls = self.prop.parent.class_
-                col = getattr(cls, "name")
-                if other is None:
-                    return col is None
-                else:
-                    return sa.func.upper(col) == sa.func.upper(other)
-
         m.add_property("_name", deferred(users.c.name))
         m.add_property("name", synonym("_name"))
         m.add_property("addresses", relationship(Address))
-        m.add_property("uc_name", sa.orm.comparable_property(UCComparator))
-        m.add_property(
-            "uc_name2", sa.orm.comparable_property(UCComparator, User.uc_name2)
-        )
 
         sess = create_session(autocommit=False)
         assert sess.query(User).get(7)
@@ -671,8 +646,6 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
                 len(self.static.user_address_result[0].addresses),
             )
             eq_(u.name, "jack")
-            eq_(u.uc_name, "JACK")
-            eq_(u.uc_name2, "JACK")
             eq_(assert_col, [("get", "jack")], str(assert_col))
 
         self.sql_count_(2, go)
@@ -1410,52 +1383,6 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
 
         eq_(result, [self.static.user_result[0]])
 
-    @testing.uses_deprecated("Mapper.order_by")
-    def test_cancel_order_by(self):
-        users, User = self.tables.users, self.classes.User
-
-        mapper(User, users, order_by=users.c.name.desc())
-
-        assert (
-            "order by users.name desc"
-            in str(create_session().query(User).statement).lower()
-        )
-        assert (
-            "order by"
-            not in str(
-                create_session().query(User).order_by(None).statement
-            ).lower()
-        )
-        assert (
-            "order by users.name asc"
-            in str(
-                create_session()
-                .query(User)
-                .order_by(User.name.asc())
-                .statement
-            ).lower()
-        )
-
-        eq_(
-            create_session().query(User).all(),
-            [
-                User(id=7, name="jack"),
-                User(id=9, name="fred"),
-                User(id=8, name="ed"),
-                User(id=10, name="chuck"),
-            ],
-        )
-
-        eq_(
-            create_session().query(User).order_by(User.name).all(),
-            [
-                User(id=10, name="chuck"),
-                User(id=8, name="ed"),
-                User(id=9, name="fred"),
-                User(id=7, name="jack"),
-            ],
-        )
-
     # 'Raises a "expression evaluation not supported" error at prepare time
     @testing.fails_on("firebird", "FIXME: unknown")
     def test_function(self):
@@ -1809,151 +1736,6 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
             ),
         )
 
-    def test_comparable(self):
-        users = self.tables.users
-
-        class extendedproperty(property):
-            attribute = 123
-
-            def method1(self):
-                return "method1"
-
-        from sqlalchemy.orm.properties import ColumnProperty
-
-        class UCComparator(ColumnProperty.Comparator):
-            __hash__ = None
-
-            def method1(self):
-                return "uccmethod1"
-
-            def method2(self, other):
-                return "method2"
-
-            def __eq__(self, other):
-                cls = self.prop.parent.class_
-                col = getattr(cls, "name")
-                if other is None:
-                    return col is None
-                else:
-                    return sa.func.upper(col) == sa.func.upper(other)
-
-        def map_(with_explicit_property):
-            class User(object):
-                @extendedproperty
-                def uc_name(self):
-                    if self.name is None:
-                        return None
-                    return self.name.upper()
-
-            if with_explicit_property:
-                args = (UCComparator, User.uc_name)
-            else:
-                args = (UCComparator,)
-            mapper(
-                User,
-                users,
-                properties=dict(uc_name=sa.orm.comparable_property(*args)),
-            )
-            return User
-
-        for User in (map_(True), map_(False)):
-            sess = create_session()
-            sess.begin()
-            q = sess.query(User)
-
-            assert hasattr(User, "name")
-            assert hasattr(User, "uc_name")
-
-            eq_(User.uc_name.method1(), "method1")
-            eq_(User.uc_name.method2("x"), "method2")
-
-            assert_raises_message(
-                AttributeError,
-                "Neither 'extendedproperty' object nor 'UCComparator' "
-                "object associated with User.uc_name has an attribute "
-                "'nonexistent'",
-                getattr,
-                User.uc_name,
-                "nonexistent",
-            )
-
-            # test compile
-            assert not isinstance(User.uc_name == "jack", bool)
-            u = q.filter(User.uc_name == "JACK").one()
-
-            assert u.uc_name == "JACK"
-            assert u not in sess.dirty
-
-            u.name = "some user name"
-            eq_(u.name, "some user name")
-            assert u in sess.dirty
-            eq_(u.uc_name, "SOME USER NAME")
-
-            sess.flush()
-            sess.expunge_all()
-
-            q = sess.query(User)
-            u2 = q.filter(User.name == "some user name").one()
-            u3 = q.filter(User.uc_name == "SOME USER NAME").one()
-
-            assert u2 is u3
-
-            eq_(User.uc_name.attribute, 123)
-            sess.rollback()
-
-    def test_comparable_column(self):
-        users, User = self.tables.users, self.classes.User
-
-        class MyComparator(sa.orm.properties.ColumnProperty.Comparator):
-            __hash__ = None
-
-            def __eq__(self, other):
-                # lower case comparison
-                return func.lower(self.__clause_element__()) == func.lower(
-                    other
-                )
-
-            def intersects(self, other):
-                # non-standard comparator
-                return self.__clause_element__().op("&=")(other)
-
-        mapper(
-            User,
-            users,
-            properties={
-                "name": sa.orm.column_property(
-                    users.c.name, comparator_factory=MyComparator
-                )
-            },
-        )
-
-        assert_raises_message(
-            AttributeError,
-            "Neither 'InstrumentedAttribute' object nor "
-            "'MyComparator' object associated with User.name has "
-            "an attribute 'nonexistent'",
-            getattr,
-            User.name,
-            "nonexistent",
-        )
-
-        eq_(
-            str(
-                (User.name == "ed").compile(
-                    dialect=sa.engine.default.DefaultDialect()
-                )
-            ),
-            "lower(users.name) = lower(:lower_1)",
-        )
-        eq_(
-            str(
-                (User.name.intersects("ed")).compile(
-                    dialect=sa.engine.default.DefaultDialect()
-                )
-            ),
-            "users.name &= :name_1",
-        )
-
     def test_reentrant_compile(self):
         users, Address, addresses, User = (
             self.tables.users,
@@ -2776,7 +2558,11 @@ class DeepOptionsTest(_fixtures.FixtureTest):
         result = (
             sess.query(User)
             .order_by(User.id)
-            .options(sa.orm.joinedload_all("orders.items.keywords"))
+            .options(
+                sa.orm.joinedload("orders")
+                .joinedload("items")
+                .joinedload("keywords")
+            )
         ).all()
 
         def go():
@@ -2788,7 +2574,9 @@ class DeepOptionsTest(_fixtures.FixtureTest):
 
         result = (
             sess.query(User).options(
-                sa.orm.subqueryload_all("orders.items.keywords")
+                sa.orm.subqueryload("orders")
+                .subqueryload("items")
+                .subqueryload("keywords")
             )
         ).all()
 
@@ -2885,7 +2673,6 @@ class ComparatorFactoryTest(_fixtures.FixtureTest, AssertsCompiledSQL):
             (composite, DummyComposite, users.c.id, users.c.name),
             (relationship, Address),
             (backref, "address"),
-            (comparable_property,),
             (dynamic_loader, Address),
         ):
             fn = args[0]
@@ -3845,9 +3632,9 @@ class RequirementsTest(fixtures.MappedTest):
         h1s = (
             s.query(H1)
             .options(
-                sa.orm.joinedload_all("t6a.h1b"),
+                sa.orm.joinedload("t6a").joinedload("h1b"),
                 sa.orm.joinedload("h2s"),
-                sa.orm.joinedload_all("h3s.h1s"),
+                sa.orm.joinedload("h3s").joinedload("h1s"),
             )
             .all()
         )
index c3e38d1b0de6dcb0660e72bfa42baee7326ceaaa..995989cd9253cca514466dccdad660143562e7cb 100644 (file)
@@ -12,14 +12,12 @@ from sqlalchemy import testing
 from sqlalchemy import Text
 from sqlalchemy.orm import attributes
 from sqlalchemy.orm import backref
-from sqlalchemy.orm import comparable_property
 from sqlalchemy.orm import configure_mappers
 from sqlalchemy.orm import create_session
 from sqlalchemy.orm import defer
 from sqlalchemy.orm import deferred
 from sqlalchemy.orm import foreign
 from sqlalchemy.orm import mapper
-from sqlalchemy.orm import PropComparator
 from sqlalchemy.orm import relationship
 from sqlalchemy.orm import Session
 from sqlalchemy.orm import sessionmaker
@@ -1283,13 +1281,10 @@ class MergeTest(_fixtures.FixtureTest):
         except sa.exc.InvalidRequestError as e:
             assert "load=False option does not support" in str(e)
 
-    def test_synonym_comparable(self):
+    def test_synonym(self):
         users = self.tables.users
 
         class User(object):
-            class Comparator(PropComparator):
-                pass
-
             def _getValue(self):
                 return self._value
 
@@ -1298,14 +1293,7 @@ class MergeTest(_fixtures.FixtureTest):
 
             value = property(_getValue, _setValue)
 
-        mapper(
-            User,
-            users,
-            properties={
-                "uid": synonym("id"),
-                "foobar": comparable_property(User.Comparator, User.value),
-            },
-        )
+        mapper(User, users, properties={"uid": synonym("id")})
 
         sess = create_session()
         u = User()
index 6e980e641fb9cc07377e707194c18ec682c954b9..61fc80cb08a299e79d4d57a28258fcef2879c64d 100644 (file)
@@ -8,11 +8,9 @@ from sqlalchemy.engine import default
 from sqlalchemy.orm import aliased
 from sqlalchemy.orm import contains_eager
 from sqlalchemy.orm import joinedload
-from sqlalchemy.orm import joinedload_all
 from sqlalchemy.orm import relationship
 from sqlalchemy.orm import Session
 from sqlalchemy.orm import subqueryload
-from sqlalchemy.orm import subqueryload_all
 from sqlalchemy.orm import with_polymorphic
 from sqlalchemy.testing import assert_raises_message
 from sqlalchemy.testing import eq_
@@ -562,8 +560,8 @@ class SubclassRelationshipTest(
         )
         s = Session(testing.db)
         q = s.query(ParentThing).options(
-            subqueryload_all(
-                ParentThing.container, DataContainer.jobs.of_type(SubJob)
+            subqueryload(ParentThing.container).subqueryload(
+                DataContainer.jobs.of_type(SubJob)
             )
         )
 
@@ -577,8 +575,8 @@ class SubclassRelationshipTest(
         s = Session(testing.db)
         sj_alias = aliased(SubJob)
         q = s.query(DataContainer).options(
-            subqueryload_all(
-                DataContainer.jobs.of_type(sj_alias), sj_alias.widget
+            subqueryload(DataContainer.jobs.of_type(sj_alias)).subqueryload(
+                sj_alias.widget
             )
         )
 
@@ -596,8 +594,8 @@ class SubclassRelationshipTest(
         )
         s = Session(testing.db)
         q = s.query(ParentThing).options(
-            joinedload_all(
-                ParentThing.container, DataContainer.jobs.of_type(SubJob)
+            joinedload(ParentThing.container).joinedload(
+                DataContainer.jobs.of_type(SubJob)
             )
         )
 
index 98b6bccbfd9d7b7603322ea6bd7803615b357349..4d205e593f757525879d0a9f36280d81d84ca4a6 100644 (file)
@@ -11,7 +11,6 @@ from sqlalchemy.orm import column_property
 from sqlalchemy.orm import create_session
 from sqlalchemy.orm import defaultload
 from sqlalchemy.orm import joinedload
-from sqlalchemy.orm import joinedload_all
 from sqlalchemy.orm import lazyload
 from sqlalchemy.orm import Load
 from sqlalchemy.orm import mapper
@@ -978,11 +977,11 @@ class OptionsNoPropTest(_fixtures.FixtureTest):
             r"Mapper\|Keyword\|keywords in this Query.",
         )
 
-    def test_option_against_nonexistent_twolevel_all(self):
+    def test_option_against_nonexistent_twolevel_chained(self):
         Item = self.classes.Item
         self._assert_eager_with_entity_exception(
             [Item],
-            (joinedload_all("keywords.foo"),),
+            (joinedload("keywords").joinedload("foo"),),
             r"Can't find property named 'foo' on the mapped entity "
             r"Mapper\|Keyword\|keywords in this Query.",
         )
@@ -996,7 +995,7 @@ class OptionsNoPropTest(_fixtures.FixtureTest):
         Keyword = self.classes.Keyword
         self._assert_eager_with_entity_exception(
             [Keyword, Item],
-            (joinedload_all("keywords"),),
+            (joinedload("keywords"),),
             r"Attribute 'keywords' of entity 'Mapper\|Keyword\|keywords' "
             "does not refer to a mapped entity",
         )
@@ -1010,7 +1009,7 @@ class OptionsNoPropTest(_fixtures.FixtureTest):
         Keyword = self.classes.Keyword
         self._assert_eager_with_entity_exception(
             [Keyword, Item],
-            (joinedload_all("keywords"),),
+            (joinedload("keywords"),),
             r"Attribute 'keywords' of entity 'Mapper\|Keyword\|keywords' "
             "does not refer to a mapped entity",
         )
@@ -1019,7 +1018,7 @@ class OptionsNoPropTest(_fixtures.FixtureTest):
         Item = self.classes.Item
         self._assert_eager_with_entity_exception(
             [Item],
-            (joinedload_all("id", "keywords"),),
+            (joinedload("id").joinedload("keywords"),),
             r"Attribute 'id' of entity 'Mapper\|Item\|items' does not "
             r"refer to a mapped entity",
         )
@@ -1029,7 +1028,7 @@ class OptionsNoPropTest(_fixtures.FixtureTest):
         Keyword = self.classes.Keyword
         self._assert_eager_with_entity_exception(
             [Keyword, Item],
-            (joinedload_all("id", "keywords"),),
+            (joinedload("id").joinedload("keywords"),),
             r"Attribute 'id' of entity 'Mapper\|Keyword\|keywords' "
             "does not refer to a mapped entity",
         )
@@ -1039,7 +1038,7 @@ class OptionsNoPropTest(_fixtures.FixtureTest):
         Keyword = self.classes.Keyword
         self._assert_eager_with_entity_exception(
             [Keyword, Item],
-            (joinedload_all("description"),),
+            (joinedload("description"),),
             r"Can't find property named 'description' on the mapped "
             r"entity Mapper\|Keyword\|keywords in this Query.",
         )
@@ -1049,7 +1048,7 @@ class OptionsNoPropTest(_fixtures.FixtureTest):
         Keyword = self.classes.Keyword
         self._assert_eager_with_entity_exception(
             [Keyword.id, Item.id],
-            (joinedload_all("keywords"),),
+            (joinedload("keywords"),),
             r"Query has only expression-based entities - can't find property "
             "named 'keywords'.",
         )
@@ -1059,7 +1058,7 @@ class OptionsNoPropTest(_fixtures.FixtureTest):
         Keyword = self.classes.Keyword
         self._assert_eager_with_entity_exception(
             [Keyword, Item],
-            (joinedload_all(Keyword.id, Item.keywords),),
+            (joinedload(Keyword.id).joinedload(Item.keywords),),
             r"Attribute 'id' of entity 'Mapper\|Keyword\|keywords' "
             "does not refer to a mapped entity",
         )
@@ -1069,7 +1068,7 @@ class OptionsNoPropTest(_fixtures.FixtureTest):
         Keyword = self.classes.Keyword
         self._assert_eager_with_entity_exception(
             [Keyword, Item],
-            (joinedload_all(Keyword.keywords, Item.keywords),),
+            (joinedload(Keyword.keywords).joinedload(Item.keywords),),
             r"Attribute 'keywords' of entity 'Mapper\|Keyword\|keywords' "
             "does not refer to a mapped entity",
         )
@@ -1079,7 +1078,7 @@ class OptionsNoPropTest(_fixtures.FixtureTest):
         Keyword = self.classes.Keyword
         self._assert_eager_with_entity_exception(
             [Keyword.id, Item.id],
-            (joinedload_all(Keyword.keywords, Item.keywords),),
+            (joinedload(Keyword.keywords).joinedload(Item.keywords),),
             r"Query has only expression-based entities - "
             "can't find property named 'keywords'.",
         )
@@ -1089,7 +1088,7 @@ class OptionsNoPropTest(_fixtures.FixtureTest):
         Keyword = self.classes.Keyword
         self._assert_eager_with_entity_exception(
             [Item],
-            (joinedload_all(Keyword),),
+            (joinedload(Keyword),),
             r"mapper option expects string key or list of attributes",
         )
 
@@ -1097,7 +1096,7 @@ class OptionsNoPropTest(_fixtures.FixtureTest):
         User = self.classes.User
         self._assert_eager_with_entity_exception(
             [User],
-            (joinedload_all(User.addresses, User.orders),),
+            (joinedload(User.addresses).joinedload(User.orders),),
             r"Attribute 'User.orders' does not link "
             "from element 'Mapper|Address|addresses'",
         )
@@ -1107,7 +1106,11 @@ class OptionsNoPropTest(_fixtures.FixtureTest):
         Order = self.classes.Order
         self._assert_eager_with_entity_exception(
             [User],
-            (joinedload_all(User.addresses, User.orders.of_type(Order)),),
+            (
+                joinedload(User.addresses).joinedload(
+                    User.orders.of_type(Order)
+                ),
+            ),
             r"Attribute 'User.orders' does not link "
             "from element 'Mapper|Address|addresses'",
         )
index 12f894ef3333989efee48cde988d64476149775e..0c8c27bb2c7529298790b8270cd0aaf2c3dec900 100644 (file)
@@ -39,7 +39,6 @@ from sqlalchemy.orm import column_property
 from sqlalchemy.orm import create_session
 from sqlalchemy.orm import defer
 from sqlalchemy.orm import joinedload
-from sqlalchemy.orm import joinedload_all
 from sqlalchemy.orm import lazyload
 from sqlalchemy.orm import mapper
 from sqlalchemy.orm import Query
@@ -842,7 +841,7 @@ class GetTest(QueryTest):
 
         # eager load does
         s.query(User).options(
-            joinedload("addresses"), joinedload_all("orders.items")
+            joinedload("addresses"), joinedload("orders").joinedload("items")
         ).populate_existing().all()
         assert u.addresses[0].email_address == "jack@bean.com"
         assert u.orders[1].items[2].description == "item 5"
@@ -4000,31 +3999,30 @@ class TextTest(QueryTest, AssertsCompiledSQL):
             None,
         )
 
-    def test_fragment(self):
+    def test_whereclause(self):
         User = self.classes.User
 
-        with expect_warnings("Textual SQL expression"):
-            eq_(
-                create_session().query(User).filter("id in (8, 9)").all(),
-                [User(id=8), User(id=9)],
-            )
+        eq_(
+            create_session().query(User).filter(text("id in (8, 9)")).all(),
+            [User(id=8), User(id=9)],
+        )
 
-            eq_(
-                create_session()
-                .query(User)
-                .filter("name='fred'")
-                .filter("id=9")
-                .all(),
-                [User(id=9)],
-            )
-            eq_(
-                create_session()
-                .query(User)
-                .filter("name='fred'")
-                .filter(User.id == 9)
-                .all(),
-                [User(id=9)],
-            )
+        eq_(
+            create_session()
+            .query(User)
+            .filter(text("name='fred'"))
+            .filter(text("id=9"))
+            .all(),
+            [User(id=9)],
+        )
+        eq_(
+            create_session()
+            .query(User)
+            .filter(text("name='fred'"))
+            .filter(User.id == 9)
+            .all(),
+            [User(id=9)],
+        )
 
     def test_binds_coerce(self):
         User = self.classes.User
index 0ace607250fe82ce8fd235838f08595c98361587..b891835d1f0ba163ae93b5920a301ab4dc64abf7 100644 (file)
@@ -13,7 +13,6 @@ from sqlalchemy.orm import joinedload
 from sqlalchemy.orm import mapper
 from sqlalchemy.orm import relationship
 from sqlalchemy.orm import selectinload
-from sqlalchemy.orm import selectinload_all
 from sqlalchemy.orm import Session
 from sqlalchemy.orm import subqueryload
 from sqlalchemy.orm import undefer
@@ -136,7 +135,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
         self.assert_sql_count(testing.db, go, 2)
 
         q = sess.query(u).options(
-            selectinload_all(u.addresses, Address.dingalings)
+            selectinload(u.addresses).selectinload(Address.dingalings)
         )
 
         def go():
@@ -1040,33 +1039,6 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
         result = q.order_by(sa.desc(User.id)).limit(2).offset(2).all()
         eq_(list(reversed(self.static.user_all_result[0:2])), result)
 
-    @testing.uses_deprecated("Mapper.order_by")
-    def test_mapper_order_by(self):
-        users, User, Address, addresses = (
-            self.tables.users,
-            self.classes.User,
-            self.classes.Address,
-            self.tables.addresses,
-        )
-
-        mapper(Address, addresses)
-        mapper(
-            User,
-            users,
-            properties={
-                "addresses": relationship(
-                    Address, lazy="selectin", order_by=addresses.c.id
-                )
-            },
-            order_by=users.c.id.desc(),
-        )
-
-        sess = create_session()
-        q = sess.query(User)
-
-        result = q.limit(2).all()
-        eq_(result, list(reversed(self.static.user_address_result[2:4])))
-
     def test_one_to_many_scalar(self):
         Address, addresses, users, User = (
             self.classes.Address,
@@ -1320,7 +1292,7 @@ class LoadOnExistingTest(_fixtures.FixtureTest):
         a2 = u1.addresses[0]
         a2.email_address = "foo"
         sess.query(User).options(
-            selectinload_all("addresses.dingaling")
+            selectinload("addresses").selectinload("dingaling")
         ).filter_by(id=8).all()
         assert u1.addresses[-1] is a1
         for a in u1.addresses:
@@ -1338,9 +1310,9 @@ class LoadOnExistingTest(_fixtures.FixtureTest):
         u1.orders
         o1 = Order()
         u1.orders.append(o1)
-        sess.query(User).options(selectinload_all("orders.items")).filter_by(
-            id=7
-        ).all()
+        sess.query(User).options(
+            selectinload("orders").selectinload("items")
+        ).filter_by(id=7).all()
         for o in u1.orders:
             if o is not o1:
                 assert "items" in o.__dict__
@@ -1357,7 +1329,7 @@ class LoadOnExistingTest(_fixtures.FixtureTest):
             .one()
         )
         sess.query(User).filter_by(id=8).options(
-            selectinload_all("addresses.dingaling")
+            selectinload("addresses").selectinload("dingaling")
         ).first()
         assert "dingaling" in u1.addresses[0].__dict__
 
@@ -1371,7 +1343,7 @@ class LoadOnExistingTest(_fixtures.FixtureTest):
             .one()
         )
         sess.query(User).filter_by(id=7).options(
-            selectinload_all("orders.items")
+            selectinload("orders").selectinload("items")
         ).first()
         assert "items" in u1.orders[0].__dict__
 
@@ -2429,7 +2401,7 @@ class SelfReferentialTest(fixtures.MappedTest):
                 sess.query(Node)
                 .filter_by(data="n1")
                 .order_by(Node.id)
-                .options(selectinload_all("children.children"))
+                .options(selectinload("children").selectinload("children"))
                 .first()
             )
             eq_(
index 7221f1d1268e04dfe31300b82d2fea58955fea71..2bc11398dc6600fcbab0ae3802343f9020ae7ceb 100644 (file)
@@ -34,7 +34,6 @@ from sqlalchemy.testing.schema import Column
 from sqlalchemy.testing.schema import Table
 from sqlalchemy.testing.util import gc_collect
 from sqlalchemy.util import pickle
-from sqlalchemy.util import pypy
 from sqlalchemy.util.compat import inspect_getfullargspec
 from test.orm import _fixtures
 
@@ -187,8 +186,9 @@ class SessionUtilTest(_fixtures.FixtureTest):
         assert u2 in s2
 
         with assertions.expect_deprecated(
-                r"The Session.close_all\(\) method is deprecated and will "
-                "be removed in a future release. "):
+            r"The Session.close_all\(\) method is deprecated and will "
+            "be removed in a future release. "
+        ):
             Session.close_all()
 
         assert u1 not in s1
@@ -628,12 +628,11 @@ class SessionStateTest(_fixtures.FixtureTest):
         assert u1 not in s2
         assert not s2.identity_map.keys()
 
-    @testing.uses_deprecated()
     def test_identity_conflict(self):
         users, User = self.tables.users, self.classes.User
 
         mapper(User, users)
-        for s in (create_session(), create_session(weak_identity_map=False)):
+        for s in (create_session(), create_session()):
             users.delete().execute()
             u1 = User(name="ed")
             s.add(u1)
@@ -1466,172 +1465,6 @@ class WeakIdentityMapTest(_fixtures.FixtureTest):
         assert not sess.identity_map.contains_state(u2._sa_instance_state)
 
 
-class StrongIdentityMapTest(_fixtures.FixtureTest):
-    run_inserts = None
-
-    def _strong_ident_fixture(self):
-        sess = create_session(weak_identity_map=False)
-        return sess, sess.prune
-
-    def _event_fixture(self):
-        session = create_session()
-
-        @event.listens_for(session, "pending_to_persistent")
-        @event.listens_for(session, "deleted_to_persistent")
-        @event.listens_for(session, "detached_to_persistent")
-        @event.listens_for(session, "loaded_as_persistent")
-        def strong_ref_object(sess, instance):
-            if "refs" not in sess.info:
-                sess.info["refs"] = refs = set()
-            else:
-                refs = sess.info["refs"]
-
-            refs.add(instance)
-
-        @event.listens_for(session, "persistent_to_detached")
-        @event.listens_for(session, "persistent_to_deleted")
-        @event.listens_for(session, "persistent_to_transient")
-        def deref_object(sess, instance):
-            sess.info["refs"].discard(instance)
-
-        def prune():
-            if "refs" not in session.info:
-                return 0
-
-            sess_size = len(session.identity_map)
-            session.info["refs"].clear()
-            gc_collect()
-            session.info["refs"] = set(
-                s.obj() for s in session.identity_map.all_states()
-            )
-            return sess_size - len(session.identity_map)
-
-        return session, prune
-
-    @testing.uses_deprecated()
-    def test_strong_ref_imap(self):
-        self._test_strong_ref(self._strong_ident_fixture)
-
-    def test_strong_ref_events(self):
-        self._test_strong_ref(self._event_fixture)
-
-    def _test_strong_ref(self, fixture):
-        s, prune = fixture()
-
-        users, User = self.tables.users, self.classes.User
-
-        mapper(User, users)
-
-        # save user
-        s.add(User(name="u1"))
-        s.flush()
-        user = s.query(User).one()
-        user = None
-        print(s.identity_map)
-        gc_collect()
-        assert len(s.identity_map) == 1
-
-        user = s.query(User).one()
-        assert not s.identity_map._modified
-        user.name = "u2"
-        assert s.identity_map._modified
-        s.flush()
-        eq_(users.select().execute().fetchall(), [(user.id, "u2")])
-
-    @testing.uses_deprecated()
-    def test_prune_imap(self):
-        self._test_prune(self._strong_ident_fixture)
-
-    def test_prune_events(self):
-        self._test_prune(self._event_fixture)
-
-    @testing.fails_if(lambda: pypy, "pypy has a real GC")
-    @testing.fails_on("+zxjdbc", "http://www.sqlalchemy.org/trac/ticket/1473")
-    def _test_prune(self, fixture):
-        s, prune = fixture()
-
-        users, User = self.tables.users, self.classes.User
-
-        mapper(User, users)
-
-        for o in [User(name="u%s" % x) for x in range(10)]:
-            s.add(o)
-        # o is still live after this loop...
-
-        self.assert_(len(s.identity_map) == 0)
-        eq_(prune(), 0)
-        s.flush()
-        gc_collect()
-        eq_(prune(), 9)
-        # o is still in local scope here, so still present
-        self.assert_(len(s.identity_map) == 1)
-
-        id_ = o.id
-        del o
-        eq_(prune(), 1)
-        self.assert_(len(s.identity_map) == 0)
-
-        u = s.query(User).get(id_)
-        eq_(prune(), 0)
-        self.assert_(len(s.identity_map) == 1)
-        u.name = "squiznart"
-        del u
-        eq_(prune(), 0)
-        self.assert_(len(s.identity_map) == 1)
-        s.flush()
-        eq_(prune(), 1)
-        self.assert_(len(s.identity_map) == 0)
-
-        s.add(User(name="x"))
-        eq_(prune(), 0)
-        self.assert_(len(s.identity_map) == 0)
-        s.flush()
-        self.assert_(len(s.identity_map) == 1)
-        eq_(prune(), 1)
-        self.assert_(len(s.identity_map) == 0)
-
-        u = s.query(User).get(id_)
-        s.delete(u)
-        del u
-        eq_(prune(), 0)
-        self.assert_(len(s.identity_map) == 1)
-        s.flush()
-        eq_(prune(), 0)
-        self.assert_(len(s.identity_map) == 0)
-
-    @testing.uses_deprecated()
-    def test_fast_discard_race(self):
-        # test issue #4068
-        users, User = self.tables.users, self.classes.User
-
-        mapper(User, users)
-
-        sess = Session(weak_identity_map=False)
-
-        u1 = User(name="u1")
-        sess.add(u1)
-        sess.commit()
-
-        u1_state = u1._sa_instance_state
-        sess.identity_map._dict.pop(u1_state.key)
-        ref = u1_state.obj
-        u1_state.obj = lambda: None
-
-        u2 = sess.query(User).first()
-        u1_state._cleanup(ref)
-
-        u3 = sess.query(User).first()
-
-        is_(u2, u3)
-
-        u2_state = u2._sa_instance_state
-        assert sess.identity_map.contains_state(u2._sa_instance_state)
-        ref = u2_state.obj
-        u2_state.obj = lambda: None
-        u2_state._cleanup(ref)
-        assert not sess.identity_map.contains_state(u2._sa_instance_state)
-
-
 class IsModifiedTest(_fixtures.FixtureTest):
     run_inserts = None
 
@@ -1700,28 +1533,6 @@ class IsModifiedTest(_fixtures.FixtureTest):
         addresses_loaded = "addresses" in u.__dict__
         assert mod is not addresses_loaded
 
-    def test_is_modified_passive_on(self):
-        User, Address = self._default_mapping_fixture()
-
-        s = Session()
-        u = User(name="fred", addresses=[Address(email_address="foo")])
-        s.add(u)
-        s.commit()
-
-        u.id
-
-        def go():
-            assert not s.is_modified(u, passive=True)
-
-        self.assert_sql_count(testing.db, go, 0)
-
-        u.name = "newname"
-
-        def go():
-            assert s.is_modified(u, passive=True)
-
-        self.assert_sql_count(testing.db, go, 0)
-
     def test_is_modified_syn(self):
         User, users = self.classes.User, self.tables.users
 
@@ -2037,49 +1848,6 @@ class SessionInterface(fixtures.TestBase):
             )
 
 
-class TLTransactionTest(fixtures.MappedTest):
-    run_dispose_bind = "once"
-    __backend__ = True
-
-    @classmethod
-    def setup_bind(cls):
-        return engines.testing_engine(options=dict(strategy="threadlocal"))
-
-    @classmethod
-    def define_tables(cls, metadata):
-        Table(
-            "users",
-            metadata,
-            Column(
-                "id", Integer, primary_key=True, test_needs_autoincrement=True
-            ),
-            Column("name", String(20)),
-            test_needs_acid=True,
-        )
-
-    @classmethod
-    def setup_classes(cls):
-        class User(cls.Basic):
-            pass
-
-    @classmethod
-    def setup_mappers(cls):
-        users, User = cls.tables.users, cls.classes.User
-
-        mapper(User, users)
-
-    @testing.exclude("mysql", "<", (5, 0, 3), "FIXME: unknown")
-    def test_session_nesting(self):
-        User = self.classes.User
-
-        sess = create_session(bind=self.bind)
-        self.bind.begin()
-        u = User(name="ed")
-        sess.add(u)
-        sess.flush()
-        self.bind.commit()
-
-
 class FlushWarningsTest(fixtures.MappedTest):
     run_setup_mappers = "each"
 
index a4ee2d804b0c32b0a9e82195c86ea4d9aee20b16..b4be6debe4c0d3d6e3786fb02c584d39f455a445 100644 (file)
@@ -15,7 +15,6 @@ from sqlalchemy.orm import mapper
 from sqlalchemy.orm import relationship
 from sqlalchemy.orm import Session
 from sqlalchemy.orm import subqueryload
-from sqlalchemy.orm import subqueryload_all
 from sqlalchemy.orm import undefer
 from sqlalchemy.orm import with_polymorphic
 from sqlalchemy.testing import assert_raises
@@ -137,7 +136,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
         self.assert_sql_count(testing.db, go, 2)
 
         q = sess.query(u).options(
-            subqueryload_all(u.addresses, Address.dingalings)
+            subqueryload(u.addresses).subqueryload(Address.dingalings)
         )
 
         def go():
@@ -1060,33 +1059,6 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
         result = q.order_by(sa.desc(User.id)).limit(2).offset(2).all()
         eq_(list(reversed(self.static.user_all_result[0:2])), result)
 
-    @testing.uses_deprecated("Mapper.order_by")
-    def test_mapper_order_by(self):
-        users, User, Address, addresses = (
-            self.tables.users,
-            self.classes.User,
-            self.classes.Address,
-            self.tables.addresses,
-        )
-
-        mapper(Address, addresses)
-        mapper(
-            User,
-            users,
-            properties={
-                "addresses": relationship(
-                    Address, lazy="subquery", order_by=addresses.c.id
-                )
-            },
-            order_by=users.c.id.desc(),
-        )
-
-        sess = create_session()
-        q = sess.query(User)
-
-        result = q.limit(2).all()
-        eq_(result, list(reversed(self.static.user_address_result[2:4])))
-
     def test_one_to_many_scalar(self):
         Address, addresses, users, User = (
             self.classes.Address,
@@ -1340,7 +1312,7 @@ class LoadOnExistingTest(_fixtures.FixtureTest):
         a2 = u1.addresses[0]
         a2.email_address = "foo"
         sess.query(User).options(
-            subqueryload_all("addresses.dingaling")
+            subqueryload("addresses").subqueryload("dingaling")
         ).filter_by(id=8).all()
         assert u1.addresses[-1] is a1
         for a in u1.addresses:
@@ -1358,9 +1330,9 @@ class LoadOnExistingTest(_fixtures.FixtureTest):
         u1.orders
         o1 = Order()
         u1.orders.append(o1)
-        sess.query(User).options(subqueryload_all("orders.items")).filter_by(
-            id=7
-        ).all()
+        sess.query(User).options(
+            subqueryload("orders").subqueryload("items")
+        ).filter_by(id=7).all()
         for o in u1.orders:
             if o is not o1:
                 assert "items" in o.__dict__
@@ -1377,7 +1349,7 @@ class LoadOnExistingTest(_fixtures.FixtureTest):
             .one()
         )
         sess.query(User).filter_by(id=8).options(
-            subqueryload_all("addresses.dingaling")
+            subqueryload("addresses").subqueryload("dingaling")
         ).first()
         assert "dingaling" in u1.addresses[0].__dict__
 
@@ -1391,7 +1363,7 @@ class LoadOnExistingTest(_fixtures.FixtureTest):
             .one()
         )
         sess.query(User).filter_by(id=7).options(
-            subqueryload_all("orders.items")
+            subqueryload("orders").subqueryload("items")
         ).first()
         assert "items" in u1.orders[0].__dict__
 
@@ -2328,7 +2300,7 @@ class SelfReferentialTest(fixtures.MappedTest):
                 sess.query(Node)
                 .filter_by(data="n1")
                 .order_by(Node.id)
-                .options(subqueryload_all("children.children"))
+                .options(subqueryload("children").subqueryload("children"))
                 .first()
             )
             eq_(
index e5708808e753ac9e2f96c68c6fc8da9715326998..5c8ed4732a54275838e124d00b55b0ccd58465eb 100644 (file)
@@ -711,19 +711,19 @@ class SessionTransactionTest(fixtures.RemovesEvents, FixtureTest):
         eq_(
             bind.mock_calls,
             [
-                mock.call.contextual_connect(),
-                mock.call.contextual_connect().execution_options(
+                mock.call._contextual_connect(),
+                mock.call._contextual_connect().execution_options(
                     isolation_level="FOO"
                 ),
-                mock.call.contextual_connect().execution_options().begin(),
+                mock.call._contextual_connect().execution_options().begin(),
             ],
         )
-        eq_(c1, bind.contextual_connect().execution_options())
+        eq_(c1, bind._contextual_connect().execution_options())
 
     def test_execution_options_ignored_mid_transaction(self):
         bind = mock.Mock()
         conn = mock.Mock(engine=bind)
-        bind.contextual_connect = mock.Mock(return_value=conn)
+        bind._contextual_connect = mock.Mock(return_value=conn)
         sess = Session(bind=bind)
         sess.execute("select 1")
         with expect_warnings(
@@ -1553,75 +1553,6 @@ class AccountingFlagsTest(_LocalFixture):
         sess.expire_all()
         assert u1.name == "edward"
 
-    def test_rollback_no_accounting(self):
-        User, users = self.classes.User, self.tables.users
-
-        sess = sessionmaker(_enable_transaction_accounting=False)()
-        u1 = User(name="ed")
-        sess.add(u1)
-        sess.commit()
-
-        u1.name = "edwardo"
-        sess.rollback()
-
-        testing.db.execute(
-            users.update(users.c.name == "ed").values(name="edward")
-        )
-
-        assert u1.name == "edwardo"
-        sess.expire_all()
-        assert u1.name == "edward"
-
-    def test_commit_no_accounting(self):
-        User, users = self.classes.User, self.tables.users
-
-        sess = sessionmaker(_enable_transaction_accounting=False)()
-        u1 = User(name="ed")
-        sess.add(u1)
-        sess.commit()
-
-        u1.name = "edwardo"
-        sess.rollback()
-
-        testing.db.execute(
-            users.update(users.c.name == "ed").values(name="edward")
-        )
-
-        assert u1.name == "edwardo"
-        sess.commit()
-
-        assert testing.db.execute(select([users.c.name])).fetchall() == [
-            ("edwardo",)
-        ]
-        assert u1.name == "edwardo"
-
-        sess.delete(u1)
-        sess.commit()
-
-    def test_preflush_no_accounting(self):
-        User, users = self.classes.User, self.tables.users
-
-        sess = Session(
-            _enable_transaction_accounting=False,
-            autocommit=True,
-            autoflush=False,
-        )
-        u1 = User(name="ed")
-        sess.add(u1)
-        sess.flush()
-
-        sess.begin()
-        u1.name = "edwardo"
-        u2 = User(name="some other user")
-        sess.add(u2)
-
-        sess.rollback()
-
-        sess.begin()
-        assert testing.db.execute(select([users.c.name])).fetchall() == [
-            ("ed",)
-        ]
-
 
 class AutoCommitTest(_LocalFixture):
     __backend__ = True
index e54925f0bd48c5dd447d14b05d212056d4bdd8d8..23263d136231448703146ed16f106948a495791f 100644 (file)
@@ -363,19 +363,19 @@ class VersioningTest(fixtures.MappedTest):
             sa.orm.exc.StaleDataError,
             r"Instance .* has version id '\d+' which does not "
             r"match database-loaded version id '\d+'",
-            s1.query(Foo).with_lockmode("read").get,
+            s1.query(Foo).with_for_update(read=True).get,
             f1s1.id,
         )
 
         # reload it - this expires the old version first
-        s1.refresh(f1s1, lockmode="read")
+        s1.refresh(f1s1, with_for_update=dict(read=True))
 
         # now assert version OK
-        s1.query(Foo).with_lockmode("read").get(f1s1.id)
+        s1.query(Foo).with_for_update(read=True).get(f1s1.id)
 
         # assert brand new load is OK too
         s1.close()
-        s1.query(Foo).with_lockmode("read").get(f1s1.id)
+        s1.query(Foo).with_for_update(read=True).get(f1s1.id)
 
     def test_versioncheck_not_versioned(self):
         """ensure the versioncheck logic skips if there isn't a
@@ -389,7 +389,7 @@ class VersioningTest(fixtures.MappedTest):
         f1s1 = Foo(value="f1 value", version_id=1)
         s1.add(f1s1)
         s1.commit()
-        s1.query(Foo).with_lockmode("read").get(f1s1.id)
+        s1.query(Foo).with_for_update(read=True).get(f1s1.id)
 
     @testing.emits_warning(r".*versioning cannot be verified")
     @engines.close_open_connections
@@ -489,7 +489,7 @@ class VersioningTest(fixtures.MappedTest):
         s1.commit()
 
         s2 = create_session(autocommit=False)
-        f1s2 = s2.query(Foo).with_lockmode("read").get(f1s1.id)
+        f1s2 = s2.query(Foo).with_for_update(read=True).get(f1s1.id)
         assert f1s2.id == f1s1.id
         assert f1s2.value == f1s1.value
 
index f152d0ed9dc0c473763f02f1ac52e6ea71b175e4..3418eac73d7a57a114ad4784c01a6b32a9a876c0 100644 (file)
@@ -1529,14 +1529,6 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
             "FROM mytable WHERE mytable.myid = :myid_1 FOR UPDATE",
         )
 
-        assert_raises_message(
-            exc.ArgumentError,
-            "Unknown for_update argument: 'unknown_mode'",
-            table1.select,
-            table1.c.myid == 7,
-            for_update="unknown_mode",
-        )
-
     def test_alias(self):
         # test the alias for a table1.  column names stay the same,
         # table name "changes" to "foo".
index 5ede46ede683c11ce5a8e102fbe398b562b49ee0..4ce8cc32f6813fa93d314c7b765f231b837c0c39 100644 (file)
@@ -363,7 +363,7 @@ class DefaultTest(fixtures.TestBase):
 
     @testing.fails_on("firebird", "Data type unknown")
     def test_standalone(self):
-        c = testing.db.engine.contextual_connect()
+        c = testing.db.engine.connect()
         x = c.execute(t.c.col1.default)
         y = t.c.col2.default.execute()
         z = c.execute(t.c.col3.default)
diff --git a/test/sql/test_deprecations.py b/test/sql/test_deprecations.py
new file mode 100644 (file)
index 0000000..2e4042a
--- /dev/null
@@ -0,0 +1,425 @@
+#! coding: utf-8
+
+from sqlalchemy import bindparam
+from sqlalchemy import Column
+from sqlalchemy import column
+from sqlalchemy import create_engine
+from sqlalchemy import exc
+from sqlalchemy import ForeignKey
+from sqlalchemy import Integer
+from sqlalchemy import MetaData
+from sqlalchemy import select
+from sqlalchemy import String
+from sqlalchemy import Table
+from sqlalchemy import table
+from sqlalchemy import testing
+from sqlalchemy import text
+from sqlalchemy import util
+from sqlalchemy.engine import default
+from sqlalchemy.schema import DDL
+from sqlalchemy.sql import util as sql_util
+from sqlalchemy.testing import assert_raises
+from sqlalchemy.testing import assert_raises_message
+from sqlalchemy.testing import AssertsCompiledSQL
+from sqlalchemy.testing import engines
+from sqlalchemy.testing import eq_
+from sqlalchemy.testing import fixtures
+from sqlalchemy.testing import mock
+
+
+class DeprecationWarningsTest(fixtures.TestBase):
+    def test_ident_preparer_force(self):
+        preparer = testing.db.dialect.identifier_preparer
+        preparer.quote("hi")
+        with testing.expect_deprecated(
+            "The IdentifierPreparer.quote.force parameter is deprecated"
+        ):
+            preparer.quote("hi", True)
+
+        with testing.expect_deprecated(
+            "The IdentifierPreparer.quote.force parameter is deprecated"
+        ):
+            preparer.quote("hi", False)
+
+        preparer.quote_schema("hi")
+        with testing.expect_deprecated(
+            "The IdentifierPreparer.quote_schema.force parameter is deprecated"
+        ):
+            preparer.quote_schema("hi", True)
+
+        with testing.expect_deprecated(
+            "The IdentifierPreparer.quote_schema.force parameter is deprecated"
+        ):
+            preparer.quote_schema("hi", True)
+
+    def test_string_convert_unicode(self):
+        with testing.expect_deprecated(
+            "The String.convert_unicode parameter is deprecated and "
+            "will be removed in a future release."
+        ):
+            String(convert_unicode=True)
+
+    def test_string_convert_unicode_force(self):
+        with testing.expect_deprecated(
+            "The String.convert_unicode parameter is deprecated and "
+            "will be removed in a future release."
+        ):
+            String(convert_unicode="force")
+
+    def test_engine_convert_unicode(self):
+        with testing.expect_deprecated(
+            "The create_engine.convert_unicode parameter and "
+            "corresponding dialect-level"
+        ):
+            create_engine("mysql://", convert_unicode=True, module=mock.Mock())
+
+    def test_join_condition_ignore_nonexistent_tables(self):
+        m = MetaData()
+        t1 = Table("t1", m, Column("id", Integer))
+        t2 = Table(
+            "t2", m, Column("id", Integer), Column("t1id", ForeignKey("t1.id"))
+        )
+        with testing.expect_deprecated(
+            "The join_condition.ignore_nonexistent_tables "
+            "parameter is deprecated"
+        ):
+            join_cond = sql_util.join_condition(
+                t1, t2, ignore_nonexistent_tables=True
+            )
+
+        t1t2 = t1.join(t2)
+
+        assert t1t2.onclause.compare(join_cond)
+
+    def test_select_autocommit(self):
+        with testing.expect_deprecated(
+            "The select.autocommit parameter is deprecated and "
+            "will be removed in a future release."
+        ):
+            stmt = select([column("x")], autocommit=True)
+
+    def test_select_for_update(self):
+        with testing.expect_deprecated(
+            "The select.for_update parameter is deprecated and "
+            "will be removed in a future release."
+        ):
+            stmt = select([column("x")], for_update=True)
+
+    @testing.provide_metadata
+    def test_table_useexisting(self):
+        meta = self.metadata
+
+        Table("t", meta, Column("x", Integer))
+        meta.create_all()
+
+        with testing.expect_deprecated(
+            "The Table.useexisting parameter is deprecated and "
+            "will be removed in a future release."
+        ):
+            Table("t", meta, useexisting=True, autoload_with=testing.db)
+
+        with testing.expect_deprecated(
+            "The Table.useexisting parameter is deprecated and "
+            "will be removed in a future release."
+        ):
+            assert_raises_message(
+                exc.ArgumentError,
+                "useexisting is synonymous with extend_existing.",
+                Table,
+                "t",
+                meta,
+                useexisting=True,
+                extend_existing=True,
+                autoload_with=testing.db,
+            )
+
+
+class DDLListenerDeprecationsTest(fixtures.TestBase):
+    def setup(self):
+        self.bind = self.engine = engines.mock_engine()
+        self.metadata = MetaData(self.bind)
+        self.table = Table("t", self.metadata, Column("id", Integer))
+        self.users = Table(
+            "users",
+            self.metadata,
+            Column("user_id", Integer, primary_key=True),
+            Column("user_name", String(40)),
+        )
+
+    def test_append_listener(self):
+        metadata, table, bind = self.metadata, self.table, self.bind
+
+        def fn(*a):
+            return None
+
+        with testing.expect_deprecated(".* is deprecated .*"):
+            table.append_ddl_listener("before-create", fn)
+        with testing.expect_deprecated(".* is deprecated .*"):
+            assert_raises(
+                exc.InvalidRequestError, table.append_ddl_listener, "blah", fn
+            )
+
+        with testing.expect_deprecated(".* is deprecated .*"):
+            metadata.append_ddl_listener("before-create", fn)
+        with testing.expect_deprecated(".* is deprecated .*"):
+            assert_raises(
+                exc.InvalidRequestError,
+                metadata.append_ddl_listener,
+                "blah",
+                fn,
+            )
+
+    def test_deprecated_append_ddl_listener_table(self):
+        metadata, users, engine = self.metadata, self.users, self.engine
+        canary = []
+        with testing.expect_deprecated(".* is deprecated .*"):
+            users.append_ddl_listener(
+                "before-create", lambda e, t, b: canary.append("mxyzptlk")
+            )
+        with testing.expect_deprecated(".* is deprecated .*"):
+            users.append_ddl_listener(
+                "after-create", lambda e, t, b: canary.append("klptzyxm")
+            )
+        with testing.expect_deprecated(".* is deprecated .*"):
+            users.append_ddl_listener(
+                "before-drop", lambda e, t, b: canary.append("xyzzy")
+            )
+        with testing.expect_deprecated(".* is deprecated .*"):
+            users.append_ddl_listener(
+                "after-drop", lambda e, t, b: canary.append("fnord")
+            )
+
+        metadata.create_all()
+        assert "mxyzptlk" in canary
+        assert "klptzyxm" in canary
+        assert "xyzzy" not in canary
+        assert "fnord" not in canary
+        del engine.mock[:]
+        canary[:] = []
+        metadata.drop_all()
+        assert "mxyzptlk" not in canary
+        assert "klptzyxm" not in canary
+        assert "xyzzy" in canary
+        assert "fnord" in canary
+
+    def test_deprecated_append_ddl_listener_metadata(self):
+        metadata, users, engine = self.metadata, self.users, self.engine
+        canary = []
+        with testing.expect_deprecated(".* is deprecated .*"):
+            metadata.append_ddl_listener(
+                "before-create",
+                lambda e, t, b, tables=None: canary.append("mxyzptlk"),
+            )
+        with testing.expect_deprecated(".* is deprecated .*"):
+            metadata.append_ddl_listener(
+                "after-create",
+                lambda e, t, b, tables=None: canary.append("klptzyxm"),
+            )
+        with testing.expect_deprecated(".* is deprecated .*"):
+            metadata.append_ddl_listener(
+                "before-drop",
+                lambda e, t, b, tables=None: canary.append("xyzzy"),
+            )
+        with testing.expect_deprecated(".* is deprecated .*"):
+            metadata.append_ddl_listener(
+                "after-drop",
+                lambda e, t, b, tables=None: canary.append("fnord"),
+            )
+
+        metadata.create_all()
+        assert "mxyzptlk" in canary
+        assert "klptzyxm" in canary
+        assert "xyzzy" not in canary
+        assert "fnord" not in canary
+        del engine.mock[:]
+        canary[:] = []
+        metadata.drop_all()
+        assert "mxyzptlk" not in canary
+        assert "klptzyxm" not in canary
+        assert "xyzzy" in canary
+        assert "fnord" in canary
+
+    def test_filter_deprecated(self):
+        cx = self.engine
+
+        tbl = Table("t", MetaData(), Column("id", Integer))
+        target = cx.name
+
+        assert DDL("")._should_execute_deprecated("x", tbl, cx)
+        with testing.expect_deprecated(".* is deprecated .*"):
+            assert DDL("", on=target)._should_execute_deprecated("x", tbl, cx)
+        with testing.expect_deprecated(".* is deprecated .*"):
+            assert not DDL("", on="bogus")._should_execute_deprecated(
+                "x", tbl, cx
+            )
+        with testing.expect_deprecated(".* is deprecated .*"):
+            assert DDL(
+                "", on=lambda d, x, y, z: True
+            )._should_execute_deprecated("x", tbl, cx)
+        with testing.expect_deprecated(".* is deprecated .*"):
+            assert DDL(
+                "", on=lambda d, x, y, z: z.engine.name != "bogus"
+            )._should_execute_deprecated("x", tbl, cx)
+
+
+class ConvertUnicodeDeprecationTest(fixtures.TestBase):
+
+    __backend__ = True
+
+    data = util.u(
+        "Alors vous imaginez ma surprise, au lever du jour, quand "
+        "une drôle de petite voix m’a réveillé. "
+        "Elle disait: « S’il vous plaît… dessine-moi un mouton! »"
+    )
+
+    def test_unicode_warnings_dialectlevel(self):
+
+        unicodedata = self.data
+
+        with testing.expect_deprecated(
+            "The create_engine.convert_unicode parameter and "
+            "corresponding dialect-level"
+        ):
+            dialect = default.DefaultDialect(convert_unicode=True)
+        dialect.supports_unicode_binds = False
+
+        s = String()
+        uni = s.dialect_impl(dialect).bind_processor(dialect)
+
+        uni(util.b("x"))
+        assert isinstance(uni(unicodedata), util.binary_type)
+
+        eq_(uni(unicodedata), unicodedata.encode("utf-8"))
+
+    def test_ignoring_unicode_error(self):
+        """checks String(unicode_error='ignore') is passed to
+        underlying codec."""
+
+        unicodedata = self.data
+
+        with testing.expect_deprecated(
+            "The String.convert_unicode parameter is deprecated and "
+            "will be removed in a future release.",
+            "The String.unicode_errors parameter is deprecated and "
+            "will be removed in a future release.",
+        ):
+            type_ = String(
+                248, convert_unicode="force", unicode_error="ignore"
+            )
+        dialect = default.DefaultDialect(encoding="ascii")
+        proc = type_.result_processor(dialect, 10)
+
+        utfdata = unicodedata.encode("utf8")
+        eq_(proc(utfdata), unicodedata.encode("ascii", "ignore").decode())
+
+
+class ForUpdateTest(fixtures.TestBase, AssertsCompiledSQL):
+    __dialect__ = "default"
+
+    def _assert_legacy(self, leg, read=False, nowait=False):
+        t = table("t", column("c"))
+
+        with testing.expect_deprecated(
+            "The select.for_update parameter is deprecated and "
+            "will be removed in a future release."
+        ):
+            s1 = select([t], for_update=leg)
+
+        if leg is False:
+            assert s1._for_update_arg is None
+            assert s1.for_update is None
+        else:
+            eq_(s1._for_update_arg.read, read)
+            eq_(s1._for_update_arg.nowait, nowait)
+            eq_(s1.for_update, leg)
+
+    def test_false_legacy(self):
+        self._assert_legacy(False)
+
+    def test_plain_true_legacy(self):
+        self._assert_legacy(True)
+
+    def test_read_legacy(self):
+        self._assert_legacy("read", read=True)
+
+    def test_nowait_legacy(self):
+        self._assert_legacy("nowait", nowait=True)
+
+    def test_read_nowait_legacy(self):
+        self._assert_legacy("read_nowait", read=True, nowait=True)
+
+    def test_unknown_mode(self):
+        t = table("t", column("c"))
+
+        with testing.expect_deprecated(
+            "The select.for_update parameter is deprecated and "
+            "will be removed in a future release."
+        ):
+            assert_raises_message(
+                exc.ArgumentError,
+                "Unknown for_update argument: 'unknown_mode'",
+                t.select,
+                t.c.c == 7,
+                for_update="unknown_mode",
+            )
+
+    def test_legacy_setter(self):
+        t = table("t", column("c"))
+        s = select([t])
+        s.for_update = "nowait"
+        eq_(s._for_update_arg.nowait, True)
+
+
+class TextTest(fixtures.TestBase, AssertsCompiledSQL):
+    __dialect__ = "default"
+
+    def test_legacy_bindparam(self):
+        with testing.expect_deprecated(
+            "The text.bindparams parameter is deprecated"
+        ):
+            t = text(
+                "select * from foo where lala=:bar and hoho=:whee",
+                bindparams=[bindparam("bar", 4), bindparam("whee", 7)],
+            )
+
+        self.assert_compile(
+            t,
+            "select * from foo where lala=:bar and hoho=:whee",
+            checkparams={"bar": 4, "whee": 7},
+        )
+
+    def test_legacy_typemap(self):
+        table1 = table(
+            "mytable",
+            column("myid", Integer),
+            column("name", String),
+            column("description", String),
+        )
+        with testing.expect_deprecated(
+            "The text.typemap parameter is deprecated"
+        ):
+            t = text(
+                "select id, name from user",
+                typemap=dict(id=Integer, name=String),
+            )
+
+        stmt = select([table1.c.myid]).select_from(
+            table1.join(t, table1.c.myid == t.c.id)
+        )
+        compiled = stmt.compile()
+        eq_(
+            compiled._create_result_map(),
+            {
+                "myid": (
+                    "myid",
+                    (table1.c.myid, "myid", "myid"),
+                    table1.c.myid.type,
+                )
+            },
+        )
+
+    def test_autocommit(self):
+        with testing.expect_deprecated(
+            "The text.autocommit parameter is deprecated"
+        ):
+            t = text("select id, name from user", autocommit=True)
index 1e6221cdcafd3d5be04689b94decba8d5c6fa781..f030f1d9ca42968bf1362c7f2c30c3cd4c24f9d2 100644 (file)
@@ -500,8 +500,8 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL):
         )
 
     def test_text(self):
-        clause = text(
-            "select * from table where foo=:bar", bindparams=[bindparam("bar")]
+        clause = text("select * from table where foo=:bar").bindparams(
+            bindparam("bar")
         )
         c1 = str(clause)
 
index a6d4b2d1a3f4f82890176d4dceb57ee614c06dae..3d60fb60ed2b76805cb3ac229d473e65f69ea596 100644 (file)
@@ -2262,18 +2262,6 @@ class UseExistingTest(fixtures.TablesTest):
             extend_existing=True,
         )
 
-    @testing.uses_deprecated()
-    def test_existing_plus_useexisting_raises(self):
-        meta2 = self._useexisting_fixture()
-        assert_raises(
-            exc.ArgumentError,
-            Table,
-            "users",
-            meta2,
-            useexisting=True,
-            extend_existing=True,
-        )
-
     def test_keep_existing_no_dupe_constraints(self):
         meta2 = self._notexisting_fixture()
         users = Table(
index 3bd61b1f83daa92ecc8e260b987293b7709e345c..5987c77467f33630928e1e9443db9321d640e16c 100644 (file)
@@ -1647,7 +1647,7 @@ class AlternateResultProxyTest(fixtures.TablesTest):
             "test",
             metadata,
             Column("x", Integer, primary_key=True),
-            Column("y", String(50, convert_unicode="force")),
+            Column("y", String(50)),
         )
 
     @classmethod
index 5456dfb4f2633631475d302febd990c5fc737f08..04c0e610234ef13343da74cdc695cc5ad2bf1089 100644 (file)
@@ -2544,39 +2544,6 @@ class ResultMapTest(fixtures.TestBase):
 class ForUpdateTest(fixtures.TestBase, AssertsCompiledSQL):
     __dialect__ = "default"
 
-    def _assert_legacy(self, leg, read=False, nowait=False):
-        t = table("t", column("c"))
-        s1 = select([t], for_update=leg)
-
-        if leg is False:
-            assert s1._for_update_arg is None
-            assert s1.for_update is None
-        else:
-            eq_(s1._for_update_arg.read, read)
-            eq_(s1._for_update_arg.nowait, nowait)
-            eq_(s1.for_update, leg)
-
-    def test_false_legacy(self):
-        self._assert_legacy(False)
-
-    def test_plain_true_legacy(self):
-        self._assert_legacy(True)
-
-    def test_read_legacy(self):
-        self._assert_legacy("read", read=True)
-
-    def test_nowait_legacy(self):
-        self._assert_legacy("nowait", nowait=True)
-
-    def test_read_nowait_legacy(self):
-        self._assert_legacy("read_nowait", read=True, nowait=True)
-
-    def test_legacy_setter(self):
-        t = table("t", column("c"))
-        s = select([t])
-        s.for_update = "nowait"
-        eq_(s._for_update_arg.nowait, True)
-
     def test_basic_clone(self):
         t = table("t", column("c"))
         s = select([t]).with_for_update(read=True, of=t.c.c)
index 6b419f59950a09544f2c0512f14910dc249526c0..48302058d3e914105dea7842c6b39e6560fd3535 100644 (file)
@@ -198,18 +198,6 @@ class SelectCompositionTest(fixtures.TestBase, AssertsCompiledSQL):
 class BindParamTest(fixtures.TestBase, AssertsCompiledSQL):
     __dialect__ = "default"
 
-    def test_legacy(self):
-        t = text(
-            "select * from foo where lala=:bar and hoho=:whee",
-            bindparams=[bindparam("bar", 4), bindparam("whee", 7)],
-        )
-
-        self.assert_compile(
-            t,
-            "select * from foo where lala=:bar and hoho=:whee",
-            checkparams={"bar": 4, "whee": 7},
-        )
-
     def test_positional(self):
         t = text("select * from foo where lala=:bar and hoho=:whee")
         t = t.bindparams(bindparam("bar", 4), bindparam("whee", 7))
index c54fe1e549fd3f9e1b3d35c24ab8f0268b6546cd..4bd182c3eb031d58254d6001c959f1e049d3290a 100644 (file)
@@ -175,7 +175,7 @@ class AdaptTest(fixtures.TestBase):
                     % (type_, expected)
                 )
 
-    @testing.uses_deprecated()
+    @testing.uses_deprecated(".*Binary.*")
     def test_adapt_method(self):
         """ensure all types have a working adapt() method,
         which creates a distinct copy.
@@ -191,6 +191,8 @@ class AdaptTest(fixtures.TestBase):
 
         def adaptions():
             for typ in self._all_types():
+                # up adapt from LowerCase to UPPERCASE,
+                # as well as to all non-sqltypes
                 up_adaptions = [typ] + typ.__subclasses__()
                 yield False, typ, up_adaptions
                 for subcl in typ.__subclasses__():
@@ -258,7 +260,6 @@ class AdaptTest(fixtures.TestBase):
         eq_(types.DateTime().python_type, datetime.datetime)
         eq_(types.String().python_type, str)
         eq_(types.Unicode().python_type, util.text_type)
-        eq_(types.String(convert_unicode=True).python_type, util.text_type)
         eq_(types.Enum("one", "two", "three").python_type, str)
 
         assert_raises(
@@ -283,10 +284,15 @@ class AdaptTest(fixtures.TestBase):
         This essentially is testing the behavior of util.constructor_copy().
 
         """
-        t1 = String(length=50, convert_unicode=False)
-        t2 = t1.adapt(Text, convert_unicode=True)
+        t1 = String(length=50)
+        t2 = t1.adapt(Text)
         eq_(t2.length, 50)
-        eq_(t2.convert_unicode, True)
+
+    def test_convert_unicode_text_type(self):
+        with testing.expect_deprecated(
+            "The String.convert_unicode parameter is deprecated"
+        ):
+            eq_(types.String(convert_unicode=True).python_type, util.text_type)
 
 
 class TypeAffinityTest(fixtures.TestBase):
@@ -1245,34 +1251,6 @@ class UnicodeTest(fixtures.TestBase):
         ):
             eq_(uni(5), 5)
 
-    def test_unicode_warnings_dialectlevel(self):
-
-        unicodedata = self.data
-
-        dialect = default.DefaultDialect(convert_unicode=True)
-        dialect.supports_unicode_binds = False
-
-        s = String()
-        uni = s.dialect_impl(dialect).bind_processor(dialect)
-
-        uni(util.b("x"))
-        assert isinstance(uni(unicodedata), util.binary_type)
-
-        eq_(uni(unicodedata), unicodedata.encode("utf-8"))
-
-    def test_ignoring_unicode_error(self):
-        """checks String(unicode_error='ignore') is passed to
-        underlying codec."""
-
-        unicodedata = self.data
-
-        type_ = String(248, convert_unicode="force", unicode_error="ignore")
-        dialect = default.DefaultDialect(encoding="ascii")
-        proc = type_.result_processor(dialect, 10)
-
-        utfdata = unicodedata.encode("utf8")
-        eq_(proc(utfdata), unicodedata.encode("ascii", "ignore").decode())
-
 
 class EnumTest(AssertsCompiledSQL, fixtures.TablesTest):
     __backend__ = True
@@ -1857,6 +1835,7 @@ class EnumTest(AssertsCompiledSQL, fixtures.TablesTest):
         # depending on backend.
         assert "('x'," in e.print_sql()
 
+    @testing.uses_deprecated(".*convert_unicode")
     def test_repr(self):
         e = Enum(
             "x",
@@ -1958,13 +1937,14 @@ class BinaryTest(fixtures.TestBase, AssertsExecutionResults):
             binary_table.select(order_by=binary_table.c.primary_id),
             text(
                 "select * from binary_table order by binary_table.primary_id",
-                typemap={
+                bind=testing.db,
+            ).columns(
+                **{
                     "pickled": PickleType,
                     "mypickle": MyPickleType,
                     "data": LargeBinary,
                     "data_slice": LargeBinary,
-                },
-                bind=testing.db,
+                }
             ),
         ):
             result = stmt.execute().fetchall()