From f9d6702b80b45ed5f096e3f1c85b0ca78c757cf0 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 14 Apr 2020 16:45:15 -0400 Subject: [PATCH] Set up absolute references for create_engine and related includes more replacements for create_engine(), Connection Change-Id: I680c02e90b9ece53e52ab1d66fd7d20b47c7df16 --- doc/build/changelog/changelog_08.rst | 8 +- doc/build/changelog/changelog_09.rst | 22 ++--- doc/build/changelog/changelog_10.rst | 22 ++--- doc/build/changelog/changelog_11.rst | 14 ++-- doc/build/changelog/changelog_12.rst | 16 ++-- doc/build/changelog/changelog_13.rst | 26 +++--- doc/build/changelog/migration_09.rst | 2 +- doc/build/changelog/migration_10.rst | 26 +++--- doc/build/changelog/migration_11.rst | 10 +-- doc/build/changelog/migration_12.rst | 12 +-- doc/build/changelog/migration_13.rst | 16 ++-- doc/build/core/connections.rst | 14 ++-- doc/build/core/defaults.rst | 8 +- doc/build/core/engines.rst | 12 +-- doc/build/core/pooling.rst | 24 +++--- doc/build/core/tutorial.rst | 18 ++--- doc/build/errors.rst | 8 +- doc/build/faq/connections.rst | 4 +- doc/build/faq/performance.rst | 8 +- doc/build/glossary.rst | 2 +- doc/build/orm/session_transaction.rst | 6 +- doc/build/orm/tutorial.rst | 6 +- lib/sqlalchemy/dialects/mssql/base.py | 15 ++-- lib/sqlalchemy/dialects/mssql/pyodbc.py | 3 +- lib/sqlalchemy/dialects/mysql/base.py | 13 +-- lib/sqlalchemy/dialects/oracle/base.py | 17 ++-- lib/sqlalchemy/dialects/oracle/cx_oracle.py | 20 ++--- lib/sqlalchemy/dialects/postgresql/base.py | 7 +- lib/sqlalchemy/dialects/postgresql/json.py | 4 +- lib/sqlalchemy/dialects/postgresql/pg8000.py | 2 +- .../dialects/postgresql/psycopg2.py | 21 ++--- lib/sqlalchemy/dialects/sqlite/base.py | 8 +- lib/sqlalchemy/dialects/sqlite/pysqlcipher.py | 3 +- lib/sqlalchemy/dialects/sqlite/pysqlite.py | 13 +-- lib/sqlalchemy/engine/__init__.py | 19 ++--- lib/sqlalchemy/engine/base.py | 31 +++---- lib/sqlalchemy/engine/default.py | 2 +- lib/sqlalchemy/engine/interfaces.py | 38 +++++---- lib/sqlalchemy/engine/result.py | 80 +++++++++++-------- lib/sqlalchemy/events.py | 53 ++++++------ lib/sqlalchemy/ext/horizontal_shard.py | 8 +- lib/sqlalchemy/interfaces.py | 2 +- lib/sqlalchemy/orm/events.py | 6 +- lib/sqlalchemy/orm/session.py | 8 +- lib/sqlalchemy/orm/util.py | 3 +- lib/sqlalchemy/pool/base.py | 8 +- lib/sqlalchemy/sql/dml.py | 18 +++-- lib/sqlalchemy/sql/operators.py | 5 +- lib/sqlalchemy/sql/sqltypes.py | 14 ++-- 49 files changed, 381 insertions(+), 324 deletions(-) diff --git a/doc/build/changelog/changelog_08.rst b/doc/build/changelog/changelog_08.rst index baaa0d4b9c..fbd7b837fb 100644 --- a/doc/build/changelog/changelog_08.rst +++ b/doc/build/changelog/changelog_08.rst @@ -1341,7 +1341,7 @@ :tickets: 2732 Fixed a bug where the routine to detect the correct kwargs - being sent to :func:`.create_engine` would fail in some cases, + being sent to :func:`_sa.create_engine` would fail in some cases, such as with the Sybase dialect. .. change:: @@ -1645,7 +1645,7 @@ Fixed bug whereby a DBAPI that can return "0" for cursor.lastrowid would not function correctly - in conjunction with :attr:`.ResultProxy.inserted_primary_key`. + in conjunction with :attr:`_engine.ResultProxy.inserted_primary_key`. .. change:: :tags: bug, mssql @@ -2366,7 +2366,7 @@ Added new :meth:`_events.PoolEvents.reset` hook to capture the event before a connection is auto-rolled back, upon return to the pool. Together with - :meth:`.ConnectionEvents.rollback` this allows all rollback + :meth:`_events.ConnectionEvents.rollback` this allows all rollback events to be intercepted. .. changelog:: @@ -3733,4 +3733,4 @@ :tags: sql, removed The long-deprecated and non-functional ``assert_unicode`` flag on - :func:`.create_engine` as well as :class:`.String` is removed. + :func:`_sa.create_engine` as well as :class:`.String` is removed. diff --git a/doc/build/changelog/changelog_09.rst b/doc/build/changelog/changelog_09.rst index e4a6ad77fd..afb0b14be6 100644 --- a/doc/build/changelog/changelog_09.rst +++ b/doc/build/changelog/changelog_09.rst @@ -590,7 +590,7 @@ :tickets: 3200 The execution options passed to an :class:`_engine.Engine` either via - :paramref:`.create_engine.execution_options` or + :paramref:`_sa.create_engine.execution_options` or :meth:`_engine.Engine.update_execution_options` are not passed to the special :class:`_engine.Connection` used to initialize the dialect within the "first connect" event; dialects will usually @@ -995,9 +995,9 @@ :tickets: 3076 :versions: 1.0.0b1 - Added new event :meth:`.ConnectionEvents.handle_error`, a more + Added new event :meth:`_events.ConnectionEvents.handle_error`, a more fully featured and comprehensive replacement for - :meth:`.ConnectionEvents.dbapi_error`. + :meth:`_events.ConnectionEvents.dbapi_error`. .. change:: :tags: bug, orm @@ -1497,12 +1497,12 @@ .. change:: :tags: bug, engine - The :meth:`.ConnectionEvents.after_cursor_execute` event is now + The :meth:`_events.ConnectionEvents.after_cursor_execute` event is now emitted for the "_cursor_execute()" method of :class:`_engine.Connection`; this is the "quick" executor that is used for things like when a sequence is executed ahead of an INSERT statement, as well as for dialect startup checks like unicode returns, charset, etc. - the :meth:`.ConnectionEvents.before_cursor_execute` event was already + the :meth:`_events.ConnectionEvents.before_cursor_execute` event was already invoked here. The "executemany" flag is now always set to False here, as this event always corresponds to a single execution. Previously the flag could be True if we were acting on behalf of @@ -2647,7 +2647,7 @@ :tags: bug, engine :tickets: 2873 - The :func:`.create_engine` routine and the related + The :func:`_sa.create_engine` routine and the related :func:`.make_url` function no longer considers the ``+`` sign to be a space within the password field. The parsing has been adjusted to match RFC 1738 exactly, in that both ``username`` @@ -3055,7 +3055,7 @@ with the compilers usual "implicit returning" feature, which is used to efficiently fetch newly generated primary key values. For supporting backends, a dictionary of all fetched values is present at - :attr:`.ResultProxy.returned_defaults`. + :attr:`_engine.ResultProxy.returned_defaults`. .. change:: :tags: bug, mysql @@ -3148,11 +3148,11 @@ :tags: feature, engine :tickets: 2770 - New events added to :class:`.ConnectionEvents`: + New events added to :class:`_events.ConnectionEvents`: - * :meth:`.ConnectionEvents.engine_connect` - * :meth:`.ConnectionEvents.set_connection_execution_options` - * :meth:`.ConnectionEvents.set_engine_execution_options` + * :meth:`_events.ConnectionEvents.engine_connect` + * :meth:`_events.ConnectionEvents.set_connection_execution_options` + * :meth:`_events.ConnectionEvents.set_engine_execution_options` .. change:: :tags: bug, sql diff --git a/doc/build/changelog/changelog_10.rst b/doc/build/changelog/changelog_10.rst index fe31200925..f4e012c230 100644 --- a/doc/build/changelog/changelog_10.rst +++ b/doc/build/changelog/changelog_10.rst @@ -1013,7 +1013,7 @@ :tags: bug, engine :tickets: 3481 - Fixed regression where new methods on :class:`.ResultProxy` used + Fixed regression where new methods on :class:`_engine.ResultProxy` used by the ORM :class:`_query.Query` object (part of the performance enhancements of :ticket:`3175`) would not raise the "this result does not return rows" exception in the case where the driver @@ -1024,7 +1024,7 @@ :tags: bug, engine :tickets: 3483 - Fixed regression where :meth:`.ResultProxy.keys` would return + Fixed regression where :meth:`_engine.ResultProxy.keys` would return un-adjusted internal symbol names for "anonymous" labels, which are the "foo_1" types of labels we see generated for SQL functions without labels and similar. This was a side effect of the @@ -1243,7 +1243,7 @@ .. change:: :tags: feature, engine - Added new engine event :meth:`.ConnectionEvents.engine_disposed`. + Added new engine event :meth:`_events.ConnectionEvents.engine_disposed`. Called after the :meth:`_engine.Engine.dispose` method is called. .. change:: @@ -1477,7 +1477,7 @@ Also added new flag :attr:`.ExceptionContext.invalidate_pool_on_disconnect`. - Allows an error handler within :meth:`.ConnectionEvents.handle_error` + Allows an error handler within :meth:`_events.ConnectionEvents.handle_error` to maintain a "disconnect" condition, but to handle calling invalidate on individual connections in a specific manner within the event. @@ -2034,12 +2034,12 @@ :tags: bug, engine :tickets: 3330, 3329 - The "auto close" for :class:`.ResultProxy` is now a "soft" close. + The "auto close" for :class:`_engine.ResultProxy` is now a "soft" close. That is, after exhausting all rows using the fetch methods, the DBAPI cursor is released as before and the object may be safely discarded, but the fetch methods may continue to be called for which they will return an end-of-result object (None for fetchone, empty list - for fetchmany and fetchall). Only if :meth:`.ResultProxy.close` + for fetchmany and fetchall). Only if :meth:`_engine.ResultProxy.close` is called explicitly will these methods raise the "result is closed" error. @@ -2171,7 +2171,7 @@ :tags: feature, postgresql The PG8000 dialect now supports the - :paramref:`.create_engine.encoding` parameter, by setting up + :paramref:`_sa.create_engine.encoding` parameter, by setting up the client encoding on the connection which is then intercepted by pg8000. Pull request courtesy Tony Locke. @@ -2494,7 +2494,7 @@ The engine-level error handling and wrapping routines will now take effect in all engine connection use cases, including when user-custom connect routines are used via the - :paramref:`.create_engine.creator` parameter, as well as when + :paramref:`_sa.create_engine.creator` parameter, as well as when the :class:`_engine.Connection` encounters a connection error on revalidation. @@ -2643,7 +2643,7 @@ or UPDATE statement. This allows :attr:`_schema.Column.key` to be used as a "substitute" string to work around a difficult column name that doesn't translate well into a bound parameter name. Note that - the paramstyle is configurable on :func:`.create_engine` in any case, + the paramstyle is configurable on :func:`_sa.create_engine` in any case, and most DBAPIs today support a named and positional style. .. change:: @@ -3210,7 +3210,7 @@ with multiple parameter sets; implicit RETURNING will no longer emit for this statement either. As both of these constructs deal with variable numbers of rows, the - :attr:`.ResultProxy.inserted_primary_key` accessor does not + :attr:`_engine.ResultProxy.inserted_primary_key` accessor does not apply. Previously, there was a documentation note that one may prefer ``inline=True`` with INSERT..FROM SELECT as some databases don't support returning and therefore can't do "implicit" returning, @@ -3324,7 +3324,7 @@ .. change:: :tags: mysql, bug - The MySQL dialect will now disable :meth:`.ConnectionEvents.handle_error` + The MySQL dialect will now disable :meth:`_events.ConnectionEvents.handle_error` events from firing for those statements which it uses internally to detect if a table exists or not. This is achieved using an execution option ``skip_user_error_events`` that disables the handle diff --git a/doc/build/changelog/changelog_11.rst b/doc/build/changelog/changelog_11.rst index 9c5427a69e..d8234488a0 100644 --- a/doc/build/changelog/changelog_11.rst +++ b/doc/build/changelog/changelog_11.rst @@ -633,7 +633,7 @@ :versions: 1.2.0b1 :tickets: 3955 - Changed the mechanics of :class:`.ResultProxy` to unconditionally + Changed the mechanics of :class:`_engine.ResultProxy` to unconditionally delay the "autoclose" step until the :class:`_engine.Connection` is done with the object; in the case where PostgreSQL ON CONFLICT with RETURNING returns no rows, autoclose was occurring in this previously @@ -1399,7 +1399,7 @@ The ``BaseException`` exception class is now intercepted by the exception-handling routines of :class:`_engine.Connection`, and includes - handling by the :meth:`~.ConnectionEvents.handle_error` + handling by the :meth:`_events.ConnectionEvents.handle_error` event. The :class:`_engine.Connection` is now **invalidated** by default in the case of a system level exception that is not a subclass of ``Exception``, including ``KeyboardInterrupt`` and the greenlet @@ -1905,7 +1905,7 @@ :tags: feature, sql Negative integer indexes are now accommodated by rows - returned from a :class:`.ResultProxy`. Pull request courtesy + returned from a :class:`_engine.ResultProxy`. Pull request courtesy Emanuele Gaifas. .. seealso:: @@ -1950,8 +1950,8 @@ :tags: feature, engine Added connection pool events :meth:`ConnectionEvents.close`, - :meth:`.ConnectionEvents.detach`, - :meth:`.ConnectionEvents.close_detached`. + :meth:`_events.ConnectionEvents.detach`, + :meth:`_events.ConnectionEvents.close_detached`. .. change:: :tags: bug, orm, mysql @@ -1973,7 +1973,7 @@ :tickets: 3534 Added basic isolation level support to the SQL Server dialects - via :paramref:`.create_engine.isolation_level` and + via :paramref:`_sa.create_engine.isolation_level` and :paramref:`.Connection.execution_options.isolation_level` parameters. @@ -2226,7 +2226,7 @@ to an ORM or Core table model, no system of labeling or de-duping of common column names needs to occur, which also means there's no need to worry about how label names match to ORM columns and so-forth. In - addition, the :class:`.ResultProxy` has been further enhanced to + addition, the :class:`_engine.ResultProxy` has been further enhanced to map column and string keys to a row with greater precision in some cases. diff --git a/doc/build/changelog/changelog_12.rst b/doc/build/changelog/changelog_12.rst index 24ef4eeaf3..8c138bde8f 100644 --- a/doc/build/changelog/changelog_12.rst +++ b/doc/build/changelog/changelog_12.rst @@ -1379,8 +1379,8 @@ times where their values will be joined into a list. This is to support the plugins feature documented at :class:`.CreateEnginePlugin` which documents that "plugin" can be passed multiple times. Additionally, the - plugin names can be passed to :func:`.create_engine` outside of the URL - using the new :paramref:`.create_engine.plugins` parameter. + plugin names can be passed to :func:`_sa.create_engine` outside of the URL + using the new :paramref:`_sa.create_engine.plugins` parameter. .. change:: :tags: feature, sql @@ -1667,7 +1667,7 @@ this method can be used with :class:`.horizontal_shard.ShardedQuery` to set the shard identifier. :class:`.horizontal_shard.ShardedQuery` has also been modified such that its :meth:`.ShardedQuery.get` method - interacts correctly with that of :class:`.baked.Result`. + interacts correctly with that of :class:`_baked.Result`. .. change:: :tags: bug, oracle @@ -1886,9 +1886,9 @@ :tags: feature, engine :tickets: 4077 - Added ``__next__()`` and ``next()`` methods to :class:`.ResultProxy`, + Added ``__next__()`` and ``next()`` methods to :class:`_engine.ResultProxy`, so that the ``next()`` builtin function works on the object directly. - :class:`.ResultProxy` has long had an ``__iter__()`` method which already + :class:`_engine.ResultProxy` has long had an ``__iter__()`` method which already allows it to respond to the ``iter()`` builtin. The implementation for ``__iter__()`` is unchanged, as performance testing has indicated that iteration using a ``__next__()`` method with ``StopIteration`` @@ -2727,7 +2727,7 @@ Added native "pessimistic disconnection" handling to the :class:`_pool.Pool` object. The new parameter :paramref:`_pool.Pool.pre_ping`, available from - the engine as :paramref:`.create_engine.pool_pre_ping`, applies an + the engine as :paramref:`_sa.create_engine.pool_pre_ping`, applies an efficient form of the "pre-ping" recipe featured in the pooling documentation, which upon each connection check out, emits a simple statement, typically "SELECT 1", to test the connection for liveness. @@ -2881,8 +2881,8 @@ expression. This causes the result for a NULL column comparison against an empty set to change from NULL to true/false. The behavior is configurable, and the old behavior can be enabled - using the :paramref:`.create_engine.empty_in_strategy` parameter - to :func:`.create_engine`. + using the :paramref:`_sa.create_engine.empty_in_strategy` parameter + to :func:`_sa.create_engine`. .. seealso:: diff --git a/doc/build/changelog/changelog_13.rst b/doc/build/changelog/changelog_13.rst index 535cd51256..3250ada293 100644 --- a/doc/build/changelog/changelog_13.rst +++ b/doc/build/changelog/changelog_13.rst @@ -907,7 +907,7 @@ :tickets: 4799 Added dialect-level flag ``encoding_errors`` to the cx_Oracle dialect, - which can be specified as part of :func:`.create_engine`. This is passed + which can be specified as part of :func:`_sa.create_engine`. This is passed to SQLAlchemy's unicode decoding converter under Python 2, and to cx_Oracle's ``cursor.var()`` object as the ``encodingErrors`` parameter under Python 3, for the very unusual case that broken encodings are present @@ -981,12 +981,12 @@ :tags: usecase, engine :tickets: 4857 - Added new :func:`.create_engine` parameter - :paramref:`.create_engine.max_identifier_length`. This overrides the + Added new :func:`_sa.create_engine` parameter + :paramref:`_sa.create_engine.max_identifier_length`. This overrides the dialect-coded "max identifier length" in order to accommodate for databases that have recently changed this length and the SQLAlchemy dialect has not yet been adjusted to detect for that version. This parameter interacts - with the existing :paramref:`.create_engine.label_length` parameter in that + with the existing :paramref:`_sa.create_engine.label_length` parameter in that it establishes the maximum (and default) value for anonymously generated labels. Additionally, post-connection detection of max identifier lengths has been added to the dialect system. This feature is first being used @@ -1001,13 +1001,13 @@ :tickets: 4857 The Oracle dialect now emits a warning if Oracle version 12.2 or greater is - used, and the :paramref:`.create_engine.max_identifier_length` parameter is + used, and the :paramref:`_sa.create_engine.max_identifier_length` parameter is not set. The version in this specific case defaults to that of the "compatibility" version set in the Oracle server configuration, not the actual server version. In version 1.4, the default max_identifier_length for 12.2 or greater will move to 128 characters. In order to maintain forwards compatibility, applications should set - :paramref:`.create_engine.max_identifier_length` to 30 in order to maintain + :paramref:`_sa.create_engine.max_identifier_length` to 30 in order to maintain the same length behavior, or to 128 in order to test the upcoming behavior. This length determines among other things how generated constraint names are truncated for statements like ``CREATE CONSTRAINT`` and ``DROP @@ -1210,7 +1210,7 @@ :tags: feature, engine :tickets: 4815 - Added new parameter :paramref:`.create_engine.hide_parameters` which when + Added new parameter :paramref:`_sa.create_engine.hide_parameters` which when set to True will cause SQL parameters to no longer be logged, nor rendered in the string representation of a :class:`.StatementError` object. @@ -1250,8 +1250,8 @@ however the SQLite dialect calls them ``_json_serilizer`` and ``_json_deserilalizer``. The names have been corrected, the old names are accepted with a change warning, and these parameters are now documented as - :paramref:`.create_engine.json_serializer` and - :paramref:`.create_engine.json_deserializer`. + :paramref:`_sa.create_engine.json_serializer` and + :paramref:`_sa.create_engine.json_deserializer`. .. change:: @@ -1956,7 +1956,7 @@ :tickets: 4562 Added support for parameter-less connection URLs for the psycopg2 dialect, - meaning, the URL can be passed to :func:`.create_engine` as + meaning, the URL can be passed to :func:`_sa.create_engine` as ``"postgresql+psycopg2://"`` with no additional arguments to indicate an empty DSN passed to libpq, which indicates to connect to "localhost" with no username, password, or database given. Pull request courtesy Julian @@ -2370,7 +2370,7 @@ :tags: change, sql :tickets: 4393 - The :paramref:`.create_engine.convert_unicode` and + The :paramref:`_sa.create_engine.convert_unicode` and :paramref:`.String.convert_unicode` parameters have been deprecated. These parameters were built back when most Python DBAPIs had little to no support for Python Unicode objects, and SQLAlchemy needed to take on the very @@ -2457,7 +2457,7 @@ The Oracle dialect will no longer use the NCHAR/NCLOB datatypes represent generic unicode strings or clob fields in conjunction with :class:`.Unicode` and :class:`.UnicodeText` unless the flag - ``use_nchar_for_unicode=True`` is passed to :func:`.create_engine` - + ``use_nchar_for_unicode=True`` is passed to :func:`_sa.create_engine` - this includes CREATE TABLE behavior as well as ``setinputsizes()`` for bound parameters. On the read side, automatic Unicode conversion under Python 2 has been added to CHAR/VARCHAR/CLOB result rows, to match the @@ -2527,7 +2527,7 @@ :tags: feature, engine Added new "lifo" mode to :class:`.QueuePool`, typically enabled by setting - the flag :paramref:`.create_engine.pool_use_lifo` to True. "lifo" mode + the flag :paramref:`_sa.create_engine.pool_use_lifo` to True. "lifo" mode means the same connection just checked in will be the first to be checked out again, allowing excess connections to be cleaned up from the server side during periods of the pool being only partially utilized. Pull request diff --git a/doc/build/changelog/migration_09.rst b/doc/build/changelog/migration_09.rst index b73e784ea7..7dec302031 100644 --- a/doc/build/changelog/migration_09.rst +++ b/doc/build/changelog/migration_09.rst @@ -1800,7 +1800,7 @@ tasks, and has been reported as *lowering* performance as a result. It's not clear how this flag is actually usable given this information, and as it appears to be only a performance enhancing feature, it now defaults to ``False``. The value can be controlled by passing the flag ``retaining=True`` -to the :func:`.create_engine` call. This is a new flag which is added as of +to the :func:`_sa.create_engine` call. This is a new flag which is added as of 0.8.2, so applications on 0.8.2 can begin setting this to ``True`` or ``False`` as desired. diff --git a/doc/build/changelog/migration_10.rst b/doc/build/changelog/migration_10.rst index 4066c567cb..e31b621fe6 100644 --- a/doc/build/changelog/migration_10.rst +++ b/doc/build/changelog/migration_10.rst @@ -648,14 +648,14 @@ be explicitly omitted from the sort. ResultProxy "auto close" is now a "soft" close ---------------------------------------------- -For many releases, the :class:`.ResultProxy` object has always been +For many releases, the :class:`_engine.ResultProxy` object has always been automatically closed out at the point at which all result rows have been fetched. This was to allow usage of the object without the need to call -upon :meth:`.ResultProxy.close` explicitly; as all DBAPI resources had been +upon :meth:`_engine.ResultProxy.close` explicitly; as all DBAPI resources had been freed, the object was safe to discard. However, the object maintained a strict "closed" behavior, which meant that any subsequent calls to -:meth:`.ResultProxy.fetchone`, :meth:`.ResultProxy.fetchmany` or -:meth:`.ResultProxy.fetchall` would now raise a :class:`.ResourceClosedError`:: +:meth:`_engine.ResultProxy.fetchone`, :meth:`_engine.ResultProxy.fetchmany` or +:meth:`_engine.ResultProxy.fetchall` would now raise a :class:`.ResourceClosedError`:: >>> result = connection.execute(stmt) >>> result.fetchone() @@ -671,13 +671,13 @@ are exhausted. It also interferes with behavior for some implementations of result proxy, such as the :class:`.BufferedColumnResultProxy` used by the cx_oracle dialect for certain datatypes. -To solve this, the "closed" state of the :class:`.ResultProxy` has been +To solve this, the "closed" state of the :class:`_engine.ResultProxy` has been broken into two states; a "soft close" which does the majority of what "close" does, in that it releases the DBAPI cursor and in the case of a "close with result" object will also release the connection, and a "closed" state which is everything included by "soft close" as well as -establishing the fetch methods as "closed". The :meth:`.ResultProxy.close` -method is now never called implicitly, only the :meth:`.ResultProxy._soft_close` +establishing the fetch methods as "closed". The :meth:`_engine.ResultProxy.close` +method is now never called implicitly, only the :meth:`_engine.ResultProxy._soft_close` method which is non-public:: >>> result = connection.execute(stmt) @@ -2090,7 +2090,7 @@ A similar change is also applied to an INSERT..VALUES with multiple parameter sets; implicit RETURNING will no longer emit for this statement either. As both of these constructs deal with variable numbers of rows, the -:attr:`.ResultProxy.inserted_primary_key` accessor does not +:attr:`_engine.ResultProxy.inserted_primary_key` accessor does not apply. Previously, there was a documentation note that one may prefer ``inline=True`` with INSERT..FROM SELECT as some databases don't support returning and therefore can't do "implicit" returning, @@ -2122,10 +2122,10 @@ SQLAlchemy's wrapping of DBAPI exceptions was not taking place in the case where a :class:`_engine.Connection` object was invalidated, and then tried to reconnect and encountered an error; this has been resolved. -Additionally, the recently added :meth:`.ConnectionEvents.handle_error` +Additionally, the recently added :meth:`_events.ConnectionEvents.handle_error` event is now invoked for errors that occur upon initial connect, upon -reconnect, and when :func:`.create_engine` is used given a custom connection -function via :paramref:`.create_engine.creator`. +reconnect, and when :func:`_sa.create_engine` is used given a custom connection +function via :paramref:`_sa.create_engine.creator`. The :class:`.ExceptionContext` object has a new datamember :attr:`.ExceptionContext.engine` that will always refer to the :class:`_engine.Engine` @@ -2400,7 +2400,7 @@ by PostgreSQL as of 9.4. SQLAlchemy allows this using PG8000 dialect supports client side encoding -------------------------------------------- -The :paramref:`.create_engine.encoding` parameter is now honored +The :paramref:`_sa.create_engine.encoding` parameter is now honored by the pg8000 dialect, using on connect handler which emits ``SET CLIENT_ENCODING`` matching the selected encoding. @@ -2495,7 +2495,7 @@ supports unicode fully including MySQL-python with use_unicode=0. MySQL internal "no such table" exceptions not passed to event handlers ---------------------------------------------------------------------- -The MySQL dialect will now disable :meth:`.ConnectionEvents.handle_error` +The MySQL dialect will now disable :meth:`_events.ConnectionEvents.handle_error` events from firing for those statements which it uses internally to detect if a table exists or not. This is achieved using an execution option ``skip_user_error_events`` that disables the handle diff --git a/doc/build/changelog/migration_11.rst b/doc/build/changelog/migration_11.rst index 67dec39bcb..ef55466b89 100644 --- a/doc/build/changelog/migration_11.rst +++ b/doc/build/changelog/migration_11.rst @@ -1079,7 +1079,7 @@ identifiable base for system-level exceptions such as ``KeyboardInterrupt``, ``SystemExit``, and notably the ``GreenletExit`` exception that's used by eventlet and gevent. This exception class is now intercepted by the exception- handling routines of :class:`_engine.Connection`, and includes handling by the -:meth:`~.ConnectionEvents.handle_error` event. The :class:`_engine.Connection` is now +:meth:`_events.ConnectionEvents.handle_error` event. The :class:`_engine.Connection` is now **invalidated** by default in the case of a system level exception that is not a subclass of ``Exception``, as it is assumed an operation was interrupted and the connection may be in an unusable state. The MySQL drivers are most @@ -1421,7 +1421,7 @@ The flag also works at the ORM level:: ResultSet column matching enhancements; positional column setup for textual SQL ------------------------------------------------------------------------------- -A series of improvements were made to the :class:`.ResultProxy` system +A series of improvements were made to the :class:`_engine.ResultProxy` system in the 1.0 series as part of :ticket:`918`, which reorganizes the internals to match cursor-bound result columns with table/ORM metadata positionally, rather than by matching names, for compiled SQL constructs that contain full @@ -2666,7 +2666,7 @@ Added support for AUTOCOMMIT "isolation level" ---------------------------------------------- The MySQL dialect now accepts the value "AUTOCOMMIT" for the -:paramref:`.create_engine.isolation_level` and +:paramref:`_sa.create_engine.isolation_level` and :paramref:`.Connection.execution_options.isolation_level` parameters:: @@ -2790,7 +2790,7 @@ use by the sqlite3_module.xBestIndex method", however SQLAlchemy's translation of these dotted column names is no longer required with this version, so is turned off when version 3.10.0 or greater is detected. -Overall, the SQLAlchemy :class:`.ResultProxy` as of the 1.0 series relies much +Overall, the SQLAlchemy :class:`_engine.ResultProxy` as of the 1.0 series relies much less on column names in result sets when delivering results for Core and ORM SQL constructs, so the importance of this issue was already lessened in any case. @@ -2846,7 +2846,7 @@ Added transaction isolation level support for SQL Server -------------------------------------------------------- All SQL Server dialects support transaction isolation level settings -via the :paramref:`.create_engine.isolation_level` and +via the :paramref:`_sa.create_engine.isolation_level` and :paramref:`.Connection.execution_options.isolation_level` parameters. The four standard levels are supported as well as ``SNAPSHOT``:: diff --git a/doc/build/changelog/migration_12.rst b/doc/build/changelog/migration_12.rst index 9369886230..44173437f5 100644 --- a/doc/build/changelog/migration_12.rst +++ b/doc/build/changelog/migration_12.rst @@ -690,11 +690,11 @@ Pessimistic disconnection detection added to the connection pool ---------------------------------------------------------------- The connection pool documentation has long featured a recipe for using -the :meth:`.ConnectionEvents.engine_connect` engine event to emit a simple +the :meth:`_events.ConnectionEvents.engine_connect` engine event to emit a simple statement on a checked-out connection to test it for liveness. The functionality of this recipe has now been added into the connection pool itself, when used in conjunction with an appropriate dialect. Using -the new parameter :paramref:`.create_engine.pool_pre_ping`, each connection +the new parameter :paramref:`_sa.create_engine.pool_pre_ping`, each connection checked out will be tested for freshness before being returned:: engine = create_engine("mysql+pymysql://", pool_pre_ping=True) @@ -730,8 +730,8 @@ of a query that is comparing a SQL expression or column that evaluates to NULL when compared to an empty set, producing a boolean value false or true (for NOT IN) rather than NULL. The warning that would emit under this condition is also removed. The old behavior is available using the -:paramref:`.create_engine.empty_in_strategy` parameter to -:func:`.create_engine`. +:paramref:`_sa.create_engine.empty_in_strategy` parameter to +:func:`_sa.create_engine`. In SQL, the IN and NOT IN operators do not support comparison to a collection of values that is explicitly empty; meaning, this syntax is @@ -801,7 +801,7 @@ for the negation ``where(~null_expr.in_([]))``, since this now evaluates to true and not NULL. The behavior can now be controlled using the flag -:paramref:`.create_engine.empty_in_strategy`, which defaults to the +:paramref:`_sa.create_engine.empty_in_strategy`, which defaults to the ``"static"`` setting, but may also be set to ``"dynamic"`` or ``"dynamic_warn"``, where the ``"dynamic_warn"`` setting is equivalent to the previous behavior of emitting ``expr != expr`` as well as a performance @@ -1550,7 +1550,7 @@ DML statements in batch. SQLAlchemy 1.2 now includes support for these helpers to be used transparently whenever the :class:`_engine.Engine` makes use of ``cursor.executemany()`` to invoke a statement against multiple parameter sets. The feature is off by default and can be enabled using the -``use_batch_mode`` argument on :func:`.create_engine`:: +``use_batch_mode`` argument on :func:`_sa.create_engine`:: engine = create_engine( "postgresql+psycopg2://scott:tiger@host/dbname", diff --git a/doc/build/changelog/migration_13.rst b/doc/build/changelog/migration_13.rst index 812eee414b..e4f894de5e 100644 --- a/doc/build/changelog/migration_13.rst +++ b/doc/build/changelog/migration_13.rst @@ -1273,7 +1273,7 @@ well as for casting decimal bind values for MySQL. New last-in-first-out strategy for QueuePool --------------------------------------------- -The connection pool usually used by :func:`.create_engine` is known +The connection pool usually used by :func:`_sa.create_engine` is known as :class:`.QueuePool`. This pool uses an object equivalent to Python's built-in ``Queue`` class in order to store database connections waiting to be used. The ``Queue`` features first-in-first-out behavior, which is @@ -1282,7 +1282,7 @@ persistently in the pool. However, a potential downside of this is that when the utilization of the pool is low, the re-use of each connection in series means that a server-side timeout strategy that attempts to reduce unused connections is prevented from shutting down these connections. To suit -this use case, a new flag :paramref:`.create_engine.pool_use_lifo` is added +this use case, a new flag :paramref:`_sa.create_engine.pool_use_lifo` is added which reverses the ``.get()`` method of the ``Queue`` to pull the connection from the beginning of the queue instead of the end, essentially turning the "queue" into a "stack" (adding a whole new pool called ``StackPool`` was @@ -1397,7 +1397,7 @@ convert_unicode parameters deprecated -------------------------------------- The parameters :paramref:`.String.convert_unicode` and -:paramref:`.create_engine.convert_unicode` are deprecated. The purpose of +:paramref:`_sa.create_engine.convert_unicode` are deprecated. The purpose of these parameters was to instruct SQLAlchemy to ensure that incoming Python Unicode objects under Python 2 were encoded to bytestrings before passing to the database, and to expect bytestrings from the database to be converted back @@ -1589,7 +1589,7 @@ functions like ``trunc()``. The one case where ``NVARCHAR2`` and related types may be needed is for a database that is not using a Unicode-compliant character set. In this case, -the flag ``use_nchar_for_unicode`` can be passed to :func:`.create_engine` to +the flag ``use_nchar_for_unicode`` can be passed to :func:`_sa.create_engine` to re-enable the old behavior. As always, using the :class:`_oracle.NVARCHAR2` and :class:`_oracle.NCLOB` @@ -1625,9 +1625,9 @@ dialect as well as the URL string: The SQLAlchemy :class:`_engine.Connection` object is not considered to be thread-safe itself so there's no need for this flag to be passed. -* It's deprecated to pass ``threaded`` to :func:`.create_engine` itself. +* It's deprecated to pass ``threaded`` to :func:`_sa.create_engine` itself. To set the value of ``threaded`` to ``True``, pass it to either the - :paramref:`.create_engine.connect_args` dictionary or use the query + :paramref:`_sa.create_engine.connect_args` dictionary or use the query string e.g. ``oracle+cx_oracle://...?threaded=true``. * All parameters passed on the URL query string that are not otherwise @@ -1636,7 +1636,7 @@ dialect as well as the URL string: or booleans including ``mode``, ``purity``, ``events``, and ``threaded``. * As was the case earlier, all cx_Oracle ``.connect()`` arguments are accepted - via the :paramref:`.create_engine.connect_args` dictionary, the documentation + via the :paramref:`_sa.create_engine.connect_args` dictionary, the documentation was inaccurate regarding this. :ticket:`4369` @@ -1651,7 +1651,7 @@ Support for pyodbc fast_executemany Pyodbc's recently added "fast_executemany" mode, available when using the Microsoft ODBC driver, is now an option for the pyodbc / mssql dialect. -Pass it via :func:`.create_engine`:: +Pass it via :func:`_sa.create_engine`:: engine = create_engine( "mssql+pyodbc://scott:tiger@mssql2017:1433/test?driver=ODBC+Driver+13+for+SQL+Server", diff --git a/doc/build/core/connections.rst b/doc/build/core/connections.rst index 053d470e67..229d942a10 100644 --- a/doc/build/core/connections.rst +++ b/doc/build/core/connections.rst @@ -19,11 +19,11 @@ Basic Usage =========== Recall from :doc:`/core/engines` that an :class:`_engine.Engine` is created via -the :func:`.create_engine` call:: +the :func:`_sa.create_engine` call:: engine = create_engine('mysql://scott:tiger@localhost/test') -The typical usage of :func:`.create_engine()` is once per particular database +The typical usage of :func:`_sa.create_engine()` is once per particular database URL, held globally for the lifetime of a single application process. A single :class:`_engine.Engine` manages many individual :term:`DBAPI` connections on behalf of the process and is intended to be called upon in a concurrent fashion. The @@ -56,11 +56,11 @@ end of the block. The :class:`_engine.Connection`, is a **proxy** object for an actual DBAPI connection. The DBAPI connection is retrieved from the connection pool at the point at which :class:`_engine.Connection` is created. -The object returned is known as :class:`.ResultProxy`, which +The object returned is known as :class:`_engine.ResultProxy`, which references a DBAPI cursor and provides methods for fetching rows similar to that of the DBAPI cursor. The DBAPI cursor will be closed -by the :class:`.ResultProxy` when all of its result rows (if any) are -exhausted. A :class:`.ResultProxy` that returns no rows, such as that of +by the :class:`_engine.ResultProxy` when all of its result rows (if any) are +exhausted. A :class:`_engine.ResultProxy` that returns no rows, such as that of an UPDATE statement (without any returned rows), releases cursor resources immediately upon construction. @@ -341,7 +341,7 @@ In both "connectionless" examples, the :class:`~sqlalchemy.engine.Connection` is created behind the scenes; the :class:`~sqlalchemy.engine.ResultProxy` returned by the ``execute()`` call references the :class:`~sqlalchemy.engine.Connection` used to issue -the SQL statement. When the :class:`.ResultProxy` is closed, the underlying +the SQL statement. When the :class:`_engine.ResultProxy` is closed, the underlying :class:`_engine.Connection` is closed for us, resulting in the DBAPI connection being returned to the pool with transactional resources removed. @@ -650,7 +650,7 @@ Multiple result set support is available from a raw DBAPI cursor using the Registering New Dialects ======================== -The :func:`.create_engine` function call locates the given dialect +The :func:`_sa.create_engine` function call locates the given dialect using setuptools entrypoints. These entry points can be established for third party dialects within the setup.py script. For example, to create a new dialect "foodialect://", the steps are as follows: diff --git a/doc/build/core/defaults.rst b/doc/build/core/defaults.rst index adb1691725..ad86ab2398 100644 --- a/doc/build/core/defaults.rst +++ b/doc/build/core/defaults.rst @@ -234,14 +234,14 @@ inline. When the statement is executed with a single set of parameters (that is, it is not an "executemany" style execution), the returned :class:`~sqlalchemy.engine.ResultProxy` will contain a collection accessible -via :meth:`.ResultProxy.postfetch_cols` which contains a list of all +via :meth:`_engine.ResultProxy.postfetch_cols` which contains a list of all :class:`~sqlalchemy.schema.Column` objects which had an inline-executed default. Similarly, all parameters which were bound to the statement, including all Python and SQL expressions which were pre-executed, are present in the -:meth:`.ResultProxy.last_inserted_params` or -:meth:`.ResultProxy.last_updated_params` collections on +:meth:`_engine.ResultProxy.last_inserted_params` or +:meth:`_engine.ResultProxy.last_updated_params` collections on :class:`~sqlalchemy.engine.ResultProxy`. The -:attr:`.ResultProxy.inserted_primary_key` collection contains a list of primary +:attr:`_engine.ResultProxy.inserted_primary_key` collection contains a list of primary key values for the row inserted (a list so that single-column and composite- column primary keys are represented in the same format). diff --git a/doc/build/core/engines.rst b/doc/build/core/engines.rst index dda1dd1e3b..67469bbee9 100644 --- a/doc/build/core/engines.rst +++ b/doc/build/core/engines.rst @@ -19,7 +19,7 @@ which together interpret the DBAPI's module functions as well as the behavior of the database. Creating an engine is just a matter of issuing a single call, -:func:`.create_engine()`:: +:func:`_sa.create_engine()`:: from sqlalchemy import create_engine engine = create_engine('postgresql://scott:tiger@localhost:5432/mydatabase') @@ -55,7 +55,7 @@ See the section :ref:`dialect_toplevel` for information on the various backends Database Urls ============= -The :func:`.create_engine` function produces an :class:`_engine.Engine` object based +The :func:`_sa.create_engine` function produces an :class:`_engine.Engine` object based on a URL. These URLs follow `RFC-1738 `_, and usually can include username, password, hostname, database name as well as optional keyword arguments for additional configuration. @@ -264,8 +264,8 @@ Python's standard `logging implement informational and debug log output with SQLAlchemy. This allows SQLAlchemy's logging to integrate in a standard way with other applications and libraries. There are also two parameters -:paramref:`.create_engine.echo` and :paramref:`.create_engine.echo_pool` -present on :func:`.create_engine` which allow immediate logging to ``sys.stdout`` +:paramref:`_sa.create_engine.echo` and :paramref:`_sa.create_engine.echo_pool` +present on :func:`_sa.create_engine` which allow immediate logging to ``sys.stdout`` for the purposes of local development; these parameters ultimately interact with the regular Python loggers described below. @@ -278,13 +278,13 @@ namespace of SA loggers that can be turned on is as follows: * ``sqlalchemy.engine`` - controls SQL echoing. set to ``logging.INFO`` for SQL query output, ``logging.DEBUG`` for query + result set output. These settings are equivalent to ``echo=True`` and ``echo="debug"`` on - :paramref:`.create_engine.echo`, respectively. + :paramref:`_sa.create_engine.echo`, respectively. * ``sqlalchemy.pool`` - controls connection pool logging. set to ``logging.INFO`` to log connection invalidation and recycle events; set to ``logging.DEBUG`` to additionally log all pool checkins and checkouts. These settings are equivalent to ``pool_echo=True`` and ``pool_echo="debug"`` - on :paramref:`.create_engine.echo_pool`, respectively. + on :paramref:`_sa.create_engine.echo_pool`, respectively. * ``sqlalchemy.dialects`` - controls custom logging for SQL dialects, to the extend that logging is used within specific dialects, which is generally diff --git a/doc/build/core/pooling.rst b/doc/build/core/pooling.rst index 1d0fdde146..2c33c567c4 100644 --- a/doc/build/core/pooling.rst +++ b/doc/build/core/pooling.rst @@ -50,7 +50,7 @@ that none of them "pre create" connections - all implementations wait until first use before creating a connection. At that point, if no additional concurrent checkout requests for more connections are made, no additional connections are created. This is why it's perfectly -fine for :func:`.create_engine` to default to using a :class:`.QueuePool` +fine for :func:`_sa.create_engine` to default to using a :class:`.QueuePool` of size five without regard to whether or not the application really needs five connections queued up - the pool would only grow to that size if the application actually used five connections concurrently, in which case the usage of a @@ -61,7 +61,7 @@ small pool is an entirely appropriate default behavior. Switching Pool Implementations ------------------------------ -The usual way to use a different kind of pool with :func:`.create_engine` +The usual way to use a different kind of pool with :func:`_sa.create_engine` is to use the ``poolclass`` argument. This argument accepts a class imported from the ``sqlalchemy.pool`` module, and handles the details of building the pool for you. Common options include specifying @@ -81,7 +81,7 @@ Using a Custom Connection Function ---------------------------------- All :class:`_pool.Pool` classes accept an argument ``creator`` which is -a callable that creates a new connection. :func:`.create_engine` +a callable that creates a new connection. :func:`_sa.create_engine` accepts this function to pass onto the pool via an argument of the same name:: @@ -97,7 +97,7 @@ the same name:: For most "initialize on connection" routines, it's more convenient to use the :class:`_events.PoolEvents` event hooks, so that the usual URL argument to -:func:`.create_engine` is still usable. ``creator`` is there as +:func:`_sa.create_engine` is still usable. ``creator`` is there as a last resort for when a DBAPI has some form of ``connect`` that is not at all supported by SQLAlchemy. @@ -150,7 +150,7 @@ are removed. This behavior can be disabled using the ``reset_on_return`` option of :class:`_pool.Pool`. A particular pre-created :class:`_pool.Pool` can be shared with one or more -engines by passing it to the ``pool`` argument of :func:`.create_engine`:: +engines by passing it to the ``pool`` argument of :func:`_sa.create_engine`:: e = create_engine('postgresql://', pool=mypool) @@ -201,8 +201,8 @@ up to the application to either abandon the operation, or retry the whole transaction again. Pessimistic testing of connections upon checkout is achievable by -using the :paramref:`_pool.Pool.pre_ping` argument, available from :func:`.create_engine` -via the :paramref:`.create_engine.pool_pre_ping` argument:: +using the :paramref:`_pool.Pool.pre_ping` argument, available from :func:`_sa.create_engine` +via the :paramref:`_sa.create_engine.pool_pre_ping` argument:: engine = create_engine("mysql+pymysql://user:pw@host/db", pool_pre_ping=True) @@ -231,9 +231,9 @@ to three times before giving up, propagating the database error last received. Custom / Legacy Pessimistic Ping ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Before :paramref:`.create_engine.pool_pre_ping` was added, the "pre-ping" +Before :paramref:`_sa.create_engine.pool_pre_ping` was added, the "pre-ping" approach historically has been performed manually using -the :meth:`.ConnectionEvents.engine_connect` engine event. +the :meth:`_events.ConnectionEvents.engine_connect` engine event. The most common recipe for this is below, for reference purposes in case an application is already using such a recipe, or special behaviors are needed:: @@ -403,7 +403,7 @@ Using FIFO vs. LIFO The :class:`.QueuePool` class features a flag called :paramref:`.QueuePool.use_lifo`, which can also be accessed from -:func:`.create_engine` via the flag :paramref:`.create_engine.pool_use_lifo`. +:func:`_sa.create_engine` via the flag :paramref:`_sa.create_engine.pool_use_lifo`. Setting this flag to ``True`` causes the pool's "queue" behavior to instead be that of a "stack", e.g. the last connection to be returned to the pool is the first one to be used on the next request. In contrast to the pool's long- @@ -417,7 +417,7 @@ connections ready to go even during idle periods:: engine = create_engine( "postgreql://", pool_use_lifo=True, pool_pre_ping=True) -Above, we also make use of the :paramref:`.create_engine.pool_pre_ping` flag +Above, we also make use of the :paramref:`_sa.create_engine.pool_pre_ping` flag so that connections which are closed from the server side are gracefully handled by the connection pool and replaced with a new connection. @@ -436,7 +436,7 @@ Using Connection Pools with Multiprocessing ------------------------------------------- It's critical that when using a connection pool, and by extension when -using an :class:`_engine.Engine` created via :func:`.create_engine`, that +using an :class:`_engine.Engine` created via :func:`_sa.create_engine`, that the pooled connections **are not shared to a forked process**. TCP connections are represented as file descriptors, which usually work across process boundaries, meaning this will cause concurrent access to the file descriptor diff --git a/doc/build/core/tutorial.rst b/doc/build/core/tutorial.rst index df9947f783..9068bfba41 100644 --- a/doc/build/core/tutorial.rst +++ b/doc/build/core/tutorial.rst @@ -77,7 +77,7 @@ and want less output generated, set it to ``False``. This tutorial will format the SQL behind a popup window so it doesn't get in our way; just click the "SQL" links to see what's being generated. -The return value of :func:`.create_engine` is an instance of +The return value of :func:`_sa.create_engine` is an instance of :class:`_engine.Engine`, and it represents the core interface to the database, adapted through a :term:`dialect` that handles the details of the database and :term:`DBAPI` in use. In this case the SQLite @@ -86,7 +86,7 @@ module. .. sidebar:: Lazy Connecting - The :class:`_engine.Engine`, when first returned by :func:`.create_engine`, + The :class:`_engine.Engine`, when first returned by :func:`_sa.create_engine`, has not actually tried to connect to the database yet; that happens only the first time it is asked to perform a task against the database. @@ -96,7 +96,7 @@ database, which is then used to emit the SQL. .. seealso:: - :ref:`database_urls` - includes examples of :func:`.create_engine` + :ref:`database_urls` - includes examples of :func:`_sa.create_engine` connecting to several kinds of databases with links to more information. Define and Create Tables @@ -288,7 +288,7 @@ DBAPI connection, the result, known as a :class:`~sqlalchemy.engine.ResultProxy` object, is analogous to the DBAPI cursor object. In the case of an INSERT, we can get important information from it, such as the primary key values which were generated from our statement -using :attr:`.ResultProxy.inserted_primary_key`: +using :attr:`_engine.ResultProxy.inserted_primary_key`: .. sourcecode:: pycon+sql @@ -303,7 +303,7 @@ at a newly generated primary key value, even though the method of generating them is different across different databases; each database's :class:`~sqlalchemy.engine.interfaces.Dialect` knows the specific steps needed to determine the correct value (or values; note that -:attr:`.ResultProxy.inserted_primary_key` +:attr:`_engine.ResultProxy.inserted_primary_key` returns a list so that it supports composite primary keys). Methods here range from using ``cursor.lastrowid``, to selecting from a database-specific function, to using ``INSERT..RETURNING`` syntax; this all occurs transparently. @@ -447,11 +447,11 @@ SELECT directly as keys: {stop}name: jack ; fullname: Jack Jones name: wendy ; fullname: Wendy Williams -The :class:`.ResultProxy` object features "auto-close" behavior that closes the +The :class:`_engine.ResultProxy` object features "auto-close" behavior that closes the underlying DBAPI ``cursor`` object when all pending result rows have been -fetched. If a :class:`.ResultProxy` is to be discarded before such an +fetched. If a :class:`_engine.ResultProxy` is to be discarded before such an autoclose has occurred, it can be explicitly closed using the -:meth:`.ResultProxy.close` method: +:meth:`_engine.ResultProxy.close` method: .. sourcecode:: pycon+sql @@ -2266,7 +2266,7 @@ Both of :meth:`_expression.TableClause.update` and :meth:`_expression.TableClause.delete` are associated with *matched row counts*. This is a number indicating the number of rows that were matched by the WHERE clause. Note that by "matched", this includes rows where no UPDATE actually took place. -The value is available as :attr:`~.ResultProxy.rowcount`: +The value is available as :attr:`_engine.ResultProxy.rowcount`: .. sourcecode:: pycon+sql diff --git a/doc/build/errors.rst b/doc/build/errors.rst index 0f725c483f..9f62881797 100644 --- a/doc/build/errors.rst +++ b/doc/build/errors.rst @@ -86,13 +86,13 @@ familiar with. In order to allow for a higher number of connections be in use at once, the pool can be adjusted using the - :paramref:`.create_engine.pool_size` and :paramref:`.create_engine.max_overflow` - parameters as passed to the :func:`.create_engine` function. The timeout + :paramref:`_sa.create_engine.pool_size` and :paramref:`_sa.create_engine.max_overflow` + parameters as passed to the :func:`_sa.create_engine` function. The timeout to wait for a connection to be available is configured using the - :paramref:`.create_engine.pool_timeout` parameter. + :paramref:`_sa.create_engine.pool_timeout` parameter. * The pool can be configured to have unlimited overflow by setting - :paramref:`.create_engine.max_overflow` to the value "-1". With this setting, + :paramref:`_sa.create_engine.max_overflow` to the value "-1". With this setting, the pool will still maintain a fixed pool of connections, however it will never block upon a new connection being requested; it will instead unconditionally make a new connection if none are available. diff --git a/doc/build/faq/connections.rst b/doc/build/faq/connections.rst index 7dbeebc00e..20ed1d8c8c 100644 --- a/doc/build/faq/connections.rst +++ b/doc/build/faq/connections.rst @@ -24,7 +24,7 @@ For more detail, see :ref:`engines_toplevel` and :ref:`pooling_toplevel`. How do I pass custom connect arguments to my database API? ---------------------------------------------------------- -The :func:`.create_engine` call accepts additional arguments either +The :func:`_sa.create_engine` call accepts additional arguments either directly via the ``connect_args`` keyword argument:: e = create_engine("mysql://scott:tiger@localhost/test", @@ -46,7 +46,7 @@ The primary cause of this error is that the MySQL connection has timed out and has been closed by the server. The MySQL server closes connections which have been idle a period of time which defaults to eight hours. To accommodate this, the immediate setting is to enable the -:paramref:`.create_engine.pool_recycle` setting, which will ensure that a +:paramref:`_sa.create_engine.pool_recycle` setting, which will ensure that a connection which is older than a set amount of seconds will be discarded and replaced with a new connection when it is next checked out. diff --git a/doc/build/faq/performance.rst b/doc/build/faq/performance.rst index b84a827eb3..eb12b160db 100644 --- a/doc/build/faq/performance.rst +++ b/doc/build/faq/performance.rst @@ -20,7 +20,7 @@ Query Profiling ^^^^^^^^^^^^^^^ Sometimes just plain SQL logging (enabled via python's logging module -or via the ``echo=True`` argument on :func:`.create_engine`) can give an +or via the ``echo=True`` argument on :func:`_sa.create_engine`) can give an idea how long things are taking. For example, if you log something right after a SQL operation, you'd see something like this in your log:: @@ -68,8 +68,8 @@ using a recipe like the following:: logger.debug("Query Complete!") logger.debug("Total Time: %f", total) -Above, we use the :meth:`.ConnectionEvents.before_cursor_execute` and -:meth:`.ConnectionEvents.after_cursor_execute` events to establish an interception +Above, we use the :meth:`_events.ConnectionEvents.before_cursor_execute` and +:meth:`_events.ConnectionEvents.after_cursor_execute` events to establish an interception point around when a statement is executed. We attach a timer onto the connection using the :class:`._ConnectionRecord.info` dictionary; we use a stack here for the occasional case where the cursor execute events may be nested. @@ -175,7 +175,7 @@ ORM query if the wrong :class:`_schema.Column` objects are used in a complex que pulling in additional FROM clauses that are unexpected. On the other hand, a fast call to ``fetchall()`` at the DBAPI level, but then -slowness when SQLAlchemy's :class:`.ResultProxy` is asked to do a ``fetchall()``, +slowness when SQLAlchemy's :class:`_engine.ResultProxy` is asked to do a ``fetchall()``, may indicate slowness in processing of datatypes, such as unicode conversions and similar:: diff --git a/doc/build/glossary.rst b/doc/build/glossary.rst index 46d09b8e10..f0cb23d42d 100644 --- a/doc/build/glossary.rst +++ b/doc/build/glossary.rst @@ -424,7 +424,7 @@ Glossary :term:`dialect` system is constructed around the operation of the DBAPI, providing individual dialect classes which service a specific DBAPI on top of a - specific database engine; for example, the :func:`.create_engine` + specific database engine; for example, the :func:`_sa.create_engine` URL ``postgresql+psycopg2://@localhost/test`` refers to the :mod:`psycopg2 <.postgresql.psycopg2>` DBAPI/dialect combination, whereas the URL ``mysql+mysqldb://@localhost/test`` diff --git a/doc/build/orm/session_transaction.rst b/doc/build/orm/session_transaction.rst index d8516b4965..3e6f253acd 100644 --- a/doc/build/orm/session_transaction.rst +++ b/doc/build/orm/session_transaction.rst @@ -304,7 +304,7 @@ APIs or via database-specific calls. SQLAlchemy's dialects support settable isolation modes on a per-:class:`_engine.Engine` or per-:class:`_engine.Connection` basis, using flags at both the -:func:`.create_engine` level as well as at the :meth:`_engine.Connection.execution_options` +:func:`_sa.create_engine` level as well as at the :meth:`_engine.Connection.execution_options` level. When using the ORM :class:`.Session`, it acts as a *facade* for engines and @@ -314,7 +314,7 @@ order to affect transaction isolation level, we need to act upon the .. seealso:: - :paramref:`.create_engine.isolation_level` + :paramref:`_sa.create_engine.isolation_level` :ref:`SQLite Transaction Isolation ` @@ -326,7 +326,7 @@ Setting Isolation Engine-Wide ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ To set up a :class:`.Session` or :class:`.sessionmaker` with a specific -isolation level globally, use the :paramref:`.create_engine.isolation_level` +isolation level globally, use the :paramref:`_sa.create_engine.isolation_level` parameter:: from sqlalchemy import create_engine diff --git a/doc/build/orm/tutorial.rst b/doc/build/orm/tutorial.rst index 900a3124aa..7e9b5faa3e 100644 --- a/doc/build/orm/tutorial.rst +++ b/doc/build/orm/tutorial.rst @@ -62,7 +62,7 @@ and want less output generated, set it to ``False``. This tutorial will format the SQL behind a popup window so it doesn't get in our way; just click the "SQL" links to see what's being generated. -The return value of :func:`.create_engine` is an instance of +The return value of :func:`_sa.create_engine` is an instance of :class:`_engine.Engine`, and it represents the core interface to the database, adapted through a :term:`dialect` that handles the details of the database and :term:`DBAPI` in use. In this case the SQLite @@ -71,7 +71,7 @@ module. .. sidebar:: Lazy Connecting - The :class:`_engine.Engine`, when first returned by :func:`.create_engine`, + The :class:`_engine.Engine`, when first returned by :func:`_sa.create_engine`, has not actually tried to connect to the database yet; that happens only the first time it is asked to perform a task against the database. @@ -83,7 +83,7 @@ behind the scenes by the ORM as we'll see shortly. .. seealso:: - :ref:`database_urls` - includes examples of :func:`.create_engine` + :ref:`database_urls` - includes examples of :func:`_sa.create_engine` connecting to several kinds of databases with links to more information. Declare a Mapping diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py index 64be5b6f17..d746cbeee3 100644 --- a/lib/sqlalchemy/dialects/mssql/base.py +++ b/lib/sqlalchemy/dialects/mssql/base.py @@ -171,7 +171,8 @@ The process for fetching this value has several variants: * Other dialects such as pymssql will call upon ``SELECT scope_identity() AS lastrowid`` subsequent to an INSERT statement. If the flag ``use_scope_identity=False`` is passed to - :func:`.create_engine`, the statement ``SELECT @@identity AS lastrowid`` + :func:`_sa.create_engine`, + the statement ``SELECT @@identity AS lastrowid`` is used instead. A table that contains an ``IDENTITY`` column will prohibit an INSERT statement @@ -272,8 +273,8 @@ Transaction Isolation Level All SQL Server dialects support setting of transaction isolation level both via a dialect-specific parameter -:paramref:`.create_engine.isolation_level` -accepted by :func:`.create_engine`, +:paramref:`_sa.create_engine.isolation_level` +accepted by :func:`_sa.create_engine`, as well as the :paramref:`.Connection.execution_options.isolation_level` argument as passed to :meth:`_engine.Connection.execution_options`. @@ -281,7 +282,7 @@ This feature works by issuing the command ``SET TRANSACTION ISOLATION LEVEL `` for each new connection. -To set isolation level using :func:`.create_engine`:: +To set isolation level using :func:`_sa.create_engine`:: engine = create_engine( "mssql+pyodbc://scott:tiger@ms_2008", @@ -373,7 +374,7 @@ behavior of this flag is as follows: or ``False`` based on whether 2012 or greater is detected. * The flag can be set to either ``True`` or ``False`` when the dialect - is created, typically via :func:`.create_engine`:: + is created, typically via :func:`_sa.create_engine`:: eng = create_engine("mssql+pymssql://user:pass@host/db", deprecate_large_types=True) @@ -476,7 +477,7 @@ below:: This mode of behavior is now off by default, as it appears to have served no purpose; however in the case that legacy applications rely upon it, it is available using the ``legacy_schema_aliasing`` argument to -:func:`.create_engine` as illustrated above. +:func:`_sa.create_engine` as illustrated above. .. versionchanged:: 1.1 the ``legacy_schema_aliasing`` flag introduced in version 1.0.5 to allow disabling of legacy mode for schemas now @@ -617,7 +618,7 @@ Declarative form:: This option can also be specified engine-wide using the -``implicit_returning=False`` argument on :func:`.create_engine`. +``implicit_returning=False`` argument on :func:`_sa.create_engine`. .. _mssql_rowcount_versioning: diff --git a/lib/sqlalchemy/dialects/mssql/pyodbc.py b/lib/sqlalchemy/dialects/mssql/pyodbc.py index 4ba3a0dfac..62edef7611 100644 --- a/lib/sqlalchemy/dialects/mssql/pyodbc.py +++ b/lib/sqlalchemy/dialects/mssql/pyodbc.py @@ -103,7 +103,8 @@ Microsoft ODBC drivers. The feature is enabled by setting the flag ``.fast_executemany`` on the DBAPI cursor when an executemany call is to be used. The SQLAlchemy pyodbc SQL Server dialect supports setting this flag automatically when the ``.fast_executemany`` flag is passed to -:func:`.create_engine`; note that the ODBC driver must be the Microsoft driver +:func:`_sa.create_engine` +; note that the ODBC driver must be the Microsoft driver in order to use this flag:: engine = create_engine( diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py index f811fbe7ec..cda85c0db2 100644 --- a/lib/sqlalchemy/dialects/mysql/base.py +++ b/lib/sqlalchemy/dialects/mysql/base.py @@ -29,7 +29,7 @@ Connection Timeouts and Disconnects MySQL features an automatic connection close behavior, for connections that have been idle for a fixed period of time, defaulting to eight hours. To circumvent having this issue, use -the :paramref:`.create_engine.pool_recycle` option which ensures that +the :paramref:`_sa.create_engine.pool_recycle` option which ensures that a connection will be discarded and replaced with a new one if it has been present in the pool for a fixed number of seconds:: @@ -120,8 +120,9 @@ Transaction Isolation Level --------------------------- All MySQL dialects support setting of transaction isolation level both via a -dialect-specific parameter :paramref:`.create_engine.isolation_level` accepted -by :func:`.create_engine`, as well as the +dialect-specific parameter :paramref:`_sa.create_engine.isolation_level` +accepted +by :func:`_sa.create_engine`, as well as the :paramref:`.Connection.execution_options.isolation_level` argument as passed to :meth:`_engine.Connection.execution_options`. This feature works by issuing the @@ -129,7 +130,7 @@ command ``SET SESSION TRANSACTION ISOLATION LEVEL `` for each new connection. For the special AUTOCOMMIT isolation level, DBAPI-specific techniques are used. -To set isolation level using :func:`.create_engine`:: +To set isolation level using :func:`_sa.create_engine`:: engine = create_engine( "mysql://scott:tiger@localhost/test", @@ -198,7 +199,7 @@ will receive results. The most typical way of invoking this feature is via the :paramref:`.Connection.execution_options.stream_results` connection execution option. Server side cursors can also be enabled for all SELECT statements unconditionally by passing ``server_side_cursors=True`` to -:func:`.create_engine`. +:func:`_sa.create_engine`. .. versionadded:: 1.1.4 - added server-side cursor support. @@ -468,7 +469,7 @@ This setting is currently hardcoded. .. seealso:: - :attr:`.ResultProxy.rowcount` + :attr:`_engine.ResultProxy.rowcount` CAST Support diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py index 87e94a0f89..5963a2923d 100644 --- a/lib/sqlalchemy/dialects/oracle/base.py +++ b/lib/sqlalchemy/dialects/oracle/base.py @@ -125,11 +125,13 @@ For the duration of the SQLAlchemy 1.3 series, the default max identifier length will remain at 30, even if compatibility version 12.2 or greater is in use. When the newer version is detected, a warning will be emitted upon first connect, which refers the user to make use of the -:paramref:`.create_engine.max_identifier_length` parameter in order to assure +:paramref:`_sa.create_engine.max_identifier_length` +parameter in order to assure forwards compatibility with SQLAlchemy 1.4, which will be changing this value to 128 when compatibility version 12.2 or greater is detected. -Using :paramref:`.create_engine.max_identifier_length`, the effective identifier +Using :paramref:`_sa.create_engine.max_identifier_length`, +the effective identifier length used by the SQLAlchemy dialect will be used as given, overriding the current default value of 30, so that when Oracle 12.2 or greater is used, the newer identifier length may be taken advantage of:: @@ -192,14 +194,15 @@ when the identifier length is changed without the name of the index or constraint first being adjusted. Therefore, applications are strongly advised to make use of -:paramref:`.create_engine.max_identifier_length` in order to maintain control +:paramref:`_sa.create_engine.max_identifier_length` +in order to maintain control of the generation of truncated names, and to fully review and test all database migrations in a staging environment when changing this value to ensure that the impact of this change has been mitigated. .. versionadded:: 1.3.9 Added the - :paramref:`.create_engine.max_identifier_length` parameter; the Oracle + :paramref:`_sa.create_engine.max_identifier_length` parameter; the Oracle dialect now detects compatibility version 12.2 or greater and warns about upcoming max identitifier length changes in SQLAlchemy 1.4. @@ -216,12 +219,12 @@ There are two options which affect its behavior: * the "FIRST ROWS()" optimization keyword is not used by default. To enable the usage of this optimization directive, specify ``optimize_limits=True`` - to :func:`.create_engine`. + to :func:`_sa.create_engine`. * the values passed for the limit/offset are sent as bound parameters. Some users have observed that Oracle produces a poor query plan when the values are sent as binds and not rendered literally. To render the limit/offset values literally within the SQL statement, specify - ``use_binds_for_limits=False`` to :func:`.create_engine`. + ``use_binds_for_limits=False`` to :func:`_sa.create_engine`. Some users have reported better performance when the entirely different approach of a window query is used, i.e. ROW_NUMBER() OVER (ORDER BY), to @@ -250,7 +253,7 @@ on the Oracle backend. By default, "implicit returning" typically only fetches the value of a single ``nextval(some_seq)`` expression embedded into an INSERT in order to increment a sequence within an INSERT statement and get the value back at the same time. To disable this feature across the board, -specify ``implicit_returning=False`` to :func:`.create_engine`:: +specify ``implicit_returning=False`` to :func:`_sa.create_engine`:: engine = create_engine("oracle://scott:tiger@dsn", implicit_returning=False) diff --git a/lib/sqlalchemy/dialects/oracle/cx_oracle.py b/lib/sqlalchemy/dialects/oracle/cx_oracle.py index af009aec38..2fa9a30190 100644 --- a/lib/sqlalchemy/dialects/oracle/cx_oracle.py +++ b/lib/sqlalchemy/dialects/oracle/cx_oracle.py @@ -33,7 +33,7 @@ If ``dbname`` is not present, then the value of ``hostname`` in the URL is used directly as the DSN passed to ``cx_Oracle.connect()``. Additional connection arguments may be sent to the ``cx_Oracle.connect()`` -function using the :paramref:`.create_engine.connect_args` dictionary. +function using the :paramref:`_sa.create_engine.connect_args` dictionary. Any cx_Oracle parameter value and/or constant may be passed, such as:: import cx_Oracle @@ -55,11 +55,12 @@ within the URL, which includes parameters such as ``mode``, ``purity``, .. versionchanged:: 1.3 the cx_oracle dialect now accepts all argument names within the URL string itself, to be passed to the cx_Oracle DBAPI. As was the case earlier but not correctly documented, the - :paramref:`.create_engine.connect_args` parameter also accepts all + :paramref:`_sa.create_engine.connect_args` parameter also accepts all cx_Oracle DBAPI connect arguments. There are also options that are consumed by the SQLAlchemy cx_oracle dialect -itself. These options are always passed directly to :func:`.create_engine`, +itself. These options are always passed directly to :func:`_sa.create_engine` +, such as:: e = create_engine( @@ -99,14 +100,15 @@ the VARCHAR2 and CLOB datatypes can accommodate the data. In the case that the Oracle database is not configured with a Unicode character set, the two options are to use the :class:`_oracle.NCHAR` and :class:`_oracle.NCLOB` datatypes explicitly, or to pass the flag -``use_nchar_for_unicode=True`` to :func:`.create_engine`, which will cause the +``use_nchar_for_unicode=True`` to :func:`_sa.create_engine`, +which will cause the SQLAlchemy dialect to use NCHAR/NCLOB for the :class:`.Unicode` / :class:`.UnicodeText` datatypes instead of VARCHAR/CLOB. .. versionchanged:: 1.3 The :class:`.Unicode` and :class:`.UnicodeText` datatypes now correspond to the ``VARCHAR2`` and ``CLOB`` Oracle datatypes unless the ``use_nchar_for_unicode=True`` is passed to the dialect - when :func:`.create_engine` is called. + when :func:`_sa.create_engine` is called. When result sets are fetched that include strings, under Python 3 the cx_Oracle DBAPI returns all strings as Python Unicode objects, since Python 3 only has a @@ -118,7 +120,7 @@ made use of converters that were supplied by cx_Oracle but were found to be non-performant; SQLAlchemy's own converters are used for the string to Unicode conversion under Python 2. To disable the Python 2 Unicode conversion for VARCHAR2, CHAR, and CLOB, the flag ``coerce_to_unicode=False`` can be passed to -:func:`.create_engine`. +:func:`_sa.create_engine`. .. versionchanged:: 1.3 Unicode conversion is applied to all string values by default under python 2. The ``coerce_to_unicode`` now defaults to True @@ -235,7 +237,7 @@ As of the 6 series, this limitation has been lifted. Nevertheless, because SQLAlchemy pre-reads these LOBs up front, this issue is avoided in any case. To disable the auto "read()" feature of the dialect, the flag -``auto_convert_lobs=False`` may be passed to :func:`.create_engine`. Under +``auto_convert_lobs=False`` may be passed to :func:`_sa.create_engine`. Under the cx_Oracle 5 series, having this flag turned off means there is the chance of reading from a stale LOB object if not read as it is fetched. With cx_Oracle 6, this issue is resolved. @@ -281,7 +283,7 @@ When no typing objects are present, as when executing plain SQL strings, a default "outputtypehandler" is present which will generally return numeric values which specify precision and scale as Python ``Decimal`` objects. To disable this coercion to decimal for performance reasons, pass the flag -``coerce_to_decimal=False`` to :func:`.create_engine`:: +``coerce_to_decimal=False`` to :func:`_sa.create_engine`:: engine = create_engine("oracle+cx_oracle://dsn", coerce_to_decimal=False) @@ -770,7 +772,7 @@ class OracleDialect_cx_oracle(OracleDialect): "in a future release. As of version 1.3, it defaults to False " "rather than True. The 'threaded' option can be passed to " "cx_Oracle directly in the URL query string passed to " - ":func:`.create_engine`.", + ":func:`_sa.create_engine`.", ) ) def __init__( diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index 6272cd7fc4..d388b460c2 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -41,7 +41,7 @@ apply; no RETURNING clause is emitted nor is the sequence pre-executed in this case. To force the usage of RETURNING by default off, specify the flag -``implicit_returning=False`` to :func:`.create_engine`. +``implicit_returning=False`` to :func:`_sa.create_engine`. PostgreSQL 10 IDENTITY columns ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -84,7 +84,8 @@ Transaction Isolation Level All PostgreSQL dialects support setting of transaction isolation level both via a dialect-specific parameter -:paramref:`.create_engine.isolation_level` accepted by :func:`.create_engine`, +:paramref:`_sa.create_engine.isolation_level` accepted by +:func:`_sa.create_engine`, as well as the :paramref:`.Connection.execution_options.isolation_level` argument as passed to :meth:`_engine.Connection.execution_options`. When using a non-psycopg2 dialect, this feature works by issuing the command @@ -92,7 +93,7 @@ When using a non-psycopg2 dialect, this feature works by issuing the command each new connection. For the special AUTOCOMMIT isolation level, DBAPI-specific techniques are used. -To set isolation level using :func:`.create_engine`:: +To set isolation level using :func:`_sa.create_engine`:: engine = create_engine( "postgresql+pg8000://scott:tiger@localhost/test", diff --git a/lib/sqlalchemy/dialects/postgresql/json.py b/lib/sqlalchemy/dialects/postgresql/json.py index 8111599538..953ad9993a 100644 --- a/lib/sqlalchemy/dialects/postgresql/json.py +++ b/lib/sqlalchemy/dialects/postgresql/json.py @@ -154,7 +154,7 @@ class JSON(sqltypes.JSON): may be called upon the result type. Custom serializers and deserializers are specified at the dialect level, - that is using :func:`.create_engine`. The reason for this is that when + that is using :func:`_sa.create_engine`. The reason for this is that when using psycopg2, the DBAPI only allows serializers at the per-cursor or per-connection level. E.g.:: @@ -274,7 +274,7 @@ class JSONB(JSON): are shared with the :class:`_types.JSON` class, using the ``json_serializer`` and ``json_deserializer`` keyword arguments. These must be specified - at the dialect level using :func:`.create_engine`. When using + at the dialect level using :func:`_sa.create_engine`. When using psycopg2, the serializers are associated with the jsonb type using ``psycopg2.extras.register_default_jsonb`` on a per-connection basis, in the same way that ``psycopg2.extras.register_default_json`` is used diff --git a/lib/sqlalchemy/dialects/postgresql/pg8000.py b/lib/sqlalchemy/dialects/postgresql/pg8000.py index b6e838738d..197d11cf4c 100644 --- a/lib/sqlalchemy/dialects/postgresql/pg8000.py +++ b/lib/sqlalchemy/dialects/postgresql/pg8000.py @@ -36,7 +36,7 @@ The ``client_encoding`` can be overridden for a session by executing the SQL: SET CLIENT_ENCODING TO 'utf8'; SQLAlchemy will execute this SQL on all new connections based on the value -passed to :func:`.create_engine` using the ``client_encoding`` parameter:: +passed to :func:`_sa.create_engine` using the ``client_encoding`` parameter:: engine = create_engine( "postgresql+pg8000://user:pass@host/dbname", client_encoding='utf8') diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py index b823f5567d..e17749933e 100644 --- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py +++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py @@ -15,7 +15,7 @@ psycopg2 Connect Arguments ----------------------------------- psycopg2-specific keyword arguments which are accepted by -:func:`.create_engine()` are: +:func:`_sa.create_engine()` are: * ``server_side_cursors``: Enable the usage of "server side cursors" for SQL statements which support this feature. What this essentially means from a @@ -160,7 +160,7 @@ when used with :ref:`multiple parameter sets `, which includes the use of this feature both by the Core as well as by the ORM for inserts of objects with non-autogenerated primary key values, by adding the ``executemany_mode`` flag to -:func:`.create_engine`:: +:func:`_sa.create_engine`:: engine = create_engine( "postgresql+psycopg2://scott:tiger@host/dbname", @@ -250,7 +250,7 @@ A second way to affect the client encoding is to set it within Psycopg2 locally. SQLAlchemy will call psycopg2's :meth:`psycopg2:connection.set_client_encoding` method on all new connections based on the value passed to -:func:`.create_engine` using the ``client_encoding`` parameter:: +:func:`_sa.create_engine` using the ``client_encoding`` parameter:: # set_client_encoding() setting; # works for *all* PostgreSQL versions @@ -262,11 +262,12 @@ When using the parameter in this way, the psycopg2 driver emits ``SET client_encoding TO 'utf8'`` on the connection explicitly, and works in all PostgreSQL versions. -Note that the ``client_encoding`` setting as passed to :func:`.create_engine` +Note that the ``client_encoding`` setting as passed to +:func:`_sa.create_engine` is **not the same** as the more recently added ``client_encoding`` parameter now supported by libpq directly. This is enabled when ``client_encoding`` is passed directly to ``psycopg2.connect()``, and from SQLAlchemy is passed -using the :paramref:`.create_engine.connect_args` parameter:: +using the :paramref:`_sa.create_engine.connect_args` parameter:: engine = create_engine( "postgresql://user:pass@host/dbname", @@ -287,10 +288,11 @@ SQLAlchemy can also be instructed to skip the usage of the psycopg2 ``UNICODE`` extension and to instead utilize its own unicode encode/decode services, which are normally reserved only for those DBAPIs that don't fully support unicode directly. Passing ``use_native_unicode=False`` to -:func:`.create_engine` will disable usage of ``psycopg2.extensions.UNICODE``. +:func:`_sa.create_engine` will disable usage of ``psycopg2.extensions. +UNICODE``. SQLAlchemy will instead encode data itself into Python bytestrings on the way in and coerce from bytes on the way back, -using the value of the :func:`.create_engine` ``encoding`` parameter, which +using the value of the :func:`_sa.create_engine` ``encoding`` parameter, which defaults to ``utf-8``. SQLAlchemy's own unicode encode/decode functionality is steadily becoming obsolete as most DBAPIs now support unicode fully. @@ -328,7 +330,7 @@ from the ``size_meters`` key as well. The other solution is to use a positional format; psycopg2 allows use of the "format" paramstyle, which can be passed to -:paramref:`.create_engine.paramstyle`:: +:paramref:`_sa.create_engine.paramstyle`:: engine = create_engine( 'postgresql://scott:tiger@localhost:5432/test', paramstyle='format') @@ -359,7 +361,8 @@ Psycopg2 Transaction Isolation Level As discussed in :ref:`postgresql_isolation_level`, all PostgreSQL dialects support setting of transaction isolation level -both via the ``isolation_level`` parameter passed to :func:`.create_engine`, +both via the ``isolation_level`` parameter passed to :func:`_sa.create_engine` +, as well as the ``isolation_level`` argument used by :meth:`_engine.Connection.execution_options`. When using the psycopg2 dialect , these diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py index 5e23b1be7f..3e48e85a6f 100644 --- a/lib/sqlalchemy/dialects/sqlite/base.py +++ b/lib/sqlalchemy/dialects/sqlite/base.py @@ -179,7 +179,8 @@ default mode of ``SERIALIZABLE`` isolation, and a "dirty read" isolation mode normally referred to as ``READ UNCOMMITTED``. SQLAlchemy ties into this PRAGMA statement using the -:paramref:`.create_engine.isolation_level` parameter of :func:`.create_engine`. +:paramref:`_sa.create_engine.isolation_level` parameter of +:func:`_sa.create_engine`. Valid values for this parameter when used with SQLite are ``"SERIALIZABLE"`` and ``"READ UNCOMMITTED"`` corresponding to a value of 0 and 1, respectively. SQLite defaults to ``SERIALIZABLE``, however its behavior is impacted by @@ -557,9 +558,10 @@ names are still addressable*:: 1 Therefore, the workaround applied by SQLAlchemy only impacts -:meth:`.ResultProxy.keys` and :meth:`.RowProxy.keys()` in the public API. In +:meth:`_engine.ResultProxy.keys` and :meth:`.RowProxy.keys()` +in the public API. In the very specific case where an application is forced to use column names that -contain dots, and the functionality of :meth:`.ResultProxy.keys` and +contain dots, and the functionality of :meth:`_engine.ResultProxy.keys` and :meth:`.RowProxy.keys()` is required to return these dotted names unmodified, the ``sqlite_raw_colnames`` execution option may be provided, either on a per-:class:`_engine.Connection` basis:: diff --git a/lib/sqlalchemy/dialects/sqlite/pysqlcipher.py b/lib/sqlalchemy/dialects/sqlite/pysqlcipher.py index f8236dea93..1c475e3625 100644 --- a/lib/sqlalchemy/dialects/sqlite/pysqlcipher.py +++ b/lib/sqlalchemy/dialects/sqlite/pysqlcipher.py @@ -69,7 +69,8 @@ dialect here defaults to using the :class:`.SingletonThreadPool` implementation, instead of the :class:`.NullPool` pool used by pysqlite. As always, the pool implementation is entirely configurable using the -:paramref:`.create_engine.poolclass` parameter; the :class:`.StaticPool` may +:paramref:`_sa.create_engine.poolclass` parameter; the :class:`.StaticPool` +may be more feasible for single-threaded use, or :class:`.NullPool` may be used to prevent unencrypted connections from being held open for long periods of time, at the expense of slower startup time for new connections. diff --git a/lib/sqlalchemy/dialects/sqlite/pysqlite.py b/lib/sqlalchemy/dialects/sqlite/pysqlite.py index faa33fde00..26c9fc2e35 100644 --- a/lib/sqlalchemy/dialects/sqlite/pysqlite.py +++ b/lib/sqlalchemy/dialects/sqlite/pysqlite.py @@ -72,7 +72,8 @@ as the "database" portion of the SQLAlchemy url (that is, following a slash):: .. note:: The "uri=true" parameter must appear in the **query string** of the URL. It will not currently work as expected if it is only - present in the :paramref:`.create_engine.connect_args` parameter dictionary. + present in the :paramref:`_sa.create_engine.connect_args` + parameter dictionary. The logic reconciles the simultaneous presence of SQLAlchemy's query string and SQLite's query string by separating out the parameters that belong to the @@ -99,15 +100,17 @@ Regarding future parameters added to either the Python or native drivers. new parameter names added to the SQLite URI scheme should be automatically accommodated by this scheme. New parameter names added to the Python driver side can be accommodated by specifying them in the -:paramref:`.create_engine.connect_args` dictionary, until dialect support is +:paramref:`_sa.create_engine.connect_args` dictionary, +until dialect support is added by SQLAlchemy. For the less likely case that the native SQLite driver adds a new parameter name that overlaps with one of the existing, known Python driver parameters (such as "timeout" perhaps), SQLAlchemy's dialect would require adjustment for the URL scheme to continue to support this. As is always the case for all SQLAlchemy dialects, the entire "URL" process -can be bypassed in :func:`.create_engine` through the use of the -:paramref:`.create_engine.creator` parameter which allows for a custom callable +can be bypassed in :func:`_sa.create_engine` through the use of the +:paramref:`_sa.create_engine.creator` +parameter which allows for a custom callable that creates a Python sqlite3 driver level connection directly. .. versionadded:: 1.3.9 @@ -326,7 +329,7 @@ ourselves. This is achieved using two event listeners:: .. warning:: When using the above recipe, it is advised to not use the :paramref:`.Connection.execution_options.isolation_level` setting on - :class:`_engine.Connection` and :func:`.create_engine` + :class:`_engine.Connection` and :func:`_sa.create_engine` with the SQLite driver, as this function necessarily will also alter the ".isolation_level" setting. diff --git a/lib/sqlalchemy/engine/__init__.py b/lib/sqlalchemy/engine/__init__.py index 173bfeccd6..efa333a4a5 100644 --- a/lib/sqlalchemy/engine/__init__.py +++ b/lib/sqlalchemy/engine/__init__.py @@ -113,7 +113,7 @@ def create_engine(*args, **kwargs): as well as the :class:`_pool.Pool`. Specific dialects also accept keyword arguments that are unique to that dialect. Here, we describe the parameters - that are common to most :func:`.create_engine()` usage. + that are common to most :func:`_sa.create_engine()` usage. Once established, the newly resulting :class:`_engine.Engine` will request a connection from the underlying :class:`_pool.Pool` once @@ -121,7 +121,7 @@ def create_engine(*args, **kwargs): such as :meth:`_engine.Engine.execute` is invoked. The :class:`_pool.Pool` in turn will establish the first actual DBAPI connection when this request - is received. The :func:`.create_engine` call itself does **not** + is received. The :func:`_sa.create_engine` call itself does **not** establish any actual DBAPI connections directly. .. seealso:: @@ -151,7 +151,7 @@ def create_engine(*args, **kwargs): .. deprecated:: 1.3 - The :paramref:`.create_engine.convert_unicode` parameter + The :paramref:`_sa.create_engine.convert_unicode` parameter is deprecated and will be removed in a future release. All modern DBAPIs now support Python Unicode directly and this parameter is unnecessary. @@ -321,14 +321,15 @@ def create_engine(*args, **kwargs): characters. If less than 6, labels are generated as "_(counter)". If ``None``, the value of ``dialect.max_identifier_length``, which may be affected via the - :paramref:`.create_engine.max_identifier_length` parameter, - is used instead. The value of :paramref:`.create_engine.label_length` + :paramref:`_sa.create_engine.max_identifier_length` parameter, + is used instead. The value of + :paramref:`_sa.create_engine.label_length` may not be larger than that of - :paramref:`.create_engine.max_identfier_length`. + :paramref:`_sa.create_engine.max_identfier_length`. .. seealso:: - :paramref:`.create_engine.max_identifier_length` + :paramref:`_sa.create_engine.max_identifier_length` :param listeners: A list of one or more :class:`~sqlalchemy.interfaces.PoolListener` objects which will @@ -352,7 +353,7 @@ def create_engine(*args, **kwargs): .. seealso:: - :paramref:`.create_engine.label_length` + :paramref:`_sa.create_engine.label_length` :param max_overflow=10: the number of connections to allow in connection pool "overflow", that is connections that can be @@ -492,7 +493,7 @@ def engine_from_config(configuration, prefix="sqlalchemy.", **kwargs): ``sqlalchemy.url``, ``sqlalchemy.echo``, etc. The 'prefix' argument indicates the prefix to be searched for. Each matching key (after the prefix is stripped) is treated as though it were the corresponding keyword - argument to a :func:`.create_engine` call. + argument to a :func:`_sa.create_engine` call. The only required key is (assuming the default prefix) ``sqlalchemy.url``, which provides the :ref:`database URL `. diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py index 08a84d2d34..296926abcb 100644 --- a/lib/sqlalchemy/engine/base.py +++ b/lib/sqlalchemy/engine/base.py @@ -246,8 +246,8 @@ class Connection(Connectable): Set the transaction isolation level for the lifespan of this :class:`_engine.Connection` object. Valid values include those string - values accepted by the :paramref:`.create_engine.isolation_level` - parameter passed to :func:`.create_engine`. These levels are + values accepted by the :paramref:`_sa.create_engine.isolation_level` + parameter passed to :func:`_sa.create_engine`. These levels are semi-database specific; see individual dialect documentation for valid levels. @@ -281,7 +281,7 @@ class Connection(Connectable): .. seealso:: - :paramref:`.create_engine.isolation_level` + :paramref:`_sa.create_engine.isolation_level` - set per :class:`_engine.Engine` isolation level :meth:`_engine.Connection.get_isolation_level` @@ -419,7 +419,7 @@ class Connection(Connectable): :attr:`_engine.Connection.default_isolation_level` - view default level - :paramref:`.create_engine.isolation_level` + :paramref:`_sa.create_engine.isolation_level` - set per :class:`_engine.Engine` isolation level :paramref:`.Connection.execution_options.isolation_level` @@ -455,7 +455,7 @@ class Connection(Connectable): :meth:`_engine.Connection.get_isolation_level` - view current level - :paramref:`.create_engine.isolation_level` + :paramref:`_sa.create_engine.isolation_level` - set per :class:`_engine.Engine` isolation level :paramref:`.Connection.execution_options.isolation_level` @@ -934,7 +934,7 @@ class Connection(Connectable): def execute(self, object_, *multiparams, **params): r"""Executes a SQL statement construct and returns a - :class:`.ResultProxy`. + :class:`_engine.ResultProxy`. :param object: The statement to be executed. May be one of: @@ -1179,7 +1179,7 @@ class Connection(Connectable): self, dialect, constructor, statement, parameters, *args ): """Create an :class:`.ExecutionContext` and execute, returning - a :class:`.ResultProxy`.""" + a :class:`_engine.ResultProxy`.""" try: try: @@ -1921,7 +1921,7 @@ class Engine(Connectable, log.Identified): default execution options that will be used for all connections. The initial contents of this dictionary can be sent via the ``execution_options`` parameter - to :func:`.create_engine`. + to :func:`_sa.create_engine`. .. seealso:: @@ -2122,10 +2122,10 @@ class Engine(Connectable, log.Identified): that the :class:`_engine.Connection` will be closed when the operation is complete. When set to ``True``, it indicates the :class:`_engine.Connection` is in "single use" mode, where the - :class:`.ResultProxy` returned by the first call to + :class:`_engine.ResultProxy` returned by the first call to :meth:`_engine.Connection.execute` will close the :class:`_engine.Connection` when - that :class:`.ResultProxy` has exhausted all result rows. + that :class:`_engine.ResultProxy` has exhausted all result rows. .. seealso:: @@ -2205,7 +2205,8 @@ class Engine(Connectable, log.Identified): return conn.run_callable(callable_, *args, **kwargs) def execute(self, statement, *multiparams, **params): - """Executes the given construct and returns a :class:`.ResultProxy`. + """Executes the given construct and returns a + :class:`_engine.ResultProxy`. The arguments are the same as those used by :meth:`_engine.Connection.execute`. @@ -2213,8 +2214,9 @@ class Engine(Connectable, log.Identified): Here, a :class:`_engine.Connection` is acquired using the :meth:`_engine.Engine.contextual_connect` method, and the statement executed - with that connection. The returned :class:`.ResultProxy` is flagged - such that when the :class:`.ResultProxy` is exhausted and its + with that connection. The returned :class:`_engine.ResultProxy` + is flagged + such that when the :class:`_engine.ResultProxy` is exhausted and its underlying cursor is closed, the :class:`_engine.Connection` created here will also be closed, which allows its associated DBAPI connection @@ -2272,7 +2274,8 @@ class Engine(Connectable, log.Identified): Subclasses of :class:`_engine.Engine` may override this method to provide contextual behavior. - :param close_with_result: When True, the first :class:`.ResultProxy` + :param close_with_result: When True, the first + :class:`_engine.ResultProxy` created by the :class:`_engine.Connection` will call the :meth:`_engine.Connection.close` method of that connection as soon as any diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py index 57f2660225..51977f880e 100644 --- a/lib/sqlalchemy/engine/default.py +++ b/lib/sqlalchemy/engine/default.py @@ -193,7 +193,7 @@ class DefaultDialect(interfaces.Dialect): @util.deprecated_params( convert_unicode=( "1.3", - "The :paramref:`.create_engine.convert_unicode` parameter " + "The :paramref:`_sa.create_engine.convert_unicode` parameter " "and corresponding dialect-level parameters are deprecated, " "and will be removed in a future release. Modern DBAPIs support " "Python Unicode natively and this parameter is unnecessary.", diff --git a/lib/sqlalchemy/engine/interfaces.py b/lib/sqlalchemy/engine/interfaces.py index 7ba8bb60b9..31518ab871 100644 --- a/lib/sqlalchemy/engine/interfaces.py +++ b/lib/sqlalchemy/engine/interfaces.py @@ -797,7 +797,7 @@ class Dialect(object): :paramref:`.Connection.execution_options.isolation_level` - set per :class:`_engine.Connection` isolation level - :paramref:`.create_engine.isolation_level` - + :paramref:`_sa.create_engine.isolation_level` - set per :class:`_engine.Engine` isolation level """ @@ -824,7 +824,7 @@ class Dialect(object): :paramref:`.Connection.execution_options.isolation_level` - set per :class:`_engine.Connection` isolation level - :paramref:`.create_engine.isolation_level` - + :paramref:`_sa.create_engine.isolation_level` - set per :class:`_engine.Engine` isolation level """ @@ -856,7 +856,7 @@ class Dialect(object): :paramref:`.Connection.execution_options.isolation_level` - set per :class:`_engine.Connection` isolation level - :paramref:`.create_engine.isolation_level` - + :paramref:`_sa.create_engine.isolation_level` - set per :class:`_engine.Engine` isolation level @@ -962,14 +962,14 @@ class CreateEnginePlugin(object): "mysql+pymysql://scott:tiger@localhost/test?plugin=myplugin") Alternatively, the :paramref:`.create_engine.plugins" argument may be - passed as a list to :func:`.create_engine`:: + passed as a list to :func:`_sa.create_engine`:: engine = create_engine( "mysql+pymysql://scott:tiger@localhost/test", plugins=["myplugin"]) .. versionadded:: 1.2.3 plugin names can also be specified - to :func:`.create_engine` as a list + to :func:`_sa.create_engine` as a list The ``plugin`` argument supports multiple instances, so that a URL may specify multiple plugins; they are loaded in the order stated @@ -980,7 +980,7 @@ class CreateEnginePlugin(object): "test?plugin=plugin_one&plugin=plugin_twp&plugin=plugin_three") A plugin can receive additional arguments from the URL string as - well as from the keyword arguments passed to :func:`.create_engine`. + well as from the keyword arguments passed to :func:`_sa.create_engine`. The :class:`.URL` object and the keyword dictionary are passed to the constructor so that these arguments can be extracted from the url's :attr:`.URL.query` collection as well as from the dictionary:: @@ -1021,7 +1021,8 @@ class CreateEnginePlugin(object): """Construct a new :class:`.CreateEnginePlugin`. The plugin object is instantiated individually for each call - to :func:`.create_engine`. A single :class:`_engine.Engine` will be + to :func:`_sa.create_engine`. A single :class:`_engine. + Engine` will be passed to the :meth:`.CreateEnginePlugin.engine_created` method corresponding to this URL. @@ -1116,7 +1117,7 @@ class ExecutionContext(object): attempted to execute a statement. This attribute is meaningful only within the - :meth:`.ConnectionEvents.dbapi_error` event. + :meth:`_events.ConnectionEvents.dbapi_error` event. .. versionadded:: 0.9.7 @@ -1124,7 +1125,7 @@ class ExecutionContext(object): :attr:`.ExecutionContext.is_disconnect` - :meth:`.ConnectionEvents.dbapi_error` + :meth:`_events.ConnectionEvents.dbapi_error` """ @@ -1133,7 +1134,7 @@ class ExecutionContext(object): is caught when this ExecutionContext attempted to execute a statement. This attribute is meaningful only within the - :meth:`.ConnectionEvents.dbapi_error` event. + :meth:`_events.ConnectionEvents.dbapi_error` event. .. versionadded:: 0.9.7 @@ -1141,7 +1142,7 @@ class ExecutionContext(object): :attr:`.ExecutionContext.exception` - :meth:`.ConnectionEvents.dbapi_error` + :meth:`_events.ConnectionEvents.dbapi_error` """ @@ -1207,7 +1208,7 @@ class ExecutionContext(object): """Return the DBAPI ``cursor.rowcount`` value, or in some cases an interpreted value. - See :attr:`.ResultProxy.rowcount` for details on this. + See :attr:`_engine.ResultProxy.rowcount` for details on this. """ @@ -1299,7 +1300,8 @@ class Connectable(object): raise NotImplementedError() def execute(self, object_, *multiparams, **params): - """Executes the given construct and returns a :class:`.ResultProxy`.""" + """Executes the given construct and returns a """ + """:class:`_engine.ResultProxy`.""" raise NotImplementedError() def scalar(self, object_, *multiparams, **params): @@ -1320,7 +1322,8 @@ class ExceptionContext(object): """Encapsulate information about an error condition in progress. This object exists solely to be passed to the - :meth:`.ConnectionEvents.handle_error` event, supporting an interface that + :meth:`_events.ConnectionEvents.handle_error` event, + supporting an interface that can be extended without backwards-incompatibility. .. versionadded:: 0.9.7 @@ -1412,7 +1415,7 @@ class ExceptionContext(object): :attr:`.ExceptionContext.parameters` members may represent a different value than that of the :class:`.ExecutionContext`, potentially in the case where a - :meth:`.ConnectionEvents.before_cursor_execute` event or similar + :meth:`_events.ConnectionEvents.before_cursor_execute` event or similar modified the statement/parameters to be sent. May be None. @@ -1424,7 +1427,7 @@ class ExceptionContext(object): condition. This flag will always be True or False within the scope of the - :meth:`.ConnectionEvents.handle_error` handler. + :meth:`_events.ConnectionEvents.handle_error` handler. SQLAlchemy will defer to this flag in order to determine whether or not the connection should be invalidated subsequently. That is, by @@ -1439,7 +1442,8 @@ class ExceptionContext(object): when a "disconnect" condition is in effect. Setting this flag to False within the scope of the - :meth:`.ConnectionEvents.handle_error` event will have the effect such + :meth:`_events.ConnectionEvents.handle_error` + event will have the effect such that the full collection of connections in the pool will not be invalidated during a disconnect; only the current connection that is the subject of the error will actually be invalidated. diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py index 3a93a89402..17abb37225 100644 --- a/lib/sqlalchemy/engine/result.py +++ b/lib/sqlalchemy/engine/result.py @@ -5,7 +5,7 @@ # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -"""Define result set constructs including :class:`.ResultProxy` +"""Define result set constructs including :class:`_engine.ResultProxy` and :class:`.RowProxy`.""" @@ -119,8 +119,8 @@ class RowProxy(BaseRowProxy): """Represent a single result row. The :class:`.RowProxy` object is retrieved from a database result, from the - :class:`.ResultProxy` object using methods like - :meth:`.ResultProxy.fetchall`. + :class:`_engine.ResultProxy` object using methods like + :meth:`_engine.ResultProxy.fetchall`. The :class:`.RowProxy` object seeks to act mostly like a Python named tuple, but also provides some Python dictionary behaviors at the same time. @@ -753,7 +753,7 @@ class ResultProxy(object): .. seealso:: :ref:`coretutorial_selecting` - introductory material for accessing - :class:`.ResultProxy` and :class:`.RowProxy` objects. + :class:`_engine.ResultProxy` and :class:`.RowProxy` objects. """ @@ -828,7 +828,7 @@ class ResultProxy(object): .. note:: - Notes regarding :attr:`.ResultProxy.rowcount`: + Notes regarding :attr:`_engine.ResultProxy.rowcount`: * This attribute returns the number of rows *matched*, @@ -841,18 +841,20 @@ class ResultProxy(object): rowcount is configured by default to return the match count in all cases. - * :attr:`.ResultProxy.rowcount` is *only* useful in conjunction + * :attr:`_engine.ResultProxy.rowcount` + is *only* useful in conjunction with an UPDATE or DELETE statement. Contrary to what the Python DBAPI says, it does *not* return the number of rows available from the results of a SELECT statement as DBAPIs cannot support this functionality when rows are unbuffered. - * :attr:`.ResultProxy.rowcount` may not be fully implemented by + * :attr:`_engine.ResultProxy.rowcount` + may not be fully implemented by all dialects. In particular, most DBAPIs do not support an aggregate rowcount result from an executemany call. - The :meth:`.ResultProxy.supports_sane_rowcount` and - :meth:`.ResultProxy.supports_sane_multi_rowcount` methods + The :meth:`_engine.ResultProxy.supports_sane_rowcount` and + :meth:`_engine.ResultProxy.supports_sane_multi_rowcount` methods will report from the dialect if each usage is known to be supported. @@ -892,19 +894,19 @@ class ResultProxy(object): @property def returns_rows(self): - """True if this :class:`.ResultProxy` returns rows. + """True if this :class:`_engine.ResultProxy` returns rows. I.e. if it is legal to call the methods - :meth:`~.ResultProxy.fetchone`, - :meth:`~.ResultProxy.fetchmany` - :meth:`~.ResultProxy.fetchall`. + :meth:`_engine.ResultProxy.fetchone`, + :meth:`_engine.ResultProxy.fetchmany` + :meth:`_engine.ResultProxy.fetchall`. """ return self._metadata is not None @property def is_insert(self): - """True if this :class:`.ResultProxy` is the result + """True if this :class:`_engine.ResultProxy` is the result of a executing an expression language compiled :func:`_expression.insert` construct. @@ -922,7 +924,7 @@ class ResultProxy(object): return self._saved_cursor.description def _soft_close(self): - """Soft close this :class:`.ResultProxy`. + """Soft close this :class:`_engine.ResultProxy`. This releases all DBAPI cursor resources, but leaves the ResultProxy "open" from a semantic perspective, meaning the @@ -940,7 +942,7 @@ class ResultProxy(object): .. seealso:: - :meth:`.ResultProxy.close` + :meth:`_engine.ResultProxy.close` """ @@ -958,10 +960,13 @@ class ResultProxy(object): This closes out the underlying DBAPI cursor corresponding to the statement execution, if one is still present. Note that the - DBAPI cursor is automatically released when the :class:`.ResultProxy` - exhausts all available rows. :meth:`.ResultProxy.close` is generally + DBAPI cursor is automatically released when the + :class:`_engine.ResultProxy` + exhausts all available rows. :meth:`_engine.ResultProxy.close` + is generally an optional method except in the case when discarding a - :class:`.ResultProxy` that still has additional rows pending for fetch. + :class:`_engine.ResultProxy` + that still has additional rows pending for fetch. In the case of a result that is the product of :ref:`connectionless execution `, @@ -973,15 +978,19 @@ class ResultProxy(object): the fetch methods, which will raise a :class:`.ResourceClosedError` on subsequent use. - .. versionchanged:: 1.0.0 - the :meth:`.ResultProxy.close` method + .. versionchanged:: 1.0.0 - the :meth:`_engine.ResultProxy.close` + method has been separated out from the process that releases the underlying DBAPI cursor resource. The "auto close" feature of the :class:`_engine.Connection` now performs a so-called "soft close", which releases the underlying DBAPI cursor, but allows the - :class:`.ResultProxy` to still behave as an open-but-exhausted - result set; the actual :meth:`.ResultProxy.close` method is never - called. It is still safe to discard a :class:`.ResultProxy` + :class:`_engine.ResultProxy` + to still behave as an open-but-exhausted + result set; the actual :meth:`_engine.ResultProxy.close` + method is never + called. It is still safe to discard a + :class:`_engine.ResultProxy` that has been fully exhausted without calling this method. .. seealso:: @@ -1186,7 +1195,7 @@ class ResultProxy(object): def supports_sane_rowcount(self): """Return ``supports_sane_rowcount`` from the dialect. - See :attr:`.ResultProxy.rowcount` for background. + See :attr:`_engine.ResultProxy.rowcount` for background. """ @@ -1195,7 +1204,7 @@ class ResultProxy(object): def supports_sane_multi_rowcount(self): """Return ``supports_sane_multi_rowcount`` from the dialect. - See :attr:`.ResultProxy.rowcount` for background. + See :attr:`_engine.ResultProxy.rowcount` for background. """ @@ -1263,8 +1272,8 @@ class ResultProxy(object): cursor resource is released, and the object may be safely discarded. - Subsequent calls to :meth:`.ResultProxy.fetchall` will return - an empty list. After the :meth:`.ResultProxy.close` method is + Subsequent calls to :meth:`_engine.ResultProxy.fetchall` will return + an empty list. After the :meth:`_engine.ResultProxy.close` method is called, the method will raise :class:`.ResourceClosedError`. :return: a list of :class:`.RowProxy` objects @@ -1288,9 +1297,10 @@ class ResultProxy(object): cursor resource is released, and the object may be safely discarded. - Calls to :meth:`.ResultProxy.fetchmany` after all rows have been + Calls to :meth:`_engine.ResultProxy.fetchmany` + after all rows have been exhausted will return - an empty list. After the :meth:`.ResultProxy.close` method is + an empty list. After the :meth:`_engine.ResultProxy.close` method is called, the method will raise :class:`.ResourceClosedError`. :return: a list of :class:`.RowProxy` objects @@ -1314,9 +1324,9 @@ class ResultProxy(object): cursor resource is released, and the object may be safely discarded. - Calls to :meth:`.ResultProxy.fetchone` after all rows have + Calls to :meth:`_engine.ResultProxy.fetchone` after all rows have been exhausted will return ``None``. - After the :meth:`.ResultProxy.close` method is + After the :meth:`_engine.ResultProxy.close` method is called, the method will raise :class:`.ResourceClosedError`. :return: a :class:`.RowProxy` object, or None if no rows remain @@ -1338,7 +1348,8 @@ class ResultProxy(object): """Fetch the first row and then close the result set unconditionally. After calling this method, the object is fully closed, - e.g. the :meth:`.ResultProxy.close` method will have been called. + e.g. the :meth:`_engine.ResultProxy.close` + method will have been called. :return: a :class:`.RowProxy` object, or None if no rows remain @@ -1365,7 +1376,8 @@ class ResultProxy(object): """Fetch the first column of the first row, and close the result set. After calling this method, the object is fully closed, - e.g. the :meth:`.ResultProxy.close` method will have been called. + e.g. the :meth:`_engine.ResultProxy.close` + method will have been called. :return: a Python scalar value , or None if no rows remain @@ -1541,7 +1553,7 @@ class BufferedColumnResultProxy(ResultProxy): databases where result rows contain "live" results that fall out of scope unless explicitly fetched. - .. versionchanged:: 1.2 This :class:`.ResultProxy` is not used by + .. versionchanged:: 1.2 This :class:`_engine.ResultProxy` is not used by any SQLAlchemy-included dialects. """ diff --git a/lib/sqlalchemy/events.py b/lib/sqlalchemy/events.py index bf2e39358d..572488ed7b 100644 --- a/lib/sqlalchemy/events.py +++ b/lib/sqlalchemy/events.py @@ -396,7 +396,8 @@ class PoolEvents(event.Events): Processing of all checkout listeners will abort and restart using the new connection. - .. seealso:: :meth:`.ConnectionEvents.engine_connect` - a similar event + .. seealso:: :meth:`_events.ConnectionEvents.engine_connect` + - a similar event which occurs upon creation of a new :class:`_engine.Connection`. """ @@ -436,9 +437,9 @@ class PoolEvents(event.Events): .. seealso:: - :meth:`.ConnectionEvents.rollback` + :meth:`_events.ConnectionEvents.rollback` - :meth:`.ConnectionEvents.commit` + :meth:`_events.ConnectionEvents.commit` """ @@ -574,7 +575,7 @@ class ConnectionEvents(event.Events): statement = statement + " -- some comment" return statement, parameters - .. note:: :class:`.ConnectionEvents` can be established on any + .. note:: :class:`_events.ConnectionEvents` can be established on any combination of :class:`_engine.Engine`, :class:`_engine.Connection`, as well as instances of each of those classes. Events across all @@ -696,7 +697,8 @@ class ConnectionEvents(event.Events): :meth:`_engine.Connection.execute`. :param multiparams: Multiple parameter sets, a list of dictionaries. :param params: Single parameter set, a single dictionary. - :param result: :class:`.ResultProxy` generated by the execution. + :param result: :class:`_engine.ResultProxy` generated by the execution + . """ @@ -721,7 +723,7 @@ class ConnectionEvents(event.Events): # do something with statement, parameters return statement, parameters - See the example at :class:`.ConnectionEvents`. + See the example at :class:`_events.ConnectionEvents`. :param conn: :class:`_engine.Connection` object :param cursor: DBAPI cursor object @@ -750,7 +752,7 @@ class ConnectionEvents(event.Events): :param conn: :class:`_engine.Connection` object :param cursor: DBAPI cursor object. Will have results pending if the statement was a SELECT, but these should not be consumed - as they will be needed by the :class:`.ResultProxy`. + as they will be needed by the :class:`_engine.ResultProxy`. :param statement: string SQL statement, as passed to the DBAPI :param parameters: Dictionary, tuple, or list of parameters being passed to the ``execute()`` or ``executemany()`` method of the @@ -764,9 +766,9 @@ class ConnectionEvents(event.Events): @util.deprecated( "0.9", - "The :meth:`.ConnectionEvents.dbapi_error` " + "The :meth:`_events.ConnectionEvents.dbapi_error` " "event is deprecated and will be removed in a future release. " - "Please refer to the :meth:`.ConnectionEvents.handle_error` " + "Please refer to the :meth:`_events.ConnectionEvents.handle_error` " "event.", ) def dbapi_error( @@ -791,7 +793,7 @@ class ConnectionEvents(event.Events): any state or throw any exceptions here as this will interfere with SQLAlchemy's cleanup and error handling routines. For exception modification, please refer to the - new :meth:`.ConnectionEvents.handle_error` event. + new :meth:`_events.ConnectionEvents.handle_error` event. Subsequent to this hook, SQLAlchemy may attempt any number of operations on the connection/cursor, including @@ -882,7 +884,8 @@ class ConnectionEvents(event.Events): "failed" in str(context.original_exception): raise MySpecialException("failed operation") - .. warning:: Because the :meth:`.ConnectionEvents.handle_error` + .. warning:: Because the + :meth:`_events.ConnectionEvents.handle_error` event specifically provides for exceptions to be re-thrown as the ultimate exception raised by the failed statement, **stack traces will be misleading** if the user-defined event @@ -923,7 +926,7 @@ class ConnectionEvents(event.Events): class for details on all available members. .. versionadded:: 0.9.7 Added the - :meth:`.ConnectionEvents.handle_error` hook. + :meth:`_events.ConnectionEvents.handle_error` hook. .. versionchanged:: 1.1 The :meth:`.handle_error` event will now receive all exceptions that inherit from ``BaseException``, @@ -994,7 +997,7 @@ class ConnectionEvents(event.Events): .. seealso:: :ref:`pool_disconnects_pessimistic` - illustrates how to use - :meth:`.ConnectionEvents.engine_connect` + :meth:`_events.ConnectionEvents.engine_connect` to transparently ensure pooled connections are connected to the database. @@ -1002,7 +1005,8 @@ class ConnectionEvents(event.Events): the lower-level pool checkout event for an individual DBAPI connection - :meth:`.ConnectionEvents.set_connection_execution_options` - a copy + :meth:`_events.ConnectionEvents.set_connection_execution_options` + - a copy of a :class:`_engine.Connection` is also made when the :meth:`_engine.Connection.execution_options` method is called. @@ -1021,7 +1025,7 @@ class ConnectionEvents(event.Events): :class:`_engine.Connection` is produced which is inheriting execution options from its parent :class:`_engine.Engine`; to intercept this condition, use the - :meth:`.ConnectionEvents.engine_connect` event. + :meth:`_events.ConnectionEvents.engine_connect` event. :param conn: The newly copied :class:`_engine.Connection` object @@ -1032,7 +1036,8 @@ class ConnectionEvents(event.Events): .. seealso:: - :meth:`.ConnectionEvents.set_engine_execution_options` - event + :meth:`_events.ConnectionEvents.set_engine_execution_options` + - event which is called when :meth:`_engine.Engine.execution_options` is called. @@ -1048,7 +1053,8 @@ class ConnectionEvents(event.Events): That new :class:`_engine.Engine` is passed here. A particular application of this - method is to add a :meth:`.ConnectionEvents.engine_connect` event + method is to add a :meth:`_events.ConnectionEvents.engine_connect` + event handler to the given :class:`_engine.Engine` which will perform some per- :class:`_engine.Connection` task specific to these execution options. @@ -1062,7 +1068,8 @@ class ConnectionEvents(event.Events): .. seealso:: - :meth:`.ConnectionEvents.set_connection_execution_options` - event + :meth:`_events.ConnectionEvents.set_connection_execution_options` + - event which is called when :meth:`_engine.Connection.execution_options` is called. @@ -1201,17 +1208,17 @@ class DialectEvents(event.Events): These hooks are not for general use and are only for those situations where intricate re-statement of DBAPI mechanics must be injected onto an existing dialect. For general-use statement-interception events, - please use the :class:`.ConnectionEvents` interface. + please use the :class:`_events.ConnectionEvents` interface. .. seealso:: - :meth:`.ConnectionEvents.before_cursor_execute` + :meth:`_events.ConnectionEvents.before_cursor_execute` - :meth:`.ConnectionEvents.before_execute` + :meth:`_events.ConnectionEvents.before_execute` - :meth:`.ConnectionEvents.after_cursor_execute` + :meth:`_events.ConnectionEvents.after_cursor_execute` - :meth:`.ConnectionEvents.after_execute` + :meth:`_events.ConnectionEvents.after_execute` .. versionadded:: 0.9.4 diff --git a/lib/sqlalchemy/ext/horizontal_shard.py b/lib/sqlalchemy/ext/horizontal_shard.py index 994476f6d3..a9c0581df9 100644 --- a/lib/sqlalchemy/ext/horizontal_shard.py +++ b/lib/sqlalchemy/ext/horizontal_shard.py @@ -154,13 +154,15 @@ class ShardedQuery(Query): class ShardedResult(object): - """A value object that represents multiple :class:`.ResultProxy` objects. + """A value object that represents multiple :class:`_engine.ResultProxy` + objects. This is used by the :meth:`.ShardedQuery._execute_crud` hook to return - an object that takes the place of the single :class:`.ResultProxy`. + an object that takes the place of the single :class:`_engine.ResultProxy`. Attribute include ``result_proxies``, which is a sequence of the - actual :class:`.ResultProxy` objects, as well as ``aggregate_rowcount`` + actual :class:`_engine.ResultProxy` objects, + as well as ``aggregate_rowcount`` or ``rowcount``, which is the sum of all the individual rowcount values. .. versionadded:: 1.3 diff --git a/lib/sqlalchemy/interfaces.py b/lib/sqlalchemy/interfaces.py index d8a085cc66..19110073ee 100644 --- a/lib/sqlalchemy/interfaces.py +++ b/lib/sqlalchemy/interfaces.py @@ -178,7 +178,7 @@ class ConnectionProxy(object): :class:`.ConnectionProxy` is deprecated and will be removed in a future release. Please refer to :func:`.event.listen` in conjunction with - the :class:`.ConnectionEvents` listener interface. + the :class:`_events.ConnectionEvents` listener interface. Either or both of the ``execute()`` and ``cursor_execute()`` may be implemented to intercept compiled statement and diff --git a/lib/sqlalchemy/orm/events.py b/lib/sqlalchemy/orm/events.py index 147bdf1d49..cede830644 100644 --- a/lib/sqlalchemy/orm/events.py +++ b/lib/sqlalchemy/orm/events.py @@ -1743,7 +1743,8 @@ class SessionEvents(event.Events): :meth:`_query.Query.update`. * ``context`` The :class:`.QueryContext` object, corresponding to the invocation of an ORM query. - * ``result`` the :class:`.ResultProxy` returned as a result of the + * ``result`` the :class:`_engine.ResultProxy` + returned as a result of the bulk UPDATE operation. .. seealso:: @@ -1778,7 +1779,8 @@ class SessionEvents(event.Events): was called upon. * ``context`` The :class:`.QueryContext` object, corresponding to the invocation of an ORM query. - * ``result`` the :class:`.ResultProxy` returned as a result of the + * ``result`` the :class:`_engine.ResultProxy` + returned as a result of the bulk DELETE operation. .. seealso:: diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py index 059ee4caf7..530ccdd9cf 100644 --- a/lib/sqlalchemy/orm/session.py +++ b/lib/sqlalchemy/orm/session.py @@ -1156,7 +1156,7 @@ class Session(_SessionClassMethods): r"""Execute a SQL expression construct or string statement within the current transaction. - Returns a :class:`.ResultProxy` representing + Returns a :class:`_engine.ResultProxy` representing results of the statement execution, in the same manner as that of an :class:`_engine.Engine` or :class:`_engine.Connection`. @@ -1224,14 +1224,16 @@ class Session(_SessionClassMethods): The :meth:`.Session.execute` method does *not* invoke autoflush. - The :class:`.ResultProxy` returned by the :meth:`.Session.execute` + The :class:`_engine.ResultProxy` returned by the + :meth:`.Session.execute` method is returned with the "close_with_result" flag set to true; the significance of this flag is that if this :class:`.Session` is autocommitting and does not have a transaction-dedicated :class:`_engine.Connection` available, a temporary :class:`_engine.Connection` is established for the statement execution, which is closed (meaning, - returned to the connection pool) when the :class:`.ResultProxy` has + returned to the connection pool) when the :class:`_engine.ResultProxy` + has consumed all available data. This applies *only* when the :class:`.Session` is configured with autocommit=True and no transaction has been started. diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py index 9ce6605346..d6bd407bec 100644 --- a/lib/sqlalchemy/orm/util.py +++ b/lib/sqlalchemy/orm/util.py @@ -309,7 +309,8 @@ first() (, (1, 2), None) :param class: mapped class (must be a positional argument) - :param row: :class:`.RowProxy` row returned by a :class:`.ResultProxy` + :param row: :class:`.RowProxy` row returned by a + :class:`_engine.ResultProxy` (must be given as a keyword arg) :param identity_token: optional identity token diff --git a/lib/sqlalchemy/pool/base.py b/lib/sqlalchemy/pool/base.py index 0ef199965a..5c4d4e5198 100644 --- a/lib/sqlalchemy/pool/base.py +++ b/lib/sqlalchemy/pool/base.py @@ -112,8 +112,8 @@ class Pool(log.Identified): ``"debug"``, the logging will include pool checkouts and checkins. The :paramref:`_pool.Pool.echo` parameter can also be set from the - :func:`.create_engine` call by using the - :paramref:`.create_engine.echo_pool` parameter. + :func:`_sa.create_engine` call by using the + :paramref:`_sa.create_engine.echo_pool` parameter. .. seealso:: @@ -164,7 +164,7 @@ class Pool(log.Identified): :param events: a list of 2-tuples, each of the form ``(callable, target)`` which will be passed to :func:`.event.listen` upon construction. Provided here so that event listeners - can be assigned via :func:`.create_engine` before dialect-level + can be assigned via :func:`_sa.create_engine` before dialect-level listeners are applied. :param listeners: A list of :class:`.PoolListener`-like objects or @@ -175,7 +175,7 @@ class Pool(log.Identified): :param dialect: a :class:`.Dialect` that will handle the job of calling rollback(), close(), or commit() on DBAPI connections. If omitted, a built-in "stub" dialect is used. Applications that - make use of :func:`~.create_engine` should not use this parameter + make use of :func:`_sa.create_engine` should not use this parameter as it is handled by the engine creation strategy. .. versionadded:: 1.1 - ``dialect`` is now a public parameter diff --git a/lib/sqlalchemy/sql/dml.py b/lib/sqlalchemy/sql/dml.py index 6d9bfee9e0..c3a9c8f9b6 100644 --- a/lib/sqlalchemy/sql/dml.py +++ b/lib/sqlalchemy/sql/dml.py @@ -138,7 +138,8 @@ class UpdateBase( Upon execution, the values of the columns to be returned are made available via the result set and can be iterated using - :meth:`.ResultProxy.fetchone` and similar. For DBAPIs which do not + :meth:`_engine.ResultProxy.fetchone` and similar. + For DBAPIs which do not natively support returning values (i.e. cx_oracle), SQLAlchemy will approximate this behavior at the result level so that a reasonable amount of behavioral neutrality is provided. @@ -314,7 +315,8 @@ class ValuesBase(UpdateBase): True, indicating that the statement will not attempt to fetch the "last inserted primary key" or other defaults. The statement deals with an arbitrary number of rows, so the - :attr:`.ResultProxy.inserted_primary_key` accessor does not + :attr:`_engine.ResultProxy.inserted_primary_key` + accessor does not apply. .. versionchanged:: 1.0.0 A multiple-VALUES INSERT now supports @@ -427,7 +429,8 @@ class ValuesBase(UpdateBase): added to any existing RETURNING clause, provided that :meth:`.UpdateBase.returning` is not used simultaneously. The column values will then be available on the result using the - :attr:`.ResultProxy.returned_defaults` accessor as a dictionary, + :attr:`_engine.ResultProxy.returned_defaults` accessor as a dictionary + , referring to values keyed to the :class:`_schema.Column` object as well as its ``.key``. @@ -457,7 +460,7 @@ class ValuesBase(UpdateBase): 3. It can be called against any backend. Backends that don't support RETURNING will skip the usage of the feature, rather than raising an exception. The return value of - :attr:`.ResultProxy.returned_defaults` will be ``None`` + :attr:`_engine.ResultProxy.returned_defaults` will be ``None`` :meth:`.ValuesBase.return_defaults` is used by the ORM to provide an efficient implementation for the ``eager_defaults`` feature of @@ -474,7 +477,7 @@ class ValuesBase(UpdateBase): :meth:`.UpdateBase.returning` - :attr:`.ResultProxy.returned_defaults` + :attr:`_engine.ResultProxy.returned_defaults` """ self._return_defaults = cols or True @@ -617,7 +620,8 @@ class Insert(ValuesBase): True, indicating that the statement will not attempt to fetch the "last inserted primary key" or other defaults. The statement deals with an arbitrary number of rows, so the - :attr:`.ResultProxy.inserted_primary_key` accessor does not apply. + :attr:`_engine.ResultProxy.inserted_primary_key` + accessor does not apply. """ if self.parameters: @@ -722,7 +726,7 @@ class Update(ValuesBase): the ``default`` keyword will be compiled 'inline' into the statement and not pre-executed. This means that their values will not be available in the dictionary returned from - :meth:`.ResultProxy.last_updated_params`. + :meth:`_engine.ResultProxy.last_updated_params`. :param preserve_parameter_order: if True, the update statement is expected to receive parameters **only** via the diff --git a/lib/sqlalchemy/sql/operators.py b/lib/sqlalchemy/sql/operators.py index de983fc3aa..f3482e3403 100644 --- a/lib/sqlalchemy/sql/operators.py +++ b/lib/sqlalchemy/sql/operators.py @@ -545,7 +545,8 @@ class ColumnOperators(Operators): This "false" expression has historically had different behaviors in older SQLAlchemy versions, see - :paramref:`.create_engine.empty_in_strategy` for behavioral options. + :paramref:`_sa.create_engine.empty_in_strategy` + for behavioral options. .. versionchanged:: 1.2 simplified the behavior of "empty in" expressions @@ -614,7 +615,7 @@ class ColumnOperators(Operators): In the case that ``other`` is an empty sequence, the compiler produces an "empty not in" expression. This defaults to the expression "1 = 1" to produce true in all cases. The - :paramref:`.create_engine.empty_in_strategy` may be used to + :paramref:`_sa.create_engine.empty_in_strategy` may be used to alter this behavior. .. versionchanged:: 1.2 The :meth:`.ColumnOperators.in_` and diff --git a/lib/sqlalchemy/sql/sqltypes.py b/lib/sqlalchemy/sql/sqltypes.py index e929cc2988..a8248c791e 100644 --- a/lib/sqlalchemy/sql/sqltypes.py +++ b/lib/sqlalchemy/sql/sqltypes.py @@ -190,8 +190,8 @@ class String(Concatenable, TypeEngine): In the rare circumstance that the DBAPI does not support Python unicode under Python 2, SQLAlchemy will use its own encoder/decoder functionality on strings, referring to the - value of the :paramref:`.create_engine.encoding` parameter - parameter passed to :func:`.create_engine` as the encoding. + value of the :paramref:`_sa.create_engine.encoding` parameter + parameter passed to :func:`_sa.create_engine` as the encoding. For the extremely rare case that Python Unicode is to be encoded/decoded by SQLAlchemy on a backend @@ -221,7 +221,7 @@ class String(Concatenable, TypeEngine): .. seealso:: - :paramref:`.create_engine.convert_unicode` - + :paramref:`_sa.create_engine.convert_unicode` - :class:`_engine.Engine`-wide parameter :param unicode_error: Optional, a method to use to handle Unicode @@ -380,7 +380,7 @@ class Unicode(String): directly, SQLAlchemy does the encoding and decoding outside of the DBAPI. The encoding in this scenario is determined by the ``encoding`` flag passed to - :func:`.create_engine`. + :func:`_sa.create_engine`. When using the :class:`.Unicode` type, it is only appropriate to pass Python ``unicode`` objects, and not plain ``str``. @@ -2112,9 +2112,9 @@ class JSON(Indexable, TypeEngine): psycopg2 dialect, psycopg2 may be using its own custom loader function. In order to affect the serializer / deserializer, they are currently - configurable at the :func:`.create_engine` level via the - :paramref:`.create_engine.json_serializer` and - :paramref:`.create_engine.json_deserializer` parameters. For example, + configurable at the :func:`_sa.create_engine` level via the + :paramref:`_sa.create_engine.json_serializer` and + :paramref:`_sa.create_engine.json_deserializer` parameters. For example, to turn off ``ensure_ascii``:: engine = create_engine( -- 2.39.5