From: Mike Bayer Date: Fri, 10 Aug 2012 15:22:37 +0000 (-0400) Subject: whitespace removal X-Git-Tag: rel_0_7_9~55 X-Git-Url: http://git.ipfire.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=080d683c9604de561d50064299ba596734e2e03d;p=thirdparty%2Fsqlalchemy%2Fsqlalchemy.git whitespace removal --- diff --git a/README.rst b/README.rst index 376b5bc50a..17f9a3cb9f 100644 --- a/README.rst +++ b/README.rst @@ -16,47 +16,47 @@ language. Major SQLAlchemy features include: -* An industrial strength ORM, built +* An industrial strength ORM, built from the core on the identity map, unit of work, and data mapper patterns. These patterns - allow transparent persistence of objects + allow transparent persistence of objects using a declarative configuration system. Domain models can be constructed and manipulated naturally, and changes are synchronized with the current transaction automatically. * A relationally-oriented query system, exposing - the full range of SQL's capabilities - explicitly, including joins, subqueries, - correlation, and most everything else, + the full range of SQL's capabilities + explicitly, including joins, subqueries, + correlation, and most everything else, in terms of the object model. - Writing queries with the ORM uses the same - techniques of relational composition you use + Writing queries with the ORM uses the same + techniques of relational composition you use when writing SQL. While you can drop into literal SQL at any time, it's virtually never needed. -* A comprehensive and flexible system +* A comprehensive and flexible system of eager loading for related collections and objects. Collections are cached within a session, - and can be loaded on individual access, all + and can be loaded on individual access, all at once using joins, or by query per collection across the full result set. -* A Core SQL construction system and DBAPI +* A Core SQL construction system and DBAPI interaction layer. The SQLAlchemy Core is separate from the ORM and is a full database abstraction layer in its own right, and includes - an extensible Python-based SQL expression - language, schema metadata, connection pooling, + an extensible Python-based SQL expression + language, schema metadata, connection pooling, type coercion, and custom types. -* All primary and foreign key constraints are +* All primary and foreign key constraints are assumed to be composite and natural. Surrogate - integer primary keys are of course still the + integer primary keys are of course still the norm, but SQLAlchemy never assumes or hardcodes to this model. * Database introspection and generation. Database schemas can be "reflected" in one step into Python structures representing database metadata; - those same structures can then generate + those same structures can then generate CREATE statements right back out - all within the Core, independent of the ORM. @@ -73,7 +73,7 @@ SQLAlchemy's philosophy: that should be fully exposed. SQLAlchemy's ORM provides an open-ended set of patterns that allow a developer to construct a custom - mediation layer between a domain model and + mediation layer between a domain model and a relational schema, turning the so-called "object relational impedance" issue into a distant memory. @@ -82,18 +82,18 @@ SQLAlchemy's philosophy: of both the object model as well as the relational schema. SQLAlchemy only provides the means to automate the execution of these decisions. -* With SQLAlchemy, there's no such thing as - "the ORM generated a bad query" - you - retain full control over the structure of +* With SQLAlchemy, there's no such thing as + "the ORM generated a bad query" - you + retain full control over the structure of queries, including how joins are organized, - how subqueries and correlation is used, what + how subqueries and correlation is used, what columns are requested. Everything SQLAlchemy does is ultimately the result of a developer- initiated decision. * Don't use an ORM if the problem doesn't need one. SQLAlchemy consists of a Core and separate ORM component. The Core offers a full SQL expression - language that allows Pythonic construction + language that allows Pythonic construction of SQL constructs that render directly to SQL strings for a target database, returning result sets that are essentially enhanced DBAPI @@ -105,7 +105,7 @@ SQLAlchemy's philosophy: the start and end of a series of operations. * Never render a literal value in a SQL statement. Bound parameters are used to the greatest degree - possible, allowing query optimizers to cache + possible, allowing query optimizers to cache query plans effectively and making SQL injection attacks a non-issue. @@ -119,7 +119,7 @@ http://www.sqlalchemy.org/docs/ Installation / Requirements --------------------------- -Full documentation for installation is at +Full documentation for installation is at `Installation `_. Getting Help / Development / Bug reporting diff --git a/doc/build/builder/builders.py b/doc/build/builder/builders.py index 66ccf8dd19..be684f0394 100644 --- a/doc/build/builder/builders.py +++ b/doc/build/builder/builders.py @@ -25,7 +25,7 @@ class MakoBridge(TemplateBridge): builder.config.html_context['site_base'] = builder.config['site_base'] self.lookup = TemplateLookup(directories=builder.config.templates_path, - #format_exceptions=True, + #format_exceptions=True, imports=[ "from builder import util" ] @@ -46,7 +46,7 @@ class MakoBridge(TemplateBridge): # RTD layout if rtd: - # add variables if not present, such + # add variables if not present, such # as if local test of READTHEDOCS variable if 'MEDIA_URL' not in context: context['MEDIA_URL'] = "http://media.readthedocs.org/" @@ -107,14 +107,14 @@ class PyConWithSQLLexer(RegexLexer): 'sqlpopup':[ ( r'(.*?\n)((?:PRAGMA|BEGIN|SELECT|INSERT|DELETE|ROLLBACK|COMMIT|ALTER|UPDATE|CREATE|DROP|PRAGMA|DESCRIBE).*?(?:{stop}\n?|$))', - bygroups(using(PythonConsoleLexer), Token.Sql.Popup), + bygroups(using(PythonConsoleLexer), Token.Sql.Popup), "#pop" ) ], 'opensqlpopup':[ ( r'.*?(?:{stop}\n*|$)', - Token.Sql, + Token.Sql, "#pop" ) ] @@ -136,14 +136,14 @@ class PythonWithSQLLexer(RegexLexer): 'sqlpopup':[ ( r'(.*?\n)((?:PRAGMA|BEGIN|SELECT|INSERT|DELETE|ROLLBACK|COMMIT|ALTER|UPDATE|CREATE|DROP|PRAGMA|DESCRIBE).*?(?:{stop}\n?|$))', - bygroups(using(PythonLexer), Token.Sql.Popup), + bygroups(using(PythonLexer), Token.Sql.Popup), "#pop" ) ], 'opensqlpopup':[ ( r'.*?(?:{stop}\n*|$)', - Token.Sql, + Token.Sql, "#pop" ) ] diff --git a/doc/build/conf.py b/doc/build/conf.py index 552eddc155..1c6038f473 100644 --- a/doc/build/conf.py +++ b/doc/build/conf.py @@ -70,7 +70,7 @@ release_date = "June 16, 2012" site_base = "http://www.sqlalchemy.org" -# arbitrary number recognized by builders.py, incrementing this +# arbitrary number recognized by builders.py, incrementing this # will force a rebuild build_number = 3 diff --git a/doc/build/copyright.rst b/doc/build/copyright.rst index d56e9fad66..6047e96ce9 100644 --- a/doc/build/copyright.rst +++ b/doc/build/copyright.rst @@ -4,7 +4,7 @@ Appendix: Copyright This is the MIT license: ``_ -Copyright (c) 2005-2012 Michael Bayer and contributors. +Copyright (c) 2005-2012 Michael Bayer and contributors. SQLAlchemy is a trademark of Michael Bayer. Permission is hereby granted, free of charge, to any person obtaining a copy of this diff --git a/doc/build/core/connections.rst b/doc/build/core/connections.rst index 9f9a8f07d2..1564e6aca4 100644 --- a/doc/build/core/connections.rst +++ b/doc/build/core/connections.rst @@ -55,12 +55,12 @@ which is a **proxy** object for an actual DBAPI connection. The DBAPI connection is retrieved from the connection pool at the point at which :class:`.Connection` is created. -The returned result is an instance of :class:`.ResultProxy`, which +The returned result is an instance of :class:`.ResultProxy`, which references a DBAPI cursor and provides a largely compatible interface with that of the DBAPI cursor. The DBAPI cursor will be closed -by the :class:`.ResultProxy` when all of its result rows (if any) are +by the :class:`.ResultProxy` when all of its result rows (if any) are exhausted. A :class:`.ResultProxy` that returns no rows, such as that of -an UPDATE statement (without any returned rows), +an UPDATE statement (without any returned rows), releases cursor resources immediately upon construction. When the :meth:`~.Connection.close` method is called, the referenced DBAPI @@ -97,17 +97,17 @@ DBAPI connection resource to the pool (SQLAlchemy achieves this by the usage of weakref callbacks - *never* the ``__del__`` method) - however it's never a good idea to rely upon Python garbage collection to manage resources. -Our example above illustrated the execution of a textual SQL string. -The :meth:`~.Connection.execute` method can of course accommodate more than +Our example above illustrated the execution of a textual SQL string. +The :meth:`~.Connection.execute` method can of course accommodate more than that, including the variety of SQL expression constructs described in :ref:`sqlexpression_toplevel`. Using Transactions ================== -.. note:: +.. note:: - This section describes how to use transactions when working directly + This section describes how to use transactions when working directly with :class:`.Engine` and :class:`.Connection` objects. When using the SQLAlchemy ORM, the public API for transaction control is via the :class:`.Session` object, which makes usage of the :class:`.Transaction` @@ -201,15 +201,15 @@ CREATE TABLE, ALTER TABLE, and then issuing a COMMIT automatically if no transaction is in progress. The detection is based on the presence of the ``autocommit=True`` execution option on the statement. If the statement is a text-only statement and the flag is not set, a regular expression is used -to detect INSERT, UPDATE, DELETE, as well as a variety of other commands +to detect INSERT, UPDATE, DELETE, as well as a variety of other commands for a particular backend:: conn = engine.connect() conn.execute("INSERT INTO users VALUES (1, 'john')") # autocommits The "autocommit" feature is only in effect when no :class:`.Transaction` has -otherwise been declared. This means the feature is not generally used with -the ORM, as the :class:`.Session` object by default always maintains an +otherwise been declared. This means the feature is not generally used with +the ORM, as the :class:`.Session` object by default always maintains an ongoing :class:`.Transaction`. Full control of the "autocommit" behavior is available using the generative @@ -232,8 +232,8 @@ refers to the usage of the ``execute()`` method on an object which is not a :class:`.Connection`. This was illustrated using the :meth:`~.Engine.execute` method of :class:`.Engine`. -In addition to "connectionless" execution, it is also possible -to use the :meth:`~.Executable.execute` method of +In addition to "connectionless" execution, it is also possible +to use the :meth:`~.Executable.execute` method of any :class:`.Executable` construct, which is a marker for SQL expression objects that support execution. The SQL expression object itself references an :class:`.Engine` or :class:`.Connection` known as the **bind**, which it uses @@ -274,7 +274,7 @@ Implicit execution is also connectionless, and calls the ``execute()`` method on the expression itself, utilizing the fact that either an :class:`~sqlalchemy.engine.base.Engine` or :class:`~sqlalchemy.engine.base.Connection` has been *bound* to the expression -object (binding is discussed further in +object (binding is discussed further in :ref:`metadata_toplevel`): .. sourcecode:: python+sql @@ -363,7 +363,7 @@ call :meth:`.Engine.contextual_connect`:: call_operation3(conn) conn.close() -Calling :meth:`~.Connection.close` on the "contextual" connection does not release +Calling :meth:`~.Connection.close` on the "contextual" connection does not release its resources until all other usages of that resource are closed as well, including that any ongoing transactions are rolled back or committed. diff --git a/doc/build/core/engines.rst b/doc/build/core/engines.rst index 1f01265a9d..eb55472655 100644 --- a/doc/build/core/engines.rst +++ b/doc/build/core/engines.rst @@ -45,11 +45,11 @@ applications. Supported Databases ==================== -SQLAlchemy includes many :class:`~sqlalchemy.engine.base.Dialect` implementations for various -backends; each is described as its own package in the :ref:`sqlalchemy.dialects_toplevel` package. A +SQLAlchemy includes many :class:`~sqlalchemy.engine.base.Dialect` implementations for various +backends; each is described as its own package in the :ref:`sqlalchemy.dialects_toplevel` package. A SQLAlchemy dialect always requires that an appropriate DBAPI driver is installed. -The table below summarizes the state of DBAPI support in SQLAlchemy 0.7. The values +The table below summarizes the state of DBAPI support in SQLAlchemy 0.7. The values translate as: * yes / Python platform - The SQLAlchemy dialect is mostly or fully operational on the target platform. @@ -272,7 +272,7 @@ connection pool, it follows that you should keep a single :class:`.Engine` per database established within an application, rather than creating a new one for each connection. -.. note:: +.. note:: :class:`.QueuePool` is not used by default for SQLite engines. See :ref:`sqlite_toplevel` for details on SQLite connection pool usage. @@ -335,7 +335,7 @@ been configured (i.e. such as via ``logging.basicConfig()``), the general namespace of SA loggers that can be turned on is as follows: * ``sqlalchemy.engine`` - controls SQL echoing. set to ``logging.INFO`` for SQL query output, ``logging.DEBUG`` for query + result set output. -* ``sqlalchemy.dialects`` - controls custom logging for SQL dialects. See the documentation of individual dialects for details. +* ``sqlalchemy.dialects`` - controls custom logging for SQL dialects. See the documentation of individual dialects for details. * ``sqlalchemy.pool`` - controls connection pool logging. set to ``logging.INFO`` or lower to log connection pool checkouts/checkins. * ``sqlalchemy.orm`` - controls logging of various ORM functions. set to ``logging.INFO`` for information on mapper configurations. @@ -373,11 +373,11 @@ string. To set this to a specific name, use the "logging_name" and The SQLAlchemy :class:`.Engine` conserves Python function call overhead by only emitting log statements when the current logging level is detected - as ``logging.INFO`` or ``logging.DEBUG``. It only checks this level when - a new connection is procured from the connection pool. Therefore when + as ``logging.INFO`` or ``logging.DEBUG``. It only checks this level when + a new connection is procured from the connection pool. Therefore when changing the logging configuration for an already-running application, any :class:`.Connection` that's currently active, or more commonly a :class:`~.orm.session.Session` object that's active in a transaction, won't log any - SQL according to the new configuration until a new :class:`.Connection` - is procured (in the case of :class:`~.orm.session.Session`, this is + SQL according to the new configuration until a new :class:`.Connection` + is procured (in the case of :class:`~.orm.session.Session`, this is after the current transaction ends and a new one begins). diff --git a/doc/build/core/event.rst b/doc/build/core/event.rst index f3433876c2..ff808f88a4 100644 --- a/doc/build/core/event.rst +++ b/doc/build/core/event.rst @@ -3,7 +3,7 @@ Events ====== -SQLAlchemy includes an event API which publishes a wide variety of hooks into +SQLAlchemy includes an event API which publishes a wide variety of hooks into the internals of both SQLAlchemy Core and ORM. .. versionadded:: 0.7 @@ -19,7 +19,7 @@ intercepted, and a target. Additional positional and keyword arguments may be s specific types of events, which may specify alternate interfaces for the given event function, or provide instructions regarding secondary event targets based on the given target. -The name of an event and the argument signature of a corresponding listener function is derived from +The name of an event and the argument signature of a corresponding listener function is derived from a class bound specification method, which exists bound to a marker class that's described in the documentation. For example, the documentation for :meth:`.PoolEvents.connect` indicates that the event name is ``"connect"`` and that a user-defined listener function should receive two positional arguments:: diff --git a/doc/build/core/interfaces.rst b/doc/build/core/interfaces.rst index 7e76127f4d..7da9ecb129 100644 --- a/doc/build/core/interfaces.rst +++ b/doc/build/core/interfaces.rst @@ -5,7 +5,7 @@ Deprecated Event Interfaces .. module:: sqlalchemy.interfaces -This section describes the class-based core event interface introduced in +This section describes the class-based core event interface introduced in SQLAlchemy 0.5. The ORM analogue is described at :ref:`dep_interfaces_orm_toplevel`. .. deprecated:: 0.7 diff --git a/doc/build/core/internals.rst b/doc/build/core/internals.rst index 31f6cb063a..5ea4aecead 100644 --- a/doc/build/core/internals.rst +++ b/doc/build/core/internals.rst @@ -3,7 +3,7 @@ Core Internals ============== -Some key internal constructs are listed here. +Some key internal constructs are listed here. .. currentmodule: sqlalchemy diff --git a/doc/build/core/pooling.rst b/doc/build/core/pooling.rst index 2906a6fea3..6b566639ff 100644 --- a/doc/build/core/pooling.rst +++ b/doc/build/core/pooling.rst @@ -6,7 +6,7 @@ Connection Pooling .. module:: sqlalchemy.pool A connection pool is a standard technique used to maintain -long running connections in memory for efficient re-use, +long running connections in memory for efficient re-use, as well as to provide management for the total number of connections an application might use simultaneously. @@ -16,7 +16,7 @@ server-side web applications, a connection pool is the standard way to maintain a "pool" of active database connections in memory which are reused across requests. -SQLAlchemy includes several connection pool implementations +SQLAlchemy includes several connection pool implementations which integrate with the :class:`.Engine`. They can also be used directly for applications that want to add pooling to an otherwise plain DBAPI approach. @@ -48,12 +48,12 @@ dataset within the scope of a single connection. All SQLAlchemy pool implementations have in common that none of them "pre create" connections - all implementations wait until first use before creating a connection. At that point, if -no additional concurrent checkout requests for more connections +no additional concurrent checkout requests for more connections are made, no additional connections are created. This is why it's perfectly fine for :func:`.create_engine` to default to using a :class:`.QueuePool` of size five without regard to whether or not the application really needs five connections queued up - the pool would only grow to that size if the application -actually used five connections concurrently, in which case the usage of a +actually used five connections concurrently, in which case the usage of a small pool is an entirely appropriate default behavior. Switching Pool Implementations @@ -72,13 +72,13 @@ Disabling pooling using :class:`.NullPool`:: from sqlalchemy.pool import NullPool engine = create_engine( - 'postgresql+psycopg2://scott:tiger@localhost/test', + 'postgresql+psycopg2://scott:tiger@localhost/test', poolclass=NullPool) Using a Custom Connection Function ---------------------------------- -All :class:`.Pool` classes accept an argument ``creator`` which is +All :class:`.Pool` classes accept an argument ``creator`` which is a callable that creates a new connection. :func:`.create_engine` accepts this function to pass onto the pool via an argument of the same name:: @@ -127,14 +127,14 @@ within a transparent proxy:: cursor.execute("select foo") The purpose of the transparent proxy is to intercept the ``close()`` call, -such that instead of the DBAPI connection being closed, its returned to the +such that instead of the DBAPI connection being closed, its returned to the pool:: # "close" the connection. Returns # it to the pool. conn.close() -The proxy also returns its contained DBAPI connection to the pool +The proxy also returns its contained DBAPI connection to the pool when it is garbage collected, though it's not deterministic in Python that this occurs immediately (though it is typical with cPython). @@ -148,24 +148,24 @@ Pool Events ----------- Connection pools support an event interface that allows hooks to execute -upon first connect, upon each new connection, and upon checkout and +upon first connect, upon each new connection, and upon checkout and checkin of connections. See :class:`.PoolEvents` for details. Dealing with Disconnects ------------------------ -The connection pool has the ability to refresh individual connections as well as +The connection pool has the ability to refresh individual connections as well as its entire set of connections, setting the previously pooled connections as -"invalid". A common use case is allow the connection pool to gracefully recover +"invalid". A common use case is allow the connection pool to gracefully recover when the database server has been restarted, and all previously established connections are no longer functional. There are two approaches to this. Disconnect Handling - Optimistic ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -The most common approach is to let SQLAlchemy handle disconnects as they -occur, at which point the pool is refreshed. This assumes the :class:`.Pool` -is used in conjunction with a :class:`.Engine`. The :class:`.Engine` has +The most common approach is to let SQLAlchemy handle disconnects as they +occur, at which point the pool is refreshed. This assumes the :class:`.Pool` +is used in conjunction with a :class:`.Engine`. The :class:`.Engine` has logic which can detect disconnection events and refresh the pool automatically. When the :class:`.Connection` attempts to use a DBAPI connection, and an @@ -187,14 +187,14 @@ that they are replaced with new ones upon next checkout:: if e.connection_invalidated: print "Connection was invalidated!" - # after the invalidate event, a new connection + # after the invalidate event, a new connection # starts with a new Pool c = e.connect() c.execute("SELECT * FROM table") The above example illustrates that no special intervention is needed, the pool continues normally after a disconnection event is detected. However, an exception is -raised. In a typical web application using an ORM Session, the above condition would +raised. In a typical web application using an ORM Session, the above condition would correspond to a single request failing with a 500 error, then the web application continuing normally beyond that. Hence the approach is "optimistic" in that frequent database restarts are not anticipated. @@ -202,7 +202,7 @@ database restarts are not anticipated. Setting Pool Recycle ~~~~~~~~~~~~~~~~~~~~~~~ -An additional setting that can augment the "optimistic" approach is to set the +An additional setting that can augment the "optimistic" approach is to set the pool recycle parameter. This parameter prevents the pool from using a particular connection that has passed a certain age, and is appropriate for database backends such as MySQL that automatically close connections that have been stale after a particular @@ -219,8 +219,8 @@ of the :class:`.Pool` itself, independent of whether or not an :class:`.Engine` Disconnect Handling - Pessimistic ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -At the expense of some extra SQL emitted for each connection checked out from the pool, -a "ping" operation established by a checkout event handler +At the expense of some extra SQL emitted for each connection checked out from the pool, +a "ping" operation established by a checkout event handler can detect an invalid connection before it's used:: from sqlalchemy import exc @@ -244,7 +244,7 @@ can detect an invalid connection before it's used:: Above, the :class:`.Pool` object specifically catches :class:`~sqlalchemy.exc.DisconnectionError` and attempts to create a new DBAPI connection, up to three times, before giving up and then raising -:class:`~sqlalchemy.exc.InvalidRequestError`, failing the connection. This recipe will ensure +:class:`~sqlalchemy.exc.InvalidRequestError`, failing the connection. This recipe will ensure that a new :class:`.Connection` will succeed even if connections in the pool have gone stale, provided that the database server is actually running. The expense is that of an additional execution performed per checkout. When using the ORM :class:`.Session`, @@ -252,7 +252,7 @@ there is one connection checkout per transaction, so the expense is fairly low. above also works with straight connection pool usage, that is, even if no :class:`.Engine` were involved. -The event handler can be tested using a script like the following, restarting the database +The event handler can be tested using a script like the following, restarting the database server at the point at which the script pauses for input:: from sqlalchemy import create_engine diff --git a/doc/build/core/schema.rst b/doc/build/core/schema.rst index ccaa89d0a6..dae9686a58 100644 --- a/doc/build/core/schema.rst +++ b/doc/build/core/schema.rst @@ -257,8 +257,8 @@ While SQLAlchemy directly supports emitting CREATE and DROP statements for schem constructs, the ability to alter those constructs, usually via the ALTER statement as well as other database-specific constructs, is outside of the scope of SQLAlchemy itself. While it's easy enough to emit ALTER statements and similar by hand, -such as by passing a string to :meth:`.Connection.execute` or by using the -:class:`.DDL` construct, it's a common practice to automate the maintenance of +such as by passing a string to :meth:`.Connection.execute` or by using the +:class:`.DDL` construct, it's a common practice to automate the maintenance of database schemas in relation to application code using schema migration tools. There are two major migration tools available for SQLAlchemy: @@ -266,12 +266,12 @@ There are two major migration tools available for SQLAlchemy: * `Alembic `_ - Written by the author of SQLAlchemy, Alembic features a highly customizable environment and a minimalistic usage pattern, supporting such features as transactional DDL, automatic generation of "candidate" - migrations, an "offline" mode which generates SQL scripts, and support for branch + migrations, an "offline" mode which generates SQL scripts, and support for branch resolution. * `SQLAlchemy-Migrate `_ - The original migration tool for SQLAlchemy, SQLAlchemy-Migrate is widely used and continues - under active development. SQLAlchemy-Migrate includes features such as - SQL script generation, ORM class generation, ORM model comparison, and extensive + under active development. SQLAlchemy-Migrate includes features such as + SQL script generation, ORM class generation, ORM model comparison, and extensive support for SQLite migrations. .. _metadata_binding: @@ -1092,11 +1092,11 @@ Setting up Constraints when using the Declarative ORM Extension The :class:`.Table` is the SQLAlchemy Core construct that allows one to define table metadata, which among other things can be used by the SQLAlchemy ORM as a target to map a class. The :ref:`Declarative ` -extension allows the :class:`.Table` object to be created automatically, given +extension allows the :class:`.Table` object to be created automatically, given the contents of the table primarily as a mapping of :class:`.Column` objects. To apply table-level constraint objects such as :class:`.ForeignKeyConstraint` -to a table defined using Declarative, use the ``__table_args__`` attribute, +to a table defined using Declarative, use the ``__table_args__`` attribute, described at :ref:`declarative_table_args`. Constraints API @@ -1179,9 +1179,9 @@ INDEX" is issued right after the create statements for the table: CREATE INDEX idx_col34 ON mytable (col3, col4){stop} Note in the example above, the :class:`.Index` construct is created -externally to the table which it corresponds, using :class:`.Column` +externally to the table which it corresponds, using :class:`.Column` objects directly. :class:`.Index` also supports -"inline" definition inside the :class:`.Table`, using string names to +"inline" definition inside the :class:`.Table`, using string names to identify columns:: meta = MetaData() @@ -1308,7 +1308,7 @@ constraint will be added via ALTER: event.listen( users, - "after_create", + "after_create", AddConstraint(constraint) ) event.listen( @@ -1331,11 +1331,11 @@ constraint will be added via ALTER: DROP TABLE users{stop} The real usefulness of the above becomes clearer once we illustrate the :meth:`.DDLEvent.execute_if` -method. This method returns a modified form of the DDL callable which will +method. This method returns a modified form of the DDL callable which will filter on criteria before responding to a received event. It accepts a parameter ``dialect``, which is the string name of a dialect or a tuple of such, which will limit the execution of the item to just those dialects. It also -accepts a ``callable_`` parameter which may reference a Python callable which will +accepts a ``callable_`` parameter which may reference a Python callable which will be invoked upon event reception, returning ``True`` or ``False`` indicating if the event should proceed. diff --git a/doc/build/core/tutorial.rst b/doc/build/core/tutorial.rst index 7c991888f7..96fa65f4a2 100644 --- a/doc/build/core/tutorial.rst +++ b/doc/build/core/tutorial.rst @@ -150,7 +150,7 @@ each table first before creating, so it's safe to call multiple times: () COMMIT -.. note:: +.. note:: Users familiar with the syntax of CREATE TABLE may notice that the VARCHAR columns were generated without a length; on SQLite and Postgresql, @@ -836,12 +836,12 @@ once for each address. We create two :class:`.Alias` constructs against ('jack@msn.com', 'jack@yahoo.com') {stop}[(1, u'jack', u'Jack Jones')] -Note that the :class:`.Alias` construct generated the names ``addresses_1`` and +Note that the :class:`.Alias` construct generated the names ``addresses_1`` and ``addresses_2`` in the final SQL result. The generation of these names is determined by the position of the construct within the statement. If we created a query using -only the second ``a2`` alias, the name would come out as ``addresses_1``. The -generation of the names is also *deterministic*, meaning the same SQLAlchemy -statement construct will produce the identical SQL string each time it is +only the second ``a2`` alias, the name would come out as ``addresses_1``. The +generation of the names is also *deterministic*, meaning the same SQLAlchemy +statement construct will produce the identical SQL string each time it is rendered for a particular dialect. Since on the outside, we refer to the alias using the :class:`.Alias` construct @@ -1027,7 +1027,7 @@ to arrive with a full statement. Transforming a Statement ------------------------ -We've seen how methods like :meth:`.Select.where` and :meth:`._SelectBase.order_by` are +We've seen how methods like :meth:`.Select.where` and :meth:`._SelectBase.order_by` are part of the so-called *Generative* family of methods on the :func:`.select` construct, where one :func:`.select` copies itself to return a new one with modifications. SQL constructs also support another form of generative behavior which is @@ -1232,7 +1232,7 @@ OVER clause, using the :meth:`~.FunctionElement.over` method: >>> s = select([users.c.id, func.row_number().over(order_by=users.c.name)]) >>> print s # doctest: +NORMALIZE_WHITESPACE - SELECT users.id, row_number() OVER (ORDER BY users.name) AS anon_1 + SELECT users.id, row_number() OVER (ORDER BY users.name) AS anon_1 FROM users Unions and Other Set Operations @@ -1473,7 +1473,7 @@ that can be specified: {stop} >>> # with binds, you can also update many rows at once - {sql}>>> conn.execute(u, + {sql}>>> conn.execute(u, ... {'oldname':'jack', 'newname':'ed'}, ... {'oldname':'wendy', 'newname':'mary'}, ... {'oldname':'jim', 'newname':'jake'}, @@ -1520,7 +1520,7 @@ that refer to multiple tables. For PG and MSSQL, this is the "UPDATE FROM" syn which updates one table at a time, but can reference additional tables in an additional "FROM" clause that can then be referenced in the WHERE clause directly. On MySQL, multiple tables can be embedded into a single UPDATE statement separated by a comma. -The SQLAlchemy :func:`.update` construct supports both of these modes +The SQLAlchemy :func:`.update` construct supports both of these modes implicitly, by specifying multiple tables in the WHERE clause:: stmt = users.update().\ @@ -1531,8 +1531,8 @@ implicitly, by specifying multiple tables in the WHERE clause:: The resulting SQL from the above statement would render as:: - UPDATE users SET name=:name FROM addresses - WHERE users.id = addresses.id AND + UPDATE users SET name=:name FROM addresses + WHERE users.id = addresses.id AND addresses.email_address LIKE :email_address_1 || '%%' When using MySQL, columns from each table can be assigned to in the @@ -1540,7 +1540,7 @@ SET clause directly, using the dictionary form passed to :meth:`.Update.values`: stmt = users.update().\ values({ - users.c.name:'ed wood', + users.c.name:'ed wood', addresses.c.email_address:'ed.wood@foo.com' }).\ where(users.c.id==addresses.c.id).\ @@ -1548,11 +1548,11 @@ SET clause directly, using the dictionary form passed to :meth:`.Update.values`: The tables are referenced explicitly in the SET clause:: - UPDATE users, addresses SET addresses.email_address=%s, - users.name=%s WHERE users.id = addresses.id + UPDATE users, addresses SET addresses.email_address=%s, + users.name=%s WHERE users.id = addresses.id AND addresses.email_address LIKE concat(%s, '%%') -SQLAlchemy doesn't do anything special when these constructs are used on +SQLAlchemy doesn't do anything special when these constructs are used on a non-supporting database. The ``UPDATE FROM`` syntax generates by default when multiple tables are present, and the statement will be rejected by the database if this syntax is not supported. diff --git a/doc/build/core/types.rst b/doc/build/core/types.rst index b6144a4384..9ff5425879 100644 --- a/doc/build/core/types.rst +++ b/doc/build/core/types.rst @@ -218,7 +218,7 @@ Or some PostgreSQL types:: Each dialect provides the full set of typenames supported by that backend within its `__all__` collection, so that a simple -`import *` or similar will import all supported types as +`import *` or similar will import all supported types as implemented for that backend:: from sqlalchemy.dialects.postgresql import * @@ -229,7 +229,7 @@ implemented for that backend:: Column('inetaddr', INET) ) -Where above, the INTEGER and VARCHAR types are ultimately from +Where above, the INTEGER and VARCHAR types are ultimately from sqlalchemy.types, and INET is specific to the Postgresql dialect. Some dialect level types have the same name as the SQL standard type, @@ -256,10 +256,10 @@ Overriding Type Compilation ~~~~~~~~~~~~~~~~~~~~~~~~~~~ A frequent need is to force the "string" version of a type, that is -the one rendered in a CREATE TABLE statement or other SQL function +the one rendered in a CREATE TABLE statement or other SQL function like CAST, to be changed. For example, an application may want to force the rendering of ``BINARY`` for all platforms -except for one, in which is wants ``BLOB`` to be rendered. Usage +except for one, in which is wants ``BLOB`` to be rendered. Usage of an existing generic type, in this case :class:`.LargeBinary`, is preferred for most use cases. But to control types more accurately, a compilation directive that is per-dialect @@ -273,10 +273,10 @@ can be associated with any type:: return "BLOB" The above code allows the usage of :class:`.types.BINARY`, which -will produce the string ``BINARY`` against all backends except SQLite, +will produce the string ``BINARY`` against all backends except SQLite, in which case it will produce ``BLOB``. -See the section :ref:`type_compilation_extension`, a subsection of +See the section :ref:`type_compilation_extension`, a subsection of :ref:`sqlalchemy.ext.compiler_toplevel`, for additional examples. Augmenting Existing Types @@ -306,10 +306,10 @@ A common source of confusion regarding the :class:`.Unicode` type is that it is intended to deal *only* with Python ``unicode`` objects on the Python side, meaning values passed to it as bind parameters must be of the form ``u'some string'`` if using Python 2 and not 3. -The encoding/decoding functions it performs are only to suit what the +The encoding/decoding functions it performs are only to suit what the DBAPI in use requires, and are primarily a private implementation detail. -The use case of a type that can safely receive Python bytestrings, +The use case of a type that can safely receive Python bytestrings, that is strings that contain non-ASCII characters and are not ``u''`` objects in Python 2, can be achieved using a :class:`.TypeDecorator` which coerces as needed:: @@ -355,9 +355,9 @@ many decimal places. Here's a recipe that rounds them down:: Backend-agnostic GUID Type ^^^^^^^^^^^^^^^^^^^^^^^^^^ -Receives and returns Python uuid() objects. Uses the PG UUID type +Receives and returns Python uuid() objects. Uses the PG UUID type when using Postgresql, CHAR(32) on other backends, storing them -in stringified hex format. Can be modified to store +in stringified hex format. Can be modified to store binary in CHAR(16) if desired:: from sqlalchemy.types import TypeDecorator, CHAR @@ -441,7 +441,7 @@ Creating New Types ~~~~~~~~~~~~~~~~~~ The :class:`.UserDefinedType` class is provided as a simple base class -for defining entirely new database types. Use this to represent native +for defining entirely new database types. Use this to represent native database types not known by SQLAlchemy. If only Python translation behavior is needed, use :class:`.TypeDecorator` instead. diff --git a/doc/build/dialects/drizzle.rst b/doc/build/dialects/drizzle.rst index 114c6f9c94..6991498d59 100644 --- a/doc/build/dialects/drizzle.rst +++ b/doc/build/dialects/drizzle.rst @@ -16,7 +16,7 @@ valid with Drizzle are importable from the top level dialect:: DECIMAL, DOUBLE, ENUM, FLOAT, INT, INTEGER, NUMERIC, TEXT, TIME, TIMESTAMP, VARBINARY, VARCHAR -Types which are specific to Drizzle, or have Drizzle-specific +Types which are specific to Drizzle, or have Drizzle-specific construction arguments, are as follows: .. currentmodule:: sqlalchemy.dialects.drizzle diff --git a/doc/build/dialects/index.rst b/doc/build/dialects/index.rst index 9865d37ec4..30c4883a16 100644 --- a/doc/build/dialects/index.rst +++ b/doc/build/dialects/index.rst @@ -4,7 +4,7 @@ Dialects ======== The **dialect** is the system SQLAlchemy uses to communicate with various types of DBAPIs and databases. -A compatibility chart of supported backends can be found at :ref:`supported_dbapis`. The sections that +A compatibility chart of supported backends can be found at :ref:`supported_dbapis`. The sections that follow contain reference documentation and notes specific to the usage of each backend, as well as notes for the various DBAPIs. diff --git a/doc/build/dialects/mssql.rst b/doc/build/dialects/mssql.rst index f969983328..9983bd394e 100644 --- a/doc/build/dialects/mssql.rst +++ b/doc/build/dialects/mssql.rst @@ -19,7 +19,7 @@ they originate from :mod:`sqlalchemy.types` or from the local dialect:: SMALLINT, SMALLMONEY, SQL_VARIANT, TEXT, TIME, \ TIMESTAMP, TINYINT, UNIQUEIDENTIFIER, VARBINARY, VARCHAR -Types which are specific to SQL Server, or have SQL Server-specific +Types which are specific to SQL Server, or have SQL Server-specific construction arguments, are as follows: .. currentmodule:: sqlalchemy.dialects.mssql diff --git a/doc/build/dialects/mysql.rst b/doc/build/dialects/mysql.rst index 916df525eb..47d48b7bf1 100644 --- a/doc/build/dialects/mysql.rst +++ b/doc/build/dialects/mysql.rst @@ -18,7 +18,7 @@ valid with MySQL are importable from the top level dialect:: NUMERIC, NVARCHAR, REAL, SET, SMALLINT, TEXT, TIME, TIMESTAMP, \ TINYBLOB, TINYINT, TINYTEXT, VARBINARY, VARCHAR, YEAR -Types which are specific to MySQL, or have MySQL-specific +Types which are specific to MySQL, or have MySQL-specific construction arguments, are as follows: .. currentmodule:: sqlalchemy.dialects.mysql diff --git a/doc/build/dialects/oracle.rst b/doc/build/dialects/oracle.rst index 5e259ead76..f3f3c1700b 100644 --- a/doc/build/dialects/oracle.rst +++ b/doc/build/dialects/oracle.rst @@ -18,7 +18,7 @@ they originate from :mod:`sqlalchemy.types` or from the local dialect:: NUMBER, NVARCHAR, NVARCHAR2, RAW, TIMESTAMP, VARCHAR, \ VARCHAR2 -Types which are specific to Oracle, or have Oracle-specific +Types which are specific to Oracle, or have Oracle-specific construction arguments, are as follows: .. currentmodule:: sqlalchemy.dialects.oracle diff --git a/doc/build/dialects/postgresql.rst b/doc/build/dialects/postgresql.rst index b34c076848..9fece47ffd 100644 --- a/doc/build/dialects/postgresql.rst +++ b/doc/build/dialects/postgresql.rst @@ -18,7 +18,7 @@ they originate from :mod:`sqlalchemy.types` or from the local dialect:: MACADDR, NUMERIC, REAL, SMALLINT, TEXT, TIME, TIMESTAMP, \ UUID, VARCHAR -Types which are specific to PostgreSQL, or have PostgreSQL-specific +Types which are specific to PostgreSQL, or have PostgreSQL-specific construction arguments, are as follows: .. currentmodule:: sqlalchemy.dialects.postgresql diff --git a/doc/build/index.rst b/doc/build/index.rst index 6cd8471533..e4c7d5f680 100644 --- a/doc/build/index.rst +++ b/doc/build/index.rst @@ -9,7 +9,7 @@ Getting Started A high level view and getting set up. -:ref:`Overview ` | +:ref:`Overview ` | :ref:`Installation Guide ` | :ref:`Migration from 0.6 ` @@ -57,7 +57,7 @@ SQLAlchemy Core =============== The breadth of SQLAlchemy's SQL rendering engine, DBAPI -integration, transaction integration, and schema description services +integration, transaction integration, and schema description services are documented here. In contrast to the ORM's domain-centric mode of usage, the SQL Expression Language provides a schema-centric usage paradigm. * **Read this first:** @@ -76,15 +76,15 @@ are documented here. In contrast to the ORM's domain-centric mode of usage, the :ref:`Database Introspection (Reflection) ` | :ref:`Insert/Update Defaults ` | :ref:`Constraints and Indexes ` | - :ref:`Using Data Definition Language (DDL) ` + :ref:`Using Data Definition Language (DDL) ` * **Datatypes:** - :ref:`Overview ` | - :ref:`Generic Types ` | + :ref:`Overview ` | + :ref:`Generic Types ` | :ref:`SQL Standard Types ` | :ref:`Vendor Specific Types ` | :ref:`Building Custom Types ` | - :ref:`API ` + :ref:`API ` * **Extending the Core:** :doc:`SQLAlchemy Events ` | diff --git a/doc/build/intro.rst b/doc/build/intro.rst index b5020b6ee3..2a539e8d84 100644 --- a/doc/build/intro.rst +++ b/doc/build/intro.rst @@ -88,18 +88,18 @@ SQLAlchemy supports installation using standard Python "distutils" or * **Plain Python Distutils** - SQLAlchemy can be installed with a clean Python install using the services provided via `Python Distutils `_, using the ``setup.py`` script. The C extensions as well as Python 3 builds are supported. -* **Standard Setuptools** - When using `setuptools `_, +* **Standard Setuptools** - When using `setuptools `_, SQLAlchemy can be installed via ``setup.py`` or ``easy_install``, and the C extensions are supported. setuptools is not supported on Python 3 at the time of of this writing. -* **Distribute** - With `distribute `_, +* **Distribute** - With `distribute `_, SQLAlchemy can be installed via ``setup.py`` or ``easy_install``, and the C extensions as well as Python 3 builds are supported. * **pip** - `pip `_ is an installer that rides on top of ``setuptools`` or ``distribute``, replacing the usage of ``easy_install``. It is often preferred for its simpler mode of usage. -.. note:: +.. note:: It is strongly recommended that either ``setuptools`` or ``distribute`` be installed. Python's built-in ``distutils`` lacks many widely used installation features. @@ -107,7 +107,7 @@ SQLAlchemy supports installation using standard Python "distutils" or Install via easy_install or pip ------------------------------- -When ``easy_install`` or ``pip`` is available, the distribution can be +When ``easy_install`` or ``pip`` is available, the distribution can be downloaded from Pypi and installed in one step:: easy_install SQLAlchemy @@ -143,7 +143,7 @@ pass the flag ``--without-cextensions`` to the ``setup.py`` script:: python setup.py --without-cextensions install -.. note:: +.. note:: The ``--without-cextensions`` flag is available **only** if ``setuptools`` or ``distribute`` is installed. It is not available on a plain Python ``distutils`` diff --git a/doc/build/orm/collections.rst b/doc/build/orm/collections.rst index 0456cd91ad..822fcd3be6 100644 --- a/doc/build/orm/collections.rst +++ b/doc/build/orm/collections.rst @@ -60,11 +60,11 @@ The dynamic relationship supports limited write operations, via the jack.posts.append(Post('new post')) -Since the read side of the dynamic relationship always queries the -database, changes to the underlying collection will not be visible -until the data has been flushed. However, as long as "autoflush" is -enabled on the :class:`.Session` in use, this will occur -automatically each time the collection is about to emit a +Since the read side of the dynamic relationship always queries the +database, changes to the underlying collection will not be visible +until the data has been flushed. However, as long as "autoflush" is +enabled on the :class:`.Session` in use, this will occur +automatically each time the collection is about to emit a query. To place a dynamic relationship on a backref, use the :func:`~.orm.backref` @@ -73,13 +73,13 @@ function in conjunction with ``lazy='dynamic'``:: class Post(Base): __table__ = posts_table - user = relationship(User, + user = relationship(User, backref=backref('posts', lazy='dynamic') ) Note that eager/lazy loading options cannot be used in conjunction dynamic relationships at this time. -.. note:: +.. note:: The :func:`~.orm.dynamic_loader` function is essentially the same as :func:`~.orm.relationship` with the ``lazy='dynamic'`` argument specified. @@ -94,7 +94,7 @@ Note that eager/lazy loading options cannot be used in conjunction dynamic relat Setting Noload --------------- -A "noload" relationship never loads from the database, even when +A "noload" relationship never loads from the database, even when accessed. It is configured using ``lazy='noload'``:: class MyClass(Base): @@ -120,14 +120,14 @@ not supported on SQLite, and requires ``InnoDB`` tables when using MySQL:: class MyClass(Base): __tablename__ = 'mytable' id = Column(Integer, primary_key=True) - children = relationship("MyOtherClass", - cascade="all, delete-orphan", + children = relationship("MyOtherClass", + cascade="all, delete-orphan", passive_deletes=True) class MyOtherClass(Base): __tablename__ = 'myothertable' id = Column(Integer, primary_key=True) - parent_id = Column(Integer, + parent_id = Column(Integer, ForeignKey('mytable.id', ondelete='CASCADE') ) @@ -179,7 +179,7 @@ default list, by specifying the ``collection_class`` option on Dictionary Collections ----------------------- -A little extra detail is needed when using a dictionary as a collection. +A little extra detail is needed when using a dictionary as a collection. This because objects are always loaded from the database as lists, and a key-generation strategy must be available to populate the dictionary correctly. The :func:`.attribute_mapped_collection` function is by far the most common way @@ -197,8 +197,8 @@ a dictionary of ``Note`` items keyed to the ``Note.keyword`` attribute:: class Item(Base): __tablename__ = 'item' id = Column(Integer, primary_key=True) - notes = relationship("Note", - collection_class=attribute_mapped_collection('keyword'), + notes = relationship("Note", + collection_class=attribute_mapped_collection('keyword'), cascade="all, delete-orphan") class Note(Base): @@ -219,28 +219,28 @@ a dictionary of ``Note`` items keyed to the ``Note.keyword`` attribute:: >>> item.notes.items() {'a': <__main__.Note object at 0x2eaaf0>} -:func:`.attribute_mapped_collection` will ensure that +:func:`.attribute_mapped_collection` will ensure that the ``.keyword`` attribute of each ``Note`` complies with the key in the dictionary. Such as, when assigning to ``Item.notes``, the dictionary key we supply must match that of the actual ``Note`` object:: item = Item() item.notes = { - 'a': Note('a', 'atext'), + 'a': Note('a', 'atext'), 'b': Note('b', 'btext') } The attribute which :func:`.attribute_mapped_collection` uses as a key does not need to be mapped at all! Using a regular Python ``@property`` allows virtually -any detail or combination of details about the object to be used as the key, as +any detail or combination of details about the object to be used as the key, as below when we establish it as a tuple of ``Note.keyword`` and the first ten letters of the ``Note.text`` field:: class Item(Base): __tablename__ = 'item' id = Column(Integer, primary_key=True) - notes = relationship("Note", - collection_class=attribute_mapped_collection('note_key'), + notes = relationship("Note", + collection_class=attribute_mapped_collection('note_key'), backref="item", cascade="all, delete-orphan") @@ -277,8 +277,8 @@ object directly:: class Item(Base): __tablename__ = 'item' id = Column(Integer, primary_key=True) - notes = relationship("Note", - collection_class=column_mapped_collection(Note.__table__.c.keyword), + notes = relationship("Note", + collection_class=column_mapped_collection(Note.__table__.c.keyword), cascade="all, delete-orphan") as well as :func:`.mapped_collection` which is passed any callable function. @@ -290,12 +290,12 @@ with a ``@property`` as mentioned earlier:: class Item(Base): __tablename__ = 'item' id = Column(Integer, primary_key=True) - notes = relationship("Note", - collection_class=mapped_collection(lambda note: note.text[0:10]), + notes = relationship("Note", + collection_class=mapped_collection(lambda note: note.text[0:10]), cascade="all, delete-orphan") Dictionary mappings are often combined with the "Association Proxy" extension to produce -streamlined dictionary views. See :ref:`proxying_dictionaries` and :ref:`composite_association_proxy` +streamlined dictionary views. See :ref:`proxying_dictionaries` and :ref:`composite_association_proxy` for examples. .. autofunction:: attribute_mapped_collection @@ -322,7 +322,7 @@ about how the collection operates. For the first use case, the :func:`.orm.validates` decorator is by far the simplest way to intercept incoming values in all cases for the purposes - of validation and simple marshaling. See :ref:`simple_validators` + of validation and simple marshaling. See :ref:`simple_validators` for an example of this. For the second use case, the :ref:`associationproxy_toplevel` extension is a @@ -334,8 +334,8 @@ about how the collection operates. unaffected and avoids the need to carefully tailor collection behavior on a method-by-method basis. - Customized collections are useful when the collection needs to - have special behaviors upon access or mutation operations that can't + Customized collections are useful when the collection needs to + have special behaviors upon access or mutation operations that can't otherwise be modeled externally to the collection. They can of course be combined with the above two approaches. @@ -478,7 +478,7 @@ collection support to other classes. It uses a keying function to delegate to MappedCollection.__init__(self, keyfunc=lambda node: node.name) OrderedDict.__init__(self, *args, **kw) -When subclassing :class:`.MappedCollection`, user-defined versions +When subclassing :class:`.MappedCollection`, user-defined versions of ``__setitem__()`` or ``__delitem__()`` should be decorated with :meth:`.collection.internally_instrumented`, **if** they call down to those same methods on :class:`.MappedCollection`. This because the methods @@ -491,7 +491,7 @@ rare cases:: collection class MyMappedCollection(MappedCollection): - """Use @internally_instrumented when your methods + """Use @internally_instrumented when your methods call down to already-instrumented methods. """ @@ -515,7 +515,7 @@ Iteration will go through ``itervalues()`` unless otherwise decorated. .. note:: - Due to a bug in MappedCollection prior to version 0.7.6, this + Due to a bug in MappedCollection prior to version 0.7.6, this workaround usually needs to be called before a custom subclass of :class:`.MappedCollection` which uses :meth:`.collection.internally_instrumented` can be used:: diff --git a/doc/build/orm/events.rst b/doc/build/orm/events.rst index 38cecf689d..8cef104beb 100644 --- a/doc/build/orm/events.rst +++ b/doc/build/orm/events.rst @@ -9,7 +9,7 @@ The ORM includes a wide variety of hooks available for subscription. The event supercedes the previous system of "extension" classes. For an introduction to the event API, see :ref:`event_toplevel`. Non-ORM events -such as those regarding connections and low-level statement execution are described in +such as those regarding connections and low-level statement execution are described in :ref:`core_event_toplevel`. Attribute Events diff --git a/doc/build/orm/extensions/associationproxy.rst b/doc/build/orm/extensions/associationproxy.rst index fac1aa429e..177587c59a 100644 --- a/doc/build/orm/extensions/associationproxy.rst +++ b/doc/build/orm/extensions/associationproxy.rst @@ -7,11 +7,11 @@ Association Proxy ``associationproxy`` is used to create a read/write view of a target attribute across a relationship. It essentially conceals -the usage of a "middle" attribute between two endpoints, and +the usage of a "middle" attribute between two endpoints, and can be used to cherry-pick fields from a collection of related objects or to reduce the verbosity of using the association object pattern. Applied creatively, the association proxy allows -the construction of sophisticated collections and dictionary +the construction of sophisticated collections and dictionary views of virtually any geometry, persisted to the database using standard, transparently configured relational patterns. @@ -97,10 +97,10 @@ for us transparently:: The :class:`.AssociationProxy` object produced by the :func:`.association_proxy` function is an instance of a `Python descriptor `_. -It is always declared with the user-defined class being mapped, regardless of +It is always declared with the user-defined class being mapped, regardless of whether Declarative or classical mappings via the :func:`.mapper` function are used. -The proxy functions by operating upon the underlying mapped attribute +The proxy functions by operating upon the underlying mapped attribute or collection in response to operations, and changes made via the proxy are immediately apparent in the mapped attribute, as well as vice versa. The underlying attribute remains fully accessible. @@ -129,7 +129,7 @@ Is translated by the association proxy into the operation:: The example works here because we have designed the constructor for ``Keyword`` to accept a single positional argument, ``keyword``. For those cases where a single-argument constructor isn't feasible, the association proxy's creational -behavior can be customized using the ``creator`` argument, which references a +behavior can be customized using the ``creator`` argument, which references a callable (i.e. Python function) that will produce a new object instance given the singular argument. Below we illustrate this using a lambda as is typical:: @@ -137,7 +137,7 @@ singular argument. Below we illustrate this using a lambda as is typical:: # ... # use Keyword(keyword=kw) on append() events - keywords = association_proxy('kw', 'keyword', + keywords = association_proxy('kw', 'keyword', creator=lambda kw: Keyword(keyword=kw)) The ``creator`` function accepts a single argument in the case of a list- @@ -154,15 +154,15 @@ proxies are useful for keeping "association objects" out the way during regular use. Suppose our ``userkeywords`` table above had additional columns -which we'd like to map explicitly, but in most cases we don't +which we'd like to map explicitly, but in most cases we don't require direct access to these attributes. Below, we illustrate -a new mapping which introduces the ``UserKeyword`` class, which +a new mapping which introduces the ``UserKeyword`` class, which is mapped to the ``userkeywords`` table illustrated earlier. This class adds an additional column ``special_key``, a value which we occasionally want to access, but not in the usual case. We create an association proxy on the ``User`` class called ``keywords``, which will bridge the gap from the ``user_keywords`` -collection of ``User`` to the ``.keyword`` attribute present on each +collection of ``User`` to the ``.keyword`` attribute present on each ``UserKeyword``:: from sqlalchemy import Column, Integer, String, ForeignKey @@ -192,8 +192,8 @@ collection of ``User`` to the ``.keyword`` attribute present on each special_key = Column(String(50)) # bidirectional attribute/collection of "user"/"user_keywords" - user = relationship(User, - backref=backref("user_keywords", + user = relationship(User, + backref=backref("user_keywords", cascade="all, delete-orphan") ) @@ -216,14 +216,14 @@ collection of ``User`` to the ``.keyword`` attribute present on each def __repr__(self): return 'Keyword(%s)' % repr(self.keyword) -With the above configuration, we can operate upon the ``.keywords`` +With the above configuration, we can operate upon the ``.keywords`` collection of each ``User`` object, and the usage of ``UserKeyword`` is concealed:: >>> user = User('log') >>> for kw in (Keyword('new_from_blammo'), Keyword('its_big')): ... user.keywords.append(kw) - ... + ... >>> print(user.keywords) [Keyword('new_from_blammo'), Keyword('its_big')] @@ -234,12 +234,12 @@ Where above, each ``.keywords.append()`` operation is equivalent to:: The ``UserKeyword`` association object has two attributes here which are populated; the ``.keyword`` attribute is populated directly as a result of passing the ``Keyword`` object as the first argument. The ``.user`` argument is then -assigned as the ``UserKeyword`` object is appended to the ``User.user_keywords`` +assigned as the ``UserKeyword`` object is appended to the ``User.user_keywords`` collection, where the bidirectional relationship configured between ``User.user_keywords`` and ``UserKeyword.user`` results in a population of the ``UserKeyword.user`` attribute. The ``special_key`` argument above is left at its default value of ``None``. -For those cases where we do want ``special_key`` to have a value, we +For those cases where we do want ``special_key`` to have a value, we create the ``UserKeyword`` object explicitly. Below we assign all three attributes, where the assignment of ``.user`` has the effect of the ``UserKeyword`` being appended to the ``User.user_keywords`` collection:: @@ -259,7 +259,7 @@ Proxying to Dictionary Based Collections The association proxy can proxy to dictionary based collections as well. SQLAlchemy mappings usually use the :func:`.attribute_mapped_collection` collection type to -create dictionary collections, as well as the extended techniques described in +create dictionary collections, as well as the extended techniques described in :ref:`dictionary_collections`. The association proxy adjusts its behavior when it detects the usage of a @@ -269,7 +269,7 @@ arguments to the creation function instead of one, the key and the value. As always, this creation function defaults to the constructor of the intermediary class, and can be customized using the ``creator`` argument. -Below, we modify our ``UserKeyword`` example such that the ``User.user_keywords`` +Below, we modify our ``UserKeyword`` example such that the ``User.user_keywords`` collection will now be mapped using a dictionary, where the ``UserKeyword.special_key`` argument will be used as the key for the dictionary. We then apply a ``creator`` argument to the ``User.keywords`` proxy so that these values are assigned appropriately @@ -291,7 +291,7 @@ when new elements are added to the dictionary:: # proxy to 'user_keywords', instantiating UserKeyword # assigning the new key to 'special_key', values to # 'keyword'. - keywords = association_proxy('user_keywords', 'keyword', + keywords = association_proxy('user_keywords', 'keyword', creator=lambda k, v: UserKeyword(special_key=k, keyword=v) ) @@ -308,7 +308,7 @@ when new elements are added to the dictionary:: # bidirectional user/user_keywords relationships, mapping # user_keywords with a dictionary against "special_key" as key. user = relationship(User, backref=backref( - "user_keywords", + "user_keywords", collection_class=attribute_mapped_collection("special_key"), cascade="all, delete-orphan" ) @@ -344,8 +344,8 @@ Composite Association Proxies Given our previous examples of proxying from relationship to scalar attribute, proxying across an association object, and proxying dictionaries, -we can combine all three techniques together to give ``User`` -a ``keywords`` dictionary that deals strictly with the string value +we can combine all three techniques together to give ``User`` +a ``keywords`` dictionary that deals strictly with the string value of ``special_key`` mapped to the string ``keyword``. Both the ``UserKeyword`` and ``Keyword`` classes are entirely concealed. This is achieved by building an association proxy on ``User`` that refers to an association proxy @@ -365,11 +365,11 @@ present on ``UserKeyword``:: id = Column(Integer, primary_key=True) name = Column(String(64)) - # the same 'user_keywords'->'keyword' proxy as in + # the same 'user_keywords'->'keyword' proxy as in # the basic dictionary example keywords = association_proxy( - 'user_keywords', - 'keyword', + 'user_keywords', + 'keyword', creator=lambda k, v: UserKeyword(special_key=k, keyword=v) ) @@ -380,11 +380,11 @@ present on ``UserKeyword``:: class UserKeyword(Base): __tablename__ = 'user_keyword' user_id = Column(Integer, ForeignKey('user.id'), primary_key=True) - keyword_id = Column(Integer, ForeignKey('keyword.id'), + keyword_id = Column(Integer, ForeignKey('keyword.id'), primary_key=True) special_key = Column(String) user = relationship(User, backref=backref( - "user_keywords", + "user_keywords", collection_class=attribute_mapped_collection("special_key"), cascade="all, delete-orphan" ) @@ -394,7 +394,7 @@ present on ``UserKeyword``:: # 'kw' kw = relationship("Keyword") - # 'keyword' is changed to be a proxy to the + # 'keyword' is changed to be a proxy to the # 'keyword' attribute of 'Keyword' keyword = association_proxy('kw', 'keyword') @@ -432,8 +432,8 @@ association proxy, to apply a dictionary value to the collection at once:: One caveat with our example above is that because ``Keyword`` objects are created for each dictionary set operation, the example fails to maintain uniqueness for -the ``Keyword`` objects on their string name, which is a typical requirement for -a tagging scenario such as this one. For this use case the recipe +the ``Keyword`` objects on their string name, which is a typical requirement for +a tagging scenario such as this one. For this use case the recipe `UniqueObject `_, or a comparable creational strategy, is recommended, which will apply a "lookup first, then create" strategy to the constructor @@ -450,32 +450,32 @@ and :meth:`.RelationshipProperty.Comparator.has` operations are available, and w a "nested" EXISTS clause, such as in our basic association object example:: >>> print(session.query(User).filter(User.keywords.any(keyword='jek'))) - SELECT user.id AS user_id, user.name AS user_name - FROM user - WHERE EXISTS (SELECT 1 - FROM user_keyword - WHERE user.id = user_keyword.user_id AND (EXISTS (SELECT 1 - FROM keyword + SELECT user.id AS user_id, user.name AS user_name + FROM user + WHERE EXISTS (SELECT 1 + FROM user_keyword + WHERE user.id = user_keyword.user_id AND (EXISTS (SELECT 1 + FROM keyword WHERE keyword.id = user_keyword.keyword_id AND keyword.keyword = :keyword_1))) For a proxy to a scalar attribute, ``__eq__()`` is supported:: >>> print(session.query(UserKeyword).filter(UserKeyword.keyword == 'jek')) SELECT user_keyword.* - FROM user_keyword - WHERE EXISTS (SELECT 1 - FROM keyword + FROM user_keyword + WHERE EXISTS (SELECT 1 + FROM keyword WHERE keyword.id = user_keyword.keyword_id AND keyword.keyword = :keyword_1) and ``.contains()`` is available for a proxy to a scalar collection:: >>> print(session.query(User).filter(User.keywords.contains('jek'))) SELECT user.* - FROM user - WHERE EXISTS (SELECT 1 - FROM userkeywords, keyword - WHERE user.id = userkeywords.user_id - AND keyword.id = userkeywords.keyword_id + FROM user + WHERE EXISTS (SELECT 1 + FROM userkeywords, keyword + WHERE user.id = userkeywords.user_id + AND keyword.id = userkeywords.keyword_id AND keyword.keyword = :keyword_1) :class:`.AssociationProxy` can be used with :meth:`.Query.join` somewhat manually diff --git a/doc/build/orm/extensions/declarative.rst b/doc/build/orm/extensions/declarative.rst index aaa0261b6d..b9d06f8cea 100644 --- a/doc/build/orm/extensions/declarative.rst +++ b/doc/build/orm/extensions/declarative.rst @@ -4,7 +4,7 @@ Declarative =========== .. automodule:: sqlalchemy.ext.declarative - + API Reference ------------- diff --git a/doc/build/orm/inheritance.rst b/doc/build/orm/inheritance.rst index 97e99502d0..ce1d2db6d4 100644 --- a/doc/build/orm/inheritance.rst +++ b/doc/build/orm/inheritance.rst @@ -15,7 +15,7 @@ When mappers are configured in an inheritance relationship, SQLAlchemy has the ability to load elements "polymorphically", meaning that a single query can return objects of multiple types. -.. note:: +.. note:: This section currently uses classical mappings to illustrate inheritance configurations, and will soon be updated to standardize on Declarative. @@ -38,7 +38,7 @@ For the following sections, assume this class relationship: self.manager_data = manager_data def __repr__(self): return ( - self.__class__.__name__ + " " + + self.__class__.__name__ + " " + self.name + " " + self.manager_data ) @@ -48,7 +48,7 @@ For the following sections, assume this class relationship: self.engineer_info = engineer_info def __repr__(self): return ( - self.__class__.__name__ + " " + + self.__class__.__name__ + " " + self.name + " " + self.engineer_info ) @@ -89,15 +89,15 @@ parent, and even a custom join condition can be specified between parent and child tables instead of using a foreign key:: engineers = Table('engineers', metadata, - Column('employee_id', Integer, - ForeignKey('employees.employee_id'), + Column('employee_id', Integer, + ForeignKey('employees.employee_id'), primary_key=True), Column('engineer_info', String(50)), ) managers = Table('managers', metadata, - Column('employee_id', Integer, - ForeignKey('employees.employee_id'), + Column('employee_id', Integer, + ForeignKey('employees.employee_id'), primary_key=True), Column('manager_data', String(50)), ) @@ -123,11 +123,11 @@ stored in the polymorphic discriminator column. .. sourcecode:: python+sql - mapper(Employee, employees, polymorphic_on=employees.c.type, + mapper(Employee, employees, polymorphic_on=employees.c.type, polymorphic_identity='employee') - mapper(Engineer, engineers, inherits=Employee, + mapper(Engineer, engineers, inherits=Employee, polymorphic_identity='engineer') - mapper(Manager, managers, inherits=Employee, + mapper(Manager, managers, inherits=Employee, polymorphic_identity='manager') And that's it. Querying against ``Employee`` will return a combination of @@ -156,7 +156,7 @@ SQL such as this: .. sourcecode:: python+sql {opensql} - SELECT employees.employee_id AS employees_employee_id, + SELECT employees.employee_id AS employees_employee_id, employees.name AS employees_name, employees.type AS employees_type FROM employees [] @@ -170,12 +170,12 @@ issued along the lines of: .. sourcecode:: python+sql {opensql} - SELECT managers.employee_id AS managers_employee_id, + SELECT managers.employee_id AS managers_employee_id, managers.manager_data AS managers_manager_data FROM managers WHERE ? = managers.employee_id [5] - SELECT engineers.employee_id AS engineers_employee_id, + SELECT engineers.employee_id AS engineers_employee_id, engineers.engineer_info AS engineers_engineer_info FROM engineers WHERE ? = engineers.employee_id @@ -201,17 +201,17 @@ produces a query which joins the ``employees`` table to both the ``engineers`` a query.all() {opensql} - SELECT employees.employee_id AS employees_employee_id, - engineers.employee_id AS engineers_employee_id, - managers.employee_id AS managers_employee_id, - employees.name AS employees_name, - employees.type AS employees_type, - engineers.engineer_info AS engineers_engineer_info, + SELECT employees.employee_id AS employees_employee_id, + engineers.employee_id AS engineers_employee_id, + managers.employee_id AS managers_employee_id, + employees.name AS employees_name, + employees.type AS employees_type, + engineers.engineer_info AS engineers_engineer_info, managers.manager_data AS managers_manager_data - FROM employees - LEFT OUTER JOIN engineers - ON employees.employee_id = engineers.employee_id - LEFT OUTER JOIN managers + FROM employees + LEFT OUTER JOIN engineers + ON employees.employee_id = engineers.employee_id + LEFT OUTER JOIN managers ON employees.employee_id = managers.employee_id [] @@ -240,7 +240,7 @@ should be used to load polymorphically: # custom selectable query.with_polymorphic( - [Engineer, Manager], + [Engineer, Manager], employees.outerjoin(managers).outerjoin(engineers) ) @@ -264,12 +264,12 @@ followed by a selectable. .. sourcecode:: python+sql - mapper(Employee, employees, polymorphic_on=employees.c.type, - polymorphic_identity='employee', + mapper(Employee, employees, polymorphic_on=employees.c.type, + polymorphic_identity='employee', with_polymorphic='*') - mapper(Engineer, engineers, inherits=Employee, + mapper(Engineer, engineers, inherits=Employee, polymorphic_identity='engineer') - mapper(Manager, managers, inherits=Employee, + mapper(Manager, managers, inherits=Employee, polymorphic_identity='manager') The above mapping will produce a query similar to that of @@ -288,7 +288,7 @@ simplistic scenarios. However, it currently does not work with any classes - it also has to be called at the outset of a query. For total control of how :class:`.Query` joins along inheritance relationships, -use the :class:`.Table` objects directly and construct joins manually. For example, to +use the :class:`.Table` objects directly and construct joins manually. For example, to query the name of employees with particular criterion:: session.query(Employee.name).\ @@ -368,11 +368,11 @@ flexibility: session.query(Company).\ join( - (employees.outerjoin(engineers).outerjoin(managers), + (employees.outerjoin(engineers).outerjoin(managers), Company.employees) ).\ filter( - or_(Engineer.engineer_info=='someinfo', + or_(Engineer.engineer_info=='someinfo', Manager.manager_data=='somedata') ) @@ -395,7 +395,7 @@ EXISTS query. To build one by hand looks like: session.query(Company).filter( exists([1], - and_(Engineer.engineer_info=='someinfo', + and_(Engineer.engineer_info=='someinfo', employees.c.company_id==companies.c.company_id), from_obj=employees.join(engineers) ) @@ -429,9 +429,9 @@ for the inheriting classes, leave their ``table`` parameter blank: employee_mapper = mapper(Employee, employees_table, \ polymorphic_on=employees_table.c.type, polymorphic_identity='employee') - manager_mapper = mapper(Manager, inherits=employee_mapper, + manager_mapper = mapper(Manager, inherits=employee_mapper, polymorphic_identity='manager') - engineer_mapper = mapper(Engineer, inherits=employee_mapper, + engineer_mapper = mapper(Engineer, inherits=employee_mapper, polymorphic_identity='engineer') Note that the mappers for the derived classes Manager and Engineer omit the @@ -491,17 +491,17 @@ each subselect: 'engineer': engineers_table }, 'type', 'pjoin') - employee_mapper = mapper(Employee, employees_table, - with_polymorphic=('*', pjoin), - polymorphic_on=pjoin.c.type, + employee_mapper = mapper(Employee, employees_table, + with_polymorphic=('*', pjoin), + polymorphic_on=pjoin.c.type, polymorphic_identity='employee') - manager_mapper = mapper(Manager, managers_table, - inherits=employee_mapper, - concrete=True, + manager_mapper = mapper(Manager, managers_table, + inherits=employee_mapper, + concrete=True, polymorphic_identity='manager') - engineer_mapper = mapper(Engineer, engineers_table, - inherits=employee_mapper, - concrete=True, + engineer_mapper = mapper(Engineer, engineers_table, + inherits=employee_mapper, + concrete=True, polymorphic_identity='engineer') Upon select, the polymorphic union produces a query like this: @@ -510,22 +510,22 @@ Upon select, the polymorphic union produces a query like this: session.query(Employee).all() {opensql} - SELECT pjoin.type AS pjoin_type, - pjoin.manager_data AS pjoin_manager_data, + SELECT pjoin.type AS pjoin_type, + pjoin.manager_data AS pjoin_manager_data, pjoin.employee_id AS pjoin_employee_id, pjoin.name AS pjoin_name, pjoin.engineer_info AS pjoin_engineer_info FROM ( - SELECT employees.employee_id AS employee_id, + SELECT employees.employee_id AS employee_id, CAST(NULL AS VARCHAR(50)) AS manager_data, employees.name AS name, CAST(NULL AS VARCHAR(50)) AS engineer_info, 'employee' AS type FROM employees UNION ALL - SELECT managers.employee_id AS employee_id, + SELECT managers.employee_id AS employee_id, managers.manager_data AS manager_data, managers.name AS name, CAST(NULL AS VARCHAR(50)) AS engineer_info, 'manager' AS type FROM managers UNION ALL - SELECT engineers.employee_id AS employee_id, + SELECT engineers.employee_id AS employee_id, CAST(NULL AS VARCHAR(50)) AS manager_data, engineers.name AS name, engineers.engineer_info AS engineer_info, 'engineer' AS type FROM engineers @@ -606,19 +606,19 @@ to the parent: Column('company_id', Integer, ForeignKey('companies.id')) ) - mapper(Employee, employees_table, - with_polymorphic=('*', pjoin), - polymorphic_on=pjoin.c.type, + mapper(Employee, employees_table, + with_polymorphic=('*', pjoin), + polymorphic_on=pjoin.c.type, polymorphic_identity='employee') - mapper(Manager, managers_table, - inherits=employee_mapper, - concrete=True, + mapper(Manager, managers_table, + inherits=employee_mapper, + concrete=True, polymorphic_identity='manager') - mapper(Engineer, engineers_table, - inherits=employee_mapper, - concrete=True, + mapper(Engineer, engineers_table, + inherits=employee_mapper, + concrete=True, polymorphic_identity='engineer') mapper(Company, companies, properties={ @@ -650,7 +650,7 @@ bidirectionally reference ``C``:: 'some_c':relationship(C, back_populates='many_a') }) mapper(C, c_table, properties={ - 'many_a':relationship(A, collection_class=set, + 'many_a':relationship(A, collection_class=set, back_populates='some_c'), }) diff --git a/doc/build/orm/loading.rst b/doc/build/orm/loading.rst index 7c9001afdd..c8fe72bf4e 100644 --- a/doc/build/orm/loading.rst +++ b/doc/build/orm/loading.rst @@ -21,14 +21,14 @@ in order to load the related object or objects: .. sourcecode:: python+sql {sql}>>> jack.addresses - SELECT addresses.id AS addresses_id, addresses.email_address AS addresses_email_address, + SELECT addresses.id AS addresses_id, addresses.email_address AS addresses_email_address, addresses.user_id AS addresses_user_id FROM addresses WHERE ? = addresses.user_id [5] {stop}[, ] -The one case where SQL is not emitted is for a simple many-to-one relationship, when +The one case where SQL is not emitted is for a simple many-to-one relationship, when the related object can be identified by its primary key alone and that object is already present in the current :class:`.Session`. @@ -66,17 +66,17 @@ parent objects: {sql}>>> jack = session.query(User).\ ... options(subqueryload('addresses')).\ - ... filter_by(name='jack').all() - SELECT users.id AS users_id, users.name AS users_name, users.fullname AS users_fullname, - users.password AS users_password - FROM users + ... filter_by(name='jack').all() + SELECT users.id AS users_id, users.name AS users_name, users.fullname AS users_fullname, + users.password AS users_password + FROM users WHERE users.name = ? ('jack',) - SELECT addresses.id AS addresses_id, addresses.email_address AS addresses_email_address, - addresses.user_id AS addresses_user_id, anon_1.users_id AS anon_1_users_id - FROM (SELECT users.id AS users_id - FROM users - WHERE users.name = ?) AS anon_1 JOIN addresses ON anon_1.users_id = addresses.user_id + SELECT addresses.id AS addresses_id, addresses.email_address AS addresses_email_address, + addresses.user_id AS addresses_user_id, anon_1.users_id AS anon_1_users_id + FROM (SELECT users.id AS users_id + FROM users + WHERE users.name = ?) AS anon_1 JOIN addresses ON anon_1.users_id = addresses.user_id ORDER BY anon_1.users_id, addresses.id ('jack',) @@ -162,9 +162,9 @@ Default Loading Strategies .. versionadded:: 0.7.5 Default loader strategies as a new feature. -Each of :func:`.joinedload`, :func:`.subqueryload`, :func:`.lazyload`, +Each of :func:`.joinedload`, :func:`.subqueryload`, :func:`.lazyload`, and :func:`.noload` can be used to set the default style of -:func:`.relationship` loading +:func:`.relationship` loading for a particular query, affecting all :func:`.relationship` -mapped attributes not otherwise specified in the :class:`.Query`. This feature is available by passing @@ -174,19 +174,19 @@ the string ``'*'`` as the argument to any of these options:: Above, the ``lazyload('*')`` option will supercede the ``lazy`` setting of all :func:`.relationship` constructs in use for that query, -except for those which use the ``'dynamic'`` style of loading. +except for those which use the ``'dynamic'`` style of loading. If some relationships specify ``lazy='joined'`` or ``lazy='subquery'``, for example, using ``default_strategy(lazy='select')`` will unilaterally cause all those relationships to use ``'select'`` loading. The option does not supercede loader options stated in the -query, such as :func:`.eagerload`, +query, such as :func:`.eagerload`, :func:`.subqueryload`, etc. The query below will still use joined loading for the ``widget`` relationship:: session.query(MyClass).options( - lazyload('*'), + lazyload('*'), joinedload(MyClass.widget) ) @@ -199,7 +199,7 @@ The Zen of Eager Loading ------------------------- The philosophy behind loader strategies is that any set of loading schemes can be -applied to a particular query, and *the results don't change* - only the number +applied to a particular query, and *the results don't change* - only the number of SQL statements required to fully load related objects and collections changes. A particular query might start out using all lazy loads. After using it in context, it might be revealed that particular attributes or collections are always accessed, and that it would be more @@ -220,7 +220,7 @@ is not valid - the ``Address`` entity is not named in the query: >>> jack = session.query(User).\ ... options(joinedload(User.addresses)).\ ... filter(User.name=='jack').\ - ... order_by(Address.email_address).all() + ... order_by(Address.email_address).all() {opensql}SELECT addresses_1.id AS addresses_1_id, addresses_1.email_address AS addresses_1_email_address, addresses_1.user_id AS addresses_1_user_id, users.id AS users_id, users.name AS users_name, users.fullname AS users_fullname, users.password AS users_password @@ -228,7 +228,7 @@ is not valid - the ``Address`` entity is not named in the query: WHERE users.name = ? ORDER BY addresses.email_address <-- this part is wrong ! ['jack'] -Above, ``ORDER BY addresses.email_address`` is not valid since ``addresses`` is not in the +Above, ``ORDER BY addresses.email_address`` is not valid since ``addresses`` is not in the FROM list. The correct way to load the ``User`` records and order by email address is to use :meth:`.Query.join`: @@ -237,7 +237,7 @@ address is to use :meth:`.Query.join`: >>> jack = session.query(User).\ ... join(User.addresses).\ ... filter(User.name=='jack').\ - ... order_by(Address.email_address).all() + ... order_by(Address.email_address).all() {opensql} SELECT users.id AS users_id, users.name AS users_name, users.fullname AS users_fullname, users.password AS users_password @@ -247,7 +247,7 @@ address is to use :meth:`.Query.join`: The statement above is of course not the same as the previous one, in that the columns from ``addresses`` are not included in the result at all. We can add :func:`.joinedload` back in, so that -there are two joins - one is that which we are ordering on, the other is used anonymously to +there are two joins - one is that which we are ordering on, the other is used anonymously to load the contents of the ``User.addresses`` collection: .. sourcecode:: python+sql @@ -256,7 +256,7 @@ load the contents of the ``User.addresses`` collection: ... join(User.addresses).\ ... options(joinedload(User.addresses)).\ ... filter(User.name=='jack').\ - ... order_by(Address.email_address).all() + ... order_by(Address.email_address).all() {opensql}SELECT addresses_1.id AS addresses_1_id, addresses_1.email_address AS addresses_1_email_address, addresses_1.user_id AS addresses_1_user_id, users.id AS users_id, users.name AS users_name, users.fullname AS users_fullname, users.password AS users_password @@ -269,8 +269,8 @@ What we see above is that our usage of :meth:`.Query.join` is to supply JOIN cla to use in subsequent query criterion, whereas our usage of :func:`.joinedload` only concerns itself with the loading of the ``User.addresses`` collection, for each ``User`` in the result. In this case, the two joins most probably appear redundant - which they are. If we -wanted to use just one JOIN for collection loading as well as ordering, we use the -:func:`.contains_eager` option, described in :ref:`contains_eager` below. But +wanted to use just one JOIN for collection loading as well as ordering, we use the +:func:`.contains_eager` option, described in :ref:`contains_eager` below. But to see why :func:`joinedload` does what it does, consider if we were **filtering** on a particular ``Address``: @@ -281,7 +281,7 @@ particular ``Address``: ... options(joinedload(User.addresses)).\ ... filter(User.name=='jack').\ ... filter(Address.email_address=='someaddress@foo.com').\ - ... all() + ... all() {opensql}SELECT addresses_1.id AS addresses_1_id, addresses_1.email_address AS addresses_1_email_address, addresses_1.user_id AS addresses_1_user_id, users.id AS users_id, users.name AS users_name, users.fullname AS users_fullname, users.password AS users_password @@ -308,14 +308,14 @@ the actual ``User`` rows we want. Below we change :func:`.joinedload` into ... options(subqueryload(User.addresses)).\ ... filter(User.name=='jack').\ ... filter(Address.email_address=='someaddress@foo.com').\ - ... all() + ... all() {opensql}SELECT users.id AS users_id, users.name AS users_name, users.fullname AS users_fullname, users.password AS users_password FROM users JOIN addresses ON users.id = addresses.user_id WHERE users.name = ? AND addresses.email_address = ? ['jack', 'someaddress@foo.com'] - # ... subqueryload() emits a SELECT in order + # ... subqueryload() emits a SELECT in order # to load all address records ... When using joined eager loading, if the @@ -323,8 +323,8 @@ query contains a modifier that impacts the rows returned externally to the joins, such as when using DISTINCT, LIMIT, OFFSET or equivalent, the completed statement is first wrapped inside a subquery, and the joins used specifically for joined eager -loading are applied to the subquery. SQLAlchemy's -joined eager loading goes the extra mile, and then ten miles further, to +loading are applied to the subquery. SQLAlchemy's +joined eager loading goes the extra mile, and then ten miles further, to absolutely ensure that it does not affect the end result of the query, only the way collections and related objects are loaded, no matter what the format of the query is. @@ -344,18 +344,18 @@ references a scalar many-to-one reference. simple SELECT without any joins. * When using joined loading, the load of 100 objects and their collections will emit only one SQL - statement. However, the - total number of rows fetched will be equal to the sum of the size of all the collections, plus one + statement. However, the + total number of rows fetched will be equal to the sum of the size of all the collections, plus one extra row for each parent object that has an empty collection. Each row will also contain the full set of columns represented by the parents, repeated for each collection item - SQLAlchemy does not - re-fetch these columns other than those of the primary key, however most DBAPIs (with some - exceptions) will transmit the full data of each parent over the wire to the client connection in - any case. Therefore joined eager loading only makes sense when the size of the collections are + re-fetch these columns other than those of the primary key, however most DBAPIs (with some + exceptions) will transmit the full data of each parent over the wire to the client connection in + any case. Therefore joined eager loading only makes sense when the size of the collections are relatively small. The LEFT OUTER JOIN can also be performance intensive compared to an INNER join. * When using subquery loading, the load of 100 objects will emit two SQL statements. The second statement will fetch a total number of rows equal to the sum of the size of all collections. An - INNER JOIN is used, and a minimum of parent columns are requested, only the primary keys. So a + INNER JOIN is used, and a minimum of parent columns are requested, only the primary keys. So a subquery load makes sense when the collections are larger. * When multiple levels of depth are used with joined or subquery loading, loading collections-within- @@ -367,7 +367,7 @@ references a scalar many-to-one reference. * When using the default lazy loading, a load of 100 objects will like in the case of the collection emit as many as 101 SQL statements. However - there is a significant exception to this, in that if the many-to-one reference is a simple foreign key reference to the target's primary key, each - reference will be checked first in the current identity map using :meth:`.Query.get`. So here, + reference will be checked first in the current identity map using :meth:`.Query.get`. So here, if the collection of objects references a relatively small set of target objects, or the full set of possible target objects have already been loaded into the session and are strongly referenced, using the default of `lazy='select'` is by far the most efficient way to go. @@ -393,7 +393,7 @@ Routing Explicit Joins/Statements into Eagerly Loaded Collections ------------------------------------------------------------------ The behavior of :func:`~sqlalchemy.orm.joinedload()` is such that joins are -created automatically, using anonymous aliases as targets, the results of which +created automatically, using anonymous aliases as targets, the results of which are routed into collections and scalar references on loaded objects. It is often the case that a query already includes the necessary joins which represent a particular collection or scalar diff --git a/doc/build/orm/mapper_config.rst b/doc/build/orm/mapper_config.rst index 7dc882d21e..9f6502bc6d 100644 --- a/doc/build/orm/mapper_config.rst +++ b/doc/build/orm/mapper_config.rst @@ -51,7 +51,7 @@ construct, then associated with the ``User`` class via the :func:`.mapper` funct mapper(User, user) Information about mapped attributes, such as relationships to other classes, are provided -via the ``properties`` dictionary. The example below illustrates a second :class:`.Table` +via the ``properties`` dictionary. The example below illustrates a second :class:`.Table` object, mapped to a class called ``Address``, then linked to ``User`` via :func:`.relationship`:: address = Table('address', metadata, @@ -73,8 +73,8 @@ for the ``Address`` relationship, and not ``Address.id``, as ``Address`` may not yet be linked to table metadata, nor can we specify a string here. Some examples in the documentation still use the classical approach, but note that -the classical as well as Declarative approaches are **fully interchangeable**. Both -systems ultimately create the same configuration, consisting of a :class:`.Table`, +the classical as well as Declarative approaches are **fully interchangeable**. Both +systems ultimately create the same configuration, consisting of a :class:`.Table`, user-defined class, linked together with a :func:`.mapper`. When we talk about "the behavior of :func:`.mapper`", this includes when using the Declarative system as well - it's still used, just behind the scenes. @@ -125,7 +125,7 @@ with the desired key:: Naming All Columns with a Prefix -------------------------------- -A way to automate the assignment of a prefix to +A way to automate the assignment of a prefix to the mapped attribute names relative to the column name is to use ``column_prefix``:: @@ -161,7 +161,7 @@ result in the former value being loaded first:: id = Column(Integer, primary_key=True) name = column_property(Column(String(50)), active_history=True) -:func:`.column_property` is also used to map a single attribute to +:func:`.column_property` is also used to map a single attribute to multiple columns. This use case arises when mapping to a :func:`~.expression.join` which has attributes which are equated to each other:: @@ -174,7 +174,7 @@ which has attributes which are equated to each other:: For more examples featuring this usage, see :ref:`maptojoin`. -Another place where :func:`.column_property` is needed is to specify SQL expressions as +Another place where :func:`.column_property` is needed is to specify SQL expressions as mapped attributes, such as below where we create an attribute ``fullname`` that is the string concatenation of the ``firstname`` and ``lastname`` columns:: @@ -195,11 +195,11 @@ See examples of this usage at :ref:`mapper_sql_expressions`. Mapping a Subset of Table Columns --------------------------------- -Sometimes, a :class:`.Table` object was made available using the -reflection process described at :ref:`metadata_reflection` to load +Sometimes, a :class:`.Table` object was made available using the +reflection process described at :ref:`metadata_reflection` to load the table's structure from the database. For such a table that has lots of columns that don't need to be referenced -in the application, the ``include_properties`` or ``exclude_properties`` +in the application, the ``include_properties`` or ``exclude_properties`` arguments can specify that only a subset of columns should be mapped. For example:: @@ -241,7 +241,7 @@ should be included or excluded:: 'primary_key' : [user_table.c.id] } -.. note:: +.. note:: insert and update defaults configured on individual :class:`.Column` objects, i.e. those described at :ref:`metadata_defaults` @@ -262,11 +262,11 @@ Deferred Column Loading ======================== This feature allows particular columns of a table be loaded only -upon direct access, instead of when the entity is queried using +upon direct access, instead of when the entity is queried using :class:`.Query`. This feature is useful when one wants to avoid loading a large text or binary field into memory when it's not needed. Individual columns can be lazy loaded by themselves or placed into groups that -lazy-load together, using the :func:`.orm.deferred` function to +lazy-load together, using the :func:`.orm.deferred` function to mark them as "deferred". In the example below, we define a mapping that will load each of ``.excerpt`` and ``.photo`` in separate, individual-row SELECT statements when each attribute is first referenced on the individual object instance:: @@ -341,8 +341,8 @@ Column Deferral API SQL Expressions as Mapped Attributes ===================================== -Attributes on a mapped class can be linked to SQL expressions, which can -be used in queries. +Attributes on a mapped class can be linked to SQL expressions, which can +be used in queries. Using a Hybrid -------------- @@ -350,7 +350,7 @@ Using a Hybrid The easiest and most flexible way to link relatively simple SQL expressions to a class is to use a so-called "hybrid attribute", described in the section :ref:`hybrids_toplevel`. The hybrid provides -for an expression that works at both the Python level as well as at the +for an expression that works at both the Python level as well as at the SQL expression level. For example, below we map a class ``User``, containing attributes ``firstname`` and ``lastname``, and include a hybrid that will provide for us the ``fullname``, which is the string concatenation of the two:: @@ -367,7 +367,7 @@ will provide for us the ``fullname``, which is the string concatenation of the t def fullname(self): return self.firstname + " " + self.lastname -Above, the ``fullname`` attribute is interpreted at both the instance and +Above, the ``fullname`` attribute is interpreted at both the instance and class level, so that it is available from an instance:: some_user = session.query(User).first() @@ -410,21 +410,21 @@ Using column_property --------------------- The :func:`.orm.column_property` function can be used to map a SQL -expression in a manner similar to a regularly mapped :class:`.Column`. +expression in a manner similar to a regularly mapped :class:`.Column`. With this technique, the attribute is loaded along with all other column-mapped attributes at load time. This is in some cases an advantage over the usage of hybrids, as the value can be loaded up front at the same time as the parent row of the object, particularly if the expression is one which links to other tables (typically as a correlated -subquery) to access data that wouldn't normally be +subquery) to access data that wouldn't normally be available on an already loaded object. -Disadvantages to using :func:`.orm.column_property` for SQL expressions include that -the expression must be compatible with the SELECT statement emitted for the class -as a whole, and there are also some configurational quirks which can occur +Disadvantages to using :func:`.orm.column_property` for SQL expressions include that +the expression must be compatible with the SELECT statement emitted for the class +as a whole, and there are also some configurational quirks which can occur when using :func:`.orm.column_property` from declarative mixins. -Our "fullname" example can be expressed using :func:`.orm.column_property` as +Our "fullname" example can be expressed using :func:`.orm.column_property` as follows:: from sqlalchemy.orm import column_property @@ -469,7 +469,7 @@ to add an additional property after the fact:: User.address_count = column_property( select([func.count(Address.id)]).\ where(Address.user_id==User.id) - ) + ) For many-to-many relationships, use :func:`.and_` to join the fields of the association table to both tables in a relation, illustrated @@ -479,7 +479,7 @@ here with a classical mapping:: mapper(Author, authors, properties={ 'book_count': column_property( - select([func.count(books.c.id)], + select([func.count(books.c.id)], and_( book_authors.c.author_id==authors.c.id, book_authors.c.book_id==books.c.id @@ -490,9 +490,9 @@ Using a plain descriptor ------------------------- In cases where a SQL query more elaborate than what :func:`.orm.column_property` -or :class:`.hybrid_property` can provide must be emitted, a regular Python +or :class:`.hybrid_property` can provide must be emitted, a regular Python function accessed as an attribute can be used, assuming the expression -only needs to be available on an already-loaded instance. The function +only needs to be available on an already-loaded instance. The function is decorated with Python's own ``@property`` decorator to mark it as a read-only attribute. Within the function, :func:`.object_session` is used to locate the :class:`.Session` corresponding to the current object, @@ -562,7 +562,7 @@ collection:: assert '@' in address.email return address -Note that the :func:`~.validates` decorator is a convenience function built on +Note that the :func:`~.validates` decorator is a convenience function built on top of attribute events. An application that requires more control over configuration of attribute change behavior can make use of this system, described at :class:`~.AttributeEvents`. @@ -635,8 +635,8 @@ that is, from the ``EmailAddress`` class directly: {sql}address = session.query(EmailAddress).\ filter(EmailAddress.email == 'address@example.com').\ one() - SELECT address.email AS address_email, address.id AS address_id - FROM address + SELECT address.email AS address_email, address.id AS address_id + FROM address WHERE address.email = ? ('address@example.com',) {stop} @@ -664,21 +664,21 @@ logic:: @hybrid_property def email(self): - """Return the value of _email up until the last twelve + """Return the value of _email up until the last twelve characters.""" return self._email[:-12] @email.setter def email(self, email): - """Set the value of _email, tacking on the twelve character + """Set the value of _email, tacking on the twelve character value @example.com.""" self._email = email + "@example.com" @email.expression def email(cls): - """Produce a SQL expression that represents the value + """Produce a SQL expression that represents the value of the _email column, minus the last twelve characters.""" return func.substr(cls._email, 0, func.length(cls._email) - 12) @@ -691,8 +691,8 @@ attribute, a SQL function is rendered which produces the same effect: .. sourcecode:: python+sql {sql}address = session.query(EmailAddress).filter(EmailAddress.email == 'address').one() - SELECT address.email AS address_email, address.id AS address_id - FROM address + SELECT address.email AS address_email, address.id AS address_id + FROM address WHERE substr(address.email, ?, length(address.email) - ?) = ? (0, 12, 'address') {stop} @@ -717,20 +717,20 @@ Custom Comparators The expressions returned by comparison operations, such as ``User.name=='ed'``, can be customized, by implementing an object that -explicitly defines each comparison method needed. +explicitly defines each comparison method needed. -This is a relatively rare use case which generally applies only to -highly customized types. Usually, custom SQL behaviors can be +This is a relatively rare use case which generally applies only to +highly customized types. Usually, custom SQL behaviors can be associated with a mapped class by composing together the classes' -existing mapped attributes with other expression components, -using the techniques described in :ref:`mapper_sql_expressions`. +existing mapped attributes with other expression components, +using the techniques described in :ref:`mapper_sql_expressions`. Those approaches should be considered first before resorting to custom comparison objects. Each of :func:`.orm.column_property`, :func:`~.composite`, :func:`.relationship`, and :func:`.comparable_property` accept an argument called ``comparator_factory``. A subclass of :class:`.PropComparator` can be provided for this argument, which can then reimplement basic Python comparison methods -such as ``__eq__()``, ``__ne__()``, ``__lt__()``, and so on. +such as ``__eq__()``, ``__ne__()``, ``__lt__()``, and so on. It's best to subclass the :class:`.PropComparator` subclass provided by each type of property. For example, to allow a column-mapped attribute to @@ -758,7 +758,7 @@ function to produce case-insensitive matching:: lower(address.email) = lower(:lower_1) When building a :class:`.PropComparator`, the ``__clause_element__()`` method -should be used in order to acquire the underlying mapped column. This will +should be used in order to acquire the underlying mapped column. This will return a column that is appropriately wrapped in any kind of subquery or aliasing that has been applied in the context of the generated SQL statement. @@ -774,7 +774,7 @@ provides a single attribute which represents the group of columns using the class you provide. .. versionchanged:: 0.7 - Composites have been simplified such that + Composites have been simplified such that they no longer "conceal" the underlying column based attributes. Additionally, in-place mutation is no longer automatic; see the section below on enabling mutability to support tracking of in-place changes. @@ -851,12 +851,12 @@ using the ``.start`` and ``.end`` attributes against ad-hoc ``Point`` instances: BEGIN (implicit) INSERT INTO vertice (x1, y1, x2, y2) VALUES (?, ?, ?, ?) (3, 4, 5, 6) - SELECT vertice.id AS vertice_id, - vertice.x1 AS vertice_x1, - vertice.y1 AS vertice_y1, - vertice.x2 AS vertice_x2, - vertice.y2 AS vertice_y2 - FROM vertice + SELECT vertice.id AS vertice_id, + vertice.x1 AS vertice_x1, + vertice.y1 AS vertice_y1, + vertice.x2 AS vertice_x2, + vertice.y2 AS vertice_y2 + FROM vertice WHERE vertice.x1 = ? AND vertice.y1 = ? LIMIT ? OFFSET ? (3, 4, 1, 0) @@ -867,9 +867,9 @@ using the ``.start`` and ``.end`` attributes against ad-hoc ``Point`` instances: Tracking In-Place Mutations on Composites ----------------------------------------- -In-place changes to an existing composite value are +In-place changes to an existing composite value are not tracked automatically. Instead, the composite class needs to provide -events to its parent object explicitly. This task is largely automated +events to its parent object explicitly. This task is largely automated via the usage of the :class:`.MutableComposite` mixin, which uses events to associate each user-defined composite object with all parent associations. Please see the example in :ref:`mutable_composites`. @@ -883,7 +883,7 @@ Redefining Comparison Operations for Composites The "equals" comparison operation by default produces an AND of all corresponding columns equated to one another. This can be changed using the ``comparator_factory``, described in :ref:`custom_comparators`. -Below we illustrate the "greater than" operator, implementing +Below we illustrate the "greater than" operator, implementing the same expression that the base "greater than" does:: from sqlalchemy.orm.properties import CompositeProperty @@ -906,9 +906,9 @@ the same expression that the base "greater than" does:: x2 = Column(Integer) y2 = Column(Integer) - start = composite(Point, x1, y1, + start = composite(Point, x1, y1, comparator_factory=PointComparator) - end = composite(Point, x2, y2, + end = composite(Point, x2, y2, comparator_factory=PointComparator) .. _maptojoin: @@ -959,22 +959,22 @@ In the example above, the join expresses columns for both the ``user`` and the ``address`` table. The ``user.id`` and ``address.user_id`` columns are equated by foreign key, so in the mapping they are defined as one attribute, ``AddressUser.id``, using :func:`.column_property` to -indicate a specialized column mapping. Based on this part of the +indicate a specialized column mapping. Based on this part of the configuration, the mapping will copy new primary key values from ``user.id`` into the ``address.user_id`` column when a flush occurs. -Additionally, the ``address.id`` column is mapped explicitly to -an attribute named ``address_id``. This is to **disambiguate** the -mapping of the ``address.id`` column from the same-named ``AddressUser.id`` +Additionally, the ``address.id`` column is mapped explicitly to +an attribute named ``address_id``. This is to **disambiguate** the +mapping of the ``address.id`` column from the same-named ``AddressUser.id`` attribute, which here has been assigned to refer to the ``user`` table combined with the ``address.user_id`` foreign key. The natural primary key of the above mapping is the composite of ``(user.id, address.id)``, as these are the primary key columns of the -``user`` and ``address`` table combined together. The identity of an +``user`` and ``address`` table combined together. The identity of an ``AddressUser`` object will be in terms of these two values, and -is represented from an ``AddressUser`` object as +is represented from an ``AddressUser`` object as ``(AddressUser.id, AddressUser.address_id)``. @@ -983,14 +983,14 @@ Mapping a Class against Arbitrary Selects Similar to mapping against a join, a plain :func:`~.expression.select` object can be used with a mapper as well. The example fragment below illustrates mapping a class -called ``Customer`` to a :func:`~.expression.select` which includes a join to a +called ``Customer`` to a :func:`~.expression.select` which includes a join to a subquery:: from sqlalchemy import select, func subq = select([ - func.count(orders.c.id).label('order_count'), - func.max(orders.c.price).label('highest_order'), + func.count(orders.c.id).label('order_count'), + func.max(orders.c.price).label('highest_order'), orders.c.customer_id ]).group_by(orders.c.customer_id).alias() @@ -1002,12 +1002,12 @@ subquery:: Above, the full row represented by ``customer_select`` will be all the columns of the ``customers`` table, in addition to those columns -exposed by the ``subq`` subquery, which are ``order_count``, +exposed by the ``subq`` subquery, which are ``order_count``, ``highest_order``, and ``customer_id``. Mapping the ``Customer`` class to this selectable then creates a class which will contain those attributes. -When the ORM persists new instances of ``Customer``, only the +When the ORM persists new instances of ``Customer``, only the ``customers`` table will actually receive an INSERT. This is because the primary key of the ``orders`` table is not represented in the mapping; the ORM will only emit an INSERT into a table for which it has mapped the primary @@ -1022,19 +1022,19 @@ persisting it towards a particular :class:`.Table`, but also *instrumenting* attributes upon the class which are structured specifically according to the table metadata. -One potential use case for another mapper to exist at the same time is if we +One potential use case for another mapper to exist at the same time is if we wanted to load instances of our class not just from the immediate :class:`.Table` to which it is mapped, but from another selectable that is a derivation of that :class:`.Table`. While there technically is a way to create such a :func:`.mapper`, using the ``non_primary=True`` option, this approach is virtually never needed. -Instead, we use the functionality of the :class:`.Query` object to achieve this, +Instead, we use the functionality of the :class:`.Query` object to achieve this, using a method such as :meth:`.Query.select_from` or :meth:`.Query.from_statement` to specify a derived selectable. Another potential use is if we genuinely want instances of our class to -be persisted into different tables at different times; certain kinds of +be persisted into different tables at different times; certain kinds of data sharding configurations may persist a particular class into tables -that are identical in structure except for their name. For this kind of +that are identical in structure except for their name. For this kind of pattern, Python offers a better approach than the complexity of mapping the same class multiple times, which is to instead create new mapped classes for each target table. SQLAlchemy refers to this as the "entity name" @@ -1090,7 +1090,7 @@ next flush() operation, so the activity within a reconstructor should be conservative. :func:`~sqlalchemy.orm.reconstructor` is a shortcut into a larger system -of "instance level" events, which can be subscribed to using the +of "instance level" events, which can be subscribed to using the event API - see :class:`.InstanceEvents` for the full API description of these events. diff --git a/doc/build/orm/relationships.rst b/doc/build/orm/relationships.rst index 695ff7ad2b..790baa1b1d 100644 --- a/doc/build/orm/relationships.rst +++ b/doc/build/orm/relationships.rst @@ -15,7 +15,7 @@ of collections via :func:`relationship`. Basic Relational Patterns -------------------------- -A quick walkthrough of the basic relational patterns. +A quick walkthrough of the basic relational patterns. The imports used for each of the following sections is as follows:: @@ -134,7 +134,7 @@ directives can locate the remote tables with which to link:: class Parent(Base): __tablename__ = 'left' id = Column(Integer, primary_key=True) - children = relationship("Child", + children = relationship("Child", secondary=association_table) class Child(Base): @@ -153,8 +153,8 @@ the same ``secondary`` argument for the reverse relationship:: class Parent(Base): __tablename__ = 'left' id = Column(Integer, primary_key=True) - children = relationship("Child", - secondary=association_table, + children = relationship("Child", + secondary=association_table, backref="parents") class Child(Base): @@ -162,7 +162,7 @@ the same ``secondary`` argument for the reverse relationship:: id = Column(Integer, primary_key=True) The ``secondary`` argument of :func:`.relationship` also accepts a callable -that returns the ultimate argument, which is evaluated only when mappers are +that returns the ultimate argument, which is evaluated only when mappers are first used. Using this, we can define the ``association_table`` at a later point, as long as it's available to the callable after all module initialization is complete:: @@ -170,8 +170,8 @@ is complete:: class Parent(Base): __tablename__ = 'left' id = Column(Integer, primary_key=True) - children = relationship("Child", - secondary=lambda: association_table, + children = relationship("Child", + secondary=lambda: association_table, backref="parents") With the declarative extension in use, the traditional "string name of the table" @@ -180,17 +180,17 @@ is accepted as well, matching the name of the table as stored in ``Base.metadata class Parent(Base): __tablename__ = 'left' id = Column(Integer, primary_key=True) - children = relationship("Child", - secondary="association", + children = relationship("Child", + secondary="association", backref="parents") Deleting Rows from the Many to Many Table ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ A behavior which is unique to the ``secondary`` argument to :func:`.relationship` -is that the :class:`.Table` which is specified here is automatically subject +is that the :class:`.Table` which is specified here is automatically subject to INSERT and DELETE statements, as objects are added or removed from the collection. -There is **no need to delete from this table manually**. The act of removing a +There is **no need to delete from this table manually**. The act of removing a record from the collection will have the effect of the row being deleted on flush:: # row will be deleted from the "secondary" table @@ -204,23 +204,23 @@ when the child object is handed directly to :meth:`.Session.delete`:: There are several possibilities here: -* If there is a :func:`.relationship` from ``Parent`` to ``Child``, but there is +* If there is a :func:`.relationship` from ``Parent`` to ``Child``, but there is **not** a reverse-relationship that links a particular ``Child`` to each ``Parent``, SQLAlchemy will not have any awareness that when deleting this particular ``Child`` object, it needs to maintain the "secondary" table that links it to the ``Parent``. No delete of the "secondary" table will occur. * If there is a relationship that links a particular ``Child`` to each ``Parent``, - suppose it's called ``Child.parents``, SQLAlchemy by default will load in + suppose it's called ``Child.parents``, SQLAlchemy by default will load in the ``Child.parents`` collection to locate all ``Parent`` objects, and remove each row from the "secondary" table which establishes this link. Note that this relationship does not need to be bidrectional; SQLAlchemy is strictly looking at every :func:`.relationship` associated with the ``Child`` object being deleted. -* A higher performing option here is to use ON DELETE CASCADE directives +* A higher performing option here is to use ON DELETE CASCADE directives with the foreign keys used by the database. Assuming the database supports - this feature, the database itself can be made to automatically delete rows in the + this feature, the database itself can be made to automatically delete rows in the "secondary" table as referencing rows in "child" are deleted. SQLAlchemy - can be instructed to forego actively loading in the ``Child.parents`` + can be instructed to forego actively loading in the ``Child.parents`` collection in this case using the ``passive_deletes=True`` directive on :meth:`.relationship`; see :ref:`passive_deletes` for more details on this. @@ -238,13 +238,13 @@ deleted - see :ref:`unitofwork_cascades` for information on this feature. Association Object ~~~~~~~~~~~~~~~~~~ -The association object pattern is a variant on many-to-many: it's +The association object pattern is a variant on many-to-many: it's used when your association table contains additional columns beyond those which are foreign keys to the left and right tables. Instead of using the ``secondary`` argument, you map a new class directly to the association table. The left side of the relationship references the association object via one-to-many, and the association class references the right side via -many-to-one. Below we illustrate an association table mapped to the +many-to-one. Below we illustrate an association table mapped to the ``Association`` class which includes a column called ``extra_data``, which is a string value that is stored along with each association between ``Parent`` and ``Child``:: @@ -313,8 +313,8 @@ associated object, and a second to a target attribute. advisable that the association-mapped table not be used as the ``secondary`` argument on a :func:`.relationship` elsewhere, unless that :func:`.relationship` contains - the option ``viewonly=True``. SQLAlchemy otherwise - may attempt to emit redundant INSERT and DELETE + the option ``viewonly=True``. SQLAlchemy otherwise + may attempt to emit redundant INSERT and DELETE statements on the same table, if similar state is detected on the related attribute as well as the associated object. @@ -323,7 +323,7 @@ Adjacency List Relationships ----------------------------- The **adjacency list** pattern is a common relational pattern whereby a table -contains a foreign key reference to itself. This is the most common +contains a foreign key reference to itself. This is the most common way to represent hierarchical data in flat tables. Other methods include **nested sets**, sometimes called "modified preorder", as well as **materialized path**. Despite the appeal that modified preorder @@ -362,7 +362,7 @@ Would be represented with data such as:: 6 1 child3 The :func:`.relationship` configuration here works in the -same way as a "normal" one-to-many relationship, with the +same way as a "normal" one-to-many relationship, with the exception that the "direction", i.e. whether the relationship is one-to-many or many-to-one, is assumed by default to be one-to-many. To establish the relationship as many-to-one, @@ -390,7 +390,7 @@ relationship using the :func:`.backref` function:: id = Column(Integer, primary_key=True) parent_id = Column(Integer, ForeignKey('node.id')) data = Column(String(50)) - children = relationship("Node", + children = relationship("Node", backref=backref('parent', remote_side=[id]) ) @@ -406,13 +406,13 @@ Querying of self-referential structures works like any other query:: # get all nodes named 'child2' session.query(Node).filter(Node.data=='child2') -However extra care is needed when attempting to join along +However extra care is needed when attempting to join along the foreign key from one level of the tree to the next. In SQL, a join from a table to itself requires that at least one side of the expression be "aliased" so that it can be unambiguously referred to. Recall from :ref:`ormtutorial_aliases` in the ORM tutorial that the -:class:`.orm.aliased` construct is normally used to provide an "alias" of +:class:`.orm.aliased` construct is normally used to provide an "alias" of an ORM entity. Joining from ``Node`` to itself using this technique looks like: @@ -425,20 +425,20 @@ looks like: join(nodealias, Node.parent).\ filter(nodealias.data=="child2").\ all() - SELECT node.id AS node_id, - node.parent_id AS node_parent_id, + SELECT node.id AS node_id, + node.parent_id AS node_parent_id, node.data AS node_data FROM node JOIN node AS node_1 - ON node.parent_id = node_1.id - WHERE node.data = ? + ON node.parent_id = node_1.id + WHERE node.data = ? AND node_1.data = ? ['subchild1', 'child2'] -:meth:`.Query.join` also includes a feature known as ``aliased=True`` that +:meth:`.Query.join` also includes a feature known as ``aliased=True`` that can shorten the verbosity self-referential joins, at the expense of query flexibility. This feature -performs a similar "aliasing" step to that above, without the need for an -explicit entity. Calls to :meth:`.Query.filter` and similar subsequent to +performs a similar "aliasing" step to that above, without the need for an +explicit entity. Calls to :meth:`.Query.filter` and similar subsequent to the aliased join will **adapt** the ``Node`` entity to be that of the alias: .. sourcecode:: python+sql @@ -447,10 +447,10 @@ the aliased join will **adapt** the ``Node`` entity to be that of the alias: join(Node.parent, aliased=True).\ filter(Node.data=='child2').\ all() - SELECT node.id AS node_id, - node.parent_id AS node_parent_id, + SELECT node.id AS node_id, + node.parent_id AS node_parent_id, node.data AS node_data - FROM node + FROM node JOIN node AS node_1 ON node_1.id = node.parent_id WHERE node.data = ? AND node_1.data = ? ['subchild1', 'child2'] @@ -460,7 +460,7 @@ to the additional :meth:`~.Query.join` calls: .. sourcecode:: python+sql - # get all nodes named 'subchild1' with a + # get all nodes named 'subchild1' with a # parent named 'child2' and a grandparent 'root' {sql}session.query(Node).\ filter(Node.data=='subchild1').\ @@ -469,18 +469,18 @@ to the additional :meth:`~.Query.join` calls: join(Node.parent, aliased=True, from_joinpoint=True).\ filter(Node.data=='root').\ all() - SELECT node.id AS node_id, - node.parent_id AS node_parent_id, + SELECT node.id AS node_id, + node.parent_id AS node_parent_id, node.data AS node_data - FROM node - JOIN node AS node_1 ON node_1.id = node.parent_id + FROM node + JOIN node AS node_1 ON node_1.id = node.parent_id JOIN node AS node_2 ON node_2.id = node_1.parent_id - WHERE node.data = ? - AND node_1.data = ? + WHERE node.data = ? + AND node_1.data = ? AND node_2.data = ? ['subchild1', 'child2', 'root'] -:meth:`.Query.reset_joinpoint` will also remove the "aliasing" from filtering +:meth:`.Query.reset_joinpoint` will also remove the "aliasing" from filtering calls:: session.query(Node).\ @@ -518,19 +518,19 @@ configured via ``join_depth``: join_depth=2) {sql}session.query(Node).all() - SELECT node_1.id AS node_1_id, - node_1.parent_id AS node_1_parent_id, - node_1.data AS node_1_data, - node_2.id AS node_2_id, - node_2.parent_id AS node_2_parent_id, - node_2.data AS node_2_data, - node.id AS node_id, - node.parent_id AS node_parent_id, + SELECT node_1.id AS node_1_id, + node_1.parent_id AS node_1_parent_id, + node_1.data AS node_1_data, + node_2.id AS node_2_id, + node_2.parent_id AS node_2_parent_id, + node_2.data AS node_2_data, + node.id AS node_id, + node.parent_id AS node_parent_id, node.data AS node_data - FROM node - LEFT OUTER JOIN node AS node_2 - ON node.id = node_2.parent_id - LEFT OUTER JOIN node AS node_1 + FROM node + LEFT OUTER JOIN node AS node_2 + ON node.id = node_2.parent_id + LEFT OUTER JOIN node AS node_1 ON node_2.id = node_1.parent_id [] @@ -592,11 +592,11 @@ in both directions. The above configuration is equivalent to:: user = relationship("User", back_populates="addresses") -Above, we add a ``.user`` relationship to ``Address`` explicitly. On -both relationships, the ``back_populates`` directive tells each relationship +Above, we add a ``.user`` relationship to ``Address`` explicitly. On +both relationships, the ``back_populates`` directive tells each relationship about the other one, indicating that they should establish "bidirectional" behavior between each other. The primary effect of this configuration -is that the relationship adds event handlers to both attributes +is that the relationship adds event handlers to both attributes which have the behavior of "when an append or set event occurs here, set ourselves onto the incoming attribute using this particular attribute name". The behavior is illustrated as follows. Start with a ``User`` and an ``Address`` @@ -621,15 +621,15 @@ both the collection and the scalar attribute have been populated:: This behavior of course works in reverse for removal operations as well, as well as for equivalent operations on both sides. Such as -when ``.user`` is set again to ``None``, the ``Address`` object is removed +when ``.user`` is set again to ``None``, the ``Address`` object is removed from the reverse collection:: >>> a1.user = None >>> u1.addresses [] -The manipulation of the ``.addresses`` collection and the ``.user`` attribute -occurs entirely in Python without any interaction with the SQL database. +The manipulation of the ``.addresses`` collection and the ``.user`` attribute +occurs entirely in Python without any interaction with the SQL database. Without this behavior, the proper state would be apparent on both sides once the data has been flushed to the database, and later reloaded after a commit or expiration operation occurs. The ``backref``/``back_populates`` behavior has the advantage @@ -644,14 +644,14 @@ Backref Arguments ~~~~~~~~~~~~~~~~~~ We've established that the ``backref`` keyword is merely a shortcut for building -two individual :func:`.relationship` constructs that refer to each other. Part of -the behavior of this shortcut is that certain configurational arguments applied to +two individual :func:`.relationship` constructs that refer to each other. Part of +the behavior of this shortcut is that certain configurational arguments applied to the :func:`.relationship` will also be applied to the other direction - namely those arguments that describe the relationship at a schema level, and are unlikely to be different in the reverse direction. The usual case here is a many-to-many :func:`.relationship` that has a ``secondary`` argument, -or a one-to-many or many-to-one which has a ``primaryjoin`` argument (the +or a one-to-many or many-to-one which has a ``primaryjoin`` argument (the ``primaryjoin`` argument is discussed in :ref:`relationship_primaryjoin`). Such as if we limited the list of ``Address`` objects to those which start with "tony":: @@ -666,7 +666,7 @@ as if we limited the list of ``Address`` objects to those which start with "tony id = Column(Integer, primary_key=True) name = Column(String) - addresses = relationship("Address", + addresses = relationship("Address", primaryjoin="and_(User.id==Address.user_id, " "Address.email.startswith('tony'))", backref="user") @@ -682,19 +682,19 @@ of the relationship have this join condition applied:: >>> print User.addresses.property.primaryjoin "user".id = address.user_id AND address.email LIKE :email_1 || '%%' - >>> + >>> >>> print Address.user.property.primaryjoin "user".id = address.user_id AND address.email LIKE :email_1 || '%%' - >>> + >>> This reuse of arguments should pretty much do the "right thing" - it uses only arguments that are applicable, and in the case of a many-to-many relationship, will reverse the usage of ``primaryjoin`` and ``secondaryjoin`` -to correspond to the other direction (see the example in :ref:`self_referential_many_to_many` +to correspond to the other direction (see the example in :ref:`self_referential_many_to_many` for this). It's very often the case however that we'd like to specify arguments that -are specific to just the side where we happened to place the "backref". +are specific to just the side where we happened to place the "backref". This includes :func:`.relationship` arguments like ``lazy``, ``remote_side``, ``cascade`` and ``cascade_backrefs``. For this case we use the :func:`.backref` function in place of a string:: @@ -707,7 +707,7 @@ function in place of a string:: id = Column(Integer, primary_key=True) name = Column(String) - addresses = relationship("Address", + addresses = relationship("Address", backref=backref("user", lazy="joined")) Where above, we placed a ``lazy="joined"`` directive only on the ``Address.user`` @@ -723,10 +723,10 @@ One Way Backrefs An unusual case is that of the "one way backref". This is where the "back-populating" behavior of the backref is only desirable in one direction. An example of this is a collection which contains a filtering ``primaryjoin`` condition. We'd like to append -items to this collection as needed, and have them populate the "parent" object on the +items to this collection as needed, and have them populate the "parent" object on the incoming object. However, we'd also like to have items that are not part of the collection, -but still have the same "parent" association - these items should never be in the -collection. +but still have the same "parent" association - these items should never be in the +collection. Taking our previous example, where we established a ``primaryjoin`` that limited the collection only to ``Address`` objects whose email address started with the word ``tony``, @@ -744,7 +744,7 @@ is present in the ``addresses`` collection of ``u1``. After these objects are the transaction committed and their attributes expired for a re-load, the ``addresses`` collection will hit the database on next access and no longer have this ``Address`` object present, due to the filtering condition. But we can do away with this unwanted side -of the "backref" behavior on the Python side by using two separate :func:`.relationship` constructs, +of the "backref" behavior on the Python side by using two separate :func:`.relationship` constructs, placing ``back_populates`` only on one side:: from sqlalchemy import Integer, ForeignKey, String, Column @@ -757,7 +757,7 @@ placing ``back_populates`` only on one side:: __tablename__ = 'user' id = Column(Integer, primary_key=True) name = Column(String) - addresses = relationship("Address", + addresses = relationship("Address", primaryjoin="and_(User.id==Address.user_id, " "Address.email.startswith('tony'))", back_populates="user") @@ -801,7 +801,7 @@ these to a minimum overall. Setting the primaryjoin and secondaryjoin ----------------------------------------- -A common scenario arises when we attempt to relate two +A common scenario arises when we attempt to relate two classes together, where there exist multiple ways to join the two tables. @@ -844,11 +844,11 @@ What this error means is that if you have a ``Customer`` object, and wish to load in an associated ``Address``, there is the choice of retrieving the ``Address`` referred to by the ``billing_address_id`` column or the one referred to by the ``shipping_address_id`` column. The :func:`.relationship`, -as it is, cannot determine its full configuration. The examples at +as it is, cannot determine its full configuration. The examples at :ref:`relationship_patterns` didn't have this issue, because in each of those examples there was only **one** way to refer to the related table. -To resolve this issue, :func:`.relationship` accepts an argument named +To resolve this issue, :func:`.relationship` accepts an argument named ``primaryjoin`` which accepts a Python-based SQL expression, using the system described at :ref:`sqlexpression_toplevel`, that describes how the two tables should be joined together. When using the declarative system, we often will specify this Python @@ -863,25 +863,25 @@ system so that it has access to the full namespace of available classes:: billing_address_id = Column(Integer, ForeignKey("address.id")) shipping_address_id = Column(Integer, ForeignKey("address.id")) - billing_address = relationship("Address", + billing_address = relationship("Address", primaryjoin="Address.id==Customer.billing_address_id") - shipping_address = relationship("Address", + shipping_address = relationship("Address", primaryjoin="Address.id==Customer.shipping_address_id") Above, loading the ``Customer.billing_address`` relationship from a ``Customer`` -object will use the value present in ``billing_address_id`` in order to +object will use the value present in ``billing_address_id`` in order to identify the row in ``Address`` to be loaded; similarly, ``shipping_address_id`` -is used for the ``shipping_address`` relationship. The linkage of the two +is used for the ``shipping_address`` relationship. The linkage of the two columns also plays a role during persistence; the newly generated primary key -of a just-inserted ``Address`` object will be copied into the appropriate +of a just-inserted ``Address`` object will be copied into the appropriate foreign key column of an associated ``Customer`` object during a flush. Specifying Alternate Join Conditions ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -The open-ended nature of ``primaryjoin`` also allows us to customize how -related items are loaded. In the example below, using the ``User`` class -as well as an ``Address`` class which stores a street address, we +The open-ended nature of ``primaryjoin`` also allows us to customize how +related items are loaded. In the example below, using the ``User`` class +as well as an ``Address`` class which stores a street address, we create a relationship ``boston_addresses`` which will only load those ``Address`` objects which specify a city of "Boston":: @@ -895,7 +895,7 @@ load those ``Address`` objects which specify a city of "Boston":: __tablename__ = 'user' id = Column(Integer, primary_key=True) name = Column(String) - addresses = relationship("Address", + addresses = relationship("Address", primaryjoin="and_(User.id==Address.user_id, " "Address.city=='Boston')") @@ -914,7 +914,7 @@ two distinct predicates for the join condition - joining both the ``User.id`` an ``Address.user_id`` columns to each other, as well as limiting rows in ``Address`` to just ``city='Boston'``. When using Declarative, rudimentary SQL functions like :func:`.and_` are automatically available in the evaluated namespace of a string -:func:`.relationship` argument. +:func:`.relationship` argument. When using classical mappings, we have the advantage of the :class:`.Table` objects already being present when the mapping is defined, so that the SQL expression @@ -937,12 +937,12 @@ can be created immediately:: Note that the custom criteria we use in a ``primaryjoin`` is generally only significant when SQLAlchemy is rendering SQL in order to load or represent this relationship. That is, it's used -in the SQL statement that's emitted in order to perform a per-attribute lazy load, or when a join is +in the SQL statement that's emitted in order to perform a per-attribute lazy load, or when a join is constructed at query time, such as via :meth:`.Query.join`, or via the eager "joined" or "subquery" styles of loading. When in-memory objects are being manipulated, we can place any ``Address`` object we'd like into the ``boston_addresses`` collection, regardless of what the value of the ``.city`` attribute is. The objects will remain present in the collection until the attribute is expired -and re-loaded from the database where the criterion is applied. When +and re-loaded from the database where the criterion is applied. When a flush occurs, the objects inside of ``boston_addresses`` will be flushed unconditionally, assigning value of the primary key ``user.id`` column onto the foreign-key-holding ``address.user_id`` column for each row. The ``city`` criteria has no effect here, as the flush process only cares about synchronizing primary @@ -954,8 +954,8 @@ Self-Referential Many-to-Many Relationship ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Many to many relationships can be customized by one or both of ``primaryjoin`` -and ``secondaryjoin`` - the latter is significant for a relationship that -specifies a many-to-many reference using the ``secondary`` argument. +and ``secondaryjoin`` - the latter is significant for a relationship that +specifies a many-to-many reference using the ``secondary`` argument. A common situation which involves the usage of ``primaryjoin`` and ``secondaryjoin`` is when establishing a many-to-many relationship from a class to itself, as shown below:: @@ -1017,8 +1017,8 @@ to ``node.c.id``:: )}) -Note that in both examples, the ``backref`` keyword specifies a ``left_nodes`` -backref - when :func:`.relationship` creates the second relationship in the reverse +Note that in both examples, the ``backref`` keyword specifies a ``left_nodes`` +backref - when :func:`.relationship` creates the second relationship in the reverse direction, it's smart enough to reverse the ``primaryjoin`` and ``secondaryjoin`` arguments. Specifying Foreign Keys @@ -1041,7 +1041,7 @@ collection: class User(Base): __table__ = users_table - addresses = relationship(Address, + addresses = relationship(Address, primaryjoin= users_table.c.user_id==addresses_table.c.user_id, foreign_keys=[addresses_table.c.user_id]) @@ -1078,9 +1078,9 @@ second UPDATE in order to properly populate a row (and vice versa an UPDATE and DELETE in order to delete without violating foreign key constraints). The two use cases are: -* A table contains a foreign key to itself, and a single row will +* A table contains a foreign key to itself, and a single row will have a foreign key value pointing to its own primary key. -* Two tables each contain a foreign key referencing the other +* Two tables each contain a foreign key referencing the other table, with a row in each table referencing the other. For example:: @@ -1115,13 +1115,13 @@ constraints fulfilled. The exception is if the foreign keys are configured as identifiers were populated manually (again essentially bypassing :func:`~sqlalchemy.orm.relationship`). -To enable the usage of a supplementary UPDATE statement, +To enable the usage of a supplementary UPDATE statement, we use the ``post_update`` option of :func:`.relationship`. This specifies that the linkage between the two rows should be created using an UPDATE statement after both rows -have been INSERTED; it also causes the rows to be de-associated with +have been INSERTED; it also causes the rows to be de-associated with each other via UPDATE before a DELETE is emitted. The flag should -be placed on just *one* of the relationships, preferably the +be placed on just *one* of the relationships, preferably the many-to-one side. Below we illustrate a complete example, including two :class:`.ForeignKey` constructs, one which specifies ``use_alter=True`` to help with emitting CREATE TABLE statements:: @@ -1142,9 +1142,9 @@ specifies ``use_alter=True`` to help with emitting CREATE TABLE statements:: __tablename__ = 'widget' widget_id = Column(Integer, primary_key=True) - favorite_entry_id = Column(Integer, - ForeignKey('entry.entry_id', - use_alter=True, + favorite_entry_id = Column(Integer, + ForeignKey('entry.entry_id', + use_alter=True, name="fk_favorite_entry")) name = Column(String(50)) @@ -1210,7 +1210,7 @@ as illustrated below:: __table_args__ = ( ForeignKeyConstraint( - ["widget_id", "favorite_entry_id"], + ["widget_id", "favorite_entry_id"], ["entry.widget_id", "entry.entry_id"], name="fk_favorite_entry", use_alter=True ), @@ -1245,11 +1245,11 @@ which reference the primary key must also be updated as well. For databases which enforce referential integrity, it's required to use the database's ON UPDATE CASCADE functionality in order to propagate primary key changes -to referenced foreign keys - the values cannot be out +to referenced foreign keys - the values cannot be out of sync for any moment. For databases that don't support this, such as SQLite and -MySQL without their referential integrity options turned +MySQL without their referential integrity options turned on, the ``passive_updates`` flag can be set to ``False``, most preferably on a one-to-many or many-to-many :func:`.relationship`, which instructs @@ -1277,7 +1277,7 @@ A typical mutable primary key setup might look like:: __tablename__ = 'address' email = Column(String(50), primary_key=True) - username = Column(String(50), + username = Column(String(50), ForeignKey('user.username', onupdate="cascade") ) diff --git a/doc/build/orm/tutorial.rst b/doc/build/orm/tutorial.rst index 6fa77d48f3..a1990b39ab 100644 --- a/doc/build/orm/tutorial.rst +++ b/doc/build/orm/tutorial.rst @@ -69,7 +69,7 @@ the core interface to the database, adapted through a **dialect** that handles t of the database and DBAPI in use. In this case the SQLite dialect will interpret instructions to the Python built-in ``sqlite3`` module. -The :class:`.Engine` has not actually tried to connect to the database yet; that happens +The :class:`.Engine` has not actually tried to connect to the database yet; that happens only the first time it is asked to perform a task against the database. We can illustrate this by asking it to perform a simple SELECT statement: @@ -80,8 +80,8 @@ this by asking it to perform a simple SELECT statement: () {stop}1 -As the :meth:`.Engine.execute` method is called, the :class:`.Engine` establishes a connection to the -SQLite database, which is then used to emit the SQL. The connection is then returned to an internal +As the :meth:`.Engine.execute` method is called, the :class:`.Engine` establishes a connection to the +SQLite database, which is then used to emit the SQL. The connection is then returned to an internal connection pool where it will be reused on subsequent statement executions. While we illustrate direct usage of the :class:`.Engine` here, this isn't typically necessary when using the ORM, where the :class:`.Engine`, once created, is used behind the scenes by the ORM as we'll see shortly. @@ -91,13 +91,13 @@ Declare a Mapping When using the ORM, the configurational process starts by describing the database tables we'll be dealing with, and then by defining our own classes which will -be mapped to those tables. In modern SQLAlchemy, +be mapped to those tables. In modern SQLAlchemy, these two tasks are usually performed together, using a system known as :ref:`declarative_toplevel`, which allows us to create classes that include directives to describe the actual database table they will be mapped to. -Classes mapped using the Declarative system are defined in terms of a base class which +Classes mapped using the Declarative system are defined in terms of a base class which maintains a catalog of classes and tables relative to that base - this is known as the **declarative base class**. Our application will usually have just one instance of this base in a commonly @@ -111,10 +111,10 @@ function, as follows:: Now that we have a "base", we can define any number of mapped classes in terms of it. We will start with just a single table called ``users``, which will store records for the end-users using our application. -A new class called ``User`` will be the class to which we map this table. The +A new class called ``User`` will be the class to which we map this table. The imports we'll need to accomplish this include objects that represent the components -of our table, including the :class:`.Column` class which represents a database column, -as well as the :class:`.Integer` and :class:`.String` classes that +of our table, including the :class:`.Column` class which represents a database column, +as well as the :class:`.Integer` and :class:`.String` classes that represent basic datatypes used in columns:: >>> from sqlalchemy import Column, Integer, String @@ -143,16 +143,16 @@ to be at least one column denoted as a primary key column; multiple-column, i.e. are of course entirely feasible as well. We define a constructor via ``__init__()`` and also a ``__repr__()`` method - both are optional. The -class of course can have any number of other methods and attributes as required by the application, +class of course can have any number of other methods and attributes as required by the application, as it's basically just a plain Python class. Inheriting from ``Base`` is also only a requirement -of the declarative configurational system, which itself is optional and relatively open ended; at its -core, the SQLAlchemy ORM only requires that a class be a so-called "new style class", that is, it inherits +of the declarative configurational system, which itself is optional and relatively open ended; at its +core, the SQLAlchemy ORM only requires that a class be a so-called "new style class", that is, it inherits from ``object`` in Python 2, in order to be mapped. All classes in Python 3 are "new style" classes. .. topic:: The Non Opinionated Philosophy In our ``User`` mapping example, it was required that we identify the name of the table - in use, as well as the names and characteristics of all columns which we care about, + in use, as well as the names and characteristics of all columns which we care about, including which column or columns represent the primary key, as well as some basic information about the types in use. SQLAlchemy never makes assumptions about these decisions - the developer must @@ -164,19 +164,19 @@ from ``object`` in Python 2, in order to be mapped. All classes in Python 3 ar With our ``User`` class constructed via the Declarative system, we have defined information about our table, known as **table metadata**, as well as a user-defined class which is linked to this -table, known as a **mapped class**. Declarative has provided for us a shorthand system for what in SQLAlchemy is +table, known as a **mapped class**. Declarative has provided for us a shorthand system for what in SQLAlchemy is called a "Classical Mapping", which specifies these two units separately and is discussed -in :ref:`classical_mapping`. The table +in :ref:`classical_mapping`. The table is actually represented by a datastructure known as :class:`.Table`, and the mapping represented -by a :class:`.Mapper` object generated by a function called :func:`.mapper`. Declarative performs both of +by a :class:`.Mapper` object generated by a function called :func:`.mapper`. Declarative performs both of these steps for us, making available the :class:`.Table` it has created via the ``__table__`` attribute:: >>> User.__table__ # doctest: +NORMALIZE_WHITESPACE Table('users', MetaData(None), - Column('id', Integer(), table=, primary_key=True, nullable=False), - Column('name', String(), table=), - Column('fullname', String(), table=), + Column('id', Integer(), table=, primary_key=True, nullable=False), + Column('name', String(), table=), + Column('fullname', String(), table=), Column('password', String(), table=), schema=None) and while rarely needed, making available the :class:`.Mapper` object via the ``__mapper__`` attribute:: @@ -191,7 +191,7 @@ new tables that have yet to be created in our SQLite database, so one helpful fe the :class:`.MetaData` object offers is the ability to issue CREATE TABLE statements to the database for all tables that don't yet exist. We illustrate this by calling the :meth:`.MetaData.create_all` method, passing in our :class:`.Engine` -as a source of database connectivity. We will see that special commands are +as a source of database connectivity. We will see that special commands are first emitted to check for the presence of the ``users`` table, and following that the actual ``CREATE TABLE`` statement: @@ -232,7 +232,7 @@ the actual ``CREATE TABLE`` statement: from sqlalchemy import Sequence Column(Integer, Sequence('user_id_seq'), primary_key=True) - A full, foolproof :class:`~sqlalchemy.schema.Table` generated via our declarative + A full, foolproof :class:`~sqlalchemy.schema.Table` generated via our declarative mapping is therefore:: class User(Base): @@ -270,11 +270,11 @@ With mappings complete, let's now create and inspect a ``User`` object:: 'None' The ``id`` attribute, which while not defined by our ``__init__()`` method, -exists with a value of ``None`` on our ``User`` instance due to the ``id`` +exists with a value of ``None`` on our ``User`` instance due to the ``id`` column we declared in our mapping. By default, the ORM creates class attributes for all columns present in the table being mapped. These class attributes exist as -`Python descriptors `_, and +`Python descriptors `_, and define **instrumentation** for the mapped class. The functionality of this instrumentation includes the ability to fire on change events, track modifications, and to automatically load new data from the database when @@ -350,10 +350,10 @@ session object. The business of acquiring a :class:`.Session` has a good deal of variety based on the variety of types of applications and frameworks out there. Keep in mind the :class:`.Session` is just a workspace for your objects, - local to a particular database connection - if you think of + local to a particular database connection - if you think of an application thread as a guest at a dinner party, the :class:`.Session` - is the guest's plate and the objects it holds are the food - (and the database...the kitchen?)! Hints on + is the guest's plate and the objects it holds are the food + (and the database...the kitchen?)! Hints on how :class:`.Session` is integrated into an application are at :ref:`session_faq`. @@ -384,9 +384,9 @@ added: BEGIN (implicit) INSERT INTO users (name, fullname, password) VALUES (?, ?, ?) ('ed', 'Ed Jones', 'edspassword') - SELECT users.id AS users_id, - users.name AS users_name, - users.fullname AS users_fullname, + SELECT users.id AS users_id, + users.name AS users_name, + users.fullname AS users_fullname, users.password AS users_password FROM users WHERE users.name = ? @@ -403,7 +403,7 @@ that which we just added:: >>> ed_user is our_user True -The ORM concept at work here is known as an `identity map `_ +The ORM concept at work here is known as an `identity map `_ and ensures that all operations upon a particular row within a :class:`~sqlalchemy.orm.session.Session` operate upon the same set of data. @@ -476,9 +476,9 @@ If we look at Ed's ``id`` attribute, which earlier was ``None``, it now has a va {sql}>>> ed_user.id # doctest: +NORMALIZE_WHITESPACE BEGIN (implicit) - SELECT users.id AS users_id, - users.name AS users_name, - users.fullname AS users_fullname, + SELECT users.id AS users_id, + users.name AS users_name, + users.fullname AS users_fullname, users.password AS users_password FROM users WHERE users.id = ? @@ -500,7 +500,7 @@ The level of reloading is configurable as is described in :ref:`session_toplevel inside the :class:`.Session` without a primary key, to actually being inserted, it moved between three out of four available "object states" - **transient**, **pending**, and **persistent**. - Being aware of these states and what they mean is always a good idea - + Being aware of these states and what they mean is always a good idea - be sure to read :ref:`session_object_states` for a quick overview. Rolling Back @@ -529,9 +529,9 @@ Querying the session, we can see that they're flushed into the current transacti ('Edwardo', 1) INSERT INTO users (name, fullname, password) VALUES (?, ?, ?) ('fakeuser', 'Invalid', '12345') - SELECT users.id AS users_id, - users.name AS users_name, - users.fullname AS users_fullname, + SELECT users.id AS users_id, + users.name AS users_name, + users.fullname AS users_fullname, users.password AS users_password FROM users WHERE users.name IN (?, ?) @@ -549,9 +549,9 @@ Rolling back, we can see that ``ed_user``'s name is back to ``ed``, and {sql}>>> ed_user.name #doctest: +NORMALIZE_WHITESPACE BEGIN (implicit) - SELECT users.id AS users_id, - users.name AS users_name, - users.fullname AS users_fullname, + SELECT users.id AS users_id, + users.name AS users_name, + users.fullname AS users_fullname, users.password AS users_password FROM users WHERE users.id = ? @@ -565,9 +565,9 @@ issuing a SELECT illustrates the changes made to the database: .. sourcecode:: python+sql {sql}>>> session.query(User).filter(User.name.in_(['ed', 'fakeuser'])).all() #doctest: +NORMALIZE_WHITESPACE - SELECT users.id AS users_id, - users.name AS users_name, - users.fullname AS users_fullname, + SELECT users.id AS users_id, + users.name AS users_name, + users.fullname AS users_fullname, users.password AS users_password FROM users WHERE users.name IN (?, ?) @@ -592,9 +592,9 @@ returned: {sql}>>> for instance in session.query(User).order_by(User.id): # doctest: +NORMALIZE_WHITESPACE ... print instance.name, instance.fullname - SELECT users.id AS users_id, + SELECT users.id AS users_id, users.name AS users_name, - users.fullname AS users_fullname, + users.fullname AS users_fullname, users.password AS users_password FROM users ORDER BY users.id () @@ -613,7 +613,7 @@ is expressed as tuples: {sql}>>> for name, fullname in session.query(User.name, User.fullname): # doctest: +NORMALIZE_WHITESPACE ... print name, fullname - SELECT users.name AS users_name, + SELECT users.name AS users_name, users.fullname AS users_fullname FROM users () @@ -631,9 +631,9 @@ class: {sql}>>> for row in session.query(User, User.name).all(): #doctest: +NORMALIZE_WHITESPACE ... print row.User, row.name - SELECT users.id AS users_id, - users.name AS users_name, - users.fullname AS users_fullname, + SELECT users.id AS users_id, + users.name AS users_name, + users.fullname AS users_fullname, users.password AS users_password FROM users () @@ -670,9 +670,9 @@ entities are present in the call to :meth:`~.Session.query`, can be controlled u {sql}>>> for row in session.query(user_alias, user_alias.name).all(): #doctest: +NORMALIZE_WHITESPACE ... print row.user_alias - SELECT user_alias.id AS user_alias_id, - user_alias.name AS user_alias_name, - user_alias.fullname AS user_alias_fullname, + SELECT user_alias.id AS user_alias_id, + user_alias.name AS user_alias_name, + user_alias.fullname AS user_alias_fullname, user_alias.password AS user_alias_password FROM users AS user_alias (){stop} @@ -689,9 +689,9 @@ conjunction with ORDER BY: {sql}>>> for u in session.query(User).order_by(User.id)[1:3]: #doctest: +NORMALIZE_WHITESPACE ... print u - SELECT users.id AS users_id, - users.name AS users_name, - users.fullname AS users_fullname, + SELECT users.id AS users_id, + users.name AS users_name, + users.fullname AS users_fullname, users.password AS users_password FROM users ORDER BY users.id LIMIT ? OFFSET ? @@ -739,9 +739,9 @@ users named "ed" with a full name of "Ed Jones", you can call ... filter(User.name=='ed').\ ... filter(User.fullname=='Ed Jones'): # doctest: +NORMALIZE_WHITESPACE ... print user - SELECT users.id AS users_id, - users.name AS users_name, - users.fullname AS users_fullname, + SELECT users.id AS users_id, + users.name AS users_name, + users.fullname AS users_fullname, users.password AS users_password FROM users WHERE users.name = ? AND users.fullname = ? @@ -818,9 +818,9 @@ non-iterator value. :meth:`~sqlalchemy.orm.query.Query.all()` returns a list: >>> query = session.query(User).filter(User.name.like('%ed')).order_by(User.id) {sql}>>> query.all() #doctest: +NORMALIZE_WHITESPACE - SELECT users.id AS users_id, - users.name AS users_name, - users.fullname AS users_fullname, + SELECT users.id AS users_id, + users.name AS users_name, + users.fullname AS users_fullname, users.password AS users_password FROM users WHERE users.name LIKE ? ORDER BY users.id @@ -833,9 +833,9 @@ the first result as a scalar: .. sourcecode:: python+sql {sql}>>> query.first() #doctest: +NORMALIZE_WHITESPACE - SELECT users.id AS users_id, - users.name AS users_name, - users.fullname AS users_fullname, + SELECT users.id AS users_id, + users.name AS users_name, + users.fullname AS users_fullname, users.password AS users_password FROM users WHERE users.name LIKE ? ORDER BY users.id @@ -854,9 +854,9 @@ an error: ... user = query.one() ... except MultipleResultsFound, e: ... print e - SELECT users.id AS users_id, - users.name AS users_name, - users.fullname AS users_fullname, + SELECT users.id AS users_id, + users.name AS users_name, + users.fullname AS users_fullname, users.password AS users_password FROM users WHERE users.name LIKE ? ORDER BY users.id @@ -870,9 +870,9 @@ an error: ... user = query.filter(User.id == 99).one() ... except NoResultFound, e: ... print e - SELECT users.id AS users_id, - users.name AS users_name, - users.fullname AS users_fullname, + SELECT users.id AS users_id, + users.name AS users_name, + users.fullname AS users_fullname, users.password AS users_password FROM users WHERE users.name LIKE ? AND users.id = ? ORDER BY users.id @@ -894,9 +894,9 @@ to SQLAlchemy clause constructs. For example, ... filter("id<224").\ ... order_by("id").all(): #doctest: +NORMALIZE_WHITESPACE ... print user.name - SELECT users.id AS users_id, - users.name AS users_name, - users.fullname AS users_fullname, + SELECT users.id AS users_id, + users.name AS users_name, + users.fullname AS users_fullname, users.password AS users_password FROM users WHERE id<224 ORDER BY id @@ -914,9 +914,9 @@ method: {sql}>>> session.query(User).filter("id<:value and name=:name").\ ... params(value=224, name='fred').order_by(User.id).one() # doctest: +NORMALIZE_WHITESPACE - SELECT users.id AS users_id, - users.name AS users_name, - users.fullname AS users_fullname, + SELECT users.id AS users_id, + users.name AS users_name, + users.fullname AS users_fullname, users.password AS users_password FROM users WHERE id>> q = session.query(User.id, User.name) {sql}>>> q.order_by("name").all() - SELECT users.id AS users_id, users.name AS users_name + SELECT users.id AS users_id, users.name AS users_name FROM users ORDER BY name () {stop}[(1, u'ed'), (4, u'fred'), (3, u'mary'), (2, u'wendy')] @@ -976,7 +976,7 @@ completely "raw", using string names to identify desired columns: Perfectly fine. But suppose, before we got a hold of the :class:`.Query`, some sophisticated transformations were applied to it, such as below where we use :meth:`~.Query.from_self`, a particularly advanced - method, to retrieve pairs of user names with + method, to retrieve pairs of user names with different numbers of characters:: >>> from sqlalchemy import func @@ -985,53 +985,53 @@ completely "raw", using string names to identify desired columns: ... filter(User.name < ua.name).\ ... filter(func.length(ua.name) != func.length(User.name)) - The :class:`.Query` now represents a select from a subquery, where + The :class:`.Query` now represents a select from a subquery, where ``User`` is represented twice both inside and outside of the subquery. Telling the :class:`.Query` to order by "name" doesn't really give - us much guarantee which "name" it's going to order on. In this + us much guarantee which "name" it's going to order on. In this case it assumes "name" is against the outer "aliased" ``User`` construct: .. sourcecode:: python+sql {sql}>>> q.order_by("name").all() #doctest: +NORMALIZE_WHITESPACE - SELECT anon_1.users_id AS anon_1_users_id, - anon_1.users_name AS anon_1_users_name, - users_1.name AS users_1_name - FROM (SELECT users.id AS users_id, users.name AS users_name - FROM users) AS anon_1, users AS users_1 - WHERE anon_1.users_name < users_1.name - AND length(users_1.name) != length(anon_1.users_name) + SELECT anon_1.users_id AS anon_1_users_id, + anon_1.users_name AS anon_1_users_name, + users_1.name AS users_1_name + FROM (SELECT users.id AS users_id, users.name AS users_name + FROM users) AS anon_1, users AS users_1 + WHERE anon_1.users_name < users_1.name + AND length(users_1.name) != length(anon_1.users_name) ORDER BY name () {stop}[(1, u'ed', u'fred'), (1, u'ed', u'mary'), (1, u'ed', u'wendy'), (3, u'mary', u'wendy'), (4, u'fred', u'wendy')] Only if we use the SQL element directly, in this case ``User.name`` - or ``ua.name``, do we give :class:`.Query` enough information to know + or ``ua.name``, do we give :class:`.Query` enough information to know for sure which "name" we'd like to order on, where we can see we get different results for each: .. sourcecode:: python+sql {sql}>>> q.order_by(ua.name).all() #doctest: +NORMALIZE_WHITESPACE - SELECT anon_1.users_id AS anon_1_users_id, - anon_1.users_name AS anon_1_users_name, - users_1.name AS users_1_name - FROM (SELECT users.id AS users_id, users.name AS users_name - FROM users) AS anon_1, users AS users_1 - WHERE anon_1.users_name < users_1.name - AND length(users_1.name) != length(anon_1.users_name) + SELECT anon_1.users_id AS anon_1_users_id, + anon_1.users_name AS anon_1_users_name, + users_1.name AS users_1_name + FROM (SELECT users.id AS users_id, users.name AS users_name + FROM users) AS anon_1, users AS users_1 + WHERE anon_1.users_name < users_1.name + AND length(users_1.name) != length(anon_1.users_name) ORDER BY users_1.name () {stop}[(1, u'ed', u'fred'), (1, u'ed', u'mary'), (1, u'ed', u'wendy'), (3, u'mary', u'wendy'), (4, u'fred', u'wendy')] {sql}>>> q.order_by(User.name).all() #doctest: +NORMALIZE_WHITESPACE - SELECT anon_1.users_id AS anon_1_users_id, - anon_1.users_name AS anon_1_users_name, - users_1.name AS users_1_name - FROM (SELECT users.id AS users_id, users.name AS users_name - FROM users) AS anon_1, users AS users_1 - WHERE anon_1.users_name < users_1.name - AND length(users_1.name) != length(anon_1.users_name) + SELECT anon_1.users_id AS anon_1_users_id, + anon_1.users_name AS anon_1_users_name, + users_1.name AS users_1_name + FROM (SELECT users.id AS users_id, users.name AS users_name + FROM users) AS anon_1, users AS users_1 + WHERE anon_1.users_name < users_1.name + AND length(users_1.name) != length(anon_1.users_name) ORDER BY anon_1.users_name () {stop}[(1, u'ed', u'wendy'), (1, u'ed', u'mary'), (1, u'ed', u'fred'), (4, u'fred', u'wendy'), (3, u'mary', u'wendy')] @@ -1045,26 +1045,26 @@ counting called :meth:`~sqlalchemy.orm.query.Query.count()`: .. sourcecode:: python+sql {sql}>>> session.query(User).filter(User.name.like('%ed')).count() #doctest: +NORMALIZE_WHITESPACE - SELECT count(*) AS count_1 - FROM (SELECT users.id AS users_id, - users.name AS users_name, - users.fullname AS users_fullname, - users.password AS users_password - FROM users + SELECT count(*) AS count_1 + FROM (SELECT users.id AS users_id, + users.name AS users_name, + users.fullname AS users_fullname, + users.password AS users_password + FROM users WHERE users.name LIKE ?) AS anon_1 ('%ed',) {stop}2 The :meth:`~.Query.count()` method is used to determine how many rows the SQL statement would return. Looking -at the generated SQL above, SQLAlchemy always places whatever it is we are +at the generated SQL above, SQLAlchemy always places whatever it is we are querying into a subquery, then counts the rows from that. In some cases this can be reduced to a simpler ``SELECT count(*) FROM table``, however modern versions of SQLAlchemy don't try to guess when this is appropriate, as the exact SQL can be emitted using more explicit means. For situations where the "thing to be counted" needs -to be indicated specifically, we can specify the "count" function +to be indicated specifically, we can specify the "count" function directly using the expression ``func.count()``, available from the :attr:`~sqlalchemy.sql.expression.func` construct. Below we use it to return the count of each distinct user name: @@ -1083,7 +1083,7 @@ To achieve our simple ``SELECT count(*) FROM table``, we can apply it as: .. sourcecode:: python+sql {sql}>>> session.query(func.count('*')).select_from(User).scalar() - SELECT count(?) AS count_1 + SELECT count(?) AS count_1 FROM users ('*',) {stop}4 @@ -1103,7 +1103,7 @@ Building a Relationship ======================= Let's consider how a second table, related to ``User``, can be mapped and -queried. Users in our system +queried. Users in our system can store any number of email addresses associated with their username. This implies a basic one to many association from the ``users`` to a new table which stores email addresses, which we will call ``addresses``. Using @@ -1139,7 +1139,7 @@ those values in the ``users.id`` column, i.e. its primary key. A second directive, known as :func:`.relationship`, tells the ORM that the ``Address`` class itself should be linked -to the ``User`` class, using the attribute ``Address.user``. +to the ``User`` class, using the attribute ``Address.user``. :func:`.relationship` uses the foreign key relationships between the two tables to determine the nature of this linkage, determining that ``Address.user`` will be **many-to-one**. @@ -1156,14 +1156,14 @@ are referred to as a **bidirectional relationship**, and is a key feature of the SQLAlchemy ORM. The section :ref:`relationships_backref` discusses the "backref" feature in detail. -Arguments to :func:`.relationship` which concern the remote class -can be specified using strings, assuming the Declarative system is in +Arguments to :func:`.relationship` which concern the remote class +can be specified using strings, assuming the Declarative system is in use. Once all mappings are complete, these strings are evaluated -as Python expressions in order to produce the actual argument, in the -above case the ``User`` class. The names which are allowed during +as Python expressions in order to produce the actual argument, in the +above case the ``User`` class. The names which are allowed during this evaluation include, among other things, the names of all classes which have been created in terms of the declared base. Below we illustrate creation -of the same "addresses/user" bidirectional relationship in terms of ``User`` instead of +of the same "addresses/user" bidirectional relationship in terms of ``User`` instead of ``Address``:: class User(Base): @@ -1174,7 +1174,7 @@ See the docstring for :func:`.relationship` for more detail on argument style. .. topic:: Did you know ? - * a FOREIGN KEY constraint in most (though not all) relational databases can + * a FOREIGN KEY constraint in most (though not all) relational databases can only link to a primary key column, or a column that has a UNIQUE constraint. * a FOREIGN KEY constraint that refers to a multiple column primary key, and itself has multiple columns, is known as a "composite foreign key". It can also @@ -1227,7 +1227,7 @@ just assign a full list directly: .. sourcecode:: python+sql >>> jack.addresses = [ - ... Address(email_address='jack@google.com'), + ... Address(email_address='jack@google.com'), ... Address(email_address='j25@yahoo.com')] When using a bidirectional relationship, elements added in one direction @@ -1266,9 +1266,9 @@ Querying for Jack, we get just Jack back. No SQL is yet issued for Jack's addre {sql}>>> jack = session.query(User).\ ... filter_by(name='jack').one() #doctest: +NORMALIZE_WHITESPACE BEGIN (implicit) - SELECT users.id AS users_id, - users.name AS users_name, - users.fullname AS users_fullname, + SELECT users.id AS users_id, + users.name AS users_name, + users.fullname AS users_fullname, users.password AS users_password FROM users WHERE users.name = ? @@ -1282,9 +1282,9 @@ Let's look at the ``addresses`` collection. Watch the SQL: .. sourcecode:: python+sql {sql}>>> jack.addresses #doctest: +NORMALIZE_WHITESPACE - SELECT addresses.id AS addresses_id, - addresses.email_address AS - addresses_email_address, + SELECT addresses.id AS addresses_id, + addresses.email_address AS + addresses_email_address, addresses.user_id AS addresses_user_id FROM addresses WHERE ? = addresses.user_id ORDER BY addresses.id @@ -1303,12 +1303,12 @@ Querying with Joins Now that we have two tables, we can show some more features of :class:`.Query`, specifically how to create queries that deal with both tables at the same time. -The `Wikipedia page on SQL JOIN -`_ offers a good introduction to +The `Wikipedia page on SQL JOIN +`_ offers a good introduction to join techniques, several of which we'll illustrate here. To construct a simple implicit join between ``User`` and ``Address``, -we can use :meth:`.Query.filter()` to equate their related columns together. +we can use :meth:`.Query.filter()` to equate their related columns together. Below we load the ``User`` and ``Address`` entities at once using this method: .. sourcecode:: python+sql @@ -1318,15 +1318,15 @@ Below we load the ``User`` and ``Address`` entities at once using this method: ... filter(Address.email_address=='jack@google.com').\ ... all(): # doctest: +NORMALIZE_WHITESPACE ... print u, a - SELECT users.id AS users_id, - users.name AS users_name, + SELECT users.id AS users_id, + users.name AS users_name, users.fullname AS users_fullname, - users.password AS users_password, + users.password AS users_password, addresses.id AS addresses_id, - addresses.email_address AS addresses_email_address, + addresses.email_address AS addresses_email_address, addresses.user_id AS addresses_user_id FROM users, addresses - WHERE users.id = addresses.user_id + WHERE users.id = addresses.user_id AND addresses.email_address = ? ('jack@google.com',) {stop} @@ -1339,9 +1339,9 @@ method: {sql}>>> session.query(User).join(Address).\ ... filter(Address.email_address=='jack@google.com').\ ... all() #doctest: +NORMALIZE_WHITESPACE - SELECT users.id AS users_id, - users.name AS users_name, - users.fullname AS users_fullname, + SELECT users.id AS users_id, + users.name AS users_name, + users.fullname AS users_fullname, users.password AS users_password FROM users JOIN addresses ON users.id = addresses.user_id WHERE addresses.email_address = ? @@ -1358,7 +1358,7 @@ works better when one of the following forms are used:: query.join(Address, User.addresses) # same, with explicit target query.join('addresses') # same, using a string -As you would expect, the same idea is used for "outer" joins, using the +As you would expect, the same idea is used for "outer" joins, using the :meth:`~.Query.outerjoin` function:: query.outerjoin(User.addresses) # LEFT OUTER JOIN @@ -1392,14 +1392,14 @@ same time: ... filter(adalias1.email_address=='jack@google.com').\ ... filter(adalias2.email_address=='j25@yahoo.com'): ... print username, email1, email2 # doctest: +NORMALIZE_WHITESPACE - SELECT users.name AS users_name, + SELECT users.name AS users_name, addresses_1.email_address AS addresses_1_email_address, addresses_2.email_address AS addresses_2_email_address - FROM users JOIN addresses AS addresses_1 + FROM users JOIN addresses AS addresses_1 ON users.id = addresses_1.user_id - JOIN addresses AS addresses_2 + JOIN addresses AS addresses_2 ON users.id = addresses_2.user_id - WHERE addresses_1.email_address = ? + WHERE addresses_1.email_address = ? AND addresses_2.email_address = ? ('jack@google.com', 'j25@yahoo.com') {stop}jack jack@google.com j25@yahoo.com @@ -1415,7 +1415,7 @@ ids, and JOIN to the parent. In this case we use a LEFT OUTER JOIN so that we get rows back for those users who don't have any addresses, e.g.:: SELECT users.*, adr_count.address_count FROM users LEFT OUTER JOIN - (SELECT user_id, count(*) AS address_count + (SELECT user_id, count(*) AS address_count FROM addresses GROUP BY user_id) AS adr_count ON users.id=adr_count.user_id @@ -1445,14 +1445,14 @@ accessible through an attribute called ``c``: {sql}>>> for u, count in session.query(User, stmt.c.address_count).\ ... outerjoin(stmt, User.id==stmt.c.user_id).order_by(User.id): # doctest: +NORMALIZE_WHITESPACE ... print u, count - SELECT users.id AS users_id, + SELECT users.id AS users_id, users.name AS users_name, - users.fullname AS users_fullname, + users.fullname AS users_fullname, users.password AS users_password, anon_1.address_count AS anon_1_address_count - FROM users LEFT OUTER JOIN + FROM users LEFT OUTER JOIN (SELECT addresses.user_id AS user_id, count(?) AS address_count - FROM addresses GROUP BY addresses.user_id) AS anon_1 + FROM addresses GROUP BY addresses.user_id) AS anon_1 ON users.id = anon_1.user_id ORDER BY users.id ('*',) @@ -1478,19 +1478,19 @@ to associate an "alias" of a mapped class to a subquery: >>> for user, address in session.query(User, adalias).\ ... join(adalias, User.addresses): # doctest: +NORMALIZE_WHITESPACE ... print user, address - SELECT users.id AS users_id, - users.name AS users_name, + SELECT users.id AS users_id, + users.name AS users_name, users.fullname AS users_fullname, - users.password AS users_password, + users.password AS users_password, anon_1.id AS anon_1_id, - anon_1.email_address AS anon_1_email_address, + anon_1.email_address AS anon_1_email_address, anon_1.user_id AS anon_1_user_id - FROM users JOIN - (SELECT addresses.id AS id, - addresses.email_address AS email_address, + FROM users JOIN + (SELECT addresses.id AS id, + addresses.email_address AS email_address, addresses.user_id AS user_id FROM addresses - WHERE addresses.email_address != ?) AS anon_1 + WHERE addresses.email_address != ?) AS anon_1 ON users.id = anon_1.user_id ('j25@yahoo.com',) {stop} @@ -1559,7 +1559,7 @@ usage of EXISTS automatically. Above, the statement can be expressed along the {sql}>>> session.query(Address).\ ... filter(~Address.user.has(User.name=='jack')).all() # doctest: +NORMALIZE_WHITESPACE - SELECT addresses.id AS addresses_id, + SELECT addresses.id AS addresses_id, addresses.email_address AS addresses_email_address, addresses.user_id AS addresses_user_id FROM addresses @@ -1612,7 +1612,7 @@ Eager Loading Recall earlier that we illustrated a **lazy loading** operation, when we accessed the ``User.addresses`` collection of a ``User`` and SQL -was emitted. If you want to reduce the number of queries (dramatically, in many cases), +was emitted. If you want to reduce the number of queries (dramatically, in many cases), we can apply an **eager load** to the query operation. SQLAlchemy offers three types of eager loading, two of which are automatic, and a third which involves custom criterion. All three are usually invoked via functions known @@ -1626,9 +1626,9 @@ In this case we'd like to indicate that ``User.addresses`` should load eagerly. A good choice for loading a set of objects as well as their related collections is the :func:`.orm.subqueryload` option, which emits a second SELECT statement that fully loads the collections associated with the results just loaded. -The name "subquery" originates from the fact that the SELECT statement +The name "subquery" originates from the fact that the SELECT statement constructed directly via the :class:`.Query` is re-used, embedded as a subquery -into a SELECT against the related table. This is a little elaborate but +into a SELECT against the related table. This is a little elaborate but very easy to use: .. sourcecode:: python+sql @@ -1637,20 +1637,20 @@ very easy to use: {sql}>>> jack = session.query(User).\ ... options(subqueryload(User.addresses)).\ ... filter_by(name='jack').one() #doctest: +NORMALIZE_WHITESPACE - SELECT users.id AS users_id, - users.name AS users_name, - users.fullname AS users_fullname, - users.password AS users_password - FROM users + SELECT users.id AS users_id, + users.name AS users_name, + users.fullname AS users_fullname, + users.password AS users_password + FROM users WHERE users.name = ? ('jack',) - SELECT addresses.id AS addresses_id, - addresses.email_address AS addresses_email_address, - addresses.user_id AS addresses_user_id, - anon_1.users_id AS anon_1_users_id - FROM (SELECT users.id AS users_id - FROM users WHERE users.name = ?) AS anon_1 - JOIN addresses ON anon_1.users_id = addresses.user_id + SELECT addresses.id AS addresses_id, + addresses.email_address AS addresses_email_address, + addresses.user_id AS addresses_user_id, + anon_1.users_id AS anon_1_users_id + FROM (SELECT users.id AS users_id + FROM users WHERE users.name = ?) AS anon_1 + JOIN addresses ON anon_1.users_id = addresses.user_id ORDER BY anon_1.users_id, addresses.id ('jack',) {stop}>>> jack @@ -1667,7 +1667,7 @@ The other automatic eager loading function is more well known and is called a LEFT OUTER JOIN, so that the lead object as well as the related object or collection is loaded in one step. We illustrate loading the same ``addresses`` collection in this way - note that even though the ``User.addresses`` -collection on ``jack`` is actually populated right now, the query +collection on ``jack`` is actually populated right now, the query will emit the extra join regardless: .. sourcecode:: python+sql @@ -1677,14 +1677,14 @@ will emit the extra join regardless: {sql}>>> jack = session.query(User).\ ... options(joinedload(User.addresses)).\ ... filter_by(name='jack').one() #doctest: +NORMALIZE_WHITESPACE - SELECT users.id AS users_id, - users.name AS users_name, + SELECT users.id AS users_id, + users.name AS users_name, users.fullname AS users_fullname, - users.password AS users_password, - addresses_1.id AS addresses_1_id, - addresses_1.email_address AS addresses_1_email_address, + users.password AS users_password, + addresses_1.id AS addresses_1_id, + addresses_1.email_address AS addresses_1_email_address, addresses_1.user_id AS addresses_1_user_id - FROM users + FROM users LEFT OUTER JOIN addresses AS addresses_1 ON users.id = addresses_1.user_id WHERE users.name = ? ORDER BY addresses_1.id ('jack',) @@ -1711,11 +1711,11 @@ for both the lead and the related object. The join created by :func:`.joinedload` is anonymously aliased such that it **does not affect the query results**. An :meth:`.Query.order_by` or :meth:`.Query.filter` call **cannot** reference these aliased - tables - so-called "user space" joins are constructed using + tables - so-called "user space" joins are constructed using :meth:`.Query.join`. The rationale for this is that :func:`.joinedload` is only applied in order to affect how related objects or collections are loaded as an optimizing detail - it can be added or removed with no impact - on actual results. See the section :ref:`zen_of_eager_loading` for + on actual results. See the section :ref:`zen_of_eager_loading` for a detailed description of how this is used. Explicit Join + Eagerload @@ -1739,14 +1739,14 @@ attribute: ... filter(User.name=='jack').\ ... options(contains_eager(Address.user)).\ ... all() #doctest: +NORMALIZE_WHITESPACE - SELECT users.id AS users_id, - users.name AS users_name, - users.fullname AS users_fullname, - users.password AS users_password, - addresses.id AS addresses_id, - addresses.email_address AS addresses_email_address, - addresses.user_id AS addresses_user_id - FROM addresses JOIN users ON users.id = addresses.user_id + SELECT users.id AS users_id, + users.name AS users_name, + users.fullname AS users_fullname, + users.password AS users_password, + addresses.id AS addresses_id, + addresses.email_address AS addresses_email_address, + addresses.user_id AS addresses_user_id + FROM addresses JOIN users ON users.id = addresses.user_id WHERE users.name = ? ('jack',) @@ -1775,12 +1775,12 @@ the session, then we'll issue a ``count`` query to see that no rows remain: (None, 2) DELETE FROM users WHERE users.id = ? (5,) - SELECT count(*) AS count_1 - FROM (SELECT users.id AS users_id, - users.name AS users_name, - users.fullname AS users_fullname, - users.password AS users_password - FROM users + SELECT count(*) AS count_1 + FROM (SELECT users.id AS users_id, + users.name AS users_name, + users.fullname AS users_fullname, + users.password AS users_password + FROM users WHERE users.name = ?) AS anon_1 ('jack',) {stop}0 @@ -1792,11 +1792,11 @@ So far, so good. How about Jack's ``Address`` objects ? {sql}>>> session.query(Address).filter( ... Address.email_address.in_(['jack@google.com', 'j25@yahoo.com']) ... ).count() # doctest: +NORMALIZE_WHITESPACE - SELECT count(*) AS count_1 - FROM (SELECT addresses.id AS addresses_id, - addresses.email_address AS addresses_email_address, - addresses.user_id AS addresses_user_id - FROM addresses + SELECT count(*) AS count_1 + FROM (SELECT addresses.id AS addresses_id, + addresses.email_address AS addresses_email_address, + addresses.user_id AS addresses_user_id + FROM addresses WHERE addresses.email_address IN (?, ?)) AS anon_1 ('jack@google.com', 'j25@yahoo.com') {stop}2 @@ -1858,9 +1858,9 @@ removing an address from his ``addresses`` collection will result in that # load Jack by primary key {sql}>>> jack = session.query(User).get(5) #doctest: +NORMALIZE_WHITESPACE BEGIN (implicit) - SELECT users.id AS users_id, - users.name AS users_name, - users.fullname AS users_fullname, + SELECT users.id AS users_id, + users.name AS users_name, + users.fullname AS users_fullname, users.password AS users_password FROM users WHERE users.id = ? @@ -1869,8 +1869,8 @@ removing an address from his ``addresses`` collection will result in that # remove one Address (lazy load fires off) {sql}>>> del jack.addresses[1] #doctest: +NORMALIZE_WHITESPACE - SELECT addresses.id AS addresses_id, - addresses.email_address AS addresses_email_address, + SELECT addresses.id AS addresses_id, + addresses.email_address AS addresses_email_address, addresses.user_id AS addresses_user_id FROM addresses WHERE ? = addresses.user_id @@ -1883,11 +1883,11 @@ removing an address from his ``addresses`` collection will result in that ... ).count() # doctest: +NORMALIZE_WHITESPACE DELETE FROM addresses WHERE addresses.id = ? (2,) - SELECT count(*) AS count_1 - FROM (SELECT addresses.id AS addresses_id, - addresses.email_address AS addresses_email_address, - addresses.user_id AS addresses_user_id - FROM addresses + SELECT count(*) AS count_1 + FROM (SELECT addresses.id AS addresses_id, + addresses.email_address AS addresses_email_address, + addresses.user_id AS addresses_user_id + FROM addresses WHERE addresses.email_address IN (?, ?)) AS anon_1 ('jack@google.com', 'j25@yahoo.com') {stop}1 @@ -1903,12 +1903,12 @@ Deleting Jack will delete both Jack and his remaining ``Address``: (1,) DELETE FROM users WHERE users.id = ? (5,) - SELECT count(*) AS count_1 - FROM (SELECT users.id AS users_id, - users.name AS users_name, - users.fullname AS users_fullname, - users.password AS users_password - FROM users + SELECT count(*) AS count_1 + FROM (SELECT users.id AS users_id, + users.name AS users_name, + users.fullname AS users_fullname, + users.password AS users_password + FROM users WHERE users.name = ?) AS anon_1 ('jack',) {stop}0 @@ -1916,11 +1916,11 @@ Deleting Jack will delete both Jack and his remaining ``Address``: {sql}>>> session.query(Address).filter( ... Address.email_address.in_(['jack@google.com', 'j25@yahoo.com']) ... ).count() # doctest: +NORMALIZE_WHITESPACE - SELECT count(*) AS count_1 - FROM (SELECT addresses.id AS addresses_id, - addresses.email_address AS addresses_email_address, - addresses.user_id AS addresses_user_id - FROM addresses + SELECT count(*) AS count_1 + FROM (SELECT addresses.id AS addresses_id, + addresses.email_address AS addresses_email_address, + addresses.user_id AS addresses_user_id + FROM addresses WHERE addresses.email_address IN (?, ?)) AS anon_1 ('jack@google.com', 'j25@yahoo.com') {stop}0 @@ -1928,7 +1928,7 @@ Deleting Jack will delete both Jack and his remaining ``Address``: .. topic:: More on Cascades Further detail on configuration of cascades is at :ref:`unitofwork_cascades`. - The cascade functionality can also integrate smoothly with + The cascade functionality can also integrate smoothly with the ``ON DELETE CASCADE`` functionality of the relational database. See :ref:`passive_deletes` for details. @@ -1952,7 +1952,7 @@ to serve as the association table. This looks like the following:: Above, we can see declaring a :class:`.Table` directly is a little different than declaring a mapped class. :class:`.Table` is a constructor function, so -each individual :class:`.Column` argument is separated by a comma. The +each individual :class:`.Column` argument is separated by a comma. The :class:`.Column` object is also given its name explicitly, rather than it being taken from an assigned attribute name. @@ -2062,9 +2062,9 @@ Usage is not too different from what we've been doing. Let's give Wendy some bl {sql}>>> wendy = session.query(User).\ ... filter_by(name='wendy').\ ... one() #doctest: +NORMALIZE_WHITESPACE - SELECT users.id AS users_id, - users.name AS users_name, - users.fullname AS users_fullname, + SELECT users.id AS users_id, + users.name AS users_name, + users.fullname AS users_fullname, users.password AS users_password FROM users WHERE users.name = ? @@ -2098,15 +2098,15 @@ keyword string 'firstpost'": (2, "Wendy's Blog Post", 'This is a test') INSERT INTO post_keywords (post_id, keyword_id) VALUES (?, ?) ((1, 1), (1, 2)) - SELECT posts.id AS posts_id, - posts.user_id AS posts_user_id, - posts.headline AS posts_headline, + SELECT posts.id AS posts_id, + posts.user_id AS posts_user_id, + posts.headline AS posts_headline, posts.body AS posts_body FROM posts WHERE EXISTS (SELECT 1 FROM post_keywords, keywords - WHERE posts.id = post_keywords.post_id - AND keywords.id = post_keywords.keyword_id + WHERE posts.id = post_keywords.post_id + AND keywords.id = post_keywords.keyword_id AND keywords.keyword = ?) ('firstpost',) {stop}[BlogPost("Wendy's Blog Post", 'This is a test', )] @@ -2120,15 +2120,15 @@ to her as a parent: ... filter(BlogPost.author==wendy).\ ... filter(BlogPost.keywords.any(keyword='firstpost')).\ ... all() #doctest: +NORMALIZE_WHITESPACE - SELECT posts.id AS posts_id, - posts.user_id AS posts_user_id, - posts.headline AS posts_headline, + SELECT posts.id AS posts_id, + posts.user_id AS posts_user_id, + posts.headline AS posts_headline, posts.body AS posts_body FROM posts WHERE ? = posts.user_id AND (EXISTS (SELECT 1 FROM post_keywords, keywords - WHERE posts.id = post_keywords.post_id - AND keywords.id = post_keywords.keyword_id + WHERE posts.id = post_keywords.post_id + AND keywords.id = post_keywords.keyword_id AND keywords.keyword = ?)) (2, 'firstpost') {stop}[BlogPost("Wendy's Blog Post", 'This is a test', )] @@ -2141,15 +2141,15 @@ relationship, to query straight from there: {sql}>>> wendy.posts.\ ... filter(BlogPost.keywords.any(keyword='firstpost')).\ ... all() #doctest: +NORMALIZE_WHITESPACE - SELECT posts.id AS posts_id, - posts.user_id AS posts_user_id, - posts.headline AS posts_headline, + SELECT posts.id AS posts_id, + posts.user_id AS posts_user_id, + posts.headline AS posts_headline, posts.body AS posts_body FROM posts WHERE ? = posts.user_id AND (EXISTS (SELECT 1 FROM post_keywords, keywords - WHERE posts.id = post_keywords.post_id - AND keywords.id = post_keywords.keyword_id + WHERE posts.id = post_keywords.post_id + AND keywords.id = post_keywords.keyword_id AND keywords.keyword = ?)) (2, 'firstpost') {stop}[BlogPost("Wendy's Blog Post", 'This is a test', )] diff --git a/doc/build/testdocs.py b/doc/build/testdocs.py index a07bcd77ab..815aa86694 100644 --- a/doc/build/testdocs.py +++ b/doc/build/testdocs.py @@ -20,8 +20,8 @@ handler.setFormatter(logging.Formatter('%(message)s')) rootlogger.addHandler(handler) -def teststring(s, name, globs=None, verbose=None, report=True, - optionflags=0, extraglobs=None, raise_on_error=False, +def teststring(s, name, globs=None, verbose=None, report=True, + optionflags=0, extraglobs=None, raise_on_error=False, parser=doctest.DocTestParser()): from doctest import DebugRunner, DocTestRunner, master diff --git a/examples/adjacency_list/adjacency_list.py b/examples/adjacency_list/adjacency_list.py index 91c6e1ccfb..1020cc57d7 100644 --- a/examples/adjacency_list/adjacency_list.py +++ b/examples/adjacency_list/adjacency_list.py @@ -15,13 +15,13 @@ class TreeNode(Base): parent_id = Column(Integer, ForeignKey(id)) name = Column(String(50), nullable=False) - children = relationship("TreeNode", + children = relationship("TreeNode", # cascade deletions cascade="all", # many to one + adjacency list - remote_side - # is required to reference the 'remote' + # is required to reference the 'remote' # column in the join condition. backref=backref("parent", remote_side=id), @@ -46,7 +46,7 @@ class TreeNode(Base): return " " * _indent + repr(self) + \ "\n" + \ "".join([ - c.dump(_indent +1) + c.dump(_indent +1) for c in self.children.values()] ) @@ -107,7 +107,7 @@ if __name__ == '__main__': "selecting tree on root, using eager loading to join four levels deep.") session.expunge_all() node = session.query(TreeNode).\ - options(joinedload_all("children", "children", + options(joinedload_all("children", "children", "children", "children")).\ filter(TreeNode.name=="rootnode").\ first() diff --git a/examples/association/__init__.py b/examples/association/__init__.py index 12d2ea6975..df736f4fbd 100644 --- a/examples/association/__init__.py +++ b/examples/association/__init__.py @@ -6,7 +6,7 @@ classes that are associated in a many-to-many pattern. This directory includes the following examples: -* basic_association.py - illustrate a many-to-many relationship between an +* basic_association.py - illustrate a many-to-many relationship between an "Order" and a collection of "Item" objects, associating a purchase price with each via an association object called "OrderItem" * proxied_association.py - same example as basic_association, adding in diff --git a/examples/association/basic_association.py b/examples/association/basic_association.py index cd86aa504f..29a473fced 100644 --- a/examples/association/basic_association.py +++ b/examples/association/basic_association.py @@ -3,7 +3,7 @@ The association object pattern is a form of many-to-many which associates additional data with each association between parent/child. -The example illustrates an "order", referencing a collection +The example illustrates an "order", referencing a collection of "items", with a particular price paid associated with each "item". """ @@ -83,7 +83,7 @@ if __name__ == '__main__': # query the order, print items order = session.query(Order).filter_by(customer_name='john smith').one() - print [(order_item.item.description, order_item.price) + print [(order_item.item.description, order_item.price) for order_item in order.order_items] # print customers who bought 'MySQL Crowbar' on sale diff --git a/examples/association/dict_of_sets_with_default.py b/examples/association/dict_of_sets_with_default.py index 0720fdab99..63c0f45318 100644 --- a/examples/association/dict_of_sets_with_default.py +++ b/examples/association/dict_of_sets_with_default.py @@ -44,7 +44,7 @@ class B(Base): key = Column(String) values = association_proxy("elements", "value") - """Bridge the association from 'elements' over to the + """Bridge the association from 'elements' over to the 'value' element of C.""" def __init__(self, key, values=None): diff --git a/examples/beaker_caching/__init__.py b/examples/beaker_caching/__init__.py index cc9f71d8b8..7e7b627917 100644 --- a/examples/beaker_caching/__init__.py +++ b/examples/beaker_caching/__init__.py @@ -1,18 +1,18 @@ """ Illustrates how to embed Beaker cache functionality within the Query object, allowing full cache control as well as the -ability to pull "lazy loaded" attributes from long term cache +ability to pull "lazy loaded" attributes from long term cache as well. In this demo, the following techniques are illustrated: * Using custom subclasses of Query -* Basic technique of circumventing Query to pull from a +* Basic technique of circumventing Query to pull from a custom cache source instead of the database. * Rudimental caching with Beaker, using "regions" which allow global control over a fixed set of configurations. -* Using custom MapperOption objects to configure options on - a Query, including the ability to invoke the options +* Using custom MapperOption objects to configure options on + a Query, including the ability to invoke the options deep within an object graph when lazy loads occur. E.g.:: @@ -49,10 +49,10 @@ The demo scripts themselves, in order of complexity, are run as follows:: Listing of files: environment.py - Establish the Session, the Beaker cache - manager, data / cache file paths, and configurations, + manager, data / cache file paths, and configurations, bootstrap fixture data if necessary. - caching_query.py - Represent functions and classes + caching_query.py - Represent functions and classes which allow the usage of Beaker caching with SQLAlchemy. Introduces a query option called FromCache. diff --git a/examples/beaker_caching/advanced.py b/examples/beaker_caching/advanced.py index c16e02f33a..31beeff6f9 100644 --- a/examples/beaker_caching/advanced.py +++ b/examples/beaker_caching/advanced.py @@ -1,6 +1,6 @@ """advanced.py -Illustrate usage of Query combined with the FromCache option, +Illustrate usage of Query combined with the FromCache option, including front-end loading, cache invalidation, namespace techniques and collection caching. @@ -17,12 +17,12 @@ def load_name_range(start, end, invalidate=False): start/end are integers, range is then "person " - "person ". - The cache option we set up is called "name_range", indicating + The cache option we set up is called "name_range", indicating a range of names for the Person class. The `Person.addresses` collections are also cached. Its basically another level of tuning here, as that particular cache option - can be transparently replaced with joinedload(Person.addresses). + can be transparently replaced with joinedload(Person.addresses). The effect is that each Person and his/her Address collection is cached either together or separately, affecting the kind of SQL that emits for unloaded Person objects as well as the distribution @@ -63,13 +63,13 @@ print ", ".join([p.name for p in load_name_range(2, 12)]) print "\ntwenty five through forty, invalidate first:\n" print ", ".join([p.name for p in load_name_range(25, 40, True)]) -# illustrate the address loading from either cache/already +# illustrate the address loading from either cache/already # on the Person print "\n\nPeople plus addresses, two through twelve, addresses possibly from cache" for p in load_name_range(2, 12): print p.format_full() -# illustrate the address loading from either cache/already +# illustrate the address loading from either cache/already # on the Person print "\n\nPeople plus addresses, two through twelve, addresses from cache" for p in load_name_range(2, 12): diff --git a/examples/beaker_caching/caching_query.py b/examples/beaker_caching/caching_query.py index a6a1261113..ae0c9c903d 100644 --- a/examples/beaker_caching/caching_query.py +++ b/examples/beaker_caching/caching_query.py @@ -11,7 +11,7 @@ The three new concepts introduced here are: parameters on a Query * RelationshipCache - a variant of FromCache which is specific to a query invoked during a lazy load. - * _params_from_query - extracts value parameters from + * _params_from_query - extracts value parameters from a Query. The rest of what's here are standard SQLAlchemy and @@ -23,30 +23,30 @@ from sqlalchemy.orm.query import Query from sqlalchemy.sql import visitors class CachingQuery(Query): - """A Query subclass which optionally loads full results from a Beaker + """A Query subclass which optionally loads full results from a Beaker cache region. The CachingQuery stores additional state that allows it to consult a Beaker cache before accessing the database: - * A "region", which is a cache region argument passed to a + * A "region", which is a cache region argument passed to a Beaker CacheManager, specifies a particular cache configuration (including backend implementation, expiration times, etc.) * A "namespace", which is a qualifying name that identifies a - group of keys within the cache. A query that filters on a name - might use the name "by_name", a query that filters on a date range + group of keys within the cache. A query that filters on a name + might use the name "by_name", a query that filters on a date range to a joined table might use the name "related_date_range". When the above state is present, a Beaker cache is retrieved. - The "namespace" name is first concatenated with - a string composed of the individual entities and columns the Query + The "namespace" name is first concatenated with + a string composed of the individual entities and columns the Query requests, i.e. such as ``Query(User.id, User.name)``. The Beaker cache is then loaded from the cache manager based on the region and composed namespace. The key within the cache itself is then constructed against the bind parameters specified - by this query, which are usually literals defined in the + by this query, which are usually literals defined in the WHERE clause. The FromCache and RelationshipCache mapper options below represent @@ -137,7 +137,7 @@ def _get_cache_parameters(query): return cache, cache_key def _namespace_from_query(namespace, query): - # cache namespace - the token handed in by the + # cache namespace - the token handed in by the # option + class we're querying against namespace = " ".join([namespace] + [str(x) for x in query._entities]) @@ -151,7 +151,7 @@ def _set_cache_parameters(query, region, namespace, cache_key): if hasattr(query, '_cache_parameters'): region, namespace, cache_key = query._cache_parameters raise ValueError("This query is already configured " - "for region %r namespace %r" % + "for region %r namespace %r" % (region, namespace) ) query._cache_parameters = region, namespace, cache_key @@ -171,10 +171,10 @@ class FromCache(MapperOption): be a name uniquely describing the target Query's lexical structure. - :param cache_key: optional. A string cache key + :param cache_key: optional. A string cache key that will serve as the key to the query. Use this if your query has a huge amount of parameters (such - as when using in_()) which correspond more simply to + as when using in_()) which correspond more simply to some other identifier. """ @@ -188,7 +188,7 @@ class FromCache(MapperOption): _set_cache_parameters(query, self.region, self.namespace, self.cache_key) class RelationshipCache(MapperOption): - """Specifies that a Query as called within a "lazy load" + """Specifies that a Query as called within a "lazy load" should load results from a cache.""" propagate_to_loaders = True @@ -228,9 +228,9 @@ class RelationshipCache(MapperOption): if (cls, key) in self._relationship_options: relationship_option = self._relationship_options[(cls, key)] _set_cache_parameters( - query, - relationship_option.region, - relationship_option.namespace, + query, + relationship_option.region, + relationship_option.namespace, None) def and_(self, option): diff --git a/examples/beaker_caching/environment.py b/examples/beaker_caching/environment.py index 740c5977ac..ccc625117d 100644 --- a/examples/beaker_caching/environment.py +++ b/examples/beaker_caching/environment.py @@ -1,6 +1,6 @@ """environment.py -Establish data / cache file paths, and configurations, +Establish data / cache file paths, and configurations, bootstrap fixture data if necessary. """ diff --git a/examples/beaker_caching/fixture_data.py b/examples/beaker_caching/fixture_data.py index 09f020cea8..b77bbcb954 100644 --- a/examples/beaker_caching/fixture_data.py +++ b/examples/beaker_caching/fixture_data.py @@ -37,7 +37,7 @@ def install(): person = Person( "person %.2d" % i, Address( - street="street %.2d" % i, + street="street %.2d" % i, postal_code=all_post_codes[random.randint(0, len(all_post_codes) - 1)] ) ) diff --git a/examples/beaker_caching/helloworld.py b/examples/beaker_caching/helloworld.py index f64fcdd2e4..6f696c502b 100644 --- a/examples/beaker_caching/helloworld.py +++ b/examples/beaker_caching/helloworld.py @@ -15,12 +15,12 @@ people = Session.query(Person).options(FromCache("default", "all_people")).all() # remove the Session. next query starts from scratch. Session.remove() -# load again, using the same FromCache option. now they're cached +# load again, using the same FromCache option. now they're cached # under "all_people", no SQL is emitted. print "loading people....again!" people = Session.query(Person).options(FromCache("default", "all_people")).all() -# want to load on some different kind of query ? change the namespace +# want to load on some different kind of query ? change the namespace # you send to FromCache print "loading people two through twelve" people_two_through_twelve = Session.query(Person).\ @@ -30,7 +30,7 @@ people_two_through_twelve = Session.query(Person).\ # the data is cached under the "namespace" you send to FromCache, *plus* # the bind parameters of the query. So this query, having -# different literal parameters under "Person.name.between()" than the +# different literal parameters under "Person.name.between()" than the # previous one, issues new SQL... print "loading people five through fifteen" people_five_through_fifteen = Session.query(Person).\ @@ -48,8 +48,8 @@ people_two_through_twelve = Session.query(Person).\ # invalidate the cache for the three queries we've done. Recreate -# each Query, which includes at the very least the same FromCache, -# same list of objects to be loaded, and the same parameters in the +# each Query, which includes at the very least the same FromCache, +# same list of objects to be loaded, and the same parameters in the # same order, then call invalidate(). print "invalidating everything" Session.query(Person).options(FromCache("default", "all_people")).invalidate() diff --git a/examples/beaker_caching/local_session_caching.py b/examples/beaker_caching/local_session_caching.py index b638583626..2d80355786 100644 --- a/examples/beaker_caching/local_session_caching.py +++ b/examples/beaker_caching/local_session_caching.py @@ -12,7 +12,7 @@ from beaker import cache, container import collections class ScopedSessionNamespace(container.MemoryNamespaceManager): - """A Beaker cache type which will cache objects locally on + """A Beaker cache type which will cache objects locally on the current session. When used with the query_cache system, the effect is that the objects @@ -86,10 +86,10 @@ if __name__ == '__main__': # identity is preserved - person10 is the *same* object that's # ultimately inside the cache. So it is safe to manipulate - # the not-queried-for attributes of objects when using such a - # cache without the need to invalidate - however, any change - # that would change the results of a cached query, such as - # inserts, deletes, or modification to attributes that are + # the not-queried-for attributes of objects when using such a + # cache without the need to invalidate - however, any change + # that would change the results of a cached query, such as + # inserts, deletes, or modification to attributes that are # part of query criterion, still require careful invalidation. from caching_query import _get_cache_parameters cache, key = _get_cache_parameters(q) diff --git a/examples/beaker_caching/model.py b/examples/beaker_caching/model.py index 629b263a79..a6733962db 100644 --- a/examples/beaker_caching/model.py +++ b/examples/beaker_caching/model.py @@ -1,5 +1,5 @@ """Model. We are modeling Person objects with a collection -of Address objects. Each Address has a PostalCode, which +of Address objects. Each Address has a PostalCode, which in turn references a City and then a Country: Person --(1..n)--> Address @@ -70,7 +70,7 @@ class Address(Base): def __str__(self): return "%s\t"\ "%s, %s\t"\ - "%s" % (self.street, self.city.name, + "%s" % (self.street, self.city.name, self.postal_code.code, self.country.name) class Person(Base): diff --git a/examples/beaker_caching/relation_caching.py b/examples/beaker_caching/relation_caching.py index 1691b071b6..f1e5c7886f 100644 --- a/examples/beaker_caching/relation_caching.py +++ b/examples/beaker_caching/relation_caching.py @@ -1,7 +1,7 @@ """relationship_caching.py -Load a set of Person and Address objects, specifying that -related PostalCode, City, Country objects should be pulled from long +Load a set of Person and Address objects, specifying that +related PostalCode, City, Country objects should be pulled from long term cache. """ diff --git a/examples/custom_attributes/custom_management.py b/examples/custom_attributes/custom_management.py index 50b65a37ee..ebd18a6fa0 100644 --- a/examples/custom_attributes/custom_management.py +++ b/examples/custom_attributes/custom_management.py @@ -138,7 +138,7 @@ class MyCollectionAdapter(object): def fire_pre_remove_event(self, initiator=None): self.state.get_impl(self.key).\ - fire_pre_remove_event(self.state, self.state.dict, + fire_pre_remove_event(self.state, self.state.dict, initiator) class MyCollection(object): @@ -161,12 +161,12 @@ class MyCollection(object): if __name__ == '__main__': meta = MetaData(create_engine('sqlite://')) - table1 = Table('table1', meta, - Column('id', Integer, primary_key=True), + table1 = Table('table1', meta, + Column('id', Integer, primary_key=True), Column('name', Text)) - table2 = Table('table2', meta, - Column('id', Integer, primary_key=True), - Column('name', Text), + table2 = Table('table2', meta, + Column('id', Integer, primary_key=True), + Column('name', Text), Column('t1id', Integer, ForeignKey('table1.id'))) meta.create_all() diff --git a/examples/declarative_reflection/__init__.py b/examples/declarative_reflection/__init__.py index 81b14b1ba8..20c7b85e6c 100644 --- a/examples/declarative_reflection/__init__.py +++ b/examples/declarative_reflection/__init__.py @@ -28,7 +28,7 @@ Usage example:: class Bar(Base): __tablename__ = 'bar' - # illustrate overriding of "bar.foo_id" to have + # illustrate overriding of "bar.foo_id" to have # a foreign key constraint otherwise not # reflected, such as when using MySQL foo_id = Column(Integer, ForeignKey('foo.id')) @@ -42,6 +42,6 @@ Usage example:: Foo(bars=[Bar(data='b3'), Bar(data='b4')], data='f2') ]) s.commit() - + """ diff --git a/examples/declarative_reflection/declarative_reflection.py b/examples/declarative_reflection/declarative_reflection.py index 3721493172..a1f9ef08dd 100644 --- a/examples/declarative_reflection/declarative_reflection.py +++ b/examples/declarative_reflection/declarative_reflection.py @@ -8,9 +8,9 @@ class DeclarativeReflectedBase(object): @classmethod def __mapper_cls__(cls, *args, **kw): - """Declarative will use this function in lieu of + """Declarative will use this function in lieu of calling mapper() directly. - + Collect each series of arguments and invoke them when prepare() is called. """ @@ -29,16 +29,16 @@ class DeclarativeReflectedBase(object): # into the existing Table object. if args[1] is not None: table = args[1] - Table(table.name, - cls.metadata, + Table(table.name, + cls.metadata, extend_existing=True, autoload_replace=False, - autoload=True, + autoload=True, autoload_with=engine, schema=table.schema) # see if we need 'inherits' in the - # mapper args. Declarative will have + # mapper args. Declarative will have # skipped this since mappings weren't # available yet. for c in klass.__bases__: @@ -64,7 +64,7 @@ if __name__ == '__main__': class Bar(Reflected): __tablename__ = 'bar' - # illustrate overriding of "bar.foo_id" to have + # illustrate overriding of "bar.foo_id" to have # a foreign key constraint otherwise not # reflected, such as when using MySQL foo_id = Column(Integer, ForeignKey('foo.id')) diff --git a/examples/dynamic_dict/__init__.py b/examples/dynamic_dict/__init__.py index 69ac409522..3df907cc54 100644 --- a/examples/dynamic_dict/__init__.py +++ b/examples/dynamic_dict/__init__.py @@ -1,5 +1,5 @@ """Illustrates how to place a dictionary-like facade on top of a "dynamic" relation, so -that dictionary operations (assuming simple string keys) can operate upon a large +that dictionary operations (assuming simple string keys) can operate upon a large collection without loading the full collection at once. """ \ No newline at end of file diff --git a/examples/elementtree/__init__.py b/examples/elementtree/__init__.py index 8d47f4acea..ee1e9e193a 100644 --- a/examples/elementtree/__init__.py +++ b/examples/elementtree/__init__.py @@ -15,8 +15,8 @@ In order of complexity: represented in a separate table. The nodes are associated in a hierarchy using an adjacency list structure. A query function is introduced which can search for nodes along any path with a given structure of attributes, basically a (very narrow) subset of xpath. -* ``optimized_al.py`` - Uses the same strategy as ``adjacency_list.py``, but associates each - DOM row with its owning document row, so that a full document of DOM nodes can be +* ``optimized_al.py`` - Uses the same strategy as ``adjacency_list.py``, but associates each + DOM row with its owning document row, so that a full document of DOM nodes can be loaded using O(1) queries - the construction of the "hierarchy" is performed after the load in a non-recursive fashion and is much more efficient. @@ -27,7 +27,7 @@ E.g.:: session.add(Document(file, doc)) session.commit() - # locate documents with a certain path/attribute structure + # locate documents with a certain path/attribute structure for document in find_document('/somefile/header/field2[@attr=foo]'): # dump the XML print document diff --git a/examples/elementtree/optimized_al.py b/examples/elementtree/optimized_al.py index 102f6c3739..1cec613661 100644 --- a/examples/elementtree/optimized_al.py +++ b/examples/elementtree/optimized_al.py @@ -1,6 +1,6 @@ """This script duplicates adjacency_list.py, but optimizes the loading -of XML nodes to be based on a "flattened" datamodel. Any number of XML documents, -each of arbitrary complexity, can be loaded in their entirety via a single query +of XML nodes to be based on a "flattened" datamodel. Any number of XML documents, +each of arbitrary complexity, can be loaded in their entirety via a single query which joins on only three tables. """ @@ -25,7 +25,7 @@ documents = Table('documents', meta, Column('filename', String(30), unique=True), ) -# stores XML nodes in an adjacency list model. This corresponds to +# stores XML nodes in an adjacency list model. This corresponds to # Element and SubElement objects. elements = Table('elements', meta, Column('element_id', Integer, primary_key=True), @@ -61,15 +61,15 @@ class Document(object): ########################## PART IV - Persistence Mapping ##################### -# Node class. a non-public class which will represent +# Node class. a non-public class which will represent # the DB-persisted Element/SubElement object. We cannot create mappers for -# ElementTree elements directly because they are at the very least not new-style +# ElementTree elements directly because they are at the very least not new-style # classes, and also may be backed by native implementations. # so here we construct an adapter. class _Node(object): pass -# Attribute class. also internal, this will represent the key/value attributes stored for +# Attribute class. also internal, this will represent the key/value attributes stored for # a particular Node. class _Attribute(object): def __init__(self, name, value): diff --git a/examples/elementtree/pickle.py b/examples/elementtree/pickle.py index 28bee4672a..d40af275bd 100644 --- a/examples/elementtree/pickle.py +++ b/examples/elementtree/pickle.py @@ -1,6 +1,6 @@ """illustrates a quick and dirty way to persist an XML document expressed using ElementTree and pickle. -This is a trivial example using PickleType to marshal/unmarshal the ElementTree +This is a trivial example using PickleType to marshal/unmarshal the ElementTree document into a binary column. Compare to explicit.py which stores the individual components of the ElementTree structure in distinct rows using two additional mapped entities. Note that the usage of both styles of persistence are identical, as is the structure of the main Document class. diff --git a/examples/generic_associations/__init__.py b/examples/generic_associations/__init__.py index b166d91610..36d50266e2 100644 --- a/examples/generic_associations/__init__.py +++ b/examples/generic_associations/__init__.py @@ -1,8 +1,8 @@ """ -Illustrates various methods of associating multiple types of +Illustrates various methods of associating multiple types of parents with a particular child object. -The examples all use the declarative extension along with +The examples all use the declarative extension along with declarative mixins. Each one presents the identical use case at the end - two classes, ``Customer`` and ``Supplier``, both subclassing the ``HasAddresses`` mixin, which ensures that the diff --git a/examples/generic_associations/discriminator_on_association.py b/examples/generic_associations/discriminator_on_association.py index a73b4df1dc..3c170d5c88 100644 --- a/examples/generic_associations/discriminator_on_association.py +++ b/examples/generic_associations/discriminator_on_association.py @@ -12,8 +12,8 @@ that refers to a particular table is present, the extra association table is used so that traditional foreign key constraints may be used. This configuration has the advantage that a fixed set of tables -are used, with no extra-table-per-parent needed. The individual -Address record can also locate its parent with no need to scan +are used, with no extra-table-per-parent needed. The individual +Address record can also locate its parent with no need to scan amongst many tables. """ @@ -26,7 +26,7 @@ from sqlalchemy.ext.associationproxy import association_proxy class Base(object): """Base class which provides automated table name and surrogate primary key column. - + """ @declared_attr def __tablename__(cls): @@ -37,17 +37,17 @@ Base = declarative_base(cls=Base) class AddressAssociation(Base): """Associates a collection of Address objects with a particular parent. - + """ __tablename__ = "address_association" @classmethod def creator(cls, discriminator): - """Provide a 'creator' function to use with + """Provide a 'creator' function to use with the association proxy.""" return lambda addresses:AddressAssociation( - addresses=addresses, + addresses=addresses, discriminator=discriminator) discriminator = Column(String) @@ -59,37 +59,37 @@ class AddressAssociation(Base): return getattr(self, "%s_parent" % self.discriminator) class Address(Base): - """The Address class. - - This represents all address records in a + """The Address class. + + This represents all address records in a single table. - + """ - association_id = Column(Integer, + association_id = Column(Integer, ForeignKey("address_association.id") ) street = Column(String) city = Column(String) zip = Column(String) association = relationship( - "AddressAssociation", + "AddressAssociation", backref="addresses") parent = association_proxy("association", "parent") def __repr__(self): return "%s(street=%r, city=%r, zip=%r)" % \ - (self.__class__.__name__, self.street, + (self.__class__.__name__, self.street, self.city, self.zip) class HasAddresses(object): """HasAddresses mixin, creates a relationship to the address_association table for each parent. - + """ @declared_attr def address_association_id(cls): - return Column(Integer, + return Column(Integer, ForeignKey("address_association.id")) @declared_attr @@ -99,8 +99,8 @@ class HasAddresses(object): "address_association", "addresses", creator=AddressAssociation.creator(discriminator) ) - return relationship("AddressAssociation", - backref=backref("%s_parent" % discriminator, + return relationship("AddressAssociation", + backref=backref("%s_parent" % discriminator, uselist=False)) @@ -117,7 +117,7 @@ session = Session(engine) session.add_all([ Customer( - name='customer 1', + name='customer 1', addresses=[ Address( street='123 anywhere street', diff --git a/examples/generic_associations/table_per_association.py b/examples/generic_associations/table_per_association.py index 86ee212dc6..e1ff2be5bb 100644 --- a/examples/generic_associations/table_per_association.py +++ b/examples/generic_associations/table_per_association.py @@ -6,7 +6,7 @@ for all parents. This configuration has the advantage that all Address rows are in one table, so that the definition of "Address" -can be maintained in one place. The association table +can be maintained in one place. The association table contains the foreign key to Address so that Address has no dependency on the system. @@ -20,7 +20,7 @@ from sqlalchemy.orm import Session, relationship class Base(object): """Base class which provides automated table name and surrogate primary key column. - + """ @declared_attr def __tablename__(cls): @@ -29,11 +29,11 @@ class Base(object): Base = declarative_base(cls=Base) class Address(Base): - """The Address class. - - This represents all address records in a + """The Address class. + + This represents all address records in a single table. - + """ street = Column(String) city = Column(String) @@ -41,23 +41,23 @@ class Address(Base): def __repr__(self): return "%s(street=%r, city=%r, zip=%r)" % \ - (self.__class__.__name__, self.street, + (self.__class__.__name__, self.street, self.city, self.zip) class HasAddresses(object): """HasAddresses mixin, creates a new address_association table for each parent. - + """ @declared_attr def addresses(cls): address_association = Table( "%s_addresses" % cls.__tablename__, cls.metadata, - Column("address_id", ForeignKey("address.id"), + Column("address_id", ForeignKey("address.id"), primary_key=True), - Column("%s_id" % cls.__tablename__, - ForeignKey("%s.id" % cls.__tablename__), + Column("%s_id" % cls.__tablename__, + ForeignKey("%s.id" % cls.__tablename__), primary_key=True), ) return relationship(Address, secondary=address_association) @@ -75,7 +75,7 @@ session = Session(engine) session.add_all([ Customer( - name='customer 1', + name='customer 1', addresses=[ Address( street='123 anywhere street', diff --git a/examples/generic_associations/table_per_related.py b/examples/generic_associations/table_per_related.py index 3130960b00..693908189f 100644 --- a/examples/generic_associations/table_per_related.py +++ b/examples/generic_associations/table_per_related.py @@ -17,7 +17,7 @@ from sqlalchemy.orm import Session, relationship class Base(object): """Base class which provides automated table name and surrogate primary key column. - + """ @declared_attr def __tablename__(cls): @@ -26,13 +26,13 @@ class Base(object): Base = declarative_base(cls=Base) class Address(object): - """Define columns that will be present in each + """Define columns that will be present in each 'Address' table. - + This is a declarative mixin, so additional mapped attributes beyond simple columns specified here should be set up using @declared_attr. - + """ street = Column(String) city = Column(String) @@ -40,13 +40,13 @@ class Address(object): def __repr__(self): return "%s(street=%r, city=%r, zip=%r)" % \ - (self.__class__.__name__, self.street, + (self.__class__.__name__, self.street, self.city, self.zip) class HasAddresses(object): """HasAddresses mixin, creates a new Address class for each parent. - + """ @declared_attr def addresses(cls): @@ -54,9 +54,9 @@ class HasAddresses(object): "%sAddress" % cls.__name__, (Address, Base,), dict( - __tablename__ = "%s_address" % + __tablename__ = "%s_address" % cls.__tablename__, - parent_id = Column(Integer, + parent_id = Column(Integer, ForeignKey("%s.id" % cls.__tablename__)), parent = relationship(cls) ) @@ -76,7 +76,7 @@ session = Session(engine) session.add_all([ Customer( - name='customer 1', + name='customer 1', addresses=[ Customer.Address( street='123 anywhere street', diff --git a/examples/graphs/directed_graph.py b/examples/graphs/directed_graph.py index 3ba602f002..b822cda89f 100644 --- a/examples/graphs/directed_graph.py +++ b/examples/graphs/directed_graph.py @@ -29,19 +29,19 @@ class Node(Base): class Edge(Base): __tablename__ = 'edge' - lower_id = Column(Integer, - ForeignKey('node.node_id'), + lower_id = Column(Integer, + ForeignKey('node.node_id'), primary_key=True) - higher_id = Column(Integer, - ForeignKey('node.node_id'), + higher_id = Column(Integer, + ForeignKey('node.node_id'), primary_key=True) lower_node = relationship(Node, - primaryjoin=lower_id==Node.node_id, + primaryjoin=lower_id==Node.node_id, backref='lower_edges') higher_node = relationship(Node, - primaryjoin=higher_id==Node.node_id, + primaryjoin=higher_id==Node.node_id, backref='higher_edges') # here we have lower.node_id <= higher.node_id diff --git a/examples/inheritance/concrete.py b/examples/inheritance/concrete.py index 84fc79cd5e..75741df6d5 100644 --- a/examples/inheritance/concrete.py +++ b/examples/inheritance/concrete.py @@ -4,13 +4,13 @@ from sqlalchemy.orm import mapper, sessionmaker, polymorphic_union metadata = MetaData() -managers_table = Table('managers', metadata, +managers_table = Table('managers', metadata, Column('employee_id', Integer, primary_key=True), Column('name', String(50)), Column('manager_data', String(40)) ) -engineers_table = Table('engineers', metadata, +engineers_table = Table('engineers', metadata, Column('employee_id', Integer, primary_key=True), Column('name', String(50)), Column('engineer_info', String(40)) diff --git a/examples/inheritance/polymorph.py b/examples/inheritance/polymorph.py index 316671bed5..004db10ca4 100644 --- a/examples/inheritance/polymorph.py +++ b/examples/inheritance/polymorph.py @@ -8,29 +8,29 @@ from sqlalchemy.orm import mapper, relationship, sessionmaker metadata = MetaData() # a table to store companies -companies = Table('companies', metadata, +companies = Table('companies', metadata, Column('company_id', Integer, primary_key=True), Column('name', String(50))) # we will define an inheritance relationship between the table "people" and # "engineers", and a second inheritance relationship between the table # "people" and "managers" -people = Table('people', metadata, +people = Table('people', metadata, Column('person_id', Integer, primary_key=True), Column('company_id', Integer, ForeignKey('companies.company_id')), Column('name', String(50)), Column('type', String(30))) -engineers = Table('engineers', metadata, - Column('person_id', Integer, ForeignKey('people.person_id'), +engineers = Table('engineers', metadata, + Column('person_id', Integer, ForeignKey('people.person_id'), primary_key=True), Column('status', String(30)), Column('engineer_name', String(50)), Column('primary_language', String(50)), ) -managers = Table('managers', metadata, - Column('person_id', Integer, ForeignKey('people.person_id'), +managers = Table('managers', metadata, + Column('person_id', Integer, ForeignKey('people.person_id'), primary_key=True), Column('status', String(30)), Column('manager_name', String(50)) @@ -47,7 +47,7 @@ class Engineer(Person): def __repr__(self): return "Engineer %s, status %s, engineer_name %s, "\ "primary_language %s" % \ - (self.name, self.status, + (self.name, self.status, self.engineer_name, self.primary_language) class Manager(Person): def __repr__(self): diff --git a/examples/inheritance/single.py b/examples/inheritance/single.py index a7883fcbff..b2f9341208 100644 --- a/examples/inheritance/single.py +++ b/examples/inheritance/single.py @@ -5,11 +5,11 @@ from sqlalchemy.orm import mapper, relationship, sessionmaker metadata = MetaData() # a table to store companies -companies = Table('companies', metadata, +companies = Table('companies', metadata, Column('company_id', Integer, primary_key=True), Column('name', String(50))) -employees_table = Table('employees', metadata, +employees_table = Table('employees', metadata, Column('employee_id', Integer, primary_key=True), Column('company_id', Integer, ForeignKey('companies.company_id')), Column('name', String(50)), @@ -31,7 +31,7 @@ class Engineer(Person): def __repr__(self): return "Engineer %s, status %s, engineer_name %s, "\ "primary_language %s" % \ - (self.name, self.status, + (self.name, self.status, self.engineer_name, self.primary_language) class Manager(Person): def __repr__(self): diff --git a/examples/large_collection/large_collection.py b/examples/large_collection/large_collection.py index 20c3b52185..b3aa5299d6 100644 --- a/examples/large_collection/large_collection.py +++ b/examples/large_collection/large_collection.py @@ -6,7 +6,7 @@ from sqlalchemy.orm import (mapper, relationship, sessionmaker) meta = MetaData() -org_table = Table('organizations', meta, +org_table = Table('organizations', meta, Column('org_id', Integer, primary_key=True), Column('org_name', String(50), nullable=False, key='name'), mysql_engine='InnoDB') @@ -27,20 +27,20 @@ class Member(object): self.name = name mapper(Organization, org_table, properties = { - 'members' : relationship(Member, + 'members' : relationship(Member, # Organization.members will be a Query object - no loading # of the entire collection occurs unless requested - lazy="dynamic", + lazy="dynamic", - # Member objects "belong" to their parent, are deleted when + # Member objects "belong" to their parent, are deleted when # removed from the collection cascade="all, delete-orphan", # "delete, delete-orphan" cascade does not load in objects on delete, # allows ON DELETE CASCADE to handle it. - # this only works with a database that supports ON DELETE CASCADE - + # this only works with a database that supports ON DELETE CASCADE - # *not* sqlite or MySQL with MyISAM - passive_deletes=True, + passive_deletes=True, ) }) @@ -65,7 +65,7 @@ if __name__ == '__main__': print "-------------------------\nflush one - save org + 3 members\n" sess.commit() - # the 'members' collection is a Query. it issues + # the 'members' collection is a Query. it issues # SQL as needed to load subsets of the collection. print "-------------------------\nload subset of members\n" members = org.members.filter(member_table.c.name.like('%member t%')).all() @@ -80,8 +80,8 @@ if __name__ == '__main__': print "-------------------------\nflush two - save 3 more members\n" sess.commit() - # delete the object. Using ON DELETE CASCADE - # SQL is only emitted for the head row - the Member rows + # delete the object. Using ON DELETE CASCADE + # SQL is only emitted for the head row - the Member rows # disappear automatically without the need for additional SQL. sess.delete(org) print "-------------------------\nflush three - delete org, delete members in one statement\n" diff --git a/examples/nested_sets/nested_sets.py b/examples/nested_sets/nested_sets.py index 55d734d4ed..e35ea61c37 100644 --- a/examples/nested_sets/nested_sets.py +++ b/examples/nested_sets/nested_sets.py @@ -45,7 +45,7 @@ class NestedSetExtension(MapperExtension): class Employee(Base): __tablename__ = 'personnel' __mapper_args__ = { - 'extension':NestedSetExtension(), + 'extension':NestedSetExtension(), 'batch':False # allows extension to fire for each instance before going to the next. } diff --git a/examples/postgis/__init__.py b/examples/postgis/__init__.py index 3eb4ed3bcb..e8f10e59d3 100644 --- a/examples/postgis/__init__.py +++ b/examples/postgis/__init__.py @@ -1,11 +1,11 @@ -"""A naive example illustrating techniques to help +"""A naive example illustrating techniques to help embed PostGIS functionality. This example was originally developed in the hopes that it would be extrapolated into a comprehensive PostGIS integration layer. We are pleased to announce that this has come to fruition as `GeoAlchemy `_. The example illustrates: -* a DDL extension which allows CREATE/DROP to work in +* a DDL extension which allows CREATE/DROP to work in conjunction with AddGeometryColumn/DropGeometryColumn * a Geometry type, as well as a few subtypes, which @@ -24,7 +24,7 @@ The example illustrates: * a standalone operator example. The implementation is limited to only public, well known -and simple to use extension points. +and simple to use extension points. E.g.:: diff --git a/examples/postgis/postgis.py b/examples/postgis/postgis.py index a1a93c732a..247265e17c 100644 --- a/examples/postgis/postgis.py +++ b/examples/postgis/postgis.py @@ -32,7 +32,7 @@ class PersistentGisElement(GisElement): class TextualGisElement(GisElement, expression.Function): """Represents a Geometry value as expressed within application code; i.e. in wkt format. - Extends expression.Function so that the value is interpreted as + Extends expression.Function so that the value is interpreted as GeomFromText(value) in a SQL expression context. """ @@ -74,7 +74,7 @@ class Geometry(TypeEngine): return value return process -# other datatypes can be added as needed, which +# other datatypes can be added as needed, which # currently only affect DDL statements. class Point(Geometry): @@ -92,7 +92,7 @@ class LineString(Curve): # DDL integration class GISDDL(object): - """A DDL extension which integrates SQLAlchemy table create/drop + """A DDL extension which integrates SQLAlchemy table create/drop methods with PostGis' AddGeometryColumn/DropGeometryColumn functions. Usage:: @@ -162,7 +162,7 @@ def _to_postgis(value): class GisAttribute(AttributeExtension): - """Intercepts 'set' events on a mapped instance attribute and + """Intercepts 'set' events on a mapped instance attribute and converts the incoming value to a GIS expression. """ @@ -198,8 +198,8 @@ def GISColumn(*args, **kw): """ return column_property( - Column(*args, **kw), - extension=GisAttribute(), + Column(*args, **kw), + extension=GisAttribute(), comparator_factory=GisComparator ) diff --git a/examples/sharding/__init__.py b/examples/sharding/__init__.py index d4b4639494..dacc815f9b 100644 --- a/examples/sharding/__init__.py +++ b/examples/sharding/__init__.py @@ -10,8 +10,8 @@ The basic components of a "sharded" mapping are: * a function which can return a list of shard ids which apply to a particular instance identifier; this is called "id_chooser". If it returns all shard ids, all shards will be searched. -* a function which can return a list of shard ids to try, given a particular - Query ("query_chooser"). If it returns all shard ids, all shards will be +* a function which can return a list of shard ids to try, given a particular + Query ("query_chooser"). If it returns all shard ids, all shards will be queried and the results joined together. In this example, four sqlite databases will store information about weather @@ -22,9 +22,9 @@ single shard being requested. The construction of generic sharding routines is an ambitious approach to the issue of organizing instances among multiple databases. For a -more plain-spoken alternative, the "distinct entity" approach +more plain-spoken alternative, the "distinct entity" approach is a simple method of assigning objects to different tables (and potentially -database nodes) in an explicit way - described on the wiki at +database nodes) in an explicit way - described on the wiki at `EntityName `_. """ diff --git a/examples/sharding/attribute_shard.py b/examples/sharding/attribute_shard.py index 5831d7ee39..410346838f 100644 --- a/examples/sharding/attribute_shard.py +++ b/examples/sharding/attribute_shard.py @@ -9,7 +9,7 @@ from sqlalchemy.sql import operators, visitors import datetime # step 2. databases. -# db1 is used for id generation. The "pool_threadlocal" +# db1 is used for id generation. The "pool_threadlocal" # causes the id_generator() to use the same connection as that # of an ongoing transaction within db1. echo = True @@ -36,7 +36,7 @@ meta = MetaData() # we need a way to create identifiers which are unique across all # databases. one easy way would be to just use a composite primary key, where one -# value is the shard id. but here, we'll show something more "generic", an +# value is the shard id. but here, we'll show something more "generic", an # id generation function. we'll use a simplistic "id table" stored in database # #1. Any other method will do just as well; UUID, hilo, application-specific, etc. @@ -53,7 +53,7 @@ def id_generator(ctx): # table setup. we'll store a lead table of continents/cities, # and a secondary table storing locations. # a particular row will be placed in the database whose shard id corresponds to the -# 'continent'. in this setup, secondary rows in 'weather_reports' will +# 'continent'. in this setup, secondary rows in 'weather_reports' will # be placed in the same DB as that of the parent, but this can be changed # if you're willing to write more complex sharding functions. @@ -81,7 +81,7 @@ db1.execute(ids.insert(), nextid=1) # step 5. define sharding functions. -# we'll use a straight mapping of a particular set of "country" +# we'll use a straight mapping of a particular set of "country" # attributes to shard id. shard_lookup = { 'North America':'north_america', @@ -94,7 +94,7 @@ def shard_chooser(mapper, instance, clause=None): """shard chooser. looks at the given instance and returns a shard id - note that we need to define conditions for + note that we need to define conditions for the WeatherLocation class, as well as our secondary Report class which will point back to its WeatherLocation via its 'location' attribute. @@ -109,8 +109,8 @@ def id_chooser(query, ident): given a primary key, returns a list of shards to search. here, we don't have any particular information from a - pk so we just return all shard ids. often, youd want to do some - kind of round-robin strategy here so that requests are evenly + pk so we just return all shard ids. often, youd want to do some + kind of round-robin strategy here so that requests are evenly distributed among DBs. """ @@ -132,8 +132,8 @@ def query_chooser(query): # "shares_lineage()" returns True if both columns refer to the same # statement column, adjusting for any annotations present. # (an annotation is an internal clone of a Column object - # and occur when using ORM-mapped attributes like - # "WeatherLocation.continent"). A simpler comparison, though less accurate, + # and occur when using ORM-mapped attributes like + # "WeatherLocation.continent"). A simpler comparison, though less accurate, # would be "column.key == 'continent'". if column.shares_lineage(weather_locations.c.continent): if operator == operators.eq: @@ -150,7 +150,7 @@ def _get_query_comparisons(query): """Search an orm.Query object for binary expressions. Returns expressions which match a Column against one or more - literal values as a list of tuples of the form + literal values as a list of tuples of the form (column, operator, values). "values" is a single value or tuple of values depending on the operator. @@ -160,15 +160,15 @@ def _get_query_comparisons(query): comparisons = [] def visit_bindparam(bind): - # visit a bind parameter. + # visit a bind parameter. # check in _params for it first if bind.key in query._params: value = query._params[bind.key] elif bind.callable: - # some ORM functions (lazy loading) - # place the bind's value as a - # callable for deferred evaulation. + # some ORM functions (lazy loading) + # place the bind's value as a + # callable for deferred evaulation. value = bind.callable() else: # just use .value @@ -185,7 +185,7 @@ def _get_query_comparisons(query): binary.operator == operators.in_op and \ hasattr(binary.right, 'clauses'): comparisons.append( - (binary.left, binary.operator, + (binary.left, binary.operator, tuple(binds[bind] for bind in binary.right.clauses) ) ) @@ -213,8 +213,8 @@ def _get_query_comparisons(query): # further configure create_session to use these functions create_session.configure( - shard_chooser=shard_chooser, - id_chooser=id_chooser, + shard_chooser=shard_chooser, + id_chooser=id_chooser, query_chooser=query_chooser ) diff --git a/examples/versioning/__init__.py b/examples/versioning/__init__.py index 9909009402..72b5afe966 100644 --- a/examples/versioning/__init__.py +++ b/examples/versioning/__init__.py @@ -9,7 +9,7 @@ Usage is illustrated via a unit test module ``test_versioning.py``, which can be run via nose:: cd examples/versioning - nosetests -v + nosetests -v A fragment of example usage, using declarative:: diff --git a/examples/versioning/_lib.py b/examples/versioning/_lib.py index d5f2cb0b75..ec0da47090 100644 --- a/examples/versioning/_lib.py +++ b/examples/versioning/_lib.py @@ -1,7 +1,7 @@ """copy of ComparableEntity and eq_() from test.lib. This is just to support running the example outside of -the SQLA testing environment which is no longer part of +the SQLA testing environment which is no longer part of SQLAlchemy as of 0.7. """ diff --git a/examples/versioning/history_meta.py b/examples/versioning/history_meta.py index 1226a8f62e..5335993941 100644 --- a/examples/versioning/history_meta.py +++ b/examples/versioning/history_meta.py @@ -71,9 +71,9 @@ def _history_mapper(local_mapper): versioned_cls = type.__new__(type, "%sHistory" % cls.__name__, bases, {}) m = mapper( - versioned_cls, - table, - inherits=super_history_mapper, + versioned_cls, + table, + inherits=super_history_mapper, polymorphic_on=polymorphic_on, polymorphic_identity=local_mapper.polymorphic_identity ) @@ -129,9 +129,9 @@ def create_version(obj, session, deleted = False): try: prop = obj_mapper.get_property_by_column(obj_col) except UnmappedColumnError: - # in the case of single table inheritance, there may be + # in the case of single table inheritance, there may be # columns on the mapped table intended for the subclass only. - # the "unmapped" status of the subclass column on the + # the "unmapped" status of the subclass column on the # base class is a feature of the declarative module as of sqla 0.5.2. continue diff --git a/examples/versioning/test_versioning.py b/examples/versioning/test_versioning.py index 389dba9187..9781fdc5d3 100644 --- a/examples/versioning/test_versioning.py +++ b/examples/versioning/test_versioning.py @@ -185,8 +185,8 @@ class TestVersioning(TestCase): eq_( sess.query(BaseClassHistory).order_by(BaseClassHistory.id).all(), [ - SubClassSeparatePkHistory(id=1, name=u'sep1', type=u'sep', version=1), - BaseClassHistory(id=2, name=u'base1', type=u'base', version=1), + SubClassSeparatePkHistory(id=1, name=u'sep1', type=u'sep', version=1), + BaseClassHistory(id=2, name=u'base1', type=u'base', version=1), SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=1) ] ) @@ -196,9 +196,9 @@ class TestVersioning(TestCase): eq_( sess.query(BaseClassHistory).order_by(BaseClassHistory.id, BaseClassHistory.version).all(), [ - SubClassSeparatePkHistory(id=1, name=u'sep1', type=u'sep', version=1), - BaseClassHistory(id=2, name=u'base1', type=u'base', version=1), - SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=1), + SubClassSeparatePkHistory(id=1, name=u'sep1', type=u'sep', version=1), + BaseClassHistory(id=2, name=u'base1', type=u'base', version=1), + SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=1), SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=2) ] ) @@ -207,10 +207,10 @@ class TestVersioning(TestCase): eq_( sess.query(BaseClassHistory).order_by(BaseClassHistory.id, BaseClassHistory.version).all(), [ - SubClassSeparatePkHistory(id=1, name=u'sep1', type=u'sep', version=1), - BaseClassHistory(id=2, name=u'base1', type=u'base', version=1), - BaseClassHistory(id=2, name=u'base1mod', type=u'base', version=2), - SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=1), + SubClassSeparatePkHistory(id=1, name=u'sep1', type=u'sep', version=1), + BaseClassHistory(id=2, name=u'base1', type=u'base', version=1), + BaseClassHistory(id=2, name=u'base1mod', type=u'base', version=2), + SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=1), SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=2) ] ) diff --git a/ez_setup.py b/ez_setup.py index b74adc0654..a8c797c3b3 100644 --- a/ez_setup.py +++ b/ez_setup.py @@ -100,7 +100,7 @@ def use_setuptools( try: import pkg_resources except ImportError: - return do_download() + return do_download() try: pkg_resources.require("setuptools>="+version); return except pkg_resources.VersionConflict, e: diff --git a/lib/sqlalchemy/connectors/mxodbc.py b/lib/sqlalchemy/connectors/mxodbc.py index e46253b49a..2054d0d1ac 100644 --- a/lib/sqlalchemy/connectors/mxodbc.py +++ b/lib/sqlalchemy/connectors/mxodbc.py @@ -117,7 +117,7 @@ class MxODBCConnector(Connector): return False def _get_server_version_info(self, connection): - # eGenix suggests using conn.dbms_version instead + # eGenix suggests using conn.dbms_version instead # of what we're doing here dbapi_con = connection.connection version = [] diff --git a/lib/sqlalchemy/connectors/pyodbc.py b/lib/sqlalchemy/connectors/pyodbc.py index a684a2dcb6..f584e0c9c4 100644 --- a/lib/sqlalchemy/connectors/pyodbc.py +++ b/lib/sqlalchemy/connectors/pyodbc.py @@ -70,7 +70,7 @@ class PyODBCConnector(Connector): if 'port' in keys and not 'port' in query: port = ',%d' % int(keys.pop('port')) - connectors = ["DRIVER={%s}" % + connectors = ["DRIVER={%s}" % keys.pop('driver', self.pyodbc_driver_name), 'Server=%s%s' % (keys.pop('host', ''), port), 'Database=%s' % keys.pop('database', '') ] @@ -83,9 +83,9 @@ class PyODBCConnector(Connector): connectors.append("Trusted_Connection=Yes") # if set to 'Yes', the ODBC layer will try to automagically - # convert textual data from your database encoding to your - # client encoding. This should obviously be set to 'No' if - # you query a cp1253 encoded database from a latin1 client... + # convert textual data from your database encoding to your + # client encoding. This should obviously be set to 'No' if + # you query a cp1253 encoded database from a latin1 client... if 'odbc_autotranslate' in keys: connectors.append("AutoTranslate=%s" % keys.pop("odbc_autotranslate")) @@ -126,7 +126,7 @@ class PyODBCConnector(Connector): if self._user_supports_unicode_binds is not None: self.supports_unicode_binds = self._user_supports_unicode_binds else: - self.supports_unicode_binds = (not self.freetds or + self.supports_unicode_binds = (not self.freetds or self.freetds_driver_version >= '0.91' ) and not self.easysoft # end Py2K diff --git a/lib/sqlalchemy/connectors/zxJDBC.py b/lib/sqlalchemy/connectors/zxJDBC.py index 5bc25f4aa8..61a9e565b4 100644 --- a/lib/sqlalchemy/connectors/zxJDBC.py +++ b/lib/sqlalchemy/connectors/zxJDBC.py @@ -33,7 +33,7 @@ class ZxJDBCConnector(Connector): def _create_jdbc_url(self, url): """Create a JDBC url from a :class:`~sqlalchemy.engine.url.URL`""" return 'jdbc:%s://%s%s/%s' % (self.jdbc_db_name, url.host, - url.port is not None + url.port is not None and ':%s' % url.port or '', url.database) @@ -41,8 +41,8 @@ class ZxJDBCConnector(Connector): opts = self._driver_kwargs() opts.update(url.query) return [ - [self._create_jdbc_url(url), - url.username, url.password, + [self._create_jdbc_url(url), + url.username, url.password, self.jdbc_driver_name], opts] diff --git a/lib/sqlalchemy/dialects/access/base.py b/lib/sqlalchemy/dialects/access/base.py index 29f10c5608..f107c9c8c1 100644 --- a/lib/sqlalchemy/dialects/access/base.py +++ b/lib/sqlalchemy/dialects/access/base.py @@ -11,7 +11,7 @@ Support for the Microsoft Access database. .. note:: - The Access dialect is **non-functional as of SQLAlchemy 0.6**, + The Access dialect is **non-functional as of SQLAlchemy 0.6**, pending development efforts to bring it up-to-date. @@ -125,7 +125,7 @@ class AccessExecutionContext(default.DefaultExecutionContext): # self._last_inserted_ids[0] is None: self.cursor.execute("SELECT @@identity AS lastrowid") row = self.cursor.fetchone() - self._last_inserted_ids = [int(row[0])] + self._last_inserted_ids = [int(row[0])] #+ self._last_inserted_ids[1:] # print "LAST ROW ID", self._last_inserted_ids @@ -260,7 +260,7 @@ class AccessDialect(default.DefaultDialect): colargs = \ { - 'nullable': not(col.Required or + 'nullable': not(col.Required or col.Attributes & const.dbAutoIncrField), } default = col.DefaultValue @@ -287,7 +287,7 @@ class AccessDialect(default.DefaultDialect): if isinstance(thecol.type, AcInteger) and \ not (thecol.default and isinstance( - thecol.default.arg, + thecol.default.arg, schema.Sequence )): thecol.autoincrement = False @@ -322,7 +322,7 @@ class AccessDialect(default.DefaultDialect): # This is necessary, so we get the latest updates dtbs = daoEngine.OpenDatabase(connection.engine.url.database) - names = [t.Name for t in dtbs.TableDefs + names = [t.Name for t in dtbs.TableDefs if t.Name[:4] != "MSys" and t.Name[:4] != "~TMP"] dtbs.Close() return names @@ -373,7 +373,7 @@ class AccessCompiler(compiler.SQLCompiler): 'length': 'len', } def visit_function(self, func): - """Access function names differ from the ANSI SQL names; + """Access function names differ from the ANSI SQL names; rewrite common ones""" func.name = self.function_rewrites.get(func.name, func.name) return super(AccessCompiler, self).visit_function(func) diff --git a/lib/sqlalchemy/dialects/firebird/__init__.py b/lib/sqlalchemy/dialects/firebird/__init__.py index 665e32267f..f79588d24d 100644 --- a/lib/sqlalchemy/dialects/firebird/__init__.py +++ b/lib/sqlalchemy/dialects/firebird/__init__.py @@ -14,7 +14,7 @@ from sqlalchemy.dialects.firebird.base import \ dialect __all__ = ( - 'SMALLINT', 'BIGINT', 'FLOAT', 'FLOAT', 'DATE', 'TIME', + 'SMALLINT', 'BIGINT', 'FLOAT', 'FLOAT', 'DATE', 'TIME', 'TEXT', 'NUMERIC', 'FLOAT', 'TIMESTAMP', 'VARCHAR', 'CHAR', 'BLOB', 'dialect' ) diff --git a/lib/sqlalchemy/dialects/firebird/base.py b/lib/sqlalchemy/dialects/firebird/base.py index 5ef30b36dd..e4458e70f6 100644 --- a/lib/sqlalchemy/dialects/firebird/base.py +++ b/lib/sqlalchemy/dialects/firebird/base.py @@ -135,7 +135,7 @@ class VARCHAR(_StringType, sqltypes.VARCHAR): __visit_name__ = 'VARCHAR' def __init__(self, length = None, **kwargs): - super(VARCHAR, self).__init__(length=length, **kwargs) + super(VARCHAR, self).__init__(length=length, **kwargs) class CHAR(_StringType, sqltypes.CHAR): """Firebird CHAR type""" @@ -164,7 +164,7 @@ ischema_names = { } -# TODO: date conversion types (should be implemented as _FBDateTime, +# TODO: date conversion types (should be implemented as _FBDateTime, # _FBDate, etc. as bind/result functionality is required) class FBTypeCompiler(compiler.GenericTypeCompiler): @@ -339,7 +339,7 @@ class FBExecutionContext(default.DefaultExecutionContext): """Get the next value from the sequence using ``gen_id()``.""" return self._execute_scalar( - "SELECT gen_id(%s, 1) FROM rdb$database" % + "SELECT gen_id(%s, 1) FROM rdb$database" % self.dialect.identifier_preparer.format_sequence(seq), type_ ) @@ -418,7 +418,7 @@ class FBDialect(default.DefaultDialect): return name def has_table(self, connection, table_name, schema=None): - """Return ``True`` if the given table exists, ignoring + """Return ``True`` if the given table exists, ignoring the `schema`.""" tblqry = """ @@ -489,8 +489,8 @@ class FBDialect(default.DefaultDialect): return pkfields @reflection.cache - def get_column_sequence(self, connection, - table_name, column_name, + def get_column_sequence(self, connection, + table_name, column_name, schema=None, **kw): tablename = self.denormalize_name(table_name) colname = self.denormalize_name(column_name) @@ -528,7 +528,7 @@ class FBDialect(default.DefaultDialect): COALESCE(cs.rdb$bytes_per_character,1) AS flen, f.rdb$field_precision AS fprec, f.rdb$field_scale AS fscale, - COALESCE(r.rdb$default_source, + COALESCE(r.rdb$default_source, f.rdb$default_source) AS fdefault FROM rdb$relation_fields r JOIN rdb$fields f ON r.rdb$field_source=f.rdb$field_name @@ -563,7 +563,7 @@ class FBDialect(default.DefaultDialect): coltype = sqltypes.NULLTYPE elif colspec == 'INT64': coltype = coltype( - precision=row['fprec'], + precision=row['fprec'], scale=row['fscale'] * -1) elif colspec in ('VARYING', 'CSTRING'): coltype = coltype(row['flen']) @@ -582,7 +582,7 @@ class FBDialect(default.DefaultDialect): if row['fdefault'] is not None: # the value comes down as "DEFAULT 'value'": there may be # more than one whitespace around the "DEFAULT" keyword - # and it may also be lower case + # and it may also be lower case # (see also http://tracker.firebirdsql.org/browse/CORE-356) defexpr = row['fdefault'].lstrip() assert defexpr[:8].rstrip().upper() == \ diff --git a/lib/sqlalchemy/dialects/firebird/kinterbasdb.py b/lib/sqlalchemy/dialects/firebird/kinterbasdb.py index d9d749b3cc..a5dc821be1 100644 --- a/lib/sqlalchemy/dialects/firebird/kinterbasdb.py +++ b/lib/sqlalchemy/dialects/firebird/kinterbasdb.py @@ -17,20 +17,20 @@ Kinterbasedb backend specific keyword arguments are: SQLAlchemy uses 200 with Unicode, datetime and decimal support (see details__). -* concurrency_level - set the backend policy with regards to threading +* concurrency_level - set the backend policy with regards to threading issues: by default SQLAlchemy uses policy 1 (see details__). -* enable_rowcount - True by default, setting this to False disables - the usage of "cursor.rowcount" with the +* enable_rowcount - True by default, setting this to False disables + the usage of "cursor.rowcount" with the Kinterbasdb dialect, which SQLAlchemy ordinarily calls upon automatically - after any UPDATE or DELETE statement. When disabled, SQLAlchemy's - ResultProxy will return -1 for result.rowcount. The rationale here is - that Kinterbasdb requires a second round trip to the database when - .rowcount is called - since SQLA's resultproxy automatically closes - the cursor after a non-result-returning statement, rowcount must be + after any UPDATE or DELETE statement. When disabled, SQLAlchemy's + ResultProxy will return -1 for result.rowcount. The rationale here is + that Kinterbasdb requires a second round trip to the database when + .rowcount is called - since SQLA's resultproxy automatically closes + the cursor after a non-result-returning statement, rowcount must be called, if at all, before the result object is returned. Additionally, cursor.rowcount may not return correct results with older versions - of Firebird, and setting this flag to False will also cause the + of Firebird, and setting this flag to False will also cause the SQLAlchemy ORM to ignore its usage. The behavior can also be controlled on a per-execution basis using the `enable_rowcount` option with :meth:`execution_options()`:: @@ -64,7 +64,7 @@ class _FBNumeric_kinterbasdb(sqltypes.Numeric): class FBExecutionContext_kinterbasdb(FBExecutionContext): @property def rowcount(self): - if self.execution_options.get('enable_rowcount', + if self.execution_options.get('enable_rowcount', self.dialect.enable_rowcount): return self.cursor.rowcount else: @@ -135,7 +135,7 @@ class FBDialect_kinterbasdb(FBDialect): # that for backward compatibility reasons returns a string like # LI-V6.3.3.12981 Firebird 2.0 # where the first version is a fake one resembling the old - # Interbase signature. + # Interbase signature. fbconn = connection.connection version = fbconn.server_version @@ -159,7 +159,7 @@ class FBDialect_kinterbasdb(FBDialect): msg = str(e) return ('Unable to complete network request to host' in msg or 'Invalid connection state' in msg or - 'Invalid cursor state' in msg or + 'Invalid cursor state' in msg or 'connection shutdown' in msg) else: return False diff --git a/lib/sqlalchemy/dialects/informix/base.py b/lib/sqlalchemy/dialects/informix/base.py index 6f96246efc..fd737cb65c 100644 --- a/lib/sqlalchemy/dialects/informix/base.py +++ b/lib/sqlalchemy/dialects/informix/base.py @@ -10,7 +10,7 @@ .. note:: The Informix dialect functions on current SQLAlchemy versions - but is not regularly tested, and may have many issues and + but is not regularly tested, and may have many issues and caveats not currently handled. """ @@ -466,7 +466,7 @@ class InformixDialect(default.DefaultDialect): c = connection.execute( """select t1.constrname as cons_name, t4.colname as local_column, t7.tabname as remote_table, - t6.colname as remote_column, t7.owner as remote_owner + t6.colname as remote_column, t7.owner as remote_owner from sysconstraints as t1 , systables as t2 , sysindexes as t3 , syscolumns as t4 , sysreferences as t5 , syscolumns as t6 , systables as t7 , @@ -475,7 +475,7 @@ class InformixDialect(default.DefaultDialect): and t3.tabid = t2.tabid and t3.idxname = t1.idxname and t4.tabid = t2.tabid and t4.colno in (t3.part1, t3.part2, t3.part3, t3.part4, t3.part5, t3.part6, t3.part7, t3.part8, t3.part9, t3.part10, - t3.part11, t3.part11, t3.part12, t3.part13, t3.part4, t3.part15, t3.part16) + t3.part11, t3.part11, t3.part12, t3.part13, t3.part4, t3.part15, t3.part16) and t5.constrid = t1.constrid and t8.constrid = t5.primary and t6.tabid = t5.ptabid and t6.colno in (t9.part1, t9.part2, t9.part3, t9.part4, t9.part5, t9.part6, t9.part7, t9.part8, t9.part9, t9.part10, @@ -522,7 +522,7 @@ class InformixDialect(default.DefaultDialect): # Select the column positions from sysindexes for sysconstraints data = connection.execute( - """select t2.* + """select t2.* from systables as t1, sysindexes as t2, sysconstraints as t3 where t1.tabid=t2.tabid and t1.tabname=? and t1.owner=? and t2.idxname=t3.idxname and t3.constrtype='P'""", @@ -544,7 +544,7 @@ class InformixDialect(default.DefaultDialect): c = connection.execute( """select t1.colname from syscolumns as t1, systables as t2 - where t2.tabname=? and t1.tabid = t2.tabid and + where t2.tabname=? and t1.tabid = t2.tabid and t1.colno in (%s)""" % place_holder, table_name, *colpositions ).fetchall() @@ -568,7 +568,7 @@ class InformixDialect(default.DefaultDialect): c = connection.execute( """select t1.colname from syscolumns as t1, systables as t2 - where t2.tabname=? and t1.tabid = t2.tabid and + where t2.tabname=? and t1.tabid = t2.tabid and t1.colno in (%s)""" % place_holder, table_name, *colnames ).fetchall() diff --git a/lib/sqlalchemy/dialects/maxdb/base.py b/lib/sqlalchemy/dialects/maxdb/base.py index cde839db9b..861cca0fcc 100644 --- a/lib/sqlalchemy/dialects/maxdb/base.py +++ b/lib/sqlalchemy/dialects/maxdb/base.py @@ -8,7 +8,7 @@ .. note:: - The MaxDB dialect is **non-functional as of SQLAlchemy 0.6**, + The MaxDB dialect is **non-functional as of SQLAlchemy 0.6**, pending development efforts to bring it up-to-date. Overview @@ -255,7 +255,7 @@ class MaxTimestamp(sqltypes.DateTime): value[20:])]) else: raise exc.InvalidRequestError( - "datetimeformat '%s' is not supported." % + "datetimeformat '%s' is not supported." % dialect.datetimeformat) return process @@ -283,18 +283,18 @@ class MaxDate(sqltypes.Date): if value is None: return None else: - return datetime.date(int(value[0:4]), int(value[4:6]), + return datetime.date(int(value[0:4]), int(value[4:6]), int(value[6:8])) elif dialect.datetimeformat == 'iso': def process(value): if value is None: return None else: - return datetime.date(int(value[0:4]), int(value[5:7]), + return datetime.date(int(value[0:4]), int(value[5:7]), int(value[8:10])) else: raise exc.InvalidRequestError( - "datetimeformat '%s' is not supported." % + "datetimeformat '%s' is not supported." % dialect.datetimeformat) return process @@ -322,7 +322,7 @@ class MaxTime(sqltypes.Time): if value is None: return None else: - return datetime.time(int(value[0:4]), int(value[4:6]), + return datetime.time(int(value[0:4]), int(value[4:6]), int(value[6:8])) elif dialect.datetimeformat == 'iso': def process(value): @@ -333,7 +333,7 @@ class MaxTime(sqltypes.Time): int(value[8:10])) else: raise exc.InvalidRequestError( - "datetimeformat '%s' is not supported." % + "datetimeformat '%s' is not supported." % dialect.datetimeformat) return process diff --git a/lib/sqlalchemy/dialects/mssql/__init__.py b/lib/sqlalchemy/dialects/mssql/__init__.py index 8a2101c51f..e262d208b7 100644 --- a/lib/sqlalchemy/dialects/mssql/__init__.py +++ b/lib/sqlalchemy/dialects/mssql/__init__.py @@ -18,9 +18,9 @@ from sqlalchemy.dialects.mssql.base import \ __all__ = ( - 'INTEGER', 'BIGINT', 'SMALLINT', 'TINYINT', 'VARCHAR', 'NVARCHAR', 'CHAR', + 'INTEGER', 'BIGINT', 'SMALLINT', 'TINYINT', 'VARCHAR', 'NVARCHAR', 'CHAR', 'NCHAR', 'TEXT', 'NTEXT', 'DECIMAL', 'NUMERIC', 'FLOAT', 'DATETIME', - 'DATETIME2', 'DATETIMEOFFSET', 'DATE', 'TIME', 'SMALLDATETIME', + 'DATETIME2', 'DATETIMEOFFSET', 'DATE', 'TIME', 'SMALLDATETIME', 'BINARY', 'VARBINARY', 'BIT', 'REAL', 'IMAGE', 'TIMESTAMP', 'MONEY', 'SMALLMONEY', 'UNIQUEIDENTIFIER', 'SQL_VARIANT', 'dialect' ) \ No newline at end of file diff --git a/lib/sqlalchemy/dialects/mssql/adodbapi.py b/lib/sqlalchemy/dialects/mssql/adodbapi.py index 21e6328804..5b23282692 100644 --- a/lib/sqlalchemy/dialects/mssql/adodbapi.py +++ b/lib/sqlalchemy/dialects/mssql/adodbapi.py @@ -16,7 +16,7 @@ import sys class MSDateTime_adodbapi(MSDateTime): def result_processor(self, dialect, coltype): def process(value): - # adodbapi will return datetimes with empty time + # adodbapi will return datetimes with empty time # values as datetime.date() objects. # Promote them back to full datetime.datetime() if type(value) is datetime.date: @@ -49,7 +49,7 @@ class MSDialect_adodbapi(MSDialect): connectors = ["Provider=SQLOLEDB"] if 'port' in keys: - connectors.append ("Data Source=%s, %s" % + connectors.append ("Data Source=%s, %s" % (keys.get("host"), keys.get("port"))) else: connectors.append ("Data Source=%s" % keys.get("host")) diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py index 25b3695bfc..063fc2c36d 100644 --- a/lib/sqlalchemy/dialects/mssql/base.py +++ b/lib/sqlalchemy/dialects/mssql/base.py @@ -74,7 +74,7 @@ will yield:: SELECT TOP n If using SQL Server 2005 or above, LIMIT with OFFSET -support is available through the ``ROW_NUMBER OVER`` construct. +support is available through the ``ROW_NUMBER OVER`` construct. For versions below 2005, LIMIT with OFFSET usage will fail. Nullability @@ -119,14 +119,14 @@ Triggers SQLAlchemy by default uses OUTPUT INSERTED to get at newly generated primary key values via IDENTITY columns or other -server side defaults. MS-SQL does not +server side defaults. MS-SQL does not allow the usage of OUTPUT INSERTED on tables that have triggers. To disable the usage of OUTPUT INSERTED on a per-table basis, specify ``implicit_returning=False`` for each :class:`.Table` which has triggers:: - Table('mytable', metadata, - Column('id', Integer, primary_key=True), + Table('mytable', metadata, + Column('id', Integer, primary_key=True), # ..., implicit_returning=False ) @@ -144,11 +144,11 @@ This option can also be specified engine-wide using the Enabling Snapshot Isolation --------------------------- -Not necessarily specific to SQLAlchemy, SQL Server has a default transaction +Not necessarily specific to SQLAlchemy, SQL Server has a default transaction isolation mode that locks entire tables, and causes even mildly concurrent applications to have long held locks and frequent deadlocks. -Enabling snapshot isolation for the database as a whole is recommended -for modern levels of concurrency support. This is accomplished via the +Enabling snapshot isolation for the database as a whole is recommended +for modern levels of concurrency support. This is accomplished via the following ALTER DATABASE commands executed at the SQL prompt:: ALTER DATABASE MyDatabase SET ALLOW_SNAPSHOT_ISOLATION ON @@ -165,7 +165,7 @@ Scalar Select Comparisons The MSSQL dialect contains a legacy behavior whereby comparing a scalar select to a value using the ``=`` or ``!=`` operator will resolve to IN or NOT IN, respectively. This behavior - will be removed in 0.8 - the ``s.in_()``/``~s.in_()`` operators + will be removed in 0.8 - the ``s.in_()``/``~s.in_()`` operators should be used when IN/NOT IN are desired. For the time being, the existing behavior prevents a comparison @@ -273,7 +273,7 @@ class _MSDate(sqltypes.Date): return value.date() elif isinstance(value, basestring): return datetime.date(*[ - int(x or 0) + int(x or 0) for x in self._reg.match(value).groups() ]) else: @@ -304,7 +304,7 @@ class TIME(sqltypes.TIME): return value.time() elif isinstance(value, basestring): return datetime.time(*[ - int(x or 0) + int(x or 0) for x in self._reg.match(value).groups()]) else: return value @@ -609,7 +609,7 @@ class MSTypeCompiler(compiler.GenericTypeCompiler): return self._extend("TEXT", type_) def visit_VARCHAR(self, type_): - return self._extend("VARCHAR", type_, + return self._extend("VARCHAR", type_, length = type_.length or 'max') def visit_CHAR(self, type_): @@ -619,7 +619,7 @@ class MSTypeCompiler(compiler.GenericTypeCompiler): return self._extend("NCHAR", type_) def visit_NVARCHAR(self, type_): - return self._extend("NVARCHAR", type_, + return self._extend("NVARCHAR", type_, length = type_.length or 'max') def visit_date(self, type_): @@ -642,8 +642,8 @@ class MSTypeCompiler(compiler.GenericTypeCompiler): def visit_VARBINARY(self, type_): return self._extend( - "VARBINARY", - type_, + "VARBINARY", + type_, length=type_.length or 'max') def visit_boolean(self, type_): @@ -690,8 +690,8 @@ class MSExecutionContext(default.DefaultExecutionContext): not self.executemany if self._enable_identity_insert: - self.root_connection._cursor_execute(self.cursor, - "SET IDENTITY_INSERT %s ON" % + self.root_connection._cursor_execute(self.cursor, + "SET IDENTITY_INSERT %s ON" % self.dialect.identifier_preparer.format_table(tbl), ()) @@ -701,10 +701,10 @@ class MSExecutionContext(default.DefaultExecutionContext): conn = self.root_connection if self._select_lastrowid: if self.dialect.use_scope_identity: - conn._cursor_execute(self.cursor, + conn._cursor_execute(self.cursor, "SELECT scope_identity() AS lastrowid", ()) else: - conn._cursor_execute(self.cursor, + conn._cursor_execute(self.cursor, "SELECT @@identity AS lastrowid", ()) # fetchall() ensures the cursor is consumed without closing it row = self.cursor.fetchall()[0] @@ -715,7 +715,7 @@ class MSExecutionContext(default.DefaultExecutionContext): self._result_proxy = base.FullyBufferedResultProxy(self) if self._enable_identity_insert: - conn._cursor_execute(self.cursor, + conn._cursor_execute(self.cursor, "SET IDENTITY_INSERT %s OFF" % self.dialect.identifier_preparer. format_table(self.compiled.statement.table), @@ -729,7 +729,7 @@ class MSExecutionContext(default.DefaultExecutionContext): if self._enable_identity_insert: try: self.cursor.execute( - "SET IDENTITY_INSERT %s OFF" % + "SET IDENTITY_INSERT %s OFF" % self.dialect.identifier_preparer.\ format_table(self.compiled.statement.table) ) @@ -772,12 +772,12 @@ class MSSQLCompiler(compiler.SQLCompiler): def visit_concat_op(self, binary, **kw): return "%s + %s" % \ - (self.process(binary.left, **kw), + (self.process(binary.left, **kw), self.process(binary.right, **kw)) def visit_match_op(self, binary, **kw): return "CONTAINS (%s, %s)" % ( - self.process(binary.left, **kw), + self.process(binary.left, **kw), self.process(binary.right, **kw)) def get_select_precolumns(self, select): @@ -867,7 +867,7 @@ class MSSQLCompiler(compiler.SQLCompiler): return "SAVE TRANSACTION %s" % self.preparer.format_savepoint(savepoint_stmt) def visit_rollback_to_savepoint(self, savepoint_stmt): - return ("ROLLBACK TRANSACTION %s" + return ("ROLLBACK TRANSACTION %s" % self.preparer.format_savepoint(savepoint_stmt)) def visit_column(self, column, result_map=None, **kwargs): @@ -881,15 +881,15 @@ class MSSQLCompiler(compiler.SQLCompiler): if result_map is not None: result_map[column.name.lower()] = \ - (column.name, (column, ), + (column.name, (column, ), column.type) return super(MSSQLCompiler, self).\ - visit_column(converted, + visit_column(converted, result_map=None, **kwargs) - return super(MSSQLCompiler, self).visit_column(column, - result_map=result_map, + return super(MSSQLCompiler, self).visit_column(column, + result_map=result_map, **kwargs) def visit_binary(self, binary, **kwargs): @@ -898,27 +898,27 @@ class MSSQLCompiler(compiler.SQLCompiler): """ if ( - isinstance(binary.left, expression._BindParamClause) + isinstance(binary.left, expression._BindParamClause) and binary.operator == operator.eq and not isinstance(binary.right, expression._BindParamClause) ): return self.process( - expression._BinaryExpression(binary.right, - binary.left, - binary.operator), + expression._BinaryExpression(binary.right, + binary.left, + binary.operator), **kwargs) else: if ( - (binary.operator is operator.eq or - binary.operator is operator.ne) + (binary.operator is operator.eq or + binary.operator is operator.ne) and ( - (isinstance(binary.left, expression._FromGrouping) - and isinstance(binary.left.element, - expression._ScalarSelect)) - or (isinstance(binary.right, expression._FromGrouping) - and isinstance(binary.right.element, - expression._ScalarSelect)) - or isinstance(binary.left, expression._ScalarSelect) + (isinstance(binary.left, expression._FromGrouping) + and isinstance(binary.left.element, + expression._ScalarSelect)) + or (isinstance(binary.right, expression._FromGrouping) + and isinstance(binary.right.element, + expression._ScalarSelect)) + or isinstance(binary.left, expression._ScalarSelect) or isinstance(binary.right, expression._ScalarSelect) ) ): @@ -950,10 +950,10 @@ class MSSQLCompiler(compiler.SQLCompiler): columns = [ self.process( - col_label(c), - within_columns_clause=True, + col_label(c), + within_columns_clause=True, result_map=self.result_map - ) + ) for c in expression._select_iterables(returning_cols) ] return 'OUTPUT ' + ', '.join(columns) @@ -973,7 +973,7 @@ class MSSQLCompiler(compiler.SQLCompiler): label_select_column(select, column, asfrom) def for_update_clause(self, select): - # "FOR UPDATE" is only allowed on "DECLARE CURSOR" which + # "FOR UPDATE" is only allowed on "DECLARE CURSOR" which # SQLAlchemy doesn't use return '' @@ -991,11 +991,11 @@ class MSSQLCompiler(compiler.SQLCompiler): from_hints, **kw): """Render the UPDATE..FROM clause specific to MSSQL. - + In MSSQL, if the UPDATE statement involves an alias of the table to be updated, then the table itself must be added to the FROM list as well. Otherwise, it is optional. Here, we add it regardless. - + """ return "FROM " + ', '.join( t._compiler_dispatch(self, asfrom=True, @@ -1015,14 +1015,14 @@ class MSSQLStrictCompiler(MSSQLCompiler): def visit_in_op(self, binary, **kw): kw['literal_binds'] = True return "%s IN %s" % ( - self.process(binary.left, **kw), + self.process(binary.left, **kw), self.process(binary.right, **kw) ) def visit_notin_op(self, binary, **kw): kw['literal_binds'] = True return "%s NOT IN %s" % ( - self.process(binary.left, **kw), + self.process(binary.left, **kw), self.process(binary.right, **kw) ) @@ -1051,7 +1051,7 @@ class MSSQLStrictCompiler(MSSQLCompiler): class MSDDLCompiler(compiler.DDLCompiler): def get_column_specification(self, column, **kwargs): - colspec = (self.preparer.format_column(column) + " " + colspec = (self.preparer.format_column(column) + " " + self.dialect.type_compiler.process(column.type)) if column.nullable is not None: @@ -1062,7 +1062,7 @@ class MSDDLCompiler(compiler.DDLCompiler): if column.table is None: raise exc.CompileError( - "mssql requires Table-bound columns " + "mssql requires Table-bound columns " "in order to generate DDL") seq_col = column.table._autoincrement_column @@ -1097,7 +1097,7 @@ class MSIdentifierPreparer(compiler.IdentifierPreparer): reserved_words = RESERVED_WORDS def __init__(self, dialect): - super(MSIdentifierPreparer, self).__init__(dialect, initial_quote='[', + super(MSIdentifierPreparer, self).__init__(dialect, initial_quote='[', final_quote=']') def _escape_identifier(self, value): @@ -1162,7 +1162,7 @@ class MSDialect(default.DefaultDialect): super(MSDialect, self).initialize(connection) if self.server_version_info[0] not in range(8, 17): # FreeTDS with version 4.2 seems to report here - # a number like "95.10.255". Don't know what + # a number like "95.10.255". Don't know what # that is. So emit warning. util.warn( "Unrecognized server version info '%s'. Version specific " @@ -1263,11 +1263,11 @@ class MSDialect(default.DefaultDialect): "join sys.schemas as sch on sch.schema_id=tab.schema_id " "where tab.name = :tabname " "and sch.name=:schname " - "and ind.is_primary_key=0", + "and ind.is_primary_key=0", bindparams=[ - sql.bindparam('tabname', tablename, + sql.bindparam('tabname', tablename, sqltypes.String(convert_unicode=True)), - sql.bindparam('schname', current_schema, + sql.bindparam('schname', current_schema, sqltypes.String(convert_unicode=True)) ], typemap = { @@ -1294,9 +1294,9 @@ class MSDialect(default.DefaultDialect): "where tab.name=:tabname " "and sch.name=:schname", bindparams=[ - sql.bindparam('tabname', tablename, + sql.bindparam('tabname', tablename, sqltypes.String(convert_unicode=True)), - sql.bindparam('schname', current_schema, + sql.bindparam('schname', current_schema, sqltypes.String(convert_unicode=True)) ], typemap = { @@ -1324,9 +1324,9 @@ class MSDialect(default.DefaultDialect): "views.schema_id=sch.schema_id and " "views.name=:viewname and sch.name=:schname", bindparams=[ - sql.bindparam('viewname', viewname, + sql.bindparam('viewname', viewname, sqltypes.String(convert_unicode=True)), - sql.bindparam('schname', current_schema, + sql.bindparam('schname', current_schema, sqltypes.String(convert_unicode=True)) ] ) @@ -1354,7 +1354,7 @@ class MSDialect(default.DefaultDialect): row = c.fetchone() if row is None: break - (name, type, nullable, charlen, + (name, type, nullable, charlen, numericprec, numericscale, default, collation) = ( row[columns.c.column_name], row[columns.c.data_type], @@ -1368,7 +1368,7 @@ class MSDialect(default.DefaultDialect): coltype = self.ischema_names.get(type, None) kwargs = {} - if coltype in (MSString, MSChar, MSNVarchar, MSNChar, MSText, + if coltype in (MSString, MSChar, MSNVarchar, MSNChar, MSText, MSNText, MSBinary, MSVarBinary, sqltypes.LargeBinary): kwargs['length'] = charlen @@ -1380,7 +1380,7 @@ class MSDialect(default.DefaultDialect): if coltype is None: util.warn( - "Did not recognize type '%s' of column '%s'" % + "Did not recognize type '%s' of column '%s'" % (type, name)) coltype = sqltypes.NULLTYPE else: @@ -1404,7 +1404,7 @@ class MSDialect(default.DefaultDialect): colmap[col['name']] = col # We also run an sp_columns to check for identity columns: cursor = connection.execute("sp_columns @table_name = '%s', " - "@table_owner = '%s'" + "@table_owner = '%s'" % (tablename, current_schema)) ic = None while True: @@ -1423,7 +1423,7 @@ class MSDialect(default.DefaultDialect): if ic is not None and self.server_version_info >= MS_2005_VERSION: table_fullname = "%s.%s" % (current_schema, tablename) cursor = connection.execute( - "select ident_seed('%s'), ident_incr('%s')" + "select ident_seed('%s'), ident_incr('%s')" % (table_fullname, table_fullname) ) @@ -1443,12 +1443,12 @@ class MSDialect(default.DefaultDialect): RR = ischema.ref_constraints # information_schema.table_constraints TC = ischema.constraints - # information_schema.constraint_column_usage: + # information_schema.constraint_column_usage: # the constrained column - C = ischema.key_constraints.alias('C') - # information_schema.constraint_column_usage: + C = ischema.key_constraints.alias('C') + # information_schema.constraint_column_usage: # the referenced column - R = ischema.key_constraints.alias('R') + R = ischema.key_constraints.alias('R') # Primary key constraints s = sql.select([C.c.column_name, TC.c.constraint_type], @@ -1470,12 +1470,12 @@ class MSDialect(default.DefaultDialect): RR = ischema.ref_constraints # information_schema.table_constraints TC = ischema.constraints - # information_schema.constraint_column_usage: + # information_schema.constraint_column_usage: # the constrained column - C = ischema.key_constraints.alias('C') - # information_schema.constraint_column_usage: + C = ischema.key_constraints.alias('C') + # information_schema.constraint_column_usage: # the referenced column - R = ischema.key_constraints.alias('R') + R = ischema.key_constraints.alias('R') # Foreign key constraints s = sql.select([C.c.column_name, diff --git a/lib/sqlalchemy/dialects/mssql/mxodbc.py b/lib/sqlalchemy/dialects/mssql/mxodbc.py index 94f0a2cbb3..15ebad1f8f 100644 --- a/lib/sqlalchemy/dialects/mssql/mxodbc.py +++ b/lib/sqlalchemy/dialects/mssql/mxodbc.py @@ -41,7 +41,7 @@ simplistic statements. For this reason, the mxODBC dialect uses the "native" mode by default only for INSERT, UPDATE, and DELETE statements, and uses the escaped string mode for -all other statements. +all other statements. This behavior can be controlled via :meth:`~sqlalchemy.sql.expression.Executable.execution_options` using the @@ -55,7 +55,7 @@ of ``False`` will unconditionally use string-escaped parameters. from sqlalchemy import types as sqltypes from sqlalchemy.connectors.mxodbc import MxODBCConnector from sqlalchemy.dialects.mssql.pyodbc import MSExecutionContext_pyodbc -from sqlalchemy.dialects.mssql.base import (MSDialect, +from sqlalchemy.dialects.mssql.base import (MSDialect, MSSQLStrictCompiler, _MSDateTime, _MSDate, TIME) diff --git a/lib/sqlalchemy/dialects/mssql/pymssql.py b/lib/sqlalchemy/dialects/mssql/pymssql.py index 9cc42c093b..f9f2e7a48e 100644 --- a/lib/sqlalchemy/dialects/mssql/pymssql.py +++ b/lib/sqlalchemy/dialects/mssql/pymssql.py @@ -21,8 +21,8 @@ Sample connect string:: mssql+pymssql://:@ Adding "?charset=utf8" or similar will cause pymssql to return -strings as Python unicode objects. This can potentially improve -performance in some scenarios as decoding of strings is +strings as Python unicode objects. This can potentially improve +performance in some scenarios as decoding of strings is handled natively. Limitations diff --git a/lib/sqlalchemy/dialects/mssql/pyodbc.py b/lib/sqlalchemy/dialects/mssql/pyodbc.py index 17dcbfecd7..b3b1641e0f 100644 --- a/lib/sqlalchemy/dialects/mssql/pyodbc.py +++ b/lib/sqlalchemy/dialects/mssql/pyodbc.py @@ -35,14 +35,14 @@ Examples of pyodbc connection string URLs: dsn=mydsn;UID=user;PWD=pass;LANGUAGE=us_english -* ``mssql+pyodbc://user:pass@host/db`` - connects using a connection +* ``mssql+pyodbc://user:pass@host/db`` - connects using a connection that would appear like:: DRIVER={SQL Server};Server=host;Database=db;UID=user;PWD=pass * ``mssql+pyodbc://user:pass@host:123/db`` - connects using a connection string which includes the port - information using the comma syntax. This will create the following + information using the comma syntax. This will create the following connection string:: DRIVER={SQL Server};Server=host,123;Database=db;UID=user;PWD=pass @@ -83,9 +83,9 @@ the python shell. For example:: Unicode Binds ^^^^^^^^^^^^^ -The current state of PyODBC on a unix backend with FreeTDS and/or +The current state of PyODBC on a unix backend with FreeTDS and/or EasySoft is poor regarding unicode; different OS platforms and versions of UnixODBC -versus IODBC versus FreeTDS/EasySoft versus PyODBC itself dramatically +versus IODBC versus FreeTDS/EasySoft versus PyODBC itself dramatically alter how strings are received. The PyODBC dialect attempts to use all the information it knows to determine whether or not a Python unicode literal can be passed directly to the PyODBC driver or not; while SQLAlchemy can encode @@ -93,13 +93,13 @@ these to bytestrings first, some users have reported that PyODBC mis-handles bytestrings for certain encodings and requires a Python unicode object, while the author has observed widespread cases where a Python unicode is completely misinterpreted by PyODBC, particularly when dealing with -the information schema tables used in table reflection, and the value +the information schema tables used in table reflection, and the value must first be encoded to a bytestring. It is for this reason that whether or not unicode literals for bound -parameters be sent to PyODBC can be controlled using the -``supports_unicode_binds`` parameter to ``create_engine()``. When -left at its default of ``None``, the PyODBC dialect will use its +parameters be sent to PyODBC can be controlled using the +``supports_unicode_binds`` parameter to ``create_engine()``. When +left at its default of ``None``, the PyODBC dialect will use its best guess as to whether or not the driver deals with unicode literals well. When ``False``, unicode literals will be encoded first, and when ``True`` unicode literals will be passed straight through. This is an interim @@ -199,7 +199,7 @@ class MSExecutionContext_pyodbc(MSExecutionContext): super(MSExecutionContext_pyodbc, self).pre_exec() - # don't embed the scope_identity select into an + # don't embed the scope_identity select into an # "INSERT .. DEFAULT VALUES" if self._select_lastrowid and \ self.dialect.use_scope_identity and \ @@ -211,11 +211,11 @@ class MSExecutionContext_pyodbc(MSExecutionContext): def post_exec(self): if self._embedded_scope_identity: # Fetch the last inserted id from the manipulated statement - # We may have to skip over a number of result sets with + # We may have to skip over a number of result sets with # no data (due to triggers, etc.) while True: try: - # fetchall() ensures the cursor is consumed + # fetchall() ensures the cursor is consumed # without closing it (FreeTDS particularly) row = self.cursor.fetchall()[0] break diff --git a/lib/sqlalchemy/dialects/mssql/zxjdbc.py b/lib/sqlalchemy/dialects/mssql/zxjdbc.py index bfa358c0c3..4bbd82c07e 100644 --- a/lib/sqlalchemy/dialects/mssql/zxjdbc.py +++ b/lib/sqlalchemy/dialects/mssql/zxjdbc.py @@ -68,7 +68,7 @@ class MSDialect_zxjdbc(ZxJDBCConnector, MSDialect): def _get_server_version_info(self, connection): return tuple( - int(x) + int(x) for x in connection.connection.dbversion.split('.') ) diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py index 448c8d631a..f2daec564a 100644 --- a/lib/sqlalchemy/dialects/mysql/base.py +++ b/lib/sqlalchemy/dialects/mysql/base.py @@ -44,7 +44,7 @@ Connection Timeouts ------------------- MySQL features an automatic connection close behavior, for connections that have -been idle for eight hours or more. To circumvent having this issue, use the +been idle for eight hours or more. To circumvent having this issue, use the ``pool_recycle`` option which controls the maximum age of any connection:: engine = create_engine('mysql+mysqldb://...', pool_recycle=3600) @@ -87,15 +87,15 @@ to be used. Transaction Isolation Level --------------------------- -:func:`.create_engine` accepts an ``isolation_level`` -parameter which results in the command ``SET SESSION -TRANSACTION ISOLATION LEVEL `` being invoked for +:func:`.create_engine` accepts an ``isolation_level`` +parameter which results in the command ``SET SESSION +TRANSACTION ISOLATION LEVEL `` being invoked for every new connection. Valid values for this parameter are -``READ COMMITTED``, ``READ UNCOMMITTED``, +``READ COMMITTED``, ``READ UNCOMMITTED``, ``REPEATABLE READ``, and ``SERIALIZABLE``:: engine = create_engine( - "mysql://scott:tiger@localhost/test", + "mysql://scott:tiger@localhost/test", isolation_level="READ UNCOMMITTED" ) @@ -193,7 +193,7 @@ usual definition of "number of rows matched by an UPDATE or DELETE" statement. This is in contradiction to the default setting on most MySQL DBAPI drivers, which is "number of rows actually modified/deleted". For this reason, the SQLAlchemy MySQL dialects always set the ``constants.CLIENT.FOUND_ROWS`` flag, -or whatever is equivalent for the DBAPI in use, on connect, unless the flag value +or whatever is equivalent for the DBAPI in use, on connect, unless the flag value is overridden using DBAPI-specific options (such as ``client_flag`` for the MySQL-Python driver, ``found_rows`` for the OurSQL driver). @@ -260,7 +260,7 @@ Index Types ~~~~~~~~~~~~~ Some MySQL storage engines permit you to specify an index type when creating -an index or primary key constraint. SQLAlchemy provides this feature via the +an index or primary key constraint. SQLAlchemy provides this feature via the ``mysql_using`` parameter on :class:`.Index`:: Index('my_index', my_table.c.data, mysql_using='hash') @@ -270,7 +270,7 @@ As well as the ``mysql_using`` parameter on :class:`.PrimaryKeyConstraint`:: PrimaryKeyConstraint("data", mysql_using='hash') The value passed to the keyword argument will be simply passed through to the -underlying CREATE INDEX or PRIMARY KEY clause, so it *must* be a valid index +underlying CREATE INDEX or PRIMARY KEY clause, so it *must* be a valid index type for your MySQL storage engine. More information can be found at: @@ -1307,13 +1307,13 @@ class MySQLCompiler(compiler.SQLCompiler): def get_select_precolumns(self, select): """Add special MySQL keywords in place of DISTINCT. - - .. note:: - + + .. note:: + this usage is deprecated. :meth:`.Select.prefix_with` should be used for special keywords at the start of a SELECT. - + """ if isinstance(select._distinct, basestring): return select._distinct.upper() + " " @@ -1361,16 +1361,16 @@ class MySQLCompiler(compiler.SQLCompiler): if limit is None: # hardwire the upper limit. Currently # needed by OurSQL with Python 3 - # (https://bugs.launchpad.net/oursql/+bug/686232), + # (https://bugs.launchpad.net/oursql/+bug/686232), # but also is consistent with the usage of the upper # bound as part of MySQL's "syntax" for OFFSET with # no LIMIT return ' \n LIMIT %s, %s' % ( - self.process(sql.literal(offset)), + self.process(sql.literal(offset)), "18446744073709551615") else: return ' \n LIMIT %s, %s' % ( - self.process(sql.literal(offset)), + self.process(sql.literal(offset)), self.process(sql.literal(limit))) else: # No offset provided, so just use the limit @@ -1384,10 +1384,10 @@ class MySQLCompiler(compiler.SQLCompiler): return None def update_tables_clause(self, update_stmt, from_table, extra_froms, **kw): - return ', '.join(t._compiler_dispatch(self, asfrom=True, **kw) + return ', '.join(t._compiler_dispatch(self, asfrom=True, **kw) for t in [from_table] + list(extra_froms)) - def update_from_clause(self, update_stmt, from_table, + def update_from_clause(self, update_stmt, from_table, extra_froms, from_hints, **kw): return None @@ -1416,7 +1416,7 @@ class MySQLDDLCompiler(compiler.DDLCompiler): constraint_string += "KEY %s (%s)" % ( self.preparer.quote( "idx_autoinc_%s" % auto_inc_column.name, None - ), + ), self.preparer.format_column(auto_inc_column) ) @@ -1453,7 +1453,7 @@ class MySQLDDLCompiler(compiler.DDLCompiler): opts = dict( ( - k[len(self.dialect.name)+1:].upper(), + k[len(self.dialect.name)+1:].upper(), v ) for k, v in table.kwargs.items() @@ -1469,7 +1469,7 @@ class MySQLDDLCompiler(compiler.DDLCompiler): arg = "'%s'" % arg.replace("\\", "\\\\").replace("'", "''") if opt in ('DATA_DIRECTORY', 'INDEX_DIRECTORY', - 'DEFAULT_CHARACTER_SET', 'CHARACTER_SET', + 'DEFAULT_CHARACTER_SET', 'CHARACTER_SET', 'DEFAULT_CHARSET', 'DEFAULT_COLLATE'): opt = opt.replace('_', ' ') @@ -1489,7 +1489,7 @@ class MySQLDDLCompiler(compiler.DDLCompiler): table = preparer.format_table(index.table) columns = [preparer.quote(c.name, c.quote) for c in index.columns] name = preparer.quote( - self._index_identifier(index.name), + self._index_identifier(index.name), index.quote) text = "CREATE " @@ -1598,24 +1598,24 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler): if type_.precision is None: return self._extend_numeric(type_, "NUMERIC") elif type_.scale is None: - return self._extend_numeric(type_, - "NUMERIC(%(precision)s)" % + return self._extend_numeric(type_, + "NUMERIC(%(precision)s)" % {'precision': type_.precision}) else: - return self._extend_numeric(type_, - "NUMERIC(%(precision)s, %(scale)s)" % + return self._extend_numeric(type_, + "NUMERIC(%(precision)s, %(scale)s)" % {'precision': type_.precision, 'scale' : type_.scale}) def visit_DECIMAL(self, type_): if type_.precision is None: return self._extend_numeric(type_, "DECIMAL") elif type_.scale is None: - return self._extend_numeric(type_, - "DECIMAL(%(precision)s)" % + return self._extend_numeric(type_, + "DECIMAL(%(precision)s)" % {'precision': type_.precision}) else: - return self._extend_numeric(type_, - "DECIMAL(%(precision)s, %(scale)s)" % + return self._extend_numeric(type_, + "DECIMAL(%(precision)s, %(scale)s)" % {'precision': type_.precision, 'scale' : type_.scale}) def visit_DOUBLE(self, type_): @@ -1638,7 +1638,7 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler): if self._mysql_type(type_) and \ type_.scale is not None and \ type_.precision is not None: - return self._extend_numeric(type_, + return self._extend_numeric(type_, "FLOAT(%s, %s)" % (type_.precision, type_.scale)) elif type_.precision is not None: return self._extend_numeric(type_, "FLOAT(%s)" % (type_.precision,)) @@ -1647,24 +1647,24 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler): def visit_INTEGER(self, type_): if self._mysql_type(type_) and type_.display_width is not None: - return self._extend_numeric(type_, - "INTEGER(%(display_width)s)" % + return self._extend_numeric(type_, + "INTEGER(%(display_width)s)" % {'display_width': type_.display_width}) else: return self._extend_numeric(type_, "INTEGER") def visit_BIGINT(self, type_): if self._mysql_type(type_) and type_.display_width is not None: - return self._extend_numeric(type_, - "BIGINT(%(display_width)s)" % + return self._extend_numeric(type_, + "BIGINT(%(display_width)s)" % {'display_width': type_.display_width}) else: return self._extend_numeric(type_, "BIGINT") def visit_MEDIUMINT(self, type_): if self._mysql_type(type_) and type_.display_width is not None: - return self._extend_numeric(type_, - "MEDIUMINT(%(display_width)s)" % + return self._extend_numeric(type_, + "MEDIUMINT(%(display_width)s)" % {'display_width': type_.display_width}) else: return self._extend_numeric(type_, "MEDIUMINT") @@ -1677,8 +1677,8 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler): def visit_SMALLINT(self, type_): if self._mysql_type(type_) and type_.display_width is not None: - return self._extend_numeric(type_, - "SMALLINT(%(display_width)s)" % + return self._extend_numeric(type_, + "SMALLINT(%(display_width)s)" % {'display_width': type_.display_width} ) else: @@ -1728,7 +1728,7 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler): return self._extend_string(type_, {}, "VARCHAR(%d)" % type_.length) else: raise exc.CompileError( - "VARCHAR requires a length on dialect %s" % + "VARCHAR requires a length on dialect %s" % self.dialect.name) def visit_CHAR(self, type_): @@ -1744,7 +1744,7 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler): return self._extend_string(type_, {'national':True}, "VARCHAR(%(length)s)" % {'length': type_.length}) else: raise exc.CompileError( - "NVARCHAR requires a length on dialect %s" % + "NVARCHAR requires a length on dialect %s" % self.dialect.name) def visit_NCHAR(self, type_): @@ -1805,8 +1805,8 @@ class MySQLIdentifierPreparer(compiler.IdentifierPreparer): quote = '"' super(MySQLIdentifierPreparer, self).__init__( - dialect, - initial_quote=quote, + dialect, + initial_quote=quote, escape_quote=quote) def _quote_free_identifiers(self, *ids): @@ -1839,7 +1839,7 @@ class MySQLDialect(default.DefaultDialect): preparer = MySQLIdentifierPreparer # default SQL compilation settings - - # these are modified upon initialize(), + # these are modified upon initialize(), # i.e. first connect _backslash_escapes = True _server_ansiquotes = False @@ -1856,7 +1856,7 @@ class MySQLDialect(default.DefaultDialect): else: return None - _isolation_lookup = set(['SERIALIZABLE', + _isolation_lookup = set(['SERIALIZABLE', 'READ UNCOMMITTED', 'READ COMMITTED', 'REPEATABLE READ']) def set_isolation_level(self, connection, level): @@ -1864,7 +1864,7 @@ class MySQLDialect(default.DefaultDialect): if level not in self._isolation_lookup: raise exc.ArgumentError( "Invalid value '%s' for isolation_level. " - "Valid isolation levels for %s are %s" % + "Valid isolation levels for %s are %s" % (level, self.name, ", ".join(self._isolation_lookup)) ) cursor = connection.cursor() @@ -1936,7 +1936,7 @@ class MySQLDialect(default.DefaultDialect): return self._extract_error_code(e) in \ (2006, 2013, 2014, 2045, 2055) elif isinstance(e, self.dbapi.InterfaceError): - # if underlying connection is closed, + # if underlying connection is closed, # this is the error you get return "(0, '')" in str(e) else: @@ -2150,9 +2150,9 @@ class MySQLDialect(default.DefaultDialect): def _parsed_state_or_create(self, connection, table_name, schema=None, **kw): return self._setup_parser( - connection, - table_name, - schema, + connection, + table_name, + schema, info_cache=kw.get('info_cache', None) ) @@ -2160,7 +2160,7 @@ class MySQLDialect(default.DefaultDialect): def _tabledef_parser(self): """return the MySQLTableDefinitionParser, generate if needed. - The deferred creation ensures that the dialect has + The deferred creation ensures that the dialect has retrieved server version information first. """ diff --git a/lib/sqlalchemy/dialects/mysql/gaerdbms.py b/lib/sqlalchemy/dialects/mysql/gaerdbms.py index f84a5d2dd9..0fe5d635a6 100644 --- a/lib/sqlalchemy/dialects/mysql/gaerdbms.py +++ b/lib/sqlalchemy/dialects/mysql/gaerdbms.py @@ -27,7 +27,7 @@ Pooling Google App Engine connections appear to be randomly recycled, so the dialect does not pool connections. The :class:`.NullPool` -implementation is installed within the :class:`.Engine` by +implementation is installed within the :class:`.Engine` by default. """ @@ -37,10 +37,10 @@ from sqlalchemy.pool import NullPool import re -class MySQLDialect_gaerdbms(MySQLDialect_mysqldb): +class MySQLDialect_gaerdbms(MySQLDialect_mysqldb): - @classmethod - def dbapi(cls): + @classmethod + def dbapi(cls): from google.appengine.api import rdbms return rdbms diff --git a/lib/sqlalchemy/dialects/mysql/mysqldb.py b/lib/sqlalchemy/dialects/mysql/mysqldb.py index 656e105a70..240f30251b 100644 --- a/lib/sqlalchemy/dialects/mysql/mysqldb.py +++ b/lib/sqlalchemy/dialects/mysql/mysqldb.py @@ -26,20 +26,20 @@ MySQLdb will accommodate Python ``unicode`` objects if the ``use_unicode=1`` parameter, or the ``charset`` parameter, is passed as a connection argument. -Without this setting, many MySQL server installations default to +Without this setting, many MySQL server installations default to a ``latin1`` encoding for client connections, which has the effect -of all data being converted into ``latin1``, even if you have ``utf8`` +of all data being converted into ``latin1``, even if you have ``utf8`` or another character set configured on your tables and columns. With versions 4.1 and higher, you can change the connection character set either through server configuration or by including the ``charset`` parameter. The ``charset`` -parameter as received by MySQL-Python also has the side-effect of +parameter as received by MySQL-Python also has the side-effect of enabling ``use_unicode=1``:: # set client encoding to utf8; all strings come back as unicode create_engine('mysql+mysqldb:///mydb?charset=utf8') -Manually configuring ``use_unicode=0`` will cause MySQL-python to +Manually configuring ``use_unicode=0`` will cause MySQL-python to return encoded strings:: # set client encoding to utf8; all strings come back as utf8 str @@ -57,9 +57,9 @@ It is strongly advised to use the latest version of MySQL-Python. from sqlalchemy.dialects.mysql.base import (MySQLDialect, MySQLExecutionContext, MySQLCompiler, MySQLIdentifierPreparer) from sqlalchemy.connectors.mysqldb import ( - MySQLDBExecutionContext, - MySQLDBCompiler, - MySQLDBIdentifierPreparer, + MySQLDBExecutionContext, + MySQLDBCompiler, + MySQLDBIdentifierPreparer, MySQLDBConnector ) diff --git a/lib/sqlalchemy/dialects/mysql/oursql.py b/lib/sqlalchemy/dialects/mysql/oursql.py index 2a3c6b09c7..8f7bebe9c5 100644 --- a/lib/sqlalchemy/dialects/mysql/oursql.py +++ b/lib/sqlalchemy/dialects/mysql/oursql.py @@ -108,9 +108,9 @@ class MySQLDialect_oursql(MySQLDialect): arg = "'%s'" % arg connection.execution_options(_oursql_plain_query=True).execute(query % arg) - # Because mysql is bad, these methods have to be + # Because mysql is bad, these methods have to be # reimplemented to use _PlainQuery. Basically, some queries - # refuse to return any data if they're run through + # refuse to return any data if they're run through # the parameterized query API, or refuse to be parameterized # in the first place. def do_begin_twophase(self, connection, xid): @@ -135,7 +135,7 @@ class MySQLDialect_oursql(MySQLDialect): # Q: why didn't we need all these "plain_query" overrides earlier ? # am i on a newer/older version of OurSQL ? def has_table(self, connection, table_name, schema=None): - return MySQLDialect.has_table(self, + return MySQLDialect.has_table(self, connection.connect().\ execution_options(_oursql_plain_query=True), table_name, schema) @@ -183,7 +183,7 @@ class MySQLDialect_oursql(MySQLDialect): def initialize(self, connection): return MySQLDialect.initialize( - self, + self, connection.execution_options(_oursql_plain_query=True) ) @@ -222,7 +222,7 @@ class MySQLDialect_oursql(MySQLDialect): opts.setdefault('found_rows', True) ssl = {} - for key in ['ssl_ca', 'ssl_key', 'ssl_cert', + for key in ['ssl_ca', 'ssl_key', 'ssl_cert', 'ssl_capath', 'ssl_cipher']: if key in opts: ssl[key[4:]] = opts[key] diff --git a/lib/sqlalchemy/dialects/mysql/pymysql.py b/lib/sqlalchemy/dialects/mysql/pymysql.py index e05d50d307..8387dfecbb 100644 --- a/lib/sqlalchemy/dialects/mysql/pymysql.py +++ b/lib/sqlalchemy/dialects/mysql/pymysql.py @@ -20,20 +20,20 @@ Connect string:: MySQL-Python Compatibility -------------------------- -The pymysql DBAPI is a pure Python port of the MySQL-python (MySQLdb) driver, -and targets 100% compatibility. Most behavioral notes for MySQL-python apply to +The pymysql DBAPI is a pure Python port of the MySQL-python (MySQLdb) driver, +and targets 100% compatibility. Most behavioral notes for MySQL-python apply to the pymysql driver as well. """ -from sqlalchemy.dialects.mysql.mysqldb import MySQLDialect_mysqldb +from sqlalchemy.dialects.mysql.mysqldb import MySQLDialect_mysqldb -class MySQLDialect_pymysql(MySQLDialect_mysqldb): +class MySQLDialect_pymysql(MySQLDialect_mysqldb): driver = 'pymysql' description_encoding = None - @classmethod - def dbapi(cls): - return __import__('pymysql') + @classmethod + def dbapi(cls): + return __import__('pymysql') -dialect = MySQLDialect_pymysql \ No newline at end of file +dialect = MySQLDialect_pymysql \ No newline at end of file diff --git a/lib/sqlalchemy/dialects/mysql/pyodbc.py b/lib/sqlalchemy/dialects/mysql/pyodbc.py index 20a16988aa..6271286f92 100644 --- a/lib/sqlalchemy/dialects/mysql/pyodbc.py +++ b/lib/sqlalchemy/dialects/mysql/pyodbc.py @@ -20,7 +20,7 @@ Connect string:: Limitations ----------- -The mysql-pyodbc dialect is subject to unresolved character encoding issues +The mysql-pyodbc dialect is subject to unresolved character encoding issues which exist within the current ODBC drivers available. (see http://code.google.com/p/pyodbc/issues/detail?id=25). Consider usage of OurSQL, MySQLdb, or MySQL-connector/Python. diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py index 8e7bfe7581..87d433cfba 100644 --- a/lib/sqlalchemy/dialects/oracle/base.py +++ b/lib/sqlalchemy/dialects/oracle/base.py @@ -14,7 +14,7 @@ for that driver. Connect Arguments ----------------- -The dialect supports several :func:`~sqlalchemy.create_engine()` arguments which +The dialect supports several :func:`~sqlalchemy.create_engine()` arguments which affect the behavior of the dialect regardless of driver in use. * *use_ansi* - Use ANSI JOIN constructs (see the section on Oracle 8). Defaults @@ -29,32 +29,32 @@ Auto Increment Behavior SQLAlchemy Table objects which include integer primary keys are usually assumed to have "autoincrementing" behavior, meaning they can generate their own primary key values upon -INSERT. Since Oracle has no "autoincrement" feature, SQLAlchemy relies upon sequences +INSERT. Since Oracle has no "autoincrement" feature, SQLAlchemy relies upon sequences to produce these values. With the Oracle dialect, *a sequence must always be explicitly -specified to enable autoincrement*. This is divergent with the majority of documentation +specified to enable autoincrement*. This is divergent with the majority of documentation examples which assume the usage of an autoincrement-capable database. To specify sequences, use the sqlalchemy.schema.Sequence object which is passed to a Column construct:: - t = Table('mytable', metadata, + t = Table('mytable', metadata, Column('id', Integer, Sequence('id_seq'), primary_key=True), Column(...), ... ) This step is also required when using table reflection, i.e. autoload=True:: - t = Table('mytable', metadata, + t = Table('mytable', metadata, Column('id', Integer, Sequence('id_seq'), primary_key=True), autoload=True - ) + ) Identifier Casing ----------------- -In Oracle, the data dictionary represents all case insensitive identifier names +In Oracle, the data dictionary represents all case insensitive identifier names using UPPERCASE text. SQLAlchemy on the other hand considers an all-lower case identifier name to be case insensitive. The Oracle dialect converts all case insensitive identifiers to and from those two formats during schema level communication, such as reflection of -tables and indexes. Using an UPPERCASE name on the SQLAlchemy side indicates a +tables and indexes. Using an UPPERCASE name on the SQLAlchemy side indicates a case sensitive identifier, and SQLAlchemy will quote the name - this will cause mismatches against data dictionary data received from Oracle, so unless identifier names have been truly created as case sensitive (i.e. using quoted names), all lowercase names should be @@ -72,16 +72,16 @@ Unicode Also note that Oracle supports unicode data through the NVARCHAR and NCLOB data types. When using the SQLAlchemy Unicode and UnicodeText types, these DDL types will be used -within CREATE TABLE statements. Usage of VARCHAR2 and CLOB with unicode text still +within CREATE TABLE statements. Usage of VARCHAR2 and CLOB with unicode text still requires NLS_LANG to be set. LIMIT/OFFSET Support -------------------- -Oracle has no support for the LIMIT or OFFSET keywords. SQLAlchemy uses -a wrapped subquery approach in conjunction with ROWNUM. The exact methodology +Oracle has no support for the LIMIT or OFFSET keywords. SQLAlchemy uses +a wrapped subquery approach in conjunction with ROWNUM. The exact methodology is taken from -http://www.oracle.com/technology/oramag/oracle/06-sep/o56asktom.html . +http://www.oracle.com/technology/oramag/oracle/06-sep/o56asktom.html . There are two options which affect its behavior: @@ -89,13 +89,13 @@ There are two options which affect its behavior: optimization directive, specify ``optimize_limits=True`` to :func:`.create_engine`. * the values passed for the limit/offset are sent as bound parameters. Some users have observed that Oracle produces a poor query plan when the values are sent as binds and not - rendered literally. To render the limit/offset values literally within the SQL + rendered literally. To render the limit/offset values literally within the SQL statement, specify ``use_binds_for_limits=False`` to :func:`.create_engine`. -Some users have reported better performance when the entirely different approach of a -window query is used, i.e. ROW_NUMBER() OVER (ORDER BY), to provide LIMIT/OFFSET (note -that the majority of users don't observe this). To suit this case the -method used for LIMIT/OFFSET can be replaced entirely. See the recipe at +Some users have reported better performance when the entirely different approach of a +window query is used, i.e. ROW_NUMBER() OVER (ORDER BY), to provide LIMIT/OFFSET (note +that the majority of users don't observe this). To suit this case the +method used for LIMIT/OFFSET can be replaced entirely. See the recipe at http://www.sqlalchemy.org/trac/wiki/UsageRecipes/WindowFunctionsByDefault which installs a select compiler that overrides the generation of limit/offset with a window function. @@ -103,11 +103,11 @@ a window function. ON UPDATE CASCADE ----------------- -Oracle doesn't have native ON UPDATE CASCADE functionality. A trigger based solution +Oracle doesn't have native ON UPDATE CASCADE functionality. A trigger based solution is available at http://asktom.oracle.com/tkyte/update_cascade/index.html . When using the SQLAlchemy ORM, the ORM has limited ability to manually issue -cascading updates - specify ForeignKey objects using the +cascading updates - specify ForeignKey objects using the "deferrable=True, initially='deferred'" keyword arguments, and specify "passive_updates=False" on each relationship(). @@ -121,21 +121,21 @@ behaviors: JOIN phrases into the WHERE clause, and in the case of LEFT OUTER JOIN makes use of Oracle's (+) operator. -* the NVARCHAR2 and NCLOB datatypes are no longer generated as DDL when - the :class:`~sqlalchemy.types.Unicode` is used - VARCHAR2 and CLOB are issued +* the NVARCHAR2 and NCLOB datatypes are no longer generated as DDL when + the :class:`~sqlalchemy.types.Unicode` is used - VARCHAR2 and CLOB are issued instead. This because these types don't seem to work correctly on Oracle 8 - even though they are available. The :class:`~sqlalchemy.types.NVARCHAR` + even though they are available. The :class:`~sqlalchemy.types.NVARCHAR` and :class:`~sqlalchemy.dialects.oracle.NCLOB` types will always generate NVARCHAR2 and NCLOB. -* the "native unicode" mode is disabled when using cx_oracle, i.e. SQLAlchemy +* the "native unicode" mode is disabled when using cx_oracle, i.e. SQLAlchemy encodes all Python unicode objects to "string" before passing in as bind parameters. Synonym/DBLINK Reflection ------------------------- When using reflection with Table objects, the dialect can optionally search for tables -indicated by synonyms that reference DBLINK-ed tables by passing the flag -oracle_resolve_synonyms=True as a keyword argument to the Table construct. If DBLINK +indicated by synonyms that reference DBLINK-ed tables by passing the flag +oracle_resolve_synonyms=True as a keyword argument to the Table construct. If DBLINK is not in use this flag should be left off. """ @@ -217,8 +217,8 @@ class LONG(sqltypes.Text): class INTERVAL(sqltypes.TypeEngine): __visit_name__ = 'INTERVAL' - def __init__(self, - day_precision=None, + def __init__(self, + day_precision=None, second_precision=None): """Construct an INTERVAL. @@ -303,10 +303,10 @@ class OracleTypeCompiler(compiler.GenericTypeCompiler): def visit_INTERVAL(self, type_): return "INTERVAL DAY%s TO SECOND%s" % ( - type_.day_precision is not None and + type_.day_precision is not None and "(%d)" % type_.day_precision or "", - type_.second_precision is not None and + type_.second_precision is not None and "(%d)" % type_.second_precision or "", ) @@ -340,7 +340,7 @@ class OracleTypeCompiler(compiler.GenericTypeCompiler): else: return "%(name)s(%(precision)s, %(scale)s)" % {'name':name,'precision': precision, 'scale' : scale} - def visit_string(self, type_): + def visit_string(self, type_): return self.visit_VARCHAR2(type_) def visit_VARCHAR2(self, type_): @@ -356,10 +356,10 @@ class OracleTypeCompiler(compiler.GenericTypeCompiler): def _visit_varchar(self, type_, n, num): if not n and self.dialect._supports_char_length: return "VARCHAR%(two)s(%(length)s CHAR)" % { - 'length' : type_.length, + 'length' : type_.length, 'two':num} else: - return "%(n)sVARCHAR%(two)s(%(length)s)" % {'length' : type_.length, + return "%(n)sVARCHAR%(two)s(%(length)s)" % {'length' : type_.length, 'two':num, 'n':n} def visit_text(self, type_): @@ -431,7 +431,7 @@ class OracleCompiler(compiler.SQLCompiler): return "" def default_from(self): - """Called when a ``SELECT`` statement has no froms, + """Called when a ``SELECT`` statement has no froms, and no ``FROM`` clause is to be appended. The Oracle compiler tacks a "FROM DUAL" to the statement. @@ -613,7 +613,7 @@ class OracleDDLCompiler(compiler.DDLCompiler): if constraint.ondelete is not None: text += " ON DELETE %s" % constraint.ondelete - # oracle has no ON UPDATE CASCADE - + # oracle has no ON UPDATE CASCADE - # its only available via triggers http://asktom.oracle.com/tkyte/update_cascade/index.html if constraint.onupdate is not None: util.warn( @@ -643,8 +643,8 @@ class OracleIdentifierPreparer(compiler.IdentifierPreparer): class OracleExecutionContext(default.DefaultExecutionContext): def fire_sequence(self, seq, type_): - return self._execute_scalar("SELECT " + - self.dialect.identifier_preparer.format_sequence(seq) + + return self._execute_scalar("SELECT " + + self.dialect.identifier_preparer.format_sequence(seq) + ".nextval FROM DUAL", type_) class OracleDialect(default.DefaultDialect): @@ -676,9 +676,9 @@ class OracleDialect(default.DefaultDialect): reflection_options = ('oracle_resolve_synonyms', ) - def __init__(self, - use_ansi=True, - optimize_limits=False, + def __init__(self, + use_ansi=True, + optimize_limits=False, use_binds_for_limits=True, **kwargs): default.DefaultDialect.__init__(self, **kwargs) @@ -808,8 +808,8 @@ class OracleDialect(default.DefaultDialect): if resolve_synonyms: actual_name, owner, dblink, synonym = self._resolve_synonym( - connection, - desired_owner=self.denormalize_name(schema), + connection, + desired_owner=self.denormalize_name(schema), desired_synonym=self.denormalize_name(table_name) ) else: @@ -876,11 +876,11 @@ class OracleDialect(default.DefaultDialect): char_length_col = 'char_length' else: char_length_col = 'data_length' - + c = connection.execute(sql.text( "SELECT column_name, data_type, %(char_length_col)s, data_precision, data_scale, " "nullable, data_default FROM ALL_TAB_COLUMNS%(dblink)s " - "WHERE table_name = :table_name AND owner = :owner " + "WHERE table_name = :table_name AND owner = :owner " "ORDER BY column_id" % {'dblink': dblink, 'char_length_col':char_length_col}), table_name=table_name, owner=schema) @@ -892,7 +892,7 @@ class OracleDialect(default.DefaultDialect): coltype = NUMBER(precision, scale) elif coltype in ('VARCHAR2', 'NVARCHAR2', 'CHAR'): coltype = self.ischema_names.get(coltype)(length) - elif 'WITH TIME ZONE' in coltype: + elif 'WITH TIME ZONE' in coltype: coltype = TIMESTAMP(timezone=True) else: coltype = re.sub(r'\(\d+\)', '', coltype) @@ -929,8 +929,8 @@ class OracleDialect(default.DefaultDialect): indexes = [] q = sql.text(""" SELECT a.index_name, a.column_name, b.uniqueness - FROM ALL_IND_COLUMNS%(dblink)s a, - ALL_INDEXES%(dblink)s b + FROM ALL_IND_COLUMNS%(dblink)s a, + ALL_INDEXES%(dblink)s b WHERE a.index_name = b.index_name AND a.table_owner = b.table_owner @@ -1112,8 +1112,8 @@ class OracleDialect(default.DefaultDialect): if resolve_synonyms: ref_remote_name, ref_remote_owner, ref_dblink, ref_synonym = \ self._resolve_synonym( - connection, - desired_owner=self.denormalize_name(remote_owner), + connection, + desired_owner=self.denormalize_name(remote_owner), desired_table=self.denormalize_name(remote_table) ) if ref_synonym: diff --git a/lib/sqlalchemy/dialects/oracle/cx_oracle.py b/lib/sqlalchemy/dialects/oracle/cx_oracle.py index 06b27b7104..116f6ada78 100644 --- a/lib/sqlalchemy/dialects/oracle/cx_oracle.py +++ b/lib/sqlalchemy/dialects/oracle/cx_oracle.py @@ -9,19 +9,19 @@ Driver ------ -The Oracle dialect uses the cx_oracle driver, available at -http://cx-oracle.sourceforge.net/ . The dialect has several behaviors +The Oracle dialect uses the cx_oracle driver, available at +http://cx-oracle.sourceforge.net/ . The dialect has several behaviors which are specifically tailored towards compatibility with this module. Version 5.0 or greater is **strongly** recommended, as SQLAlchemy makes -extensive use of the cx_oracle output converters for numeric and +extensive use of the cx_oracle output converters for numeric and string conversions. Connecting ---------- -Connecting with create_engine() uses the standard URL approach of -``oracle://user:pass@host:port/dbname[?key=value&key=value...]``. If dbname is present, the -host, port, and dbname tokens are converted to a TNS name using the cx_oracle +Connecting with create_engine() uses the standard URL approach of +``oracle://user:pass@host:port/dbname[?key=value&key=value...]``. If dbname is present, the +host, port, and dbname tokens are converted to a TNS name using the cx_oracle :func:`makedsn()` function. Otherwise, the host token is taken directly as a TNS name. Additional arguments which may be specified either as query string arguments on the @@ -53,7 +53,7 @@ handler so that all string based result values are returned as unicode as well. Generally, the ``NLS_LANG`` environment variable determines the nature of the encoding to be used. -Note that this behavior is disabled when Oracle 8 is detected, as it has been +Note that this behavior is disabled when Oracle 8 is detected, as it has been observed that issues remain when passing Python unicodes to cx_oracle with Oracle 8. LOB Objects @@ -71,7 +71,7 @@ To disable this processing, pass ``auto_convert_lobs=False`` to :func:`create_en Two Phase Transaction Support ----------------------------- -Two Phase transactions are implemented using XA transactions. Success has been reported +Two Phase transactions are implemented using XA transactions. Success has been reported with this feature but it should be regarded as experimental. Precision Numerics @@ -95,14 +95,14 @@ If precision numerics aren't required, the decimal handling can be disabled by passing the flag ``coerce_to_decimal=False`` to :func:`.create_engine`:: - engine = create_engine("oracle+cx_oracle://dsn", + engine = create_engine("oracle+cx_oracle://dsn", coerce_to_decimal=False) .. versionadded:: 0.7.6 Add the ``coerce_to_decimal`` flag. -Another alternative to performance is to use the -`cdecimal `_ library; +Another alternative to performance is to use the +`cdecimal `_ library; see :class:`.Numeric` for additional notes. The handler attempts to use the "precision" and "scale" @@ -160,7 +160,7 @@ class _OracleNumeric(sqltypes.Numeric): def result_processor(self, dialect, coltype): # we apply a cx_oracle type handler to all connections # that converts floating point strings to Decimal(). - # However, in some subquery situations, Oracle doesn't + # However, in some subquery situations, Oracle doesn't # give us enough information to determine int or Decimal. # It could even be int/Decimal differently on each row, # regardless of the scale given for the originating type. @@ -190,7 +190,7 @@ class _OracleNumeric(sqltypes.Numeric): else: return None else: - # cx_oracle 4 behavior, will assume + # cx_oracle 4 behavior, will assume # floats return super(_OracleNumeric, self).\ result_processor(dialect, coltype) @@ -237,7 +237,7 @@ class _NativeUnicodeMixin(object): # end Py2K # we apply a connection output handler that returns - # unicode in all cases, so the "native_unicode" flag + # unicode in all cases, so the "native_unicode" flag # will be set for the default String.result_processor. class _OracleChar(_NativeUnicodeMixin, sqltypes.CHAR): @@ -316,15 +316,15 @@ class OracleExecutionContext_cx_oracle(OracleExecutionContext): getattr(self.compiled, '_quoted_bind_names', None) if quoted_bind_names: if not self.dialect.supports_unicode_statements: - # if DBAPI doesn't accept unicode statements, + # if DBAPI doesn't accept unicode statements, # keys in self.parameters would have been encoded # here. so convert names in quoted_bind_names # to encoded as well. quoted_bind_names = \ dict( - (fromname.encode(self.dialect.encoding), - toname.encode(self.dialect.encoding)) - for fromname, toname in + (fromname.encode(self.dialect.encoding), + toname.encode(self.dialect.encoding)) + for fromname, toname in quoted_bind_names.items() ) for param in self.parameters: @@ -333,10 +333,10 @@ class OracleExecutionContext_cx_oracle(OracleExecutionContext): del param[fromname] if self.dialect.auto_setinputsizes: - # cx_oracle really has issues when you setinputsizes + # cx_oracle really has issues when you setinputsizes # on String, including that outparams/RETURNING # breaks for varchars - self.set_input_sizes(quoted_bind_names, + self.set_input_sizes(quoted_bind_names, exclude_types=self.dialect._cx_oracle_string_types ) @@ -369,7 +369,7 @@ class OracleExecutionContext_cx_oracle(OracleExecutionContext): def get_result_proxy(self): if hasattr(self, 'out_parameters') and self.compiled.returning: returning_params = dict( - (k, v.getvalue()) + (k, v.getvalue()) for k, v in self.out_parameters.items() ) return ReturningResultProxy(self, returning_params) @@ -395,7 +395,7 @@ class OracleExecutionContext_cx_oracle(OracleExecutionContext): impl_type = type.dialect_impl(self.dialect) dbapi_type = impl_type.get_dbapi_type(self.dialect.dbapi) result_processor = impl_type.\ - result_processor(self.dialect, + result_processor(self.dialect, dbapi_type) if result_processor is not None: out_parameters[name] = \ @@ -404,7 +404,7 @@ class OracleExecutionContext_cx_oracle(OracleExecutionContext): out_parameters[name] = self.out_parameters[name].getvalue() else: result.out_parameters = dict( - (k, v.getvalue()) + (k, v.getvalue()) for k, v in self.out_parameters.items() ) @@ -413,13 +413,13 @@ class OracleExecutionContext_cx_oracle(OracleExecutionContext): class OracleExecutionContext_cx_oracle_with_unicode(OracleExecutionContext_cx_oracle): """Support WITH_UNICODE in Python 2.xx. - WITH_UNICODE allows cx_Oracle's Python 3 unicode handling - behavior under Python 2.x. This mode in some cases disallows - and in other cases silently passes corrupted data when - non-Python-unicode strings (a.k.a. plain old Python strings) - are passed as arguments to connect(), the statement sent to execute(), + WITH_UNICODE allows cx_Oracle's Python 3 unicode handling + behavior under Python 2.x. This mode in some cases disallows + and in other cases silently passes corrupted data when + non-Python-unicode strings (a.k.a. plain old Python strings) + are passed as arguments to connect(), the statement sent to execute(), or any of the bind parameter keys or values sent to execute(). - This optional context therefore ensures that all statements are + This optional context therefore ensures that all statements are passed as Python unicode objects. """ @@ -450,7 +450,7 @@ class ReturningResultProxy(base.FullyBufferedResultProxy): return ret def _buffer_rows(self): - return collections.deque([tuple(self._returning_params["ret_%d" % i] + return collections.deque([tuple(self._returning_params["ret_%d" % i] for i, c in enumerate(self._returning_params))]) class OracleDialect_cx_oracle(OracleDialect): @@ -482,11 +482,11 @@ class OracleDialect_cx_oracle(OracleDialect): execute_sequence_format = list - def __init__(self, - auto_setinputsizes=True, - auto_convert_lobs=True, - threaded=True, - allow_twophase=True, + def __init__(self, + auto_setinputsizes=True, + auto_convert_lobs=True, + threaded=True, + allow_twophase=True, coerce_to_decimal=True, arraysize=50, **kwargs): OracleDialect.__init__(self, **kwargs) @@ -509,11 +509,11 @@ class OracleDialect_cx_oracle(OracleDialect): self._cx_oracle_string_types = types("STRING", "UNICODE", "NCLOB", "CLOB") self._cx_oracle_unicode_types = types("UNICODE", "NCLOB") - self._cx_oracle_binary_types = types("BFILE", "CLOB", "NCLOB", "BLOB") + self._cx_oracle_binary_types = types("BFILE", "CLOB", "NCLOB", "BLOB") self.supports_unicode_binds = self.cx_oracle_ver >= (5, 0) self.supports_native_decimal = ( - self.cx_oracle_ver >= (5, 0) and + self.cx_oracle_ver >= (5, 0) and coerce_to_decimal ) @@ -571,12 +571,12 @@ class OracleDialect_cx_oracle(OracleDialect): self._detect_decimal_char(connection) def _detect_decimal_char(self, connection): - """detect if the decimal separator character is not '.', as + """detect if the decimal separator character is not '.', as is the case with european locale settings for NLS_LANG. cx_oracle itself uses similar logic when it formats Python - Decimal objects to strings on the bind side (as of 5.0.3), - as Oracle sends/receives string numerics only in the + Decimal objects to strings on the bind side (as of 5.0.3), + as Oracle sends/receives string numerics only in the current locale. """ @@ -587,14 +587,14 @@ class OracleDialect_cx_oracle(OracleDialect): cx_Oracle = self.dbapi conn = connection.connection - # override the output_type_handler that's - # on the cx_oracle connection with a plain + # override the output_type_handler that's + # on the cx_oracle connection with a plain # one on the cursor - def output_type_handler(cursor, name, defaultType, + def output_type_handler(cursor, name, defaultType, size, precision, scale): return cursor.var( - cx_Oracle.STRING, + cx_Oracle.STRING, 255, arraysize=cursor.arraysize) cursor = conn.cursor() @@ -624,7 +624,7 @@ class OracleDialect_cx_oracle(OracleDialect): return cx_Oracle = self.dbapi - def output_type_handler(cursor, name, defaultType, + def output_type_handler(cursor, name, defaultType, size, precision, scale): # convert all NUMBER with precision + positive scale to Decimal # this almost allows "native decimal" mode. @@ -632,22 +632,22 @@ class OracleDialect_cx_oracle(OracleDialect): defaultType == cx_Oracle.NUMBER and \ precision and scale > 0: return cursor.var( - cx_Oracle.STRING, - 255, - outconverter=self._to_decimal, + cx_Oracle.STRING, + 255, + outconverter=self._to_decimal, arraysize=cursor.arraysize) # if NUMBER with zero precision and 0 or neg scale, this appears - # to indicate "ambiguous". Use a slower converter that will - # make a decision based on each value received - the type + # to indicate "ambiguous". Use a slower converter that will + # make a decision based on each value received - the type # may change from row to row (!). This kills # off "native decimal" mode, handlers still needed. elif self.supports_native_decimal and \ defaultType == cx_Oracle.NUMBER \ and not precision and scale <= 0: return cursor.var( - cx_Oracle.STRING, - 255, - outconverter=self._detect_decimal, + cx_Oracle.STRING, + 255, + outconverter=self._detect_decimal, arraysize=cursor.arraysize) # allow all strings to come back natively as Unicode elif defaultType in (cx_Oracle.STRING, cx_Oracle.FIXED_CHAR): @@ -711,7 +711,7 @@ class OracleDialect_cx_oracle(OracleDialect): def _get_server_version_info(self, connection): return tuple( - int(x) + int(x) for x in connection.connection.version.split('.') ) diff --git a/lib/sqlalchemy/dialects/postgresql/__init__.py b/lib/sqlalchemy/dialects/postgresql/__init__.py index bc0c31275a..339634020f 100644 --- a/lib/sqlalchemy/dialects/postgresql/__init__.py +++ b/lib/sqlalchemy/dialects/postgresql/__init__.py @@ -14,7 +14,7 @@ from sqlalchemy.dialects.postgresql.base import \ DATE, BYTEA, BOOLEAN, INTERVAL, ARRAY, ENUM, dialect __all__ = ( -'INTEGER', 'BIGINT', 'SMALLINT', 'VARCHAR', 'CHAR', 'TEXT', 'NUMERIC', 'FLOAT', 'REAL', 'INET', +'INTEGER', 'BIGINT', 'SMALLINT', 'VARCHAR', 'CHAR', 'TEXT', 'NUMERIC', 'FLOAT', 'REAL', 'INET', 'CIDR', 'UUID', 'BIT', 'MACADDR', 'DOUBLE_PRECISION', 'TIMESTAMP', 'TIME', 'DATE', 'BYTEA', 'BOOLEAN', 'INTERVAL', 'ARRAY', 'ENUM', 'dialect' ) diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index 0ff54f8a2c..0cfb4dc477 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -21,7 +21,7 @@ default corresponding to the column. To specify a specific named sequence to be used for primary key generation, use the :func:`~sqlalchemy.schema.Sequence` construct:: - Table('sometable', metadata, + Table('sometable', metadata, Column('id', Integer, Sequence('some_id_seq'), primary_key=True) ) @@ -51,7 +51,7 @@ parameter are ``READ COMMITTED``, ``READ UNCOMMITTED``, ``REPEATABLE READ``, and ``SERIALIZABLE``:: engine = create_engine( - "postgresql+pg8000://scott:tiger@localhost/test", + "postgresql+pg8000://scott:tiger@localhost/test", isolation_level="READ UNCOMMITTED" ) @@ -75,7 +75,7 @@ the current ``search_path``, the "schema" attribute of the resulting remote table matches that of the referencing table, and the "schema" argument was explicitly stated on the referencing table. -The best practice here is to not use the ``schema`` argument +The best practice here is to not use the ``schema`` argument on :class:`.Table` for any schemas that are present in ``search_path``. ``search_path`` defaults to "public", but care should be taken to inspect the actual value using:: @@ -87,7 +87,7 @@ to inspect the actual value using:: were also in the ``search_path`` could make an incorrect assumption if the schemas were explicitly stated on each :class:`.Table`. -Background on PG's ``search_path`` is at: +Background on PG's ``search_path`` is at: http://www.postgresql.org/docs/9.0/static/ddl-schemas.html#DDL-SCHEMAS-PATH INSERT/UPDATE...RETURNING @@ -126,7 +126,7 @@ to the PostgreSQL dialect. Partial Indexes ^^^^^^^^^^^^^^^^ -Partial indexes add criterion to the index definition so that the index is +Partial indexes add criterion to the index definition so that the index is applied to a subset of rows. These can be specified on :class:`.Index` using the ``postgresql_where`` keyword argument:: @@ -140,11 +140,11 @@ an index (see http://www.postgresql.org/docs/8.3/interactive/indexes-opclass.htm The :class:`.Index` construct allows these to be specified via the ``postgresql_ops`` keyword argument:: - Index('my_index', my_table.c.id, my_table.c.data, + Index('my_index', my_table.c.id, my_table.c.data, postgresql_ops={ - 'data': 'text_pattern_ops', + 'data': 'text_pattern_ops', 'id': 'int4_ops' - }) + }) .. versionadded:: 0.7.2 ``postgresql_ops`` keyword argument to :class:`.Index` construct. @@ -349,9 +349,9 @@ class ARRAY(sqltypes.MutableType, sqltypes.Concatenable, sqltypes.TypeEngine): the fly :param mutable=False: Specify whether lists passed to this - class should be considered mutable - this enables - "mutable types" mode in the ORM. Be sure to read the - notes for :class:`.MutableType` regarding ORM + class should be considered mutable - this enables + "mutable types" mode in the ORM. Be sure to read the + notes for :class:`.MutableType` regarding ORM performance implications. .. versionchanged:: 0.7.0 @@ -359,7 +359,7 @@ class ARRAY(sqltypes.MutableType, sqltypes.Concatenable, sqltypes.TypeEngine): .. versionchanged:: 0.7 This functionality is now superseded by the - ``sqlalchemy.ext.mutable`` extension described in + ``sqlalchemy.ext.mutable`` extension described in :ref:`mutable_toplevel`. :param as_tuple=False: Specify whether return results @@ -451,37 +451,37 @@ PGArray = ARRAY class ENUM(sqltypes.Enum): """Postgresql ENUM type. - + This is a subclass of :class:`.types.Enum` which includes support for PG's ``CREATE TYPE``. - - :class:`~.postgresql.ENUM` is used automatically when + + :class:`~.postgresql.ENUM` is used automatically when using the :class:`.types.Enum` type on PG assuming - the ``native_enum`` is left as ``True``. However, the + the ``native_enum`` is left as ``True``. However, the :class:`~.postgresql.ENUM` class can also be instantiated directly in order to access some additional Postgresql-specific - options, namely finer control over whether or not + options, namely finer control over whether or not ``CREATE TYPE`` should be emitted. - - Note that both :class:`.types.Enum` as well as + + Note that both :class:`.types.Enum` as well as :class:`~.postgresql.ENUM` feature create/drop methods; the base :class:`.types.Enum` type ultimately delegates to the :meth:`~.postgresql.ENUM.create` and :meth:`~.postgresql.ENUM.drop` methods present here. - + """ def __init__(self, *enums, **kw): """Construct an :class:`~.postgresql.ENUM`. - + Arguments are the same as that of :class:`.types.Enum`, but also including the following parameters. - - :param create_type: Defaults to True. - Indicates that ``CREATE TYPE`` should be - emitted, after optionally checking for the - presence of the type, when the parent + + :param create_type: Defaults to True. + Indicates that ``CREATE TYPE`` should be + emitted, after optionally checking for the + presence of the type, when the parent table is being created; and additionally that ``DROP TYPE`` is called when the table is dropped. When ``False``, no check @@ -492,7 +492,7 @@ class ENUM(sqltypes.Enum): are called directly. Setting to ``False`` is helpful when invoking a creation scheme to a SQL file - without access to the actual database - + without access to the actual database - the :meth:`~.postgresql.ENUM.create` and :meth:`~.postgresql.ENUM.drop` methods can be used to emit SQL to a target bind. @@ -504,20 +504,20 @@ class ENUM(sqltypes.Enum): super(ENUM, self).__init__(*enums, **kw) def create(self, bind=None, checkfirst=True): - """Emit ``CREATE TYPE`` for this + """Emit ``CREATE TYPE`` for this :class:`~.postgresql.ENUM`. - + If the underlying dialect does not support Postgresql CREATE TYPE, no action is taken. - + :param bind: a connectable :class:`.Engine`, :class:`.Connection`, or similar object to emit SQL. - :param checkfirst: if ``True``, a query against + :param checkfirst: if ``True``, a query against the PG catalog will be first performed to see if the type does not exist already before creating. - + """ if not bind.dialect.supports_native_enum: return @@ -527,19 +527,19 @@ class ENUM(sqltypes.Enum): bind.execute(CreateEnumType(self)) def drop(self, bind=None, checkfirst=True): - """Emit ``DROP TYPE`` for this + """Emit ``DROP TYPE`` for this :class:`~.postgresql.ENUM`. - + If the underlying dialect does not support Postgresql DROP TYPE, no action is taken. - + :param bind: a connectable :class:`.Engine`, :class:`.Connection`, or similar object to emit SQL. - :param checkfirst: if ``True``, a query against + :param checkfirst: if ``True``, a query against the PG catalog will be first performed to see if the type actually exists before dropping. - + """ if not bind.dialect.supports_native_enum: return @@ -551,7 +551,7 @@ class ENUM(sqltypes.Enum): def _check_for_name_in_memos(self, checkfirst, kw): """Look in the 'ddl runner' for 'memos', then note our name in that collection. - + This to ensure a particular named enum is operated upon only once within any kind of create/drop sequence without relying upon "checkfirst". @@ -628,14 +628,14 @@ class PGCompiler(compiler.SQLCompiler): def visit_match_op(self, binary, **kw): return "%s @@ to_tsquery(%s)" % ( - self.process(binary.left), + self.process(binary.left), self.process(binary.right)) def visit_ilike_op(self, binary, **kw): escape = binary.modifiers.get("escape", None) return '%s ILIKE %s' % \ (self.process(binary.left), self.process(binary.right)) \ - + (escape and + + (escape and (' ESCAPE ' + self.render_literal_value(escape, None)) or '') @@ -643,7 +643,7 @@ class PGCompiler(compiler.SQLCompiler): escape = binary.modifiers.get("escape", None) return '%s NOT ILIKE %s' % \ (self.process(binary.left), self.process(binary.right)) \ - + (escape and + + (escape and (' ESCAPE ' + self.render_literal_value(escape, None)) or '') @@ -694,9 +694,9 @@ class PGCompiler(compiler.SQLCompiler): columns = [ self.process( - self.label_select_column(None, c, asfrom=False), - within_columns_clause=True, - result_map=self.result_map) + self.label_select_column(None, c, asfrom=False), + within_columns_clause=True, + result_map=self.result_map) for c in expression._select_iterables(returning_cols) ] @@ -710,8 +710,8 @@ class PGCompiler(compiler.SQLCompiler): affinity = None casts = { - sqltypes.Date:'date', - sqltypes.DateTime:'timestamp', + sqltypes.Date:'date', + sqltypes.DateTime:'timestamp', sqltypes.Interval:'interval', sqltypes.Time:'time' } cast = casts.get(affinity, None) @@ -730,7 +730,7 @@ class PGDDLCompiler(compiler.DDLCompiler): column is column.table._autoincrement_column and \ not isinstance(impl_type, sqltypes.SmallInteger) and \ ( - column.default is None or + column.default is None or ( isinstance(column.default, schema.Sequence) and column.default.optional @@ -785,7 +785,7 @@ class PGDDLCompiler(compiler.DDLCompiler): text += "(%s)" \ % ( ', '.join([ - preparer.format_column(c) + + preparer.format_column(c) + (c.key in ops and (' ' + ops[c.key]) or '') for c in index.columns]) ) @@ -843,14 +843,14 @@ class PGTypeCompiler(compiler.GenericTypeCompiler): def visit_TIMESTAMP(self, type_): return "TIMESTAMP%s %s" % ( - getattr(type_, 'precision', None) and "(%d)" % + getattr(type_, 'precision', None) and "(%d)" % type_.precision or "", (type_.timezone and "WITH" or "WITHOUT") + " TIME ZONE" ) def visit_TIME(self, type_): return "TIME%s %s" % ( - getattr(type_, 'precision', None) and "(%d)" % + getattr(type_, 'precision', None) and "(%d)" % type_.precision or "", (type_.timezone and "WITH" or "WITHOUT") + " TIME ZONE" ) @@ -932,21 +932,21 @@ class PGExecutionContext(default.DefaultExecutionContext): return self._execute_scalar("select %s" % column.server_default.arg, column.type) - elif (column.default is None or + elif (column.default is None or (column.default.is_sequence and column.default.optional)): - # execute the sequence associated with a SERIAL primary + # execute the sequence associated with a SERIAL primary # key column. for non-primary-key SERIAL, the ID just # generates server side. try: seq_name = column._postgresql_seq_name except AttributeError: - tab = column.table.name - col = column.name - tab = tab[0:29 + max(0, (29 - len(col)))] - col = col[0:29 + max(0, (29 - len(tab)))] + tab = column.table.name + col = column.name + tab = tab[0:29 + max(0, (29 - len(col)))] + col = col[0:29 + max(0, (29 - len(tab)))] column._postgresql_seq_name = seq_name = "%s_%s_seq" % (tab, col) sch = column.table.schema @@ -1016,7 +1016,7 @@ class PGDialect(default.DefaultDialect): else: return None - _isolation_lookup = set(['SERIALIZABLE', + _isolation_lookup = set(['SERIALIZABLE', 'READ UNCOMMITTED', 'READ COMMITTED', 'REPEATABLE READ']) def set_isolation_level(self, connection, level): @@ -1024,9 +1024,9 @@ class PGDialect(default.DefaultDialect): if level not in self._isolation_lookup: raise exc.ArgumentError( "Invalid value '%s' for isolation_level. " - "Valid isolation levels for %s are %s" % + "Valid isolation levels for %s are %s" % (level, self.name, ", ".join(self._isolation_lookup)) - ) + ) cursor = connection.cursor() cursor.execute( "SET SESSION CHARACTERISTICS AS TRANSACTION " @@ -1047,13 +1047,13 @@ class PGDialect(default.DefaultDialect): def do_prepare_twophase(self, connection, xid): connection.execute("PREPARE TRANSACTION '%s'" % xid) - def do_rollback_twophase(self, connection, xid, + def do_rollback_twophase(self, connection, xid, is_prepared=True, recover=False): if is_prepared: if recover: - #FIXME: ugly hack to get out of transaction + #FIXME: ugly hack to get out of transaction # context when committing recoverable transactions - # Must find out a way how to make the dbapi not + # Must find out a way how to make the dbapi not # open a transaction. connection.execute("ROLLBACK") connection.execute("ROLLBACK PREPARED '%s'" % xid) @@ -1062,7 +1062,7 @@ class PGDialect(default.DefaultDialect): else: self.do_rollback(connection.connection) - def do_commit_twophase(self, connection, xid, + def do_commit_twophase(self, connection, xid, is_prepared=True, recover=False): if is_prepared: if recover: @@ -1114,10 +1114,10 @@ class PGDialect(default.DefaultDialect): "n.oid=c.relnamespace where n.nspname=:schema and " "relname=:name", bindparams=[ - sql.bindparam('name', + sql.bindparam('name', unicode(table_name), type_=sqltypes.Unicode), - sql.bindparam('schema', - unicode(schema), type_=sqltypes.Unicode)] + sql.bindparam('schema', + unicode(schema), type_=sqltypes.Unicode)] ) ) return bool(cursor.first()) @@ -1133,7 +1133,7 @@ class PGDialect(default.DefaultDialect): bindparams=[ sql.bindparam('name', unicode(sequence_name), type_=sqltypes.Unicode) - ] + ] ) ) else: @@ -1145,7 +1145,7 @@ class PGDialect(default.DefaultDialect): bindparams=[ sql.bindparam('name', unicode(sequence_name), type_=sqltypes.Unicode), - sql.bindparam('schema', + sql.bindparam('schema', unicode(schema), type_=sqltypes.Unicode) ] ) @@ -1273,13 +1273,13 @@ class PGDialect(default.DefaultDialect): SELECT relname FROM pg_class c WHERE relkind = 'v' - AND '%(schema)s' = (select nspname from pg_namespace n + AND '%(schema)s' = (select nspname from pg_namespace n where n.oid = c.relnamespace) """ % dict(schema=current_schema) # Py3K #view_names = [row[0] for row in connection.execute(s)] # Py2K - view_names = [row[0].decode(self.encoding) + view_names = [row[0].decode(self.encoding) for row in connection.execute(s)] # end Py2K return view_names @@ -1313,10 +1313,10 @@ class PGDialect(default.DefaultDialect): SQL_COLS = """ SELECT a.attname, pg_catalog.format_type(a.atttypid, a.atttypmod), - (SELECT substring(pg_catalog.pg_get_expr(d.adbin, d.adrelid) - for 128) + (SELECT substring(pg_catalog.pg_get_expr(d.adbin, d.adrelid) + for 128) FROM pg_catalog.pg_attrdef d - WHERE d.adrelid = a.attrelid AND d.adnum = a.attnum + WHERE d.adrelid = a.attrelid AND d.adnum = a.attnum AND a.atthasdef) AS DEFAULT, a.attnotnull, a.attnum, a.attrelid as table_oid @@ -1325,8 +1325,8 @@ class PGDialect(default.DefaultDialect): AND a.attnum > 0 AND NOT a.attisdropped ORDER BY a.attnum """ - s = sql.text(SQL_COLS, - bindparams=[sql.bindparam('table_oid', type_=sqltypes.Integer)], + s = sql.text(SQL_COLS, + bindparams=[sql.bindparam('table_oid', type_=sqltypes.Integer)], typemap={'attname':sqltypes.Unicode, 'default':sqltypes.Unicode} ) c = connection.execute(s, table_oid=table_oid) @@ -1337,7 +1337,7 @@ class PGDialect(default.DefaultDialect): # format columns columns = [] for name, format_type, default, notnull, attnum, table_oid in rows: - ## strip (5) from character varying(5), timestamp(5) + ## strip (5) from character varying(5), timestamp(5) # with time zone, etc attype = re.sub(r'\([\d,]+\)', '', format_type) @@ -1362,13 +1362,13 @@ class PGDialect(default.DefaultDialect): args = (53, ) elif attype == 'integer': args = () - elif attype in ('timestamp with time zone', + elif attype in ('timestamp with time zone', 'time with time zone'): kwargs['timezone'] = True if charlen: kwargs['precision'] = int(charlen) args = () - elif attype in ('timestamp without time zone', + elif attype in ('timestamp without time zone', 'time without time zone', 'time'): kwargs['timezone'] = False if charlen: @@ -1409,7 +1409,7 @@ class PGDialect(default.DefaultDialect): # A table can't override whether the domain is nullable. nullable = domain['nullable'] if domain['default'] and not default: - # It can, however, override the default + # It can, however, override the default # value, but can't set it to null. default = domain['default'] continue @@ -1435,7 +1435,7 @@ class PGDialect(default.DefaultDialect): sch = schema if '.' not in match.group(2) and sch is not None: # unconditionally quote the schema name. this could - # later be enhanced to obey quoting rules / + # later be enhanced to obey quoting rules / # "quote schema" default = match.group(1) + \ ('"%s"' % sch) + '.' + \ @@ -1453,10 +1453,10 @@ class PGDialect(default.DefaultDialect): PK_SQL = """ SELECT a.attname - FROM + FROM pg_class t join pg_index ix on t.oid = ix.indrelid - join pg_attribute a + join pg_attribute a on t.oid=a.attrelid and a.attnum=ANY(ix.indkey) WHERE t.oid = :table_oid and @@ -1471,7 +1471,7 @@ class PGDialect(default.DefaultDialect): @reflection.cache def get_pk_constraint(self, connection, table_name, schema=None, **kw): - cols = self.get_primary_keys(connection, table_name, + cols = self.get_primary_keys(connection, table_name, schema=schema, **kw) table_oid = self.get_table_oid(connection, table_name, schema, @@ -1498,14 +1498,14 @@ class PGDialect(default.DefaultDialect): info_cache=kw.get('info_cache')) FK_SQL = """ - SELECT r.conname, + SELECT r.conname, pg_catalog.pg_get_constraintdef(r.oid, true) as condef, n.nspname as conschema FROM pg_catalog.pg_constraint r, pg_namespace n, pg_class c - WHERE r.conrelid = :table AND + WHERE r.conrelid = :table AND r.contype = 'f' AND c.oid = confrelid AND n.oid = c.relnamespace @@ -1522,7 +1522,7 @@ class PGDialect(default.DefaultDialect): '(?:(.*?)\.)?(.*?)\((.*?)\)', condef).groups() constrained_columns, referred_schema, \ referred_table, referred_columns = m - constrained_columns = [preparer._unquote_identifier(x) + constrained_columns = [preparer._unquote_identifier(x) for x in re.split(r'\s*,\s*', constrained_columns)] if referred_schema: @@ -1537,7 +1537,7 @@ class PGDialect(default.DefaultDialect): # and an explicit schema was given for the referencing table. referred_schema = schema referred_table = preparer._unquote_identifier(referred_table) - referred_columns = [preparer._unquote_identifier(x) + referred_columns = [preparer._unquote_identifier(x) for x in re.split(r'\s*,\s', referred_columns)] fkey_d = { 'name' : conname, @@ -1560,11 +1560,11 @@ class PGDialect(default.DefaultDialect): ix.indisunique, ix.indexprs, ix.indpred, a.attname FROM - pg_class t + pg_class t join pg_index ix on t.oid = ix.indrelid join pg_class i on i.oid=ix.indexrelid - left outer join - pg_attribute a + left outer join + pg_attribute a on t.oid=a.attrelid and a.attnum=ANY(ix.indkey) WHERE t.relkind = 'r' @@ -1616,7 +1616,7 @@ class PGDialect(default.DefaultDialect): SQL_ENUMS = """ SELECT t.typname as "name", -- no enum defaults in 8.4 at least - -- t.typdefault as "default", + -- t.typdefault as "default", pg_catalog.pg_type_is_visible(t.oid) as "visible", n.nspname as "schema", e.enumlabel as "label" @@ -1683,8 +1683,8 @@ class PGDialect(default.DefaultDialect): name = "%s.%s" % (domain['schema'], domain['name']) domains[name] = { - 'attype':attype, - 'nullable': domain['nullable'], + 'attype':attype, + 'nullable': domain['nullable'], 'default': domain['default'] } diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py index ec8d0f219e..f9f992d04b 100644 --- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py +++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py @@ -10,7 +10,7 @@ Driver ------ The psycopg2 driver is available at http://pypi.python.org/pypi/psycopg2/ . -The dialect has several behaviors which are specifically tailored towards compatibility +The dialect has several behaviors which are specifically tailored towards compatibility with this module. Note that psycopg1 is **not** supported. @@ -48,7 +48,7 @@ which specifies Unix-domain communication rather than TCP/IP communication:: create_engine("postgresql+psycopg2://user:password@/dbname") By default, the socket file used is to connect to a Unix-domain socket -in ``/tmp``, or whatever socket directory was specified when PostgreSQL +in ``/tmp``, or whatever socket directory was specified when PostgreSQL was built. This value can be overridden by passing a pathname to psycopg2, using ``host`` as an additional keyword argument:: @@ -61,11 +61,11 @@ See also: Per-Statement/Connection Execution Options ------------------------------------------- -The following DBAPI-specific options are respected when used with +The following DBAPI-specific options are respected when used with :meth:`.Connection.execution_options`, :meth:`.Executable.execution_options`, :meth:`.Query.execution_options`, in addition to those not specific to DBAPIs: -* isolation_level - Set the transaction isolation level for the lifespan of a +* isolation_level - Set the transaction isolation level for the lifespan of a :class:`.Connection` (can only be set on a connection, not a statement or query). This includes the options ``SERIALIZABLE``, ``READ COMMITTED``, ``READ UNCOMMITTED`` and ``REPEATABLE READ``. @@ -79,8 +79,8 @@ By default, the psycopg2 driver uses the ``psycopg2.extensions.UNICODE`` extension, such that the DBAPI receives and returns all strings as Python Unicode objects directly - SQLAlchemy passes these values through without change. Psycopg2 here will encode/decode string values based on the -current "client encoding" setting; by default this is the value in -the ``postgresql.conf`` file, which often defaults to ``SQL_ASCII``. +current "client encoding" setting; by default this is the value in +the ``postgresql.conf`` file, which often defaults to ``SQL_ASCII``. Typically, this can be changed to ``utf-8``, as a more useful default:: #client_encoding = sql_ascii # actually, defaults to database @@ -90,7 +90,7 @@ Typically, this can be changed to ``utf-8``, as a more useful default:: A second way to affect the client encoding is to set it within Psycopg2 locally. SQLAlchemy will call psycopg2's ``set_client_encoding()`` method (see: http://initd.org/psycopg/docs/connection.html#connection.set_client_encoding) -on all new connections based on the value passed to +on all new connections based on the value passed to :func:`.create_engine` using the ``client_encoding`` parameter:: engine = create_engine("postgresql://user:pass@host/dbname", client_encoding='utf8') @@ -102,15 +102,15 @@ This overrides the encoding specified in the Postgresql client configuration. SQLAlchemy can also be instructed to skip the usage of the psycopg2 ``UNICODE`` extension and to instead utilize it's own unicode encode/decode -services, which are normally reserved only for those DBAPIs that don't -fully support unicode directly. Passing ``use_native_unicode=False`` +services, which are normally reserved only for those DBAPIs that don't +fully support unicode directly. Passing ``use_native_unicode=False`` to :func:`.create_engine` will disable usage of ``psycopg2.extensions.UNICODE``. -SQLAlchemy will instead encode data itself into Python bytestrings on the way +SQLAlchemy will instead encode data itself into Python bytestrings on the way in and coerce from bytes on the way back, -using the value of the :func:`.create_engine` ``encoding`` parameter, which +using the value of the :func:`.create_engine` ``encoding`` parameter, which defaults to ``utf-8``. SQLAlchemy's own unicode encode/decode functionality is steadily becoming -obsolete as more DBAPIs support unicode fully along with the approach of +obsolete as more DBAPIs support unicode fully along with the approach of Python 3; in modern usage psycopg2 should be relied upon to handle unicode. Transactions @@ -132,7 +132,7 @@ at the API level what level should be used. NOTICE logging --------------- -The psycopg2 dialect will log Postgresql NOTICE messages via the +The psycopg2 dialect will log Postgresql NOTICE messages via the ``sqlalchemy.dialects.postgresql`` logger:: import logging @@ -220,8 +220,8 @@ class PGExecutionContext_psycopg2(PGExecutionContext): (self.compiled and isinstance(self.compiled.statement, expression.Selectable) \ or \ ( - (not self.compiled or - isinstance(self.compiled.statement, expression._TextClause)) + (not self.compiled or + isinstance(self.compiled.statement, expression._TextClause)) and self.statement and SERVER_SIDE_CURSOR_RE.match(self.statement)) ) ) @@ -249,7 +249,7 @@ class PGExecutionContext_psycopg2(PGExecutionContext): def _log_notices(self, cursor): for notice in cursor.connection.notices: - # NOTICE messages have a + # NOTICE messages have a # newline character at the end logger.info(notice.rstrip()) @@ -291,7 +291,7 @@ class PGDialect_psycopg2(PGDialect): } ) - def __init__(self, server_side_cursors=False, use_native_unicode=True, + def __init__(self, server_side_cursors=False, use_native_unicode=True, client_encoding=None, **kwargs): PGDialect.__init__(self, **kwargs) self.server_side_cursors = server_side_cursors @@ -299,12 +299,12 @@ class PGDialect_psycopg2(PGDialect): self.supports_unicode_binds = use_native_unicode self.client_encoding = client_encoding if self.dbapi and hasattr(self.dbapi, '__version__'): - m = re.match(r'(\d+)\.(\d+)(?:\.(\d+))?', + m = re.match(r'(\d+)\.(\d+)(?:\.(\d+))?', self.dbapi.__version__) if m: self.psycopg2_version = tuple( - int(x) - for x in m.group(1, 2, 3) + int(x) + for x in m.group(1, 2, 3) if x is not None) @classmethod @@ -316,8 +316,8 @@ class PGDialect_psycopg2(PGDialect): def _isolation_lookup(self): extensions = __import__('psycopg2.extensions').extensions return { - 'READ COMMITTED':extensions.ISOLATION_LEVEL_READ_COMMITTED, - 'READ UNCOMMITTED':extensions.ISOLATION_LEVEL_READ_UNCOMMITTED, + 'READ COMMITTED':extensions.ISOLATION_LEVEL_READ_COMMITTED, + 'READ UNCOMMITTED':extensions.ISOLATION_LEVEL_READ_UNCOMMITTED, 'REPEATABLE READ':extensions.ISOLATION_LEVEL_REPEATABLE_READ, 'SERIALIZABLE':extensions.ISOLATION_LEVEL_SERIALIZABLE } @@ -328,9 +328,9 @@ class PGDialect_psycopg2(PGDialect): except KeyError: raise exc.ArgumentError( "Invalid value '%s' for isolation_level. " - "Valid isolation levels for %s are %s" % + "Valid isolation levels for %s are %s" % (level, self.name, ", ".join(self._isolation_lookup)) - ) + ) connection.set_isolation_level(level) @@ -370,8 +370,8 @@ class PGDialect_psycopg2(PGDialect): def is_disconnect(self, e, connection, cursor): if isinstance(e, self.dbapi.OperationalError): # these error messages from libpq: interfaces/libpq/fe-misc.c. - # TODO: these are sent through gettext in libpq and we can't - # check within other locales - consider using connection.closed + # TODO: these are sent through gettext in libpq and we can't + # check within other locales - consider using connection.closed return 'closed the connection' in str(e) or \ 'connection not open' in str(e) or \ 'could not receive data from server' in str(e) @@ -380,7 +380,7 @@ class PGDialect_psycopg2(PGDialect): return 'connection already closed' in str(e) or \ 'cursor already closed' in str(e) elif isinstance(e, self.dbapi.ProgrammingError): - # not sure where this path is originally from, it may + # not sure where this path is originally from, it may # be obsolete. It really says "losed", not "closed". return "losed the connection unexpectedly" in str(e) else: diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py index 063ac9bc89..793d0c49b3 100644 --- a/lib/sqlalchemy/dialects/sqlite/base.py +++ b/lib/sqlalchemy/dialects/sqlite/base.py @@ -12,7 +12,7 @@ section regarding that driver. Date and Time Types ------------------- -SQLite does not have built-in DATE, TIME, or DATETIME types, and pysqlite does not provide +SQLite does not have built-in DATE, TIME, or DATETIME types, and pysqlite does not provide out of the box functionality for translating values between Python `datetime` objects and a SQLite-supported format. SQLAlchemy's own :class:`~sqlalchemy.types.DateTime` and related types provide date formatting and parsing functionality when SQlite is used. @@ -36,19 +36,19 @@ Two things to note: This is regardless of the AUTOINCREMENT keyword being present or not. To specifically render the AUTOINCREMENT keyword on the primary key -column when rendering DDL, add the flag ``sqlite_autoincrement=True`` +column when rendering DDL, add the flag ``sqlite_autoincrement=True`` to the Table construct:: Table('sometable', metadata, - Column('id', Integer, primary_key=True), + Column('id', Integer, primary_key=True), sqlite_autoincrement=True) Transaction Isolation Level --------------------------- -:func:`.create_engine` accepts an ``isolation_level`` parameter which results in -the command ``PRAGMA read_uncommitted `` being invoked for every new -connection. Valid values for this parameter are ``SERIALIZABLE`` and +:func:`.create_engine` accepts an ``isolation_level`` parameter which results in +the command ``PRAGMA read_uncommitted `` being invoked for every new +connection. Valid values for this parameter are ``SERIALIZABLE`` and ``READ UNCOMMITTED`` corresponding to a value of 0 and 1, respectively. See the section :ref:`pysqlite_serializable` for an important workaround when using serializable isolation with Pysqlite. @@ -57,31 +57,31 @@ Database Locking Behavior / Concurrency --------------------------------------- Note that SQLite is not designed for a high level of concurrency. The database -itself, being a file, is locked completely during write operations and within +itself, being a file, is locked completely during write operations and within transactions, meaning exactly one connection has exclusive access to the database during this period - all other connections will be blocked during this time. The Python DBAPI specification also calls for a connection model that is always in a transaction; there is no BEGIN method, only commit and rollback. This implies -that a SQLite DBAPI driver would technically allow only serialized access to a +that a SQLite DBAPI driver would technically allow only serialized access to a particular database file at all times. The pysqlite driver attempts to ameliorate this by deferring the actual BEGIN statement until the first DML (INSERT, UPDATE, or DELETE) is received within a transaction. While this breaks serializable isolation, it at least delays the exclusive locking inherent in SQLite's design. -SQLAlchemy's default mode of usage with the ORM is known -as "autocommit=False", which means the moment the :class:`.Session` begins to be +SQLAlchemy's default mode of usage with the ORM is known +as "autocommit=False", which means the moment the :class:`.Session` begins to be used, a transaction is begun. As the :class:`.Session` is used, the autoflush -feature, also on by default, will flush out pending changes to the database +feature, also on by default, will flush out pending changes to the database before each query. The effect of this is that a :class:`.Session` used in its default mode will often emit DML early on, long before the transaction is actually -committed. This again will have the effect of serializing access to the SQLite +committed. This again will have the effect of serializing access to the SQLite database. If highly concurrent reads are desired against the SQLite database, it is advised that the autoflush feature be disabled, and potentially even that autocommit be re-enabled, which has the effect of each SQL statement and flush committing changes immediately. -For more information on SQLite's lack of concurrency by design, please +For more information on SQLite's lack of concurrency by design, please see `Situations Where Another RDBMS May Work Better - High Concurrency `_ near the bottom of the page. @@ -112,35 +112,35 @@ class _DateTimeMixin(object): class DATETIME(_DateTimeMixin, sqltypes.DateTime): """Represent a Python datetime object in SQLite using a string. - + The default string storage format is:: - - "%04d-%02d-%02d %02d:%02d:%02d.%06d" % (value.year, + + "%04d-%02d-%02d %02d:%02d:%02d.%06d" % (value.year, value.month, value.day, - value.hour, value.minute, + value.hour, value.minute, value.second, value.microsecond) - + e.g.:: - + 2011-03-15 12:05:57.10558 - - The storage format can be customized to some degree using the + + The storage format can be customized to some degree using the ``storage_format`` and ``regexp`` parameters, such as:: - + import re from sqlalchemy.dialects.sqlite import DATETIME - + dt = DATETIME( storage_format="%04d/%02d/%02d %02d-%02d-%02d-%06d", regexp=re.compile("(\d+)/(\d+)/(\d+) (\d+)-(\d+)-(\d+)(?:-(\d+))?") ) - - :param storage_format: format string which will be applied to the + + :param storage_format: format string which will be applied to the tuple ``(value.year, value.month, value.day, value.hour, value.minute, value.second, value.microsecond)``, given a Python datetime.datetime() object. - - :param regexp: regular expression which will be applied to + + :param regexp: regular expression which will be applied to incoming result rows. The resulting match object is applied to the Python datetime() constructor via ``*map(int, match_obj.groups(0))``. @@ -178,16 +178,16 @@ class DATE(_DateTimeMixin, sqltypes.Date): """Represent a Python date object in SQLite using a string. The default string storage format is:: - + "%04d-%02d-%02d" % (value.year, value.month, value.day) - + e.g.:: - + 2011-03-15 - - The storage format can be customized to some degree using the + + The storage format can be customized to some degree using the ``storage_format`` and ``regexp`` parameters, such as:: - + import re from sqlalchemy.dialects.sqlite import DATE @@ -195,16 +195,16 @@ class DATE(_DateTimeMixin, sqltypes.Date): storage_format="%02d/%02d/%02d", regexp=re.compile("(\d+)/(\d+)/(\d+)") ) - - :param storage_format: format string which will be applied to the + + :param storage_format: format string which will be applied to the tuple ``(value.year, value.month, value.day)``, given a Python datetime.date() object. - - :param regexp: regular expression which will be applied to + + :param regexp: regular expression which will be applied to incoming result rows. The resulting match object is applied to the Python date() constructor via ``*map(int, match_obj.groups(0))``. - + """ _storage_format = "%04d-%02d-%02d" @@ -231,20 +231,20 @@ class DATE(_DateTimeMixin, sqltypes.Date): class TIME(_DateTimeMixin, sqltypes.Time): """Represent a Python time object in SQLite using a string. - + The default string storage format is:: - - "%02d:%02d:%02d.%06d" % (value.hour, value.minute, + + "%02d:%02d:%02d.%06d" % (value.hour, value.minute, value.second, value.microsecond) - + e.g.:: - + 12:05:57.10558 - - The storage format can be customized to some degree using the + + The storage format can be customized to some degree using the ``storage_format`` and ``regexp`` parameters, such as:: - + import re from sqlalchemy.dialects.sqlite import TIME @@ -252,12 +252,12 @@ class TIME(_DateTimeMixin, sqltypes.Time): storage_format="%02d-%02d-%02d-%06d", regexp=re.compile("(\d+)-(\d+)-(\d+)-(?:-(\d+))?") ) - - :param storage_format: format string which will be applied + + :param storage_format: format string which will be applied to the tuple ``(value.hour, value.minute, value.second, value.microsecond)``, given a Python datetime.time() object. - - :param regexp: regular expression which will be applied to + + :param regexp: regular expression which will be applied to incoming result rows. The resulting match object is applied to the Python time() constructor via ``*map(int, match_obj.groups(0))``. @@ -405,7 +405,7 @@ class SQLiteDDLCompiler(compiler.DDLCompiler): issubclass(c.type._type_affinity, sqltypes.Integer) and \ not c.foreign_keys: return None - + return super(SQLiteDDLCompiler, self).\ visit_primary_key_constraint(constraint) @@ -480,7 +480,7 @@ class SQLiteExecutionContext(default.DefaultExecutionContext): def _translate_colname(self, colname): # adjust for dotted column names. SQLite - # in the case of UNION may store col names as + # in the case of UNION may store col names as # "tablename.colname" # in cursor.description if not self._preserve_raw_colnames and "." in colname: @@ -517,7 +517,7 @@ class SQLiteDialect(default.DefaultDialect): # this flag used by pysqlite dialect, and perhaps others in the # future, to indicate the driver is handling date/timestamp - # conversions (and perhaps datetime/time as well on some + # conversions (and perhaps datetime/time as well on some # hypothetical driver ?) self.native_datetime = native_datetime @@ -537,9 +537,9 @@ class SQLiteDialect(default.DefaultDialect): except KeyError: raise exc.ArgumentError( "Invalid value '%s' for isolation_level. " - "Valid isolation levels for %s are %s" % + "Valid isolation levels for %s are %s" % (level, self.name, ", ".join(self._isolation_lookup)) - ) + ) cursor = connection.cursor() cursor.execute("PRAGMA read_uncommitted = %d" % isolation_level) cursor.close() @@ -550,11 +550,11 @@ class SQLiteDialect(default.DefaultDialect): res = cursor.fetchone() if res: value = res[0] - else: + else: # http://www.sqlite.org/changes.html#version_3_3_3 - # "Optional READ UNCOMMITTED isolation (instead of the - # default isolation level of SERIALIZABLE) and - # table level locking when database connections + # "Optional READ UNCOMMITTED isolation (instead of the + # default isolation level of SERIALIZABLE) and + # table level locking when database connections # share a common cache."" # pre-SQLite 3.3.0 default to 0 value = 0 @@ -670,7 +670,7 @@ class SQLiteDialect(default.DefaultDialect): pragma = "PRAGMA " qtable = quote(table_name) c = _pragma_cursor( - connection.execute("%stable_info(%s)" % + connection.execute("%stable_info(%s)" % (pragma, qtable))) found_table = False columns = [] @@ -679,7 +679,7 @@ class SQLiteDialect(default.DefaultDialect): if row is None: break (name, type_, nullable, default, has_default, primary_key) = \ - (row[1], row[2].upper(), not row[3], + (row[1], row[2].upper(), not row[3], row[4], row[4] is not None, row[5]) name = re.sub(r'^\"|\"$', '', name) match = re.match(r'(\w+)(\(.*?\))?', type_) @@ -796,7 +796,7 @@ class SQLiteDialect(default.DefaultDialect): def _pragma_cursor(cursor): - """work around SQLite issue whereby cursor.description + """work around SQLite issue whereby cursor.description is blank when PRAGMA returns no rows.""" if cursor.closed: diff --git a/lib/sqlalchemy/dialects/sqlite/pysqlite.py b/lib/sqlalchemy/dialects/sqlite/pysqlite.py index 71f91aa364..c18fd302f2 100644 --- a/lib/sqlalchemy/dialects/sqlite/pysqlite.py +++ b/lib/sqlalchemy/dialects/sqlite/pysqlite.py @@ -12,15 +12,15 @@ module included with the Python distribution. Driver ------ -When using Python 2.5 and above, the built in ``sqlite3`` driver is +When using Python 2.5 and above, the built in ``sqlite3`` driver is already installed and no additional installation is needed. Otherwise, the ``pysqlite2`` driver needs to be present. This is the same driver as ``sqlite3``, just with a different name. The ``pysqlite2`` driver will be loaded first, and if not found, ``sqlite3`` is loaded. This allows an explicitly installed pysqlite driver to take -precedence over the built in one. As with all dialects, a specific -DBAPI module may be provided to :func:`~sqlalchemy.create_engine()` to control +precedence over the built in one. As with all dialects, a specific +DBAPI module may be provided to :func:`~sqlalchemy.create_engine()` to control this explicitly:: from sqlite3 import dbapi2 as sqlite @@ -64,25 +64,25 @@ The sqlite ``:memory:`` identifier is the default if no filepath is present. Sp Compatibility with sqlite3 "native" date and datetime types ----------------------------------------------------------- -The pysqlite driver includes the sqlite3.PARSE_DECLTYPES and +The pysqlite driver includes the sqlite3.PARSE_DECLTYPES and sqlite3.PARSE_COLNAMES options, which have the effect of any column or expression explicitly cast as "date" or "timestamp" will be converted -to a Python date or datetime object. The date and datetime types provided -with the pysqlite dialect are not currently compatible with these options, -since they render the ISO date/datetime including microseconds, which +to a Python date or datetime object. The date and datetime types provided +with the pysqlite dialect are not currently compatible with these options, +since they render the ISO date/datetime including microseconds, which pysqlite's driver does not. Additionally, SQLAlchemy does not at -this time automatically render the "cast" syntax required for the +this time automatically render the "cast" syntax required for the freestanding functions "current_timestamp" and "current_date" to return -datetime/date types natively. Unfortunately, pysqlite +datetime/date types natively. Unfortunately, pysqlite does not provide the standard DBAPI types in ``cursor.description``, -leaving SQLAlchemy with no way to detect these types on the fly +leaving SQLAlchemy with no way to detect these types on the fly without expensive per-row type checks. Keeping in mind that pysqlite's parsing option is not recommended, -nor should be necessary, for use with SQLAlchemy, usage of PARSE_DECLTYPES +nor should be necessary, for use with SQLAlchemy, usage of PARSE_DECLTYPES can be forced if one configures "native_datetime=True" on create_engine():: - engine = create_engine('sqlite://', + engine = create_engine('sqlite://', connect_args={'detect_types': sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES}, native_datetime=True ) @@ -99,7 +99,7 @@ Threading/Pooling Behavior Pysqlite's default behavior is to prohibit the usage of a single connection in more than one thread. This is controlled by the ``check_same_thread`` Pysqlite flag. This default is intended to work with older versions -of SQLite that did not support multithreaded operation under +of SQLite that did not support multithreaded operation under various circumstances. In particular, older SQLite versions did not allow a ``:memory:`` database to be used in multiple threads under any circumstances. @@ -109,9 +109,9 @@ SQLAlchemy sets up pooling to work with Pysqlite's default behavior: * When a ``:memory:`` SQLite database is specified, the dialect by default will use :class:`.SingletonThreadPool`. This pool maintains a single connection per thread, so that all access to the engine within the current thread use the - same ``:memory:`` database - other threads would access a different + same ``:memory:`` database - other threads would access a different ``:memory:`` database. -* When a file-based database is specified, the dialect will use :class:`.NullPool` +* When a file-based database is specified, the dialect will use :class:`.NullPool` as the source of connections. This pool closes and discards connections which are returned to the pool immediately. SQLite file-based connections have extremely low overhead, so pooling is not necessary. The scheme also @@ -141,7 +141,7 @@ can be passed to Pysqlite as ``False``:: connect_args={'check_same_thread':False}, poolclass=StaticPool) -Note that using a ``:memory:`` database in multiple threads requires a recent +Note that using a ``:memory:`` database in multiple threads requires a recent version of SQLite. Using Temporary Tables with SQLite @@ -175,8 +175,8 @@ Unicode The pysqlite driver only returns Python ``unicode`` objects in result sets, never plain strings, and accommodates ``unicode`` objects within bound parameter -values in all cases. Regardless of the SQLAlchemy string type in use, -string-based result values will by Python ``unicode`` in Python 2. +values in all cases. Regardless of the SQLAlchemy string type in use, +string-based result values will by Python ``unicode`` in Python 2. The :class:`.Unicode` type should still be used to indicate those columns that require unicode, however, so that non-``unicode`` values passed inadvertently will emit a warning. Pysqlite will emit an error if a non-``unicode`` string @@ -191,7 +191,7 @@ The pysqlite DBAPI driver has a long-standing bug in which transactional state is not begun until the first DML statement, that is INSERT, UPDATE or DELETE, is emitted. A SELECT statement will not cause transactional state to begin. While this mode of usage is fine for typical situations -and has the advantage that the SQLite database file is not prematurely +and has the advantage that the SQLite database file is not prematurely locked, it breaks serializable transaction isolation, which requires that the database file be locked upon any SQL being emitted. diff --git a/lib/sqlalchemy/dialects/sybase/base.py b/lib/sqlalchemy/dialects/sybase/base.py index affef974cc..f551bff991 100644 --- a/lib/sqlalchemy/dialects/sybase/base.py +++ b/lib/sqlalchemy/dialects/sybase/base.py @@ -1,8 +1,8 @@ # sybase/base.py # Copyright (C) 2010-2011 the SQLAlchemy authors and contributors # get_select_precolumns(), limit_clause() implementation -# copyright (C) 2007 Fisch Asset Management -# AG http://www.fam.ch, with coding by Alexander Houben +# copyright (C) 2007 Fisch Asset Management +# AG http://www.fam.ch, with coding by Alexander Houben # alexander.houben@thor-solutions.ch # # This module is part of SQLAlchemy and is released under @@ -13,7 +13,7 @@ .. note:: The Sybase dialect functions on current SQLAlchemy versions - but is not regularly tested, and may have many issues and + but is not regularly tested, and may have many issues and caveats not currently handled. In particular, the table and database reflection features are not implemented. @@ -130,7 +130,7 @@ class UNIQUEIDENTIFIER(sqltypes.TypeEngine): class IMAGE(sqltypes.LargeBinary): __visit_name__ = 'IMAGE' - + class SybaseTypeCompiler(compiler.GenericTypeCompiler): def visit_large_binary(self, type_): @@ -224,12 +224,12 @@ class SybaseExecutionContext(default.DefaultExecutionContext): self._enable_identity_insert = False if self._enable_identity_insert: - self.cursor.execute("SET IDENTITY_INSERT %s ON" % + self.cursor.execute("SET IDENTITY_INSERT %s ON" % self.dialect.identifier_preparer.format_table(tbl)) if self.isddl: # TODO: to enhance this, we can detect "ddl in tran" on the - # database settings. this error message should be improved to + # database settings. this error message should be improved to # include a note about that. if not self.should_autocommit: raise exc.InvalidRequestError( @@ -240,7 +240,7 @@ class SybaseExecutionContext(default.DefaultExecutionContext): "AUTOCOMMIT (Assuming no Sybase 'ddl in tran')") self.set_ddl_autocommit( - self.root_connection.connection.connection, + self.root_connection.connection.connection, True) @@ -304,7 +304,7 @@ class SybaseSQLCompiler(compiler.SQLCompiler): field, self.process(extract.expr, **kw)) def for_update_clause(self, select): - # "FOR UPDATE" is only allowed on "DECLARE CURSOR" + # "FOR UPDATE" is only allowed on "DECLARE CURSOR" # which SQLAlchemy doesn't use return '' diff --git a/lib/sqlalchemy/dialects/sybase/pyodbc.py b/lib/sqlalchemy/dialects/sybase/pyodbc.py index 35d8d15420..70bdd71a26 100644 --- a/lib/sqlalchemy/dialects/sybase/pyodbc.py +++ b/lib/sqlalchemy/dialects/sybase/pyodbc.py @@ -17,7 +17,7 @@ Connect strings are of the form:: Unicode Support --------------- -The pyodbc driver currently supports usage of these Sybase types with +The pyodbc driver currently supports usage of these Sybase types with Unicode or multibyte strings:: CHAR @@ -43,7 +43,7 @@ from sqlalchemy.util.compat import decimal class _SybNumeric_pyodbc(sqltypes.Numeric): """Turns Decimals with adjusted() < -6 into floats. - It's not yet known how to get decimals with many + It's not yet known how to get decimals with many significant digits or very large adjusted() into Sybase via pyodbc. diff --git a/lib/sqlalchemy/dialects/sybase/pysybase.py b/lib/sqlalchemy/dialects/sybase/pysybase.py index e3bfae06cd..bf8c2096b8 100644 --- a/lib/sqlalchemy/dialects/sybase/pysybase.py +++ b/lib/sqlalchemy/dialects/sybase/pysybase.py @@ -38,7 +38,7 @@ class SybaseExecutionContext_pysybase(SybaseExecutionContext): def set_ddl_autocommit(self, dbapi_connection, value): if value: # call commit() on the Sybase connection directly, - # to avoid any side effects of calling a Connection + # to avoid any side effects of calling a Connection # transactional method inside of pre_exec() dbapi_connection.commit() @@ -83,7 +83,7 @@ class SybaseDialect_pysybase(SybaseDialect): def _get_server_version_info(self, connection): vers = connection.scalar("select @@version_number") - # i.e. 15500, 15000, 12500 == (15, 5, 0, 0), (15, 0, 0, 0), + # i.e. 15500, 15000, 12500 == (15, 5, 0, 0), (15, 0, 0, 0), # (12, 5, 0, 0) return (vers / 1000, vers % 1000 / 100, vers % 100 / 10, vers % 10) diff --git a/lib/sqlalchemy/engine/__init__.py b/lib/sqlalchemy/engine/__init__.py index 90fed17078..00475cc697 100644 --- a/lib/sqlalchemy/engine/__init__.py +++ b/lib/sqlalchemy/engine/__init__.py @@ -101,8 +101,8 @@ default_strategy = 'plain' def create_engine(*args, **kwargs): """Create a new :class:`.Engine` instance. - The standard calling form is to send the URL as the - first positional argument, usually a string + The standard calling form is to send the URL as the + first positional argument, usually a string that indicates database dialect and connection arguments. Additional keyword arguments may then follow it which establish various options on the resulting :class:`.Engine` @@ -111,14 +111,14 @@ def create_engine(*args, **kwargs): The string form of the URL is ``dialect+driver://user:password@host/dbname[?key=value..]``, where - ``dialect`` is a database name such as ``mysql``, ``oracle``, - ``postgresql``, etc., and ``driver`` the name of a DBAPI, such as - ``psycopg2``, ``pyodbc``, ``cx_oracle``, etc. Alternatively, + ``dialect`` is a database name such as ``mysql``, ``oracle``, + ``postgresql``, etc., and ``driver`` the name of a DBAPI, such as + ``psycopg2``, ``pyodbc``, ``cx_oracle``, etc. Alternatively, the URL can be an instance of :class:`~sqlalchemy.engine.url.URL`. - ``**kwargs`` takes a wide variety of options which are routed - towards their appropriate components. Arguments may be - specific to the :class:`.Engine`, the underlying :class:`.Dialect`, as well as the + ``**kwargs`` takes a wide variety of options which are routed + towards their appropriate components. Arguments may be + specific to the :class:`.Engine`, the underlying :class:`.Dialect`, as well as the :class:`.Pool`. Specific dialects also accept keyword arguments that are unique to that dialect. Here, we describe the parameters that are common to most :func:`.create_engine()` usage. @@ -136,11 +136,11 @@ def create_engine(*args, **kwargs): :ref:`engines_toplevel` :ref:`connections_toplevel` - + :param assert_unicode: Deprecated. This flag sets an engine-wide default value for - the ``assert_unicode`` flag on the - :class:`.String` type - see that + the ``assert_unicode`` flag on the + :class:`.String` type - see that type for further details. :param connect_args: a dictionary of options which will be @@ -151,16 +151,16 @@ def create_engine(*args, **kwargs): :param convert_unicode=False: if set to True, sets the default behavior of ``convert_unicode`` on the :class:`.String` type to ``True``, regardless - of a setting of ``False`` on an individual + of a setting of ``False`` on an individual :class:`.String` type, thus causing all :class:`.String` -based columns to accommodate Python ``unicode`` objects. This flag - is useful as an engine-wide setting when using a + is useful as an engine-wide setting when using a DBAPI that does not natively support Python ``unicode`` objects and raises an error when one is received (such as pyodbc with FreeTDS). - - See :class:`.String` for further details on + + See :class:`.String` for further details on what this flag indicates. :param creator: a callable which returns a DBAPI connection. @@ -184,43 +184,43 @@ def create_engine(*args, **kwargs): :ref:`dbengine_logging` for information on how to configure logging directly. - :param encoding: Defaults to ``utf-8``. This is the string - encoding used by SQLAlchemy for string encode/decode - operations which occur within SQLAlchemy, **outside of - the DBAPI.** Most modern DBAPIs feature some degree of + :param encoding: Defaults to ``utf-8``. This is the string + encoding used by SQLAlchemy for string encode/decode + operations which occur within SQLAlchemy, **outside of + the DBAPI.** Most modern DBAPIs feature some degree of direct support for Python ``unicode`` objects, what you see in Python 2 as a string of the form - ``u'some string'``. For those scenarios where the + ``u'some string'``. For those scenarios where the DBAPI is detected as not supporting a Python ``unicode`` - object, this encoding is used to determine the + object, this encoding is used to determine the source/destination encoding. It is **not used** for those cases where the DBAPI handles unicode directly. - + To properly configure a system to accommodate Python - ``unicode`` objects, the DBAPI should be + ``unicode`` objects, the DBAPI should be configured to handle unicode to the greatest degree as is appropriate - see the notes on unicode pertaining to the specific - target database in use at :ref:`dialect_toplevel`. - - Areas where string encoding may need to be accommodated - outside of the DBAPI include zero or more of: - - * the values passed to bound parameters, corresponding to + target database in use at :ref:`dialect_toplevel`. + + Areas where string encoding may need to be accommodated + outside of the DBAPI include zero or more of: + + * the values passed to bound parameters, corresponding to the :class:`.Unicode` type or the :class:`.String` type when ``convert_unicode`` is ``True``; - * the values returned in result set columns corresponding - to the :class:`.Unicode` type or the :class:`.String` + * the values returned in result set columns corresponding + to the :class:`.Unicode` type or the :class:`.String` type when ``convert_unicode`` is ``True``; - * the string SQL statement passed to the DBAPI's - ``cursor.execute()`` method; - * the string names of the keys in the bound parameter - dictionary passed to the DBAPI's ``cursor.execute()`` + * the string SQL statement passed to the DBAPI's + ``cursor.execute()`` method; + * the string names of the keys in the bound parameter + dictionary passed to the DBAPI's ``cursor.execute()`` as well as ``cursor.setinputsizes()`` methods; - * the string column names retrieved from the DBAPI's + * the string column names retrieved from the DBAPI's ``cursor.description`` attribute. - + When using Python 3, the DBAPI is required to support *all* of the above values as Python ``unicode`` objects, which in Python 3 are just known as ``str``. In Python 2, @@ -236,9 +236,9 @@ def create_engine(*args, **kwargs): :param implicit_returning=True: When ``True``, a RETURNING- compatible construct, if available, will be used to fetch newly generated primary key values when a single row - INSERT statement is emitted with no existing returning() - clause. This applies to those backends which support RETURNING - or a compatible construct, including Postgresql, Firebird, Oracle, + INSERT statement is emitted with no existing returning() + clause. This applies to those backends which support RETURNING + or a compatible construct, including Postgresql, Firebird, Oracle, Microsoft SQL Server. Set this to ``False`` to disable the automatic usage of RETURNING. @@ -248,13 +248,13 @@ def create_engine(*args, **kwargs): "_(counter)". If ``None``, the value of ``dialect.max_identifier_length`` is used instead. - :param listeners: A list of one or more - :class:`~sqlalchemy.interfaces.PoolListener` objects which will + :param listeners: A list of one or more + :class:`~sqlalchemy.interfaces.PoolListener` objects which will receive connection pool events. :param logging_name: String identifier which will be used within the "name" field of logging records generated within the - "sqlalchemy.engine" logger. Defaults to a hexstring of the + "sqlalchemy.engine" logger. Defaults to a hexstring of the object's id. :param max_overflow=10: the number of connections to allow in @@ -286,8 +286,8 @@ def create_engine(*args, **kwargs): of pool to be used. :param pool_logging_name: String identifier which will be used within - the "name" field of logging records generated within the - "sqlalchemy.pool" logger. Defaults to a hexstring of the object's + the "name" field of logging records generated within the + "sqlalchemy.pool" logger. Defaults to a hexstring of the object's id. :param pool_size=5: the number of connections to keep open @@ -307,7 +307,7 @@ def create_engine(*args, **kwargs): server configuration as well). :param pool_reset_on_return='rollback': set the "reset on return" - behavior of the pool, which is whether ``rollback()``, + behavior of the pool, which is whether ``rollback()``, ``commit()``, or nothing is called upon connections being returned to the pool. See the docstring for ``reset_on_return`` at :class:`.Pool`. diff --git a/lib/sqlalchemy/engine/ddl.py b/lib/sqlalchemy/engine/ddl.py index d6fdaee2ee..c3b32505eb 100644 --- a/lib/sqlalchemy/engine/ddl.py +++ b/lib/sqlalchemy/engine/ddl.py @@ -28,7 +28,7 @@ class SchemaGenerator(DDLBase): if table.schema: self.dialect.validate_identifier(table.schema) return not self.checkfirst or \ - not self.dialect.has_table(self.connection, + not self.dialect.has_table(self.connection, table.name, schema=table.schema) def _can_create_sequence(self, sequence): @@ -39,8 +39,8 @@ class SchemaGenerator(DDLBase): ( not self.checkfirst or not self.dialect.has_sequence( - self.connection, - sequence.name, + self.connection, + sequence.name, schema=sequence.schema) ) ) @@ -50,9 +50,9 @@ class SchemaGenerator(DDLBase): tables = self.tables else: tables = metadata.tables.values() - collection = [t for t in sql_util.sort_tables(tables) + collection = [t for t in sql_util.sort_tables(tables) if self._can_create_table(t)] - seq_coll = [s for s in metadata._sequences.values() + seq_coll = [s for s in metadata._sequences.values() if s.column is None and self._can_create_sequence(s)] metadata.dispatch.before_create(metadata, self.connection, @@ -95,7 +95,7 @@ class SchemaGenerator(DDLBase): def visit_sequence(self, sequence, create_ok=False): if not create_ok and not self._can_create_sequence(sequence): - return + return self.connection.execute(schema.CreateSequence(sequence)) def visit_index(self, index): @@ -116,9 +116,9 @@ class SchemaDropper(DDLBase): tables = self.tables else: tables = metadata.tables.values() - collection = [t for t in reversed(sql_util.sort_tables(tables)) + collection = [t for t in reversed(sql_util.sort_tables(tables)) if self._can_drop_table(t)] - seq_coll = [s for s in metadata._sequences.values() + seq_coll = [s for s in metadata._sequences.values() if s.column is None and self._can_drop_sequence(s)] metadata.dispatch.before_drop(metadata, self.connection, @@ -141,7 +141,7 @@ class SchemaDropper(DDLBase): self.dialect.validate_identifier(table.name) if table.schema: self.dialect.validate_identifier(table.schema) - return not self.checkfirst or self.dialect.has_table(self.connection, + return not self.checkfirst or self.dialect.has_table(self.connection, table.name, schema=table.schema) def _can_drop_sequence(self, sequence): @@ -150,8 +150,8 @@ class SchemaDropper(DDLBase): not sequence.optional) and (not self.checkfirst or self.dialect.has_sequence( - self.connection, - sequence.name, + self.connection, + sequence.name, schema=sequence.schema)) ) diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py index c93f390dba..bee4565ff7 100644 --- a/lib/sqlalchemy/engine/default.py +++ b/lib/sqlalchemy/engine/default.py @@ -89,7 +89,7 @@ class DefaultDialect(base.Dialect): server_version_info = None - # indicates symbol names are + # indicates symbol names are # UPPERCASEd if they are case insensitive # within the database. # if this is True, the methods normalize_name() @@ -186,7 +186,7 @@ class DefaultDialect(base.Dialect): self.returns_unicode_strings = self._check_unicode_returns(connection) self.do_rollback(connection.connection) - + def on_connect(self): """return a callable which sets up a newly created DBAPI connection. @@ -216,7 +216,7 @@ class DefaultDialect(base.Dialect): try: cursor.execute( cast_to( - expression.select( + expression.select( [expression.cast( expression.literal_column( "'test %s returns'" % formatstr), type_) @@ -266,14 +266,14 @@ class DefaultDialect(base.Dialect): """ return { 'constrained_columns': - self.get_primary_keys(conn, table_name, + self.get_primary_keys(conn, table_name, schema=schema, **kw) } def validate_identifier(self, ident): if len(ident) > self.max_identifier_length: raise exc.IdentifierError( - "Identifier '%s' exceeds maximum length of %d characters" % + "Identifier '%s' exceeds maximum length of %d characters" % (ident, self.max_identifier_length) ) @@ -337,8 +337,8 @@ class DefaultDialect(base.Dialect): return False def reset_isolation_level(self, dbapi_conn): - # default_isolation_level is read from the first connection - # after the initial set of 'isolation_level', if any, so is + # default_isolation_level is read from the first connection + # after the initial set of 'isolation_level', if any, so is # the configured default of this dialect. self.set_isolation_level(dbapi_conn, self.default_isolation_level) @@ -356,7 +356,7 @@ class DefaultExecutionContext(base.ExecutionContext): _is_implicit_returning = False _is_explicit_returning = False - # a hook for SQLite's translation of + # a hook for SQLite's translation of # result column names _translate_colname = None @@ -451,8 +451,8 @@ class DefaultExecutionContext(base.ExecutionContext): processors = compiled._bind_processors - # Convert the dictionary of bind parameter values - # into a dict or list to be sent to the DBAPI's + # Convert the dictionary of bind parameter values + # into a dict or list to be sent to the DBAPI's # execute() or executemany() method. parameters = [] if dialect.positional: @@ -515,7 +515,7 @@ class DefaultExecutionContext(base.ExecutionContext): for d in parameters ] or [{}] else: - self.parameters = [dialect.execute_sequence_format(p) + self.parameters = [dialect.execute_sequence_format(p) for p in parameters] self.executemany = len(parameters) > 1 @@ -552,10 +552,10 @@ class DefaultExecutionContext(base.ExecutionContext): @util.memoized_property def should_autocommit(self): - autocommit = self.execution_options.get('autocommit', - not self.compiled and + autocommit = self.execution_options.get('autocommit', + not self.compiled and self.statement and - expression.PARSE_AUTOCOMMIT + expression.PARSE_AUTOCOMMIT or False) if autocommit is expression.PARSE_AUTOCOMMIT: @@ -588,7 +588,7 @@ class DefaultExecutionContext(base.ExecutionContext): if type_ is not None: # apply type post processors to the result proc = type_._cached_result_processor( - self.dialect, + self.dialect, self.cursor.description[0][1] ) if proc: @@ -625,7 +625,7 @@ class DefaultExecutionContext(base.ExecutionContext): and when no explicit id value was bound to the statement. - The function is called once, directly after + The function is called once, directly after post_exec() and before the transaction is committed or ResultProxy is generated. If the post_exec() method assigns a value to `self._lastrowid`, the @@ -674,7 +674,7 @@ class DefaultExecutionContext(base.ExecutionContext): self.inserted_primary_key = [ c is autoinc_col and lastrowid or v for c, v in zip( - table.primary_key, + table.primary_key, self.inserted_primary_key) ] @@ -700,7 +700,7 @@ class DefaultExecutionContext(base.ExecutionContext): style of ``setinputsizes()`` on the cursor, using DB-API types from the bind parameter's ``TypeEngine`` objects. - This method only called by those dialects which require it, + This method only called by those dialects which require it, currently cx_oracle. """ @@ -745,7 +745,7 @@ class DefaultExecutionContext(base.ExecutionContext): elif default.is_callable: return default.arg(self) elif default.is_clause_element: - # TODO: expensive branching here should be + # TODO: expensive branching here should be # pulled into _exec_scalar() conn = self.connection c = expression.select([default.arg]).compile(bind=conn) @@ -811,7 +811,7 @@ class DefaultExecutionContext(base.ExecutionContext): if self.isinsert: self.inserted_primary_key = [ - self.compiled_parameters[0].get(c.key, None) + self.compiled_parameters[0].get(c.key, None) for c in self.compiled.\ statement.table.primary_key ] diff --git a/lib/sqlalchemy/engine/reflection.py b/lib/sqlalchemy/engine/reflection.py index 71d97e65f8..76cb5bdaa4 100644 --- a/lib/sqlalchemy/engine/reflection.py +++ b/lib/sqlalchemy/engine/reflection.py @@ -38,8 +38,8 @@ def cache(fn, self, con, *args, **kw): if info_cache is None: return fn(self, con, *args, **kw) key = ( - fn.__name__, - tuple(a for a in args if isinstance(a, basestring)), + fn.__name__, + tuple(a for a in args if isinstance(a, basestring)), tuple((k, v) for k, v in kw.iteritems() if isinstance(v, (basestring, int, float))) ) ret = info_cache.get(key) @@ -72,9 +72,9 @@ class Inspector(object): def __init__(self, bind): """Initialize a new :class:`.Inspector`. - :param bind: a :class:`~sqlalchemy.engine.base.Connectable`, - which is typically an instance of - :class:`~sqlalchemy.engine.base.Engine` or + :param bind: a :class:`~sqlalchemy.engine.base.Connectable`, + which is typically an instance of + :class:`~sqlalchemy.engine.base.Engine` or :class:`~sqlalchemy.engine.base.Connection`. For a dialect-specific instance of :class:`.Inspector`, see @@ -101,9 +101,9 @@ class Inspector(object): def from_engine(cls, bind): """Construct a new dialect-specific Inspector object from the given engine or connection. - :param bind: a :class:`~sqlalchemy.engine.base.Connectable`, - which is typically an instance of - :class:`~sqlalchemy.engine.base.Engine` or + :param bind: a :class:`~sqlalchemy.engine.base.Connectable`, + which is typically an instance of + :class:`~sqlalchemy.engine.base.Engine` or :class:`~sqlalchemy.engine.base.Connection`. This method differs from direct a direct constructor call of :class:`.Inspector` @@ -320,7 +320,7 @@ class Inspector(object): def reflecttable(self, table, include_columns, exclude_columns=()): """Given a Table object, load its internal constructs based on introspection. - This is the underlying method used by most dialects to produce + This is the underlying method used by most dialects to produce table reflection. Direct usage is like:: from sqlalchemy import create_engine, MetaData, Table @@ -414,11 +414,11 @@ class Inspector(object): # Primary keys pk_cons = self.get_pk_constraint(table_name, schema, **tblkw) if pk_cons: - pk_cols = [table.c[pk] - for pk in pk_cons['constrained_columns'] + pk_cols = [table.c[pk] + for pk in pk_cons['constrained_columns'] if pk in table.c and pk not in exclude_columns ] + [pk for pk in table.primary_key if pk.key in exclude_columns] - primary_key_constraint = sa_schema.PrimaryKeyConstraint(name=pk_cons.get('name'), + primary_key_constraint = sa_schema.PrimaryKeyConstraint(name=pk_cons.get('name'), *pk_cols ) @@ -452,7 +452,7 @@ class Inspector(object): table.append_constraint( sa_schema.ForeignKeyConstraint(constrained_columns, refspec, conname, link_to_name=True)) - # Indexes + # Indexes indexes = self.get_indexes(table_name, schema) for index_d in indexes: name = index_d['name'] @@ -465,5 +465,5 @@ class Inspector(object): "Omitting %s KEY for (%s), key covers omitted columns." % (flavor, ', '.join(columns))) continue - sa_schema.Index(name, *[table.columns[c] for c in columns], + sa_schema.Index(name, *[table.columns[c] for c in columns], **dict(unique=unique)) diff --git a/lib/sqlalchemy/engine/strategies.py b/lib/sqlalchemy/engine/strategies.py index a781cb451c..1e321603ed 100644 --- a/lib/sqlalchemy/engine/strategies.py +++ b/lib/sqlalchemy/engine/strategies.py @@ -80,7 +80,7 @@ class DefaultEngineStrategy(EngineStrategy): return dialect.connect(*cargs, **cparams) except Exception, e: # Py3K - #raise exc.DBAPIError.instance(None, None, + #raise exc.DBAPIError.instance(None, None, # e, dialect.dbapi.Error, # connection_invalidated= # dialect.is_disconnect(e, None, None) @@ -245,8 +245,8 @@ class MockEngineStrategy(EngineStrategy): from sqlalchemy.engine import ddl ddl.SchemaDropper(self.dialect, self, **kwargs).traverse_single(entity) - def _run_visitor(self, visitorcallable, element, - connection=None, + def _run_visitor(self, visitorcallable, element, + connection=None, **kwargs): kwargs['checkfirst'] = False visitorcallable(self.dialect, self, diff --git a/lib/sqlalchemy/engine/threadlocal.py b/lib/sqlalchemy/engine/threadlocal.py index f0d6803dcf..7def7dd9b9 100644 --- a/lib/sqlalchemy/engine/threadlocal.py +++ b/lib/sqlalchemy/engine/threadlocal.py @@ -7,7 +7,7 @@ """Provides a thread-local transactional wrapper around the root Engine class. The ``threadlocal`` module is invoked when using the ``strategy="threadlocal"`` flag -with :func:`~sqlalchemy.engine.create_engine`. This module is semi-private and is +with :func:`~sqlalchemy.engine.create_engine`. This module is semi-private and is invoked automatically when the threadlocal engine strategy is used. """ diff --git a/lib/sqlalchemy/engine/url.py b/lib/sqlalchemy/engine/url.py index 392ecda116..b3434b6cb3 100644 --- a/lib/sqlalchemy/engine/url.py +++ b/lib/sqlalchemy/engine/url.py @@ -110,7 +110,7 @@ class URL(object): module = self._load_entry_point() if module is None: raise exc.ArgumentError( - "Could not determine dialect for '%s'." % + "Could not determine dialect for '%s'." % self.drivername) return module.dialect diff --git a/lib/sqlalchemy/event.py b/lib/sqlalchemy/event.py index dfdda3d441..775cd9dc86 100644 --- a/lib/sqlalchemy/event.py +++ b/lib/sqlalchemy/event.py @@ -25,8 +25,8 @@ def listen(target, identifier, fn, *args, **kw): list(const.columns)[0].name ) event.listen( - UniqueConstraint, - "after_parent_attach", + UniqueConstraint, + "after_parent_attach", unique_constraint_name) """ @@ -90,12 +90,12 @@ class _UnpickleDispatch(object): raise AttributeError("No class with a 'dispatch' member present.") class _Dispatch(object): - """Mirror the event listening definitions of an Events class with + """Mirror the event listening definitions of an Events class with listener collections. - Classes which define a "dispatch" member will return a - non-instantiated :class:`._Dispatch` subclass when the member - is accessed at the class level. When the "dispatch" member is + Classes which define a "dispatch" member will return a + non-instantiated :class:`._Dispatch` subclass when the member + is accessed at the class level. When the "dispatch" member is accessed at the instance level of its owner, an instance of the :class:`._Dispatch` class is returned. @@ -103,7 +103,7 @@ class _Dispatch(object): class defined, by the :func:`._create_dispatcher_class` function. The original :class:`.Events` classes remain untouched. This decouples the construction of :class:`.Events` subclasses from - the implementation used by the event internals, and allows + the implementation used by the event internals, and allows inspecting tools like Sphinx to work in an unsurprising way against the public API. @@ -127,7 +127,7 @@ def _event_descriptors(target): return [getattr(target, k) for k in dir(target) if _is_event_name(k)] class _EventMeta(type): - """Intercept new Event subclasses and create + """Intercept new Event subclasses and create associated _Dispatch classes.""" def __init__(cls, classname, bases, dict_): @@ -135,14 +135,14 @@ class _EventMeta(type): return type.__init__(cls, classname, bases, dict_) def _create_dispatcher_class(cls, classname, bases, dict_): - """Create a :class:`._Dispatch` class corresponding to an + """Create a :class:`._Dispatch` class corresponding to an :class:`.Events` class.""" # there's all kinds of ways to do this, # i.e. make a Dispatch class that shares the '_listen' method # of the Event class, this is the straight monkeypatch. dispatch_base = getattr(cls, 'dispatch', _Dispatch) - cls.dispatch = dispatch_cls = type("%sDispatch" % classname, + cls.dispatch = dispatch_cls = type("%sDispatch" % classname, (dispatch_base, ), {}) dispatch_cls._listen = cls._listen dispatch_cls._clear = cls._clear @@ -236,8 +236,8 @@ class _DispatchDescriptor(object): for cls in target.__mro__[1:]: if cls in self._clslevel: clslevel.extend([ - fn for fn - in self._clslevel[cls] + fn for fn + in self._clslevel[cls] if fn not in clslevel ]) @@ -278,7 +278,7 @@ class _DispatchDescriptor(object): class _EmptyListener(object): """Serves as a class-level interface to the events - served by a _DispatchDescriptor, when there are no + served by a _DispatchDescriptor, when there are no instance-level events present. Is replaced by _ListenerCollection when instance-level @@ -298,7 +298,7 @@ class _EmptyListener(object): """Return an event collection which can be modified. For _EmptyListener at the instance level of - a dispatcher, this generates a new + a dispatcher, this generates a new _ListenerCollection, applies it to the instance, and returns it. @@ -379,7 +379,7 @@ class _ListenerCollection(object): # I'm not entirely thrilled about the overhead here, # but this allows class-level listeners to be added # at any point. - # + # # In the absense of instance-level listeners, # we stay with the _EmptyListener object when called # at the instance level. @@ -403,8 +403,8 @@ class _ListenerCollection(object): existing_listeners = self.listeners existing_listener_set = set(existing_listeners) self.propagate.update(other.propagate) - existing_listeners.extend([l for l - in other.listeners + existing_listeners.extend([l for l + in other.listeners if l not in existing_listener_set and not only_propagate or l in self.propagate ]) @@ -431,7 +431,7 @@ class _ListenerCollection(object): self.propagate.clear() class dispatcher(object): - """Descriptor used by target classes to + """Descriptor used by target classes to deliver the _Dispatch class at the class level and produce new _Dispatch instances for target instances. diff --git a/lib/sqlalchemy/events.py b/lib/sqlalchemy/events.py index 161b651ebc..a12f4ace15 100644 --- a/lib/sqlalchemy/events.py +++ b/lib/sqlalchemy/events.py @@ -17,11 +17,11 @@ class DDLEvents(event.Events): that is, :class:`.SchemaItem` and :class:`.SchemaEvent` subclasses, including :class:`.MetaData`, :class:`.Table`, :class:`.Column`. - + :class:`.MetaData` and :class:`.Table` support events specifically regarding when CREATE and DROP - DDL is emitted to the database. - + DDL is emitted to the database. + Attachment events are also provided to customize behavior whenever a child schema element is associated with a parent, such as, when a :class:`.Column` is associated @@ -37,14 +37,14 @@ class DDLEvents(event.Events): some_table = Table('some_table', m, Column('data', Integer)) def after_create(target, connection, **kw): - connection.execute("ALTER TABLE %s SET name=foo_%s" % + connection.execute("ALTER TABLE %s SET name=foo_%s" % (target.name, target.name)) event.listen(some_table, "after_create", after_create) - DDL events integrate closely with the + DDL events integrate closely with the :class:`.DDL` class and the :class:`.DDLElement` hierarchy - of DDL clause constructs, which are themselves appropriate + of DDL clause constructs, which are themselves appropriate as listener callables:: from sqlalchemy import DDL @@ -81,7 +81,7 @@ class DDLEvents(event.Events): to the event. The contents of this dictionary may vary across releases, and include the list of tables being generated for a metadata-level - event, the checkfirst flag, and other + event, the checkfirst flag, and other elements used by internal events. """ @@ -97,7 +97,7 @@ class DDLEvents(event.Events): to the event. The contents of this dictionary may vary across releases, and include the list of tables being generated for a metadata-level - event, the checkfirst flag, and other + event, the checkfirst flag, and other elements used by internal events. """ @@ -113,7 +113,7 @@ class DDLEvents(event.Events): to the event. The contents of this dictionary may vary across releases, and include the list of tables being generated for a metadata-level - event, the checkfirst flag, and other + event, the checkfirst flag, and other elements used by internal events. """ @@ -129,52 +129,52 @@ class DDLEvents(event.Events): to the event. The contents of this dictionary may vary across releases, and include the list of tables being generated for a metadata-level - event, the checkfirst flag, and other + event, the checkfirst flag, and other elements used by internal events. """ def before_parent_attach(self, target, parent): - """Called before a :class:`.SchemaItem` is associated with + """Called before a :class:`.SchemaItem` is associated with a parent :class:`.SchemaItem`. - + :param target: the target object :param parent: the parent to which the target is being attached. - + :func:`.event.listen` also accepts a modifier for this event: - + :param propagate=False: When True, the listener function will be established for any copies made of the target object, i.e. those copies that are generated when :meth:`.Table.tometadata` is used. - + """ def after_parent_attach(self, target, parent): - """Called after a :class:`.SchemaItem` is associated with + """Called after a :class:`.SchemaItem` is associated with a parent :class:`.SchemaItem`. :param target: the target object :param parent: the parent to which the target is being attached. - + :func:`.event.listen` also accepts a modifier for this event: - + :param propagate=False: When True, the listener function will be established for any copies made of the target object, i.e. those copies that are generated when :meth:`.Table.tometadata` is used. - + """ def column_reflect(self, table, column_info): """Called for each unit of 'column info' retrieved when - a :class:`.Table` is being reflected. - + a :class:`.Table` is being reflected. + The dictionary of column information as returned by the dialect is passed, and can be modified. The dictionary - is that returned in each element of the list returned - by :meth:`.reflection.Inspector.get_columns`. - + is that returned in each element of the list returned + by :meth:`.reflection.Inspector.get_columns`. + The event is called before any action is taken against this dictionary, and the contents can be modified. The :class:`.Column` specific arguments ``info``, ``key``, @@ -182,45 +182,45 @@ class DDLEvents(event.Events): will be passed to the constructor of :class:`.Column`. Note that this event is only meaningful if either - associated with the :class:`.Table` class across the + associated with the :class:`.Table` class across the board, e.g.:: - + from sqlalchemy.schema import Table from sqlalchemy import event def listen_for_reflect(table, column_info): "receive a column_reflect event" # ... - + event.listen( - Table, - 'column_reflect', + Table, + 'column_reflect', listen_for_reflect) - + ...or with a specific :class:`.Table` instance using the ``listeners`` argument:: - + def listen_for_reflect(table, column_info): "receive a column_reflect event" # ... - + t = Table( - 'sometable', + 'sometable', autoload=True, listeners=[ ('column_reflect', listen_for_reflect) ]) - + This because the reflection process initiated by ``autoload=True`` completes within the scope of the constructor for :class:`.Table`. - + """ class SchemaEventTarget(object): """Base class for elements that are the targets of :class:`.DDLEvents` events. - + This includes :class:`.SchemaItem` as well as :class:`.SchemaType`. - + """ dispatch = event.dispatcher(DDLEvents) @@ -230,9 +230,9 @@ class SchemaEventTarget(object): raise NotImplementedError() def _set_parent_with_dispatch(self, parent): - self.dispatch.before_parent_attach(self, parent) - self._set_parent(parent) - self.dispatch.after_parent_attach(self, parent) + self.dispatch.before_parent_attach(self, parent) + self._set_parent(parent) + self.dispatch.after_parent_attach(self, parent) class PoolEvents(event.Events): """Available events for :class:`.Pool`. @@ -350,10 +350,10 @@ class ConnectionEvents(event.Events): Some events allow modifiers to the listen() function. - :param retval=False: Applies to the :meth:`.before_execute` and + :param retval=False: Applies to the :meth:`.before_execute` and :meth:`.before_cursor_execute` events only. When True, the user-defined event function must have a return value, which - is a tuple of parameters that replace the given statement + is a tuple of parameters that replace the given statement and parameters. See those methods for a description of specific return arguments. @@ -372,9 +372,9 @@ class ConnectionEvents(event.Events): fn = wrap elif identifier == 'before_cursor_execute': orig_fn = fn - def wrap(conn, cursor, statement, + def wrap(conn, cursor, statement, parameters, context, executemany): - orig_fn(conn, cursor, statement, + orig_fn(conn, cursor, statement, parameters, context, executemany) return statement, parameters fn = wrap @@ -393,40 +393,40 @@ class ConnectionEvents(event.Events): def after_execute(self, conn, clauseelement, multiparams, params, result): """Intercept high level execute() events.""" - def before_cursor_execute(self, conn, cursor, statement, + def before_cursor_execute(self, conn, cursor, statement, parameters, context, executemany): """Intercept low-level cursor execute() events.""" - def after_cursor_execute(self, conn, cursor, statement, + def after_cursor_execute(self, conn, cursor, statement, parameters, context, executemany): """Intercept low-level cursor execute() events.""" - def dbapi_error(self, conn, cursor, statement, parameters, + def dbapi_error(self, conn, cursor, statement, parameters, context, exception): """Intercept a raw DBAPI error. - - This event is called with the DBAPI exception instance - received from the DBAPI itself, *before* SQLAlchemy wraps the + + This event is called with the DBAPI exception instance + received from the DBAPI itself, *before* SQLAlchemy wraps the exception with it's own exception wrappers, and before any other operations are performed on the DBAPI cursor; the existing transaction remains in effect as well as any state on the cursor. - + The use case here is to inject low-level exception handling into an :class:`.Engine`, typically for logging and debugging purposes. In general, user code should **not** modify any state or throw any exceptions here as this will interfere with SQLAlchemy's cleanup and error handling routines. - + Subsequent to this hook, SQLAlchemy may attempt any number of operations on the connection/cursor, including - closing the cursor, rolling back of the transaction in the + closing the cursor, rolling back of the transaction in the case of connectionless execution, and disposing of the entire connection pool if a "disconnect" was detected. The exception is then wrapped in a SQLAlchemy DBAPI exception wrapper and re-thrown. - + .. versionadded:: 0.7.7 """ diff --git a/lib/sqlalchemy/exc.py b/lib/sqlalchemy/exc.py index 91ffc2811b..9a7dcb7de8 100644 --- a/lib/sqlalchemy/exc.py +++ b/lib/sqlalchemy/exc.py @@ -28,21 +28,21 @@ class ArgumentError(SQLAlchemyError): class CircularDependencyError(SQLAlchemyError): """Raised by topological sorts when a circular dependency is detected. - + There are two scenarios where this error occurs: - + * In a Session flush operation, if two objects are mutually dependent - on each other, they can not be inserted or deleted via INSERT or + on each other, they can not be inserted or deleted via INSERT or DELETE statements alone; an UPDATE will be needed to post-associate or pre-deassociate one of the foreign key constrained values. - The ``post_update`` flag described at :ref:`post_update` can resolve + The ``post_update`` flag described at :ref:`post_update` can resolve this cycle. * In a :meth:`.MetaData.create_all`, :meth:`.MetaData.drop_all`, :attr:`.MetaData.sorted_tables` operation, two :class:`.ForeignKey` or :class:`.ForeignKeyConstraint` objects mutually refer to each other. Apply the ``use_alter=True`` flag to one or both, see :ref:`use_alter`. - + """ def __init__(self, message, cycles, edges, msg=None): if msg is None: @@ -54,7 +54,7 @@ class CircularDependencyError(SQLAlchemyError): self.edges = edges def __reduce__(self): - return self.__class__, (None, self.cycles, + return self.__class__, (None, self.cycles, self.edges, self.args[0]) class CompileError(SQLAlchemyError): @@ -70,9 +70,9 @@ class DisconnectionError(SQLAlchemyError): """A disconnect is detected on a raw DB-API connection. This error is raised and consumed internally by a connection pool. It can - be raised by the :meth:`.PoolEvents.checkout` event + be raised by the :meth:`.PoolEvents.checkout` event so that the host pool forces a retry; the exception will be caught - three times in a row before the pool gives up and raises + three times in a row before the pool gives up and raises :class:`~sqlalchemy.exc.InvalidRequestError` regarding the connection attempt. """ @@ -121,7 +121,7 @@ class NoReferencedColumnError(NoReferenceError): self.column_name = cname def __reduce__(self): - return self.__class__, (self.args[0], self.table_name, + return self.__class__, (self.args[0], self.table_name, self.column_name) class NoSuchTableError(InvalidRequestError): @@ -136,20 +136,20 @@ class DontWrapMixin(object): """A mixin class which, when applied to a user-defined Exception class, will not be wrapped inside of :class:`.StatementError` if the error is emitted within the process of executing a statement. - + E.g.:: from sqlalchemy.exc import DontWrapMixin - + class MyCustomException(Exception, DontWrapMixin): pass - + class MySpecialType(TypeDecorator): impl = String - + def process_bind_param(self, value, dialect): if value == 'invalid': raise MyCustomException("invalid!") - + """ import sys if sys.version_info < (2, 5): @@ -161,15 +161,15 @@ UnmappedColumnError = None class StatementError(SQLAlchemyError): """An error occurred during execution of a SQL statement. - + :class:`StatementError` wraps the exception raised during execution, and features :attr:`.statement` and :attr:`.params` attributes which supply context regarding the specifics of the statement which had an issue. - The wrapped exception object is available in + The wrapped exception object is available in the :attr:`.orig` attribute. - + """ statement = None @@ -188,7 +188,7 @@ class StatementError(SQLAlchemyError): self.orig = orig def __reduce__(self): - return self.__class__, (self.args[0], self.statement, + return self.__class__, (self.args[0], self.statement, self.params, self.orig) def __str__(self): @@ -211,7 +211,7 @@ class DBAPIError(StatementError): :class:`DBAPIError` features :attr:`~.StatementError.statement` and :attr:`~.StatementError.params` attributes which supply context regarding - the specifics of the statement which had an issue, for the + the specifics of the statement which had an issue, for the typical case when the error was raised within the context of emitting a SQL statement. @@ -221,8 +221,8 @@ class DBAPIError(StatementError): """ @classmethod - def instance(cls, statement, params, - orig, + def instance(cls, statement, params, + orig, dbapi_base_err, connection_invalidated=False): # Don't ever wrap these, just return them directly as if @@ -236,7 +236,7 @@ class DBAPIError(StatementError): if not isinstance(orig, dbapi_base_err) and statement: return StatementError( "%s (original cause: %s)" % ( - str(orig), + str(orig), traceback.format_exception_only(orig.__class__, orig)[-1].strip() ), statement, params, orig) @@ -247,7 +247,7 @@ class DBAPIError(StatementError): return cls(statement, params, orig, connection_invalidated) def __reduce__(self): - return self.__class__, (self.statement, self.params, + return self.__class__, (self.statement, self.params, self.orig, self.connection_invalidated) def __init__(self, statement, params, orig, connection_invalidated=False): @@ -258,7 +258,7 @@ class DBAPIError(StatementError): except Exception, e: text = 'Error in str() of DB-API-generated exception: ' + str(e) StatementError.__init__( - self, + self, '(%s) %s' % (orig.__class__.__name__, text), statement, params, diff --git a/lib/sqlalchemy/ext/associationproxy.py b/lib/sqlalchemy/ext/associationproxy.py index 7542f892ec..98ba5936cc 100644 --- a/lib/sqlalchemy/ext/associationproxy.py +++ b/lib/sqlalchemy/ext/associationproxy.py @@ -24,17 +24,17 @@ from sqlalchemy.sql import not_ def association_proxy(target_collection, attr, **kw): """Return a Python property implementing a view of a target - attribute which references an attribute on members of the + attribute which references an attribute on members of the target. - + The returned value is an instance of :class:`.AssociationProxy`. - + Implements a Python property representing a relationship as a collection of simpler values, or a scalar value. The proxied property will mimic the collection type of the target (list, dict or set), or, in the case of a one to one relationship, a simple scalar value. - :param target_collection: Name of the attribute we'll proxy to. + :param target_collection: Name of the attribute we'll proxy to. This attribute is typically mapped by :func:`~sqlalchemy.orm.relationship` to link to a target collection, but can also be a many-to-one or non-scalar relationship. @@ -80,15 +80,15 @@ class AssociationProxy(object): """A descriptor that presents a read/write view of an object attribute.""" def __init__(self, target_collection, attr, creator=None, - getset_factory=None, proxy_factory=None, + getset_factory=None, proxy_factory=None, proxy_bulk_set=None): """Construct a new :class:`.AssociationProxy`. - + The :func:`.association_proxy` function is provided as the usual entrypoint here, though :class:`.AssociationProxy` can be instantiated and/or subclassed directly. - :param target_collection: Name of the collection we'll proxy to, + :param target_collection: Name of the collection we'll proxy to, usually created with :func:`.relationship`. :param attr: Attribute on the collected instances we'll proxy for. For example, @@ -120,7 +120,7 @@ class AssociationProxy(object): collection implementation, you may supply a factory function to produce those collections. Only applicable to non-scalar relationships. - :param proxy_bulk_set: Optional, use with proxy_factory. See + :param proxy_bulk_set: Optional, use with proxy_factory. See the _set() method for details. """ @@ -140,11 +140,11 @@ class AssociationProxy(object): def remote_attr(self): """The 'remote' :class:`.MapperProperty` referenced by this :class:`.AssociationProxy`. - + .. versionadded:: 0.7.3 - + See also: - + :attr:`.AssociationProxy.attr` :attr:`.AssociationProxy.local_attr` @@ -158,9 +158,9 @@ class AssociationProxy(object): :class:`.AssociationProxy`. .. versionadded:: 0.7.3 - + See also: - + :attr:`.AssociationProxy.attr` :attr:`.AssociationProxy.remote_attr` @@ -171,20 +171,20 @@ class AssociationProxy(object): @property def attr(self): """Return a tuple of ``(local_attr, remote_attr)``. - - This attribute is convenient when specifying a join + + This attribute is convenient when specifying a join using :meth:`.Query.join` across two relationships:: - + sess.query(Parent).join(*Parent.proxied.attr) .. versionadded:: 0.7.3 - + See also: - + :attr:`.AssociationProxy.local_attr` :attr:`.AssociationProxy.remote_attr` - + """ return (self.local_attr, self.remote_attr) @@ -195,10 +195,10 @@ class AssociationProxy(object): @util.memoized_property def target_class(self): """The intermediary class handled by this :class:`.AssociationProxy`. - + Intercepted append/set/assignment events will result in the generation of new instances of this class. - + """ return self._get_property().mapper.class_ @@ -333,10 +333,10 @@ class AssociationProxy(object): def any(self, criterion=None, **kwargs): """Produce a proxied 'any' expression using EXISTS. - + This expression will be a composed product using the :meth:`.RelationshipProperty.Comparator.any` - and/or :meth:`.RelationshipProperty.Comparator.has` + and/or :meth:`.RelationshipProperty.Comparator.has` operators of the underlying proxied attributes. """ @@ -360,12 +360,12 @@ class AssociationProxy(object): def has(self, criterion=None, **kwargs): """Produce a proxied 'has' expression using EXISTS. - + This expression will be a composed product using the :meth:`.RelationshipProperty.Comparator.any` - and/or :meth:`.RelationshipProperty.Comparator.has` + and/or :meth:`.RelationshipProperty.Comparator.has` operators of the underlying proxied attributes. - + """ return self._comparator.has( @@ -375,7 +375,7 @@ class AssociationProxy(object): def contains(self, obj): """Produce a proxied 'contains' expression using EXISTS. - + This expression will be a composed product using the :meth:`.RelationshipProperty.Comparator.any` , :meth:`.RelationshipProperty.Comparator.has`, diff --git a/lib/sqlalchemy/ext/compiler.py b/lib/sqlalchemy/ext/compiler.py index 47221fa6a4..0379468cb2 100644 --- a/lib/sqlalchemy/ext/compiler.py +++ b/lib/sqlalchemy/ext/compiler.py @@ -91,9 +91,9 @@ Produces:: "INSERT INTO mytable (SELECT mytable.x, mytable.y, mytable.z FROM mytable WHERE mytable.x > :x_1)" -.. note:: +.. note:: - The above ``InsertFromSelect`` construct probably wants to have "autocommit" + The above ``InsertFromSelect`` construct probably wants to have "autocommit" enabled. See :ref:`enabling_compiled_autocommit` for this step. Cross Compiling between SQL and DDL compilers @@ -118,12 +118,12 @@ Enabling Autocommit on a Construct Recall from the section :ref:`autocommit` that the :class:`.Engine`, when asked to execute a construct in the absence of a user-defined transaction, detects if the given -construct represents DML or DDL, that is, a data modification or data definition statement, which +construct represents DML or DDL, that is, a data modification or data definition statement, which requires (or may require, in the case of DDL) that the transaction generated by the DBAPI be committed -(recall that DBAPI always has a transaction going on regardless of what SQLAlchemy does). Checking +(recall that DBAPI always has a transaction going on regardless of what SQLAlchemy does). Checking for this is actually accomplished by checking for the "autocommit" execution option on the construct. When building a construct like -an INSERT derivation, a new DDL type, or perhaps a stored procedure that alters data, the "autocommit" +an INSERT derivation, a new DDL type, or perhaps a stored procedure that alters data, the "autocommit" option needs to be set in order for the statement to function with "connectionless" execution (as described in :ref:`dbengine_implicit`). @@ -146,13 +146,13 @@ can be used, which already is a subclass of :class:`.Executable`, :class:`.Claus class MyInsertThing(UpdateBase): def __init__(self, ...): ... - - - + + + DDL elements that subclass :class:`.DDLElement` already have the "autocommit" flag turned on. - + Changing the default compilation of existing constructs @@ -163,7 +163,7 @@ the compilation of a built in SQL construct, the @compiles decorator is invoked the appropriate class (be sure to use the class, i.e. ``Insert`` or ``Select``, instead of the creation function such as ``insert()`` or ``select()``). Within the new compilation function, to get at the "original" compilation routine, -use the appropriate visit_XXX method - this because compiler.process() will call upon the +use the appropriate visit_XXX method - this because compiler.process() will call upon the overriding routine and cause an endless loop. Such as, to add "prefix" to all insert statements:: from sqlalchemy.sql.expression import Insert @@ -205,7 +205,7 @@ A synopsis is as follows: expression class. Any SQL expression can be derived from this base, and is probably the best choice for longer constructs such as specialized INSERT statements. - + * :class:`~sqlalchemy.sql.expression.ColumnElement` - The root of all "column-like" elements. Anything that you'd place in the "columns" clause of a SELECT statement (as well as order by and group by) can derive from this - @@ -218,7 +218,7 @@ A synopsis is as follows: class timestamp(ColumnElement): type = TIMESTAMP() - + * :class:`~sqlalchemy.sql.expression.FunctionElement` - This is a hybrid of a ``ColumnElement`` and a "from clause" like object, and represents a SQL function or stored procedure type of call. Since most databases support @@ -250,7 +250,7 @@ A synopsis is as follows: * :class:`~sqlalchemy.sql.expression.Executable` - This is a mixin which should be used with any expression class that represents a "standalone" SQL statement that - can be passed directly to an ``execute()`` method. It is already implicit + can be passed directly to an ``execute()`` method. It is already implicit within ``DDLElement`` and ``FunctionElement``. Further Examples @@ -263,15 +263,15 @@ A function that works like "CURRENT_TIMESTAMP" except applies the appropriate co so that the time is in UTC time. Timestamps are best stored in relational databases as UTC, without time zones. UTC so that your database doesn't think time has gone backwards in the hour when daylight savings ends, without timezones because timezones -are like character encodings - they're best applied only at the endpoints of an +are like character encodings - they're best applied only at the endpoints of an application (i.e. convert to UTC upon user input, re-apply desired timezone upon display). For Postgresql and Microsoft SQL Server:: - + from sqlalchemy.sql import expression from sqlalchemy.ext.compiler import compiles from sqlalchemy.types import DateTime - + class utcnow(expression.FunctionElement): type = DateTime() @@ -284,7 +284,7 @@ For Postgresql and Microsoft SQL Server:: return "GETUTCDATE()" Example usage:: - + from sqlalchemy import ( Table, Column, Integer, String, DateTime, MetaData ) @@ -299,8 +299,8 @@ Example usage:: ------------------- The "GREATEST" function is given any number of arguments and returns the one that is -of the highest value - it's equivalent to Python's ``max`` function. A SQL -standard version versus a CASE based version which only accommodates two +of the highest value - it's equivalent to Python's ``max`` function. A SQL +standard version versus a CASE based version which only accommodates two arguments:: from sqlalchemy.sql import expression @@ -332,7 +332,7 @@ Example usage:: Session.query(Account).\\ filter( greatest( - Account.checking_balance, + Account.checking_balance, Account.savings_balance) > 10000 ) @@ -340,10 +340,10 @@ Example usage:: ------------------ Render a "false" constant expression, rendering as "0" on platforms that don't have a "false" constant:: - + from sqlalchemy.sql import expression from sqlalchemy.ext.compiler import compiles - + class sql_false(expression.ColumnElement): pass @@ -358,14 +358,14 @@ Render a "false" constant expression, rendering as "0" on platforms that don't h return "0" Example usage:: - + from sqlalchemy import select, union_all exp = union_all( select([users.c.name, sql_false().label("enrolled")]), select([customers.c.name, customers.c.enrolled]) ) - + """ from sqlalchemy import exc diff --git a/lib/sqlalchemy/ext/declarative.py b/lib/sqlalchemy/ext/declarative.py index 6b804036c3..43bebc3314 100755 --- a/lib/sqlalchemy/ext/declarative.py +++ b/lib/sqlalchemy/ext/declarative.py @@ -51,7 +51,7 @@ automatically named with the name of the attribute to which they are assigned. To name columns explicitly with a name distinct from their mapped attribute, -just give the column a name. Below, column "some_table_id" is mapped to the +just give the column a name. Below, column "some_table_id" is mapped to the "id" attribute of `SomeClass`, but in SQL will be represented as "some_table_id":: class SomeClass(Base): @@ -68,7 +68,7 @@ added to the underlying :class:`.Table` and Classes which are constructed using declarative can interact freely with classes that are mapped explicitly with :func:`mapper`. -It is recommended, though not required, that all tables +It is recommended, though not required, that all tables share the same underlying :class:`~sqlalchemy.schema.MetaData` object, so that string-configured :class:`~sqlalchemy.schema.ForeignKey` references can be resolved without issue. @@ -98,9 +98,9 @@ of construction, the ``bind`` argument is accepted:: :func:`declarative_base` can also receive a pre-existing :class:`.MetaData` object, which allows a -declarative setup to be associated with an already +declarative setup to be associated with an already existing traditional collection of :class:`~sqlalchemy.schema.Table` -objects:: +objects:: mymetadata = MetaData() Base = declarative_base(metadata=mymetadata) @@ -113,7 +113,7 @@ feature that the class specified to :func:`~sqlalchemy.orm.relationship` may be a string name. The "class registry" associated with ``Base`` is used at mapper compilation time to resolve the name into the actual class object, which is expected to have been defined once the mapper -configuration is used:: +configuration is used:: class User(Base): __tablename__ = 'users' @@ -131,7 +131,7 @@ configuration is used:: Column constructs, since they are just that, are immediately usable, as below where we define a primary join condition on the ``Address`` -class using them:: +class using them:: class Address(Base): __tablename__ = 'addresses' @@ -148,15 +148,15 @@ evaluated as Python expressions. The full namespace available within this evaluation includes all classes mapped for this declarative base, as well as the contents of the ``sqlalchemy`` package, including expression functions like :func:`~sqlalchemy.sql.expression.desc` and -:attr:`~sqlalchemy.sql.expression.func`:: +:attr:`~sqlalchemy.sql.expression.func`:: class User(Base): # .... addresses = relationship("Address", - order_by="desc(Address.email)", + order_by="desc(Address.email)", primaryjoin="Address.user_id==User.id") -As an alternative to string-based attributes, attributes may also be +As an alternative to string-based attributes, attributes may also be defined after all classes have been created. Just add them to the target class after the fact:: @@ -169,8 +169,8 @@ Configuring Many-to-Many Relationships Many-to-many relationships are also declared in the same way with declarative as with traditional mappings. The ``secondary`` argument to -:func:`.relationship` is as usual passed a -:class:`.Table` object, which is typically declared in the +:func:`.relationship` is as usual passed a +:class:`.Table` object, which is typically declared in the traditional way. The :class:`.Table` usually shares the :class:`.MetaData` object used by the declarative base:: @@ -185,7 +185,7 @@ the :class:`.MetaData` object used by the declarative base:: id = Column(Integer, primary_key=True) keywords = relationship("Keyword", secondary=keywords) -Like other :func:`.relationship` arguments, a string is accepted as well, +Like other :func:`.relationship` arguments, a string is accepted as well, passing the string name of the table as defined in the ``Base.metadata.tables`` collection:: @@ -194,7 +194,7 @@ collection:: id = Column(Integer, primary_key=True) keywords = relationship("Keyword", secondary="keywords") -As with traditional mapping, its generally not a good idea to use +As with traditional mapping, its generally not a good idea to use a :class:`.Table` as the "secondary" argument which is also mapped to a class, unless the :class:`.relationship` is declared with ``viewonly=True``. Otherwise, the unit-of-work system may attempt duplicate INSERT and @@ -219,7 +219,7 @@ This attribute accommodates both positional as well as keyword arguments that are normally sent to the :class:`~sqlalchemy.schema.Table` constructor. The attribute can be specified in one of two forms. One is as a -dictionary:: +dictionary:: class MyClass(Base): __tablename__ = 'sometable' @@ -235,7 +235,7 @@ The other, a tuple, where each argument is positional UniqueConstraint('foo'), ) -Keyword arguments can be specified with the above form by +Keyword arguments can be specified with the above form by specifying the last argument as a dictionary:: class MyClass(Base): @@ -253,7 +253,7 @@ As an alternative to ``__tablename__``, a direct :class:`~sqlalchemy.schema.Table` construct may be used. The :class:`~sqlalchemy.schema.Column` objects, which in this case require their names, will be added to the mapping just like a regular mapping -to a table:: +to a table:: class MyClass(Base): __table__ = Table('my_table', Base.metadata, @@ -277,9 +277,9 @@ and pass it to declarative classes:: class Address(Base): __table__ = metadata.tables['address'] -Some configuration schemes may find it more appropriate to use ``__table__``, -such as those which already take advantage of the data-driven nature of -:class:`.Table` to customize and/or automate schema definition. +Some configuration schemes may find it more appropriate to use ``__table__``, +such as those which already take advantage of the data-driven nature of +:class:`.Table` to customize and/or automate schema definition. Note that when the ``__table__`` approach is used, the object is immediately usable as a plain :class:`.Table` within the class declaration body itself, @@ -292,15 +292,15 @@ by using the ``id`` column in the ``primaryjoin`` condition of a :func:`.relatio Column('name', String(50)) ) - widgets = relationship(Widget, + widgets = relationship(Widget, primaryjoin=Widget.myclass_id==__table__.c.id) -Similarly, mapped attributes which refer to ``__table__`` can be placed inline, +Similarly, mapped attributes which refer to ``__table__`` can be placed inline, as below where we assign the ``name`` column to the attribute ``_name``, generating a synonym for ``name``:: from sqlalchemy.ext.declarative import synonym_for - + class MyClass(Base): __table__ = Table('my_table', Base.metadata, Column('id', Integer, primary_key=True), @@ -320,14 +320,14 @@ It's easy to set up a :class:`.Table` that uses ``autoload=True`` in conjunction with a mapped class:: class MyClass(Base): - __table__ = Table('mytable', Base.metadata, + __table__ = Table('mytable', Base.metadata, autoload=True, autoload_with=some_engine) -However, one improvement that can be made here is to not -require the :class:`.Engine` to be available when classes are +However, one improvement that can be made here is to not +require the :class:`.Engine` to be available when classes are being first declared. To achieve this, use the example -described at :ref:`examples_declarative_reflection` to build a -declarative base that sets up mappings only after a special +described at :ref:`examples_declarative_reflection` to build a +declarative base that sets up mappings only after a special ``prepare(engine)`` step is called:: Base = declarative_base(cls=DeclarativeReflectedBase) @@ -339,14 +339,14 @@ declarative base that sets up mappings only after a special class Bar(Base): __tablename__ = 'bar' - # illustrate overriding of "bar.foo_id" to have + # illustrate overriding of "bar.foo_id" to have # a foreign key constraint otherwise not # reflected, such as when using MySQL foo_id = Column(Integer, ForeignKey('foo.id')) Base.prepare(e) - + Mapper Configuration ==================== @@ -354,7 +354,7 @@ Declarative makes use of the :func:`~.orm.mapper` function internally when it creates the mapping to the declared table. The options for :func:`~.orm.mapper` are passed directly through via the ``__mapper_args__`` class attribute. As always, arguments which reference locally -mapped columns can reference them directly from within the +mapped columns can reference them directly from within the class declaration:: from datetime import datetime @@ -383,7 +383,7 @@ as declarative will determine this from the class itself. The various Joined Table Inheritance ~~~~~~~~~~~~~~~~~~~~~~~~ -Joined table inheritance is defined as a subclass that defines its own +Joined table inheritance is defined as a subclass that defines its own table:: class Person(Base): @@ -416,13 +416,13 @@ only the ``engineers.id`` column, give it a different attribute name:: .. versionchanged:: 0.7 joined table inheritance favors the subclass column over that of the superclass, such as querying above for ``Engineer.id``. Prior to 0.7 this was the reverse. - + Single Table Inheritance ~~~~~~~~~~~~~~~~~~~~~~~~ Single table inheritance is defined as a subclass that does not have its own table; you just leave out the ``__table__`` and ``__tablename__`` -attributes:: +attributes:: class Person(Base): __tablename__ = 'people' @@ -520,22 +520,22 @@ loader for the mapper after all subclasses have been declared. An abstract base can be declared using the :class:`.AbstractConcreteBase` class:: from sqlalchemy.ext.declarative import AbstractConcreteBase - + class Employee(AbstractConcreteBase, Base): pass To have a concrete ``employee`` table, use :class:`.ConcreteBase` instead:: from sqlalchemy.ext.declarative import ConcreteBase - + class Employee(ConcreteBase, Base): __tablename__ = 'employee' employee_id = Column(Integer, primary_key=True) name = Column(String(50)) __mapper_args__ = { - 'polymorphic_identity':'employee', + 'polymorphic_identity':'employee', 'concrete':True} - + Either ``Employee`` base can be used in the normal fashion:: @@ -545,7 +545,7 @@ Either ``Employee`` base can be used in the normal fashion:: name = Column(String(50)) manager_data = Column(String(40)) __mapper_args__ = { - 'polymorphic_identity':'manager', + 'polymorphic_identity':'manager', 'concrete':True} class Engineer(Employee): @@ -553,7 +553,7 @@ Either ``Employee`` base can be used in the normal fashion:: employee_id = Column(Integer, primary_key=True) name = Column(String(50)) engineer_info = Column(String(40)) - __mapper_args__ = {'polymorphic_identity':'engineer', + __mapper_args__ = {'polymorphic_identity':'engineer', 'concrete':True} @@ -576,7 +576,7 @@ mappings are declared. An example of some commonly mixed-in idioms is below:: from sqlalchemy.ext.declarative import declared_attr - + class MyMixin(object): @declared_attr @@ -593,29 +593,29 @@ idioms is below:: Where above, the class ``MyModel`` will contain an "id" column as the primary key, a ``__tablename__`` attribute that derives -from the name of the class itself, as well as ``__table_args__`` +from the name of the class itself, as well as ``__table_args__`` and ``__mapper_args__`` defined by the ``MyMixin`` mixin class. -There's no fixed convention over whether ``MyMixin`` precedes -``Base`` or not. Normal Python method resolution rules apply, and +There's no fixed convention over whether ``MyMixin`` precedes +``Base`` or not. Normal Python method resolution rules apply, and the above example would work just as well with:: class MyModel(Base, MyMixin): name = Column(String(1000)) -This works because ``Base`` here doesn't define any of the -variables that ``MyMixin`` defines, i.e. ``__tablename__``, -``__table_args__``, ``id``, etc. If the ``Base`` did define -an attribute of the same name, the class placed first in the -inherits list would determine which attribute is used on the +This works because ``Base`` here doesn't define any of the +variables that ``MyMixin`` defines, i.e. ``__tablename__``, +``__table_args__``, ``id``, etc. If the ``Base`` did define +an attribute of the same name, the class placed first in the +inherits list would determine which attribute is used on the newly defined class. Augmenting the Base ~~~~~~~~~~~~~~~~~~~ -In addition to using a pure mixin, most of the techniques in this +In addition to using a pure mixin, most of the techniques in this section can also be applied to the base class itself, for patterns that -should apply to all classes derived from a particular base. This +should apply to all classes derived from a particular base. This is achieved using the ``cls`` argument of the :func:`.declarative_base` function:: from sqlalchemy.ext.declarative import declared_attr @@ -624,26 +624,26 @@ is achieved using the ``cls`` argument of the :func:`.declarative_base` function @declared_attr def __tablename__(cls): return cls.__name__.lower() - + __table_args__ = {'mysql_engine': 'InnoDB'} id = Column(Integer, primary_key=True) from sqlalchemy.ext.declarative import declarative_base - + Base = declarative_base(cls=Base) class MyModel(Base): name = Column(String(1000)) -Where above, ``MyModel`` and all other classes that derive from ``Base`` will have -a table name derived from the class name, an ``id`` primary key column, as well as +Where above, ``MyModel`` and all other classes that derive from ``Base`` will have +a table name derived from the class name, an ``id`` primary key column, as well as the "InnoDB" engine for MySQL. Mixing in Columns ~~~~~~~~~~~~~~~~~ -The most basic way to specify a column on a mixin is by simple +The most basic way to specify a column on a mixin is by simple declaration:: class TimestampMixin(object): @@ -656,26 +656,26 @@ declaration:: name = Column(String(1000)) Where above, all declarative classes that include ``TimestampMixin`` -will also have a column ``created_at`` that applies a timestamp to +will also have a column ``created_at`` that applies a timestamp to all row insertions. -Those familiar with the SQLAlchemy expression language know that +Those familiar with the SQLAlchemy expression language know that the object identity of clause elements defines their role in a schema. -Two ``Table`` objects ``a`` and ``b`` may both have a column called -``id``, but the way these are differentiated is that ``a.c.id`` +Two ``Table`` objects ``a`` and ``b`` may both have a column called +``id``, but the way these are differentiated is that ``a.c.id`` and ``b.c.id`` are two distinct Python objects, referencing their parent tables ``a`` and ``b`` respectively. In the case of the mixin column, it seems that only one -:class:`.Column` object is explicitly created, yet the ultimate +:class:`.Column` object is explicitly created, yet the ultimate ``created_at`` column above must exist as a distinct Python object for each separate destination class. To accomplish this, the declarative -extension creates a **copy** of each :class:`.Column` object encountered on +extension creates a **copy** of each :class:`.Column` object encountered on a class that is detected as a mixin. This copy mechanism is limited to simple columns that have no foreign keys, as a :class:`.ForeignKey` itself contains references to columns -which can't be properly recreated at this level. For columns that +which can't be properly recreated at this level. For columns that have foreign keys, as well as for the variety of mapper-level constructs that require destination-explicit context, the :func:`~.declared_attr` decorator is provided so that @@ -692,7 +692,7 @@ patterns common to many classes can be defined as callables:: __tablename__ = 'user' id = Column(Integer, primary_key=True) -Where above, the ``address_id`` class-level callable is executed at the +Where above, the ``address_id`` class-level callable is executed at the point at which the ``User`` class is constructed, and the declarative extension can use the resulting :class:`.Column` object as returned by the method without the need to copy it. @@ -701,8 +701,8 @@ the method without the need to copy it. Rename 0.6.5 ``sqlalchemy.util.classproperty`` into :func:`~.declared_attr`. Columns generated by :func:`~.declared_attr` can also be -referenced by ``__mapper_args__`` to a limited degree, currently -by ``polymorphic_on`` and ``version_id_col``, by specifying the +referenced by ``__mapper_args__`` to a limited degree, currently +by ``polymorphic_on`` and ``version_id_col``, by specifying the classdecorator itself into the dictionary - the declarative extension will resolve them at class construction time:: @@ -750,7 +750,7 @@ reference a common target class via many-to-one:: id = Column(Integer, primary_key=True) :func:`~sqlalchemy.orm.relationship` definitions which require explicit -primaryjoin, order_by etc. expressions should use the string forms +primaryjoin, order_by etc. expressions should use the string forms for these arguments, so that they are evaluated as late as possible. To reference the mixin class in these expressions, use the given ``cls`` to get it's name:: @@ -772,8 +772,8 @@ Mixing in deferred(), column_property(), etc. Like :func:`~sqlalchemy.orm.relationship`, all :class:`~sqlalchemy.orm.interfaces.MapperProperty` subclasses such as :func:`~sqlalchemy.orm.deferred`, :func:`~sqlalchemy.orm.column_property`, -etc. ultimately involve references to columns, and therefore, when -used with declarative mixins, have the :func:`.declared_attr` +etc. ultimately involve references to columns, and therefore, when +used with declarative mixins, have the :func:`.declared_attr` requirement so that no reliance on copying is needed:: class SomethingMixin(object): @@ -790,7 +790,7 @@ Controlling table inheritance with mixins ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The ``__tablename__`` attribute in conjunction with the hierarchy of -classes involved in a declarative mixin scenario controls what type of +classes involved in a declarative mixin scenario controls what type of table inheritance, if any, is configured by the declarative extension. @@ -825,7 +825,7 @@ return a ``__tablename__`` in the event that no table is already mapped in the inheritance hierarchy. To help with this, a :func:`~sqlalchemy.ext.declarative.has_inherited_table` helper function is provided that returns ``True`` if a parent class already -has a mapped table. +has a mapped table. As an example, here's a mixin that will only allow single table inheritance:: @@ -915,7 +915,7 @@ from multiple collections:: Creating Indexes with Mixins ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -To define a named, potentially multicolumn :class:`.Index` that applies to all +To define a named, potentially multicolumn :class:`.Index` that applies to all tables derived from a mixin, use the "inline" form of :class:`.Index` and establish it as part of ``__table_args__``:: @@ -937,7 +937,7 @@ Special Directives ``__declare_last__()`` ~~~~~~~~~~~~~~~~~~~~~~ -The ``__declare_last__()`` hook allows definition of +The ``__declare_last__()`` hook allows definition of a class level function that is automatically called by the :meth:`.MapperEvents.after_configured` event, which occurs after mappings are assumed to be completed and the 'configure' step has finished:: @@ -962,17 +962,17 @@ just from the special class:: class SomeAbstractBase(Base): __abstract__ = True - + def some_helpful_method(self): "" - + @declared_attr def __mapper_args__(cls): return {"helpful mapper arguments":True} class MyMappedClass(SomeAbstractBase): "" - + One possible use of ``__abstract__`` is to use a distinct :class:`.MetaData` for different bases:: @@ -986,8 +986,8 @@ bases:: __abstract__ = True metadata = MetaData() -Above, classes which inherit from ``DefaultBase`` will use one :class:`.MetaData` as the -registry of tables, and those which inherit from ``OtherBase`` will use a different one. +Above, classes which inherit from ``DefaultBase`` will use one :class:`.MetaData` as the +registry of tables, and those which inherit from ``OtherBase`` will use a different one. The tables themselves can then be created perhaps within distinct databases:: DefaultBase.metadata.create_all(some_engine) @@ -1019,7 +1019,7 @@ setup using :func:`~sqlalchemy.orm.scoped_session` might look like:: Base = declarative_base() Mapped instances then make usage of -:class:`~sqlalchemy.orm.session.Session` in the usual way. +:class:`~sqlalchemy.orm.session.Session` in the usual way. """ @@ -1041,7 +1041,7 @@ __all__ = 'declarative_base', 'synonym_for', \ def instrument_declarative(cls, registry, metadata): """Given a class, configure the class declaratively, using the given registry, which can be any dictionary, and - MetaData object. + MetaData object. """ if '_decl_class_registry' in cls.__dict__: @@ -1084,7 +1084,7 @@ def _as_declarative(cls, classname, dict_): def go(): cls.__declare_last__() if '__abstract__' in base.__dict__: - if (base is cls or + if (base is cls or (base in cls.__bases__ and not _is_declarative_inherits) ): return @@ -1096,19 +1096,19 @@ def _as_declarative(cls, classname, dict_): for name,obj in vars(base).items(): if name == '__mapper_args__': if not mapper_args and ( - not class_mapped or + not class_mapped or isinstance(obj, declarative_props) ): mapper_args = cls.__mapper_args__ elif name == '__tablename__': if not tablename and ( - not class_mapped or + not class_mapped or isinstance(obj, declarative_props) ): tablename = cls.__tablename__ elif name == '__table_args__': if not table_args and ( - not class_mapped or + not class_mapped or isinstance(obj, declarative_props) ): table_args = cls.__table_args__ @@ -1123,7 +1123,7 @@ def _as_declarative(cls, classname, dict_): util.warn("Regular (i.e. not __special__) " "attribute '%s.%s' uses @declared_attr, " "but owning class %s is mapped - " - "not applying to subclass %s." + "not applying to subclass %s." % (base.__name__, name, base, cls)) continue elif base is not cls: @@ -1135,7 +1135,7 @@ def _as_declarative(cls, classname, dict_): "must be declared as @declared_attr callables " "on declarative mixin classes. ") if name not in dict_ and not ( - '__table__' in dict_ and + '__table__' in dict_ and (obj.name or name) in dict_['__table__'].c ) and name not in potential_columns: potential_columns[name] = \ @@ -1164,7 +1164,7 @@ def _as_declarative(cls, classname, dict_): if inherited_table_args and not tablename: table_args = None - # make sure that column copies are used rather + # make sure that column copies are used rather # than the original columns from any mixins for k in ('version_id_col', 'polymorphic_on',): if k in mapper_args: @@ -1217,7 +1217,7 @@ def _as_declarative(cls, classname, dict_): elif isinstance(c, Column): _undefer_column_name(key, c) cols.add(c) - # if the column is the same name as the key, + # if the column is the same name as the key, # remove it from the explicit properties dict. # the normal rules for assigning column-based properties # will take over, including precedence of columns @@ -1304,7 +1304,7 @@ def _as_declarative(cls, classname, dict_): if c.name in inherited_table.c: raise exc.ArgumentError( "Column '%s' on class %s conflicts with " - "existing column '%s'" % + "existing column '%s'" % (c, cls, inherited_table.c[c.name]) ) inherited_table.append_column(c) @@ -1323,7 +1323,7 @@ def _as_declarative(cls, classname, dict_): if c not in inherited_mapper._columntoproperty]) exclude_properties.difference_update([c.key for c in cols]) - # look through columns in the current mapper that + # look through columns in the current mapper that # are keyed to a propname different than the colname # (if names were the same, we'd have popped it out above, # in which case the mapper makes this combination). @@ -1340,9 +1340,9 @@ def _as_declarative(cls, classname, dict_): our_stuff[k] = [col] + p.columns - cls.__mapper__ = mapper_cls(cls, - table, - properties=our_stuff, + cls.__mapper__ = mapper_cls(cls, + table, + properties=our_stuff, **mapper_args) class DeclarativeMeta(type): @@ -1367,7 +1367,7 @@ class DeclarativeMeta(type): cls.__mapper__.add_property(key, value) elif isinstance(value, MapperProperty): cls.__mapper__.add_property( - key, + key, _deferred_relationship(cls, value) ) else: @@ -1434,7 +1434,7 @@ def _deferred_relationship(cls, prop): "When initializing mapper %s, expression %r failed to " "locate a name (%r). If this is a class name, consider " "adding this relationship() to the %r class after " - "both dependent classes have been defined." % + "both dependent classes have been defined." % (prop.parent, arg, n.args[0], cls) ) return return_cls @@ -1505,13 +1505,13 @@ class declared_attr(property): a mapped property or special declarative member name. .. versionchanged:: 0.6.{2,3,4} - ``@declared_attr`` is available as + ``@declared_attr`` is available as ``sqlalchemy.util.classproperty`` for SQLAlchemy versions 0.6.2, 0.6.3, 0.6.4. @declared_attr turns the attribute into a scalar-like property that can be invoked from the uninstantiated class. - Declarative treats attributes specifically marked with + Declarative treats attributes specifically marked with @declared_attr as returning a construct that is specific to mapping or declarative table configuration. The name of the attribute is that of what the non-dynamic version @@ -1543,7 +1543,7 @@ class declared_attr(property): def __mapper_args__(cls): if cls.__name__ == 'Employee': return { - "polymorphic_on":cls.type, + "polymorphic_on":cls.type, "polymorphic_identity":"Employee" } else: @@ -1591,8 +1591,8 @@ def declarative_base(bind=None, metadata=None, mapper=None, cls=object, :param bind: An optional :class:`~sqlalchemy.engine.base.Connectable`, will be assigned - the ``bind`` attribute on the :class:`~sqlalchemy.MetaData` - instance. + the ``bind`` attribute on the :class:`~sqlalchemy.MetaData` + instance. :param metadata: An optional :class:`~sqlalchemy.MetaData` instance. All @@ -1623,13 +1623,13 @@ def declarative_base(bind=None, metadata=None, mapper=None, cls=object, no __init__ will be provided and construction will fall back to cls.__init__ by way of the normal Python semantics. - :param class_registry: optional dictionary that will serve as the + :param class_registry: optional dictionary that will serve as the registry of class names-> mapped classes when string names - are used to identify classes inside of :func:`.relationship` + are used to identify classes inside of :func:`.relationship` and others. Allows two or more declarative base classes - to share the same registry of class names for simplified + to share the same registry of class names for simplified inter-base relationships. - + :param metaclass: Defaults to :class:`.DeclarativeMeta`. A metaclass or __metaclass__ compatible callable to use as the meta type of the generated @@ -1662,7 +1662,7 @@ def _undefer_column_name(key, column): class ConcreteBase(object): """A helper class for 'concrete' declarative mappings. - + :class:`.ConcreteBase` will use the :func:`.polymorphic_union` function automatically, against all tables mapped as a subclass to this class. The function is called via the @@ -1672,7 +1672,7 @@ class ConcreteBase(object): :class:`.ConcreteBase` produces a mapped table for the class itself. Compare to :class:`.AbstractConcreteBase`, which does not. - + Example:: from sqlalchemy.ext.declarative import ConcreteBase @@ -1682,7 +1682,7 @@ class ConcreteBase(object): employee_id = Column(Integer, primary_key=True) name = Column(String(50)) __mapper_args__ = { - 'polymorphic_identity':'employee', + 'polymorphic_identity':'employee', 'concrete':True} class Manager(Employee): @@ -1691,7 +1691,7 @@ class ConcreteBase(object): name = Column(String(50)) manager_data = Column(String(40)) __mapper_args__ = { - 'polymorphic_identity':'manager', + 'polymorphic_identity':'manager', 'concrete':True} """ @@ -1716,17 +1716,17 @@ class ConcreteBase(object): class AbstractConcreteBase(ConcreteBase): """A helper class for 'concrete' declarative mappings. - + :class:`.AbstractConcreteBase` will use the :func:`.polymorphic_union` function automatically, against all tables mapped as a subclass to this class. The function is called via the ``__declare_last__()`` function, which is essentially a hook for the :func:`.MapperEvents.after_configured` event. - + :class:`.AbstractConcreteBase` does not produce a mapped table for the class itself. Compare to :class:`.ConcreteBase`, which does. - + Example:: from sqlalchemy.ext.declarative import ConcreteBase @@ -1740,7 +1740,7 @@ class AbstractConcreteBase(ConcreteBase): name = Column(String(50)) manager_data = Column(String(40)) __mapper_args__ = { - 'polymorphic_identity':'manager', + 'polymorphic_identity':'manager', 'concrete':True} """ diff --git a/lib/sqlalchemy/ext/horizontal_shard.py b/lib/sqlalchemy/ext/horizontal_shard.py index 47123f3d52..e7809f8d9f 100644 --- a/lib/sqlalchemy/ext/horizontal_shard.py +++ b/lib/sqlalchemy/ext/horizontal_shard.py @@ -9,7 +9,7 @@ Defines a rudimental 'horizontal sharding' system which allows a Session to distribute queries and persistence operations across multiple databases. -For a usage example, see the :ref:`examples_sharding` example included in +For a usage example, see the :ref:`examples_sharding` example included in the source distribution. """ @@ -31,7 +31,7 @@ class ShardedQuery(Query): def set_shard(self, shard_id): """return a new query, limited to a single shard ID. - all subsequent operations with the returned query will + all subsequent operations with the returned query will be against the single shard regardless of other state. """ @@ -45,7 +45,7 @@ class ShardedQuery(Query): result = self._connection_from_session( mapper=self._mapper_zero(), shard_id=shard_id).execute( - context.statement, + context.statement, self._params) return self.instances(result, context) @@ -56,7 +56,7 @@ class ShardedQuery(Query): for shard_id in self.query_chooser(self): partial.extend(iter_for_shard(shard_id)) - # if some kind of in memory 'sorting' + # if some kind of in memory 'sorting' # were done, this is where it would happen return iter(partial) @@ -73,7 +73,7 @@ class ShardedQuery(Query): return None class ShardedSession(Session): - def __init__(self, shard_chooser, id_chooser, query_chooser, shards=None, + def __init__(self, shard_chooser, id_chooser, query_chooser, shards=None, query_cls=ShardedQuery, **kwargs): """Construct a ShardedSession. @@ -113,8 +113,8 @@ class ShardedSession(Session): if self.transaction is not None: return self.transaction.connection(mapper, shard_id=shard_id) else: - return self.get_bind(mapper, - shard_id=shard_id, + return self.get_bind(mapper, + shard_id=shard_id, instance=instance).contextual_connect(**kwargs) def get_bind(self, mapper, shard_id=None, instance=None, clause=None, **kw): diff --git a/lib/sqlalchemy/ext/hybrid.py b/lib/sqlalchemy/ext/hybrid.py index 8734181ea2..90809afd8e 100644 --- a/lib/sqlalchemy/ext/hybrid.py +++ b/lib/sqlalchemy/ext/hybrid.py @@ -10,8 +10,8 @@ class level and at the instance level. The :mod:`~sqlalchemy.ext.hybrid` extension provides a special form of method -decorator, is around 50 lines of code and has almost no dependencies on the rest -of SQLAlchemy. It can, in theory, work with any descriptor-based expression +decorator, is around 50 lines of code and has almost no dependencies on the rest +of SQLAlchemy. It can, in theory, work with any descriptor-based expression system. Consider a mapping ``Interval``, representing integer ``start`` and ``end`` @@ -25,9 +25,9 @@ as the class itself:: from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import Session, aliased from sqlalchemy.ext.hybrid import hybrid_property, hybrid_method - + Base = declarative_base() - + class Interval(Base): __tablename__ = 'interval' @@ -50,7 +50,7 @@ as the class itself:: @hybrid_method def intersects(self, other): return self.contains(other.start) | self.contains(other.end) - + Above, the ``length`` property returns the difference between the ``end`` and ``start`` attributes. With an instance of ``Interval``, this subtraction occurs in Python, using normal Python descriptor mechanics:: @@ -60,33 +60,33 @@ in Python, using normal Python descriptor mechanics:: 5 When dealing with the ``Interval`` class itself, the :class:`.hybrid_property` -descriptor evaluates the function body given the ``Interval`` class as +descriptor evaluates the function body given the ``Interval`` class as the argument, which when evaluated with SQLAlchemy expression mechanics returns a new SQL expression:: - + >>> print Interval.length interval."end" - interval.start - + >>> print Session().query(Interval).filter(Interval.length > 10) - SELECT interval.id AS interval_id, interval.start AS interval_start, - interval."end" AS interval_end - FROM interval + SELECT interval.id AS interval_id, interval.start AS interval_start, + interval."end" AS interval_end + FROM interval WHERE interval."end" - interval.start > :param_1 - -ORM methods such as :meth:`~.Query.filter_by` generally use ``getattr()`` to + +ORM methods such as :meth:`~.Query.filter_by` generally use ``getattr()`` to locate attributes, so can also be used with hybrid attributes:: >>> print Session().query(Interval).filter_by(length=5) - SELECT interval.id AS interval_id, interval.start AS interval_start, - interval."end" AS interval_end - FROM interval + SELECT interval.id AS interval_id, interval.start AS interval_start, + interval."end" AS interval_end + FROM interval WHERE interval."end" - interval.start = :param_1 The ``Interval`` class example also illustrates two methods, ``contains()`` and ``intersects()``, decorated with :class:`.hybrid_method`. This decorator applies the same idea to methods that :class:`.hybrid_property` applies -to attributes. The methods return boolean values, and take advantage -of the Python ``|`` and ``&`` bitwise operators to produce equivalent instance-level and +to attributes. The methods return boolean values, and take advantage +of the Python ``|`` and ``&`` bitwise operators to produce equivalent instance-level and SQL expression-level boolean behavior:: >>> i1.contains(6) @@ -97,24 +97,24 @@ SQL expression-level boolean behavior:: True >>> i1.intersects(Interval(25, 29)) False - + >>> print Session().query(Interval).filter(Interval.contains(15)) - SELECT interval.id AS interval_id, interval.start AS interval_start, - interval."end" AS interval_end - FROM interval + SELECT interval.id AS interval_id, interval.start AS interval_start, + interval."end" AS interval_end + FROM interval WHERE interval.start <= :start_1 AND interval."end" > :end_1 >>> ia = aliased(Interval) >>> print Session().query(Interval, ia).filter(Interval.intersects(ia)) - SELECT interval.id AS interval_id, interval.start AS interval_start, - interval."end" AS interval_end, interval_1.id AS interval_1_id, - interval_1.start AS interval_1_start, interval_1."end" AS interval_1_end - FROM interval, interval AS interval_1 - WHERE interval.start <= interval_1.start - AND interval."end" > interval_1.start - OR interval.start <= interval_1."end" + SELECT interval.id AS interval_id, interval.start AS interval_start, + interval."end" AS interval_end, interval_1.id AS interval_1_id, + interval_1.start AS interval_1_start, interval_1."end" AS interval_1_end + FROM interval, interval AS interval_1 + WHERE interval.start <= interval_1.start + AND interval."end" > interval_1.start + OR interval.start <= interval_1."end" AND interval."end" > interval_1."end" - + Defining Expression Behavior Distinct from Attribute Behavior -------------------------------------------------------------- @@ -122,18 +122,18 @@ Our usage of the ``&`` and ``|`` bitwise operators above was fortunate, consider our functions operated on two boolean values to return a new one. In many cases, the construction of an in-Python function and a SQLAlchemy SQL expression have enough differences that two separate Python expressions should be defined. The :mod:`~sqlalchemy.ext.hybrid` decorators -define the :meth:`.hybrid_property.expression` modifier for this purpose. As an example we'll +define the :meth:`.hybrid_property.expression` modifier for this purpose. As an example we'll define the radius of the interval, which requires the usage of the absolute value function:: from sqlalchemy import func - + class Interval(object): # ... - + @hybrid_property def radius(self): return abs(self.length) / 2 - + @radius.expression def radius(cls): return func.abs(cls.length) / 2 @@ -143,22 +143,22 @@ Above the Python function ``abs()`` is used for instance-level operations, the S >>> i1.radius 2 - + >>> print Session().query(Interval).filter(Interval.radius > 5) - SELECT interval.id AS interval_id, interval.start AS interval_start, - interval."end" AS interval_end - FROM interval + SELECT interval.id AS interval_id, interval.start AS interval_start, + interval."end" AS interval_end + FROM interval WHERE abs(interval."end" - interval.start) / :abs_1 > :param_1 Defining Setters ---------------- -Hybrid properties can also define setter methods. If we wanted ``length`` above, when +Hybrid properties can also define setter methods. If we wanted ``length`` above, when set, to modify the endpoint value:: class Interval(object): # ... - + @hybrid_property def length(self): return self.end - self.start @@ -179,7 +179,7 @@ The ``length(self, value)`` method is now called upon set:: Working with Relationships -------------------------- -There's no essential difference when creating hybrids that work with related objects as +There's no essential difference when creating hybrids that work with related objects as opposed to column-based data. The need for distinct expressions tends to be greater. Consider the following declarative mapping which relates a ``User`` to a ``SavingsAccount``:: @@ -187,9 +187,9 @@ Consider the following declarative mapping which relates a ``User`` to a ``Savin from sqlalchemy.orm import relationship from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.ext.hybrid import hybrid_property - + Base = declarative_base() - + class SavingsAccount(Base): __tablename__ = 'account' id = Column(Integer, primary_key=True) @@ -200,9 +200,9 @@ Consider the following declarative mapping which relates a ``User`` to a ``Savin __tablename__ = 'user' id = Column(Integer, primary_key=True) name = Column(String(100), nullable=False) - + accounts = relationship("SavingsAccount", backref="owner") - + @hybrid_property def balance(self): if self.accounts: @@ -222,17 +222,17 @@ Consider the following declarative mapping which relates a ``User`` to a ``Savin def balance(cls): return SavingsAccount.balance -The above hybrid property ``balance`` works with the first ``SavingsAccount`` entry in the list of +The above hybrid property ``balance`` works with the first ``SavingsAccount`` entry in the list of accounts for this user. The in-Python getter/setter methods can treat ``accounts`` as a Python -list available on ``self``. +list available on ``self``. -However, at the expression level, we can't travel along relationships to column attributes -directly since SQLAlchemy is explicit about joins. So here, it's expected that the ``User`` class will be +However, at the expression level, we can't travel along relationships to column attributes +directly since SQLAlchemy is explicit about joins. So here, it's expected that the ``User`` class will be used in an appropriate context such that an appropriate join to ``SavingsAccount`` will be present:: >>> print Session().query(User, User.balance).join(User.accounts).filter(User.balance > 5000) SELECT "user".id AS user_id, "user".name AS user_name, account.balance AS account_balance - FROM "user" JOIN account ON "user".id = account.user_id + FROM "user" JOIN account ON "user".id = account.user_id WHERE account.balance > :balance_1 Note however, that while the instance level accessors need to worry about whether ``self.accounts`` @@ -242,8 +242,8 @@ would use an outer join:: >>> from sqlalchemy import or_ >>> print (Session().query(User, User.balance).outerjoin(User.accounts). ... filter(or_(User.balance < 5000, User.balance == None))) - SELECT "user".id AS user_id, "user".name AS user_name, account.balance AS account_balance - FROM "user" LEFT OUTER JOIN account ON "user".id = account.user_id + SELECT "user".id AS user_id, "user".name AS user_name, account.balance AS account_balance + FROM "user" LEFT OUTER JOIN account ON "user".id = account.user_id WHERE account.balance < :balance_1 OR account.balance IS NULL .. _hybrid_custom_comparators: @@ -253,7 +253,7 @@ Building Custom Comparators The hybrid property also includes a helper that allows construction of custom comparators. A comparator object allows one to customize the behavior of each SQLAlchemy expression -operator individually. They are useful when creating custom types that have +operator individually. They are useful when creating custom types that have some highly idiosyncratic behavior on the SQL side. The example class below allows case-insensitive comparisons on the attribute @@ -263,9 +263,9 @@ named ``word_insensitive``:: from sqlalchemy import func, Column, Integer, String from sqlalchemy.orm import Session from sqlalchemy.ext.declarative import declarative_base - + Base = declarative_base() - + class CaseInsensitiveComparator(Comparator): def __eq__(self, other): return func.lower(self.__clause_element__()) == func.lower(other) @@ -274,27 +274,27 @@ named ``word_insensitive``:: __tablename__ = 'searchword' id = Column(Integer, primary_key=True) word = Column(String(255), nullable=False) - + @hybrid_property def word_insensitive(self): return self.word.lower() - + @word_insensitive.comparator def word_insensitive(cls): return CaseInsensitiveComparator(cls.word) -Above, SQL expressions against ``word_insensitive`` will apply the ``LOWER()`` +Above, SQL expressions against ``word_insensitive`` will apply the ``LOWER()`` SQL function to both sides:: >>> print Session().query(SearchWord).filter_by(word_insensitive="Trucks") - SELECT searchword.id AS searchword_id, searchword.word AS searchword_word - FROM searchword + SELECT searchword.id AS searchword_id, searchword.word AS searchword_word + FROM searchword WHERE lower(searchword.word) = lower(:lower_1) The ``CaseInsensitiveComparator`` above implements part of the :class:`.ColumnOperators` interface. A "coercion" operation like lowercasing can be applied to all comparison operations (i.e. ``eq``, ``lt``, ``gt``, etc.) using :meth:`.Operators.operate`:: - + class CaseInsensitiveComparator(Comparator): def operate(self, op, other): return op(func.lower(self.__clause_element__()), func.lower(other)) @@ -310,7 +310,7 @@ by ``@word_insensitive.comparator``, only applies to the SQL side. A more comprehensive form of the custom comparator is to construct a *Hybrid Value Object*. This technique applies the target value or expression to a value object which is then returned by the accessor in all cases. The value object allows control -of all operations upon the value as well as how compared values are treated, both +of all operations upon the value as well as how compared values are treated, both on the SQL expression side as well as the Python value side. Replacing the previous ``CaseInsensitiveComparator`` class with a new ``CaseInsensitiveWord`` class:: @@ -342,8 +342,8 @@ previous ``CaseInsensitiveComparator`` class with a new ``CaseInsensitiveWord`` Above, the ``CaseInsensitiveWord`` object represents ``self.word``, which may be a SQL function, or may be a Python native. By overriding ``operate()`` and ``__clause_element__()`` to work in terms of ``self.word``, all comparison operations will work against the -"converted" form of ``word``, whether it be SQL side or Python side. -Our ``SearchWord`` class can now deliver the ``CaseInsensitiveWord`` object unconditionally +"converted" form of ``word``, whether it be SQL side or Python side. +Our ``SearchWord`` class can now deliver the ``CaseInsensitiveWord`` object unconditionally from a single hybrid call:: class SearchWord(Base): @@ -356,12 +356,12 @@ from a single hybrid call:: return CaseInsensitiveWord(self.word) The ``word_insensitive`` attribute now has case-insensitive comparison behavior -universally, including SQL expression vs. Python expression (note the Python value is +universally, including SQL expression vs. Python expression (note the Python value is converted to lower case on the Python side here):: >>> print Session().query(SearchWord).filter_by(word_insensitive="Trucks") - SELECT searchword.id AS searchword_id, searchword.word AS searchword_word - FROM searchword + SELECT searchword.id AS searchword_id, searchword.word AS searchword_word + FROM searchword WHERE lower(searchword.word) = :lower_1 SQL expression versus SQL expression:: @@ -369,13 +369,13 @@ SQL expression versus SQL expression:: >>> sw1 = aliased(SearchWord) >>> sw2 = aliased(SearchWord) >>> print Session().query( - ... sw1.word_insensitive, + ... sw1.word_insensitive, ... sw2.word_insensitive).\\ ... filter( ... sw1.word_insensitive > sw2.word_insensitive ... ) - SELECT lower(searchword_1.word) AS lower_1, lower(searchword_2.word) AS lower_2 - FROM searchword AS searchword_1, searchword AS searchword_2 + SELECT lower(searchword_1.word) AS lower_1, lower(searchword_2.word) AS lower_2 + FROM searchword AS searchword_1, searchword AS searchword_2 WHERE lower(searchword_1.word) > lower(searchword_2.word) Python only expression:: @@ -403,7 +403,7 @@ Building Transformers ---------------------- A *transformer* is an object which can receive a :class:`.Query` object and return a -new one. The :class:`.Query` object includes a method :meth:`.with_transformation` +new one. The :class:`.Query` object includes a method :meth:`.with_transformation` that simply returns a new :class:`.Query` transformed by the given function. We can combine this with the :class:`.Comparator` class to produce one type @@ -412,18 +412,18 @@ filtering criterion. Consider a mapped class ``Node``, which assembles using adjacency list into a hierarchical tree pattern:: - + from sqlalchemy import Column, Integer, ForeignKey from sqlalchemy.orm import relationship from sqlalchemy.ext.declarative import declarative_base Base = declarative_base() - + class Node(Base): __tablename__ = 'node' id =Column(Integer, primary_key=True) parent_id = Column(Integer, ForeignKey('node.id')) parent = relationship("Node", remote_side=id) - + Suppose we wanted to add an accessor ``grandparent``. This would return the ``parent`` of ``Node.parent``. When we have an instance of ``Node``, this is simple:: @@ -431,7 +431,7 @@ Suppose we wanted to add an accessor ``grandparent``. This would return the ``p class Node(Base): # ... - + @hybrid_property def grandparent(self): return self.parent.parent @@ -460,7 +460,7 @@ attribute and filtered based on the given criterion:: id =Column(Integer, primary_key=True) parent_id = Column(Integer, ForeignKey('node.id')) parent = relationship("Node", remote_side=id) - + @hybrid_property def grandparent(self): return self.parent.parent @@ -486,8 +486,8 @@ using :attr:`.Operators.eq` against the left and right sides, passing into {sql}>>> session.query(Node).\\ ... with_transformation(Node.grandparent==Node(id=5)).\\ ... all() - SELECT node.id AS node_id, node.parent_id AS node_parent_id - FROM node JOIN node AS node_1 ON node_1.id = node.parent_id + SELECT node.id AS node_id, node.parent_id AS node_parent_id + FROM node JOIN node AS node_1 ON node_1.id = node.parent_id WHERE :param_1 = node_1.parent_id {stop} @@ -529,14 +529,14 @@ with each class:: {sql}>>> session.query(Node).\\ ... with_transformation(Node.grandparent.join).\\ ... filter(Node.grandparent==Node(id=5)) - SELECT node.id AS node_id, node.parent_id AS node_parent_id - FROM node JOIN node AS node_1 ON node_1.id = node.parent_id + SELECT node.id AS node_id, node.parent_id AS node_parent_id + FROM node JOIN node AS node_1 ON node_1.id = node.parent_id WHERE :param_1 = node_1.parent_id {stop} The "transformer" pattern is an experimental pattern that starts to make usage of some functional programming paradigms. -While it's only recommended for advanced and/or patient developers, +While it's only recommended for advanced and/or patient developers, there's probably a whole lot of amazing things it can be used for. """ @@ -546,26 +546,26 @@ from sqlalchemy.orm import attributes, interfaces class hybrid_method(object): """A decorator which allows definition of a Python object method with both instance-level and class-level behavior. - + """ def __init__(self, func, expr=None): """Create a new :class:`.hybrid_method`. - + Usage is typically via decorator:: - + from sqlalchemy.ext.hybrid import hybrid_method - + class SomeClass(object): @hybrid_method def value(self, x, y): return self._value + x + y - + @value.expression def value(self, x, y): return func.some_function(self._value, x, y) - + """ self.func = func self.expr = expr or func @@ -585,25 +585,25 @@ class hybrid_method(object): class hybrid_property(object): """A decorator which allows definition of a Python descriptor with both instance-level and class-level behavior. - + """ def __init__(self, fget, fset=None, fdel=None, expr=None): """Create a new :class:`.hybrid_property`. - + Usage is typically via decorator:: - + from sqlalchemy.ext.hybrid import hybrid_property - + class SomeClass(object): @hybrid_property def value(self): return self._value - + @value.setter def value(self, value): self._value = value - + """ self.fget = fget self.fset = fset @@ -647,10 +647,10 @@ class hybrid_property(object): def comparator(self, comparator): """Provide a modifying decorator that defines a custom comparator producing method. - + The return value of the decorated method should be an instance of :class:`~.hybrid.Comparator`. - + """ proxy_attr = attributes.\ diff --git a/lib/sqlalchemy/ext/mutable.py b/lib/sqlalchemy/ext/mutable.py index 2280e33f35..ab4aff806b 100644 --- a/lib/sqlalchemy/ext/mutable.py +++ b/lib/sqlalchemy/ext/mutable.py @@ -21,8 +21,8 @@ Establishing Mutability on Scalar Column Values =============================================== A typical example of a "mutable" structure is a Python dictionary. -Following the example introduced in :ref:`types_toplevel`, we -begin with a custom type that marshals Python dictionaries into +Following the example introduced in :ref:`types_toplevel`, we +begin with a custom type that marshals Python dictionaries into JSON strings before being persisted:: from sqlalchemy.types import TypeDecorator, VARCHAR @@ -43,7 +43,7 @@ JSON strings before being persisted:: value = json.loads(value) return value -The usage of ``json`` is only for the purposes of example. The :mod:`sqlalchemy.ext.mutable` +The usage of ``json`` is only for the purposes of example. The :mod:`sqlalchemy.ext.mutable` extension can be used with any type whose target Python type may be mutable, including :class:`.PickleType`, :class:`.postgresql.ARRAY`, etc. @@ -86,7 +86,7 @@ The above dictionary class takes the approach of subclassing the Python built-in ``dict`` to produce a dict subclass which routes all mutation events through ``__setitem__``. There are many variants on this approach, such as subclassing ``UserDict.UserDict``, -the newer ``collections.MutableMapping``, etc. The part that's important to this +the newer ``collections.MutableMapping``, etc. The part that's important to this example is that the :meth:`.Mutable.changed` method is called whenever an in-place change to the datastructure takes place. @@ -95,7 +95,7 @@ convert any values that are not instances of ``MutationDict``, such as the plain dictionaries returned by the ``json`` module, into the appropriate type. Defining this method is optional; we could just as well created our ``JSONEncodedDict`` such that it always returns an instance of ``MutationDict``, -and additionally ensured that all calling code uses ``MutationDict`` +and additionally ensured that all calling code uses ``MutationDict`` explicitly. When :meth:`.Mutable.coerce` is not overridden, any values applied to a parent object which are not instances of the mutable type will raise a ``ValueError``. @@ -108,14 +108,14 @@ of this type, applying event listening instrumentation to the mapped attribute. Such as, with classical table metadata:: from sqlalchemy import Table, Column, Integer - + my_data = Table('my_data', metadata, Column('id', Integer, primary_key=True), Column('data', MutationDict.as_mutable(JSONEncodedDict)) ) Above, :meth:`~.Mutable.as_mutable` returns an instance of ``JSONEncodedDict`` -(if the type object was not an instance already), which will intercept any +(if the type object was not an instance already), which will intercept any attributes which are mapped against this type. Below we establish a simple mapping against the ``my_data`` table:: @@ -157,7 +157,7 @@ will flag the attribute as "dirty" on the parent object:: The ``MutationDict`` can be associated with all future instances of ``JSONEncodedDict`` in one step, using :meth:`~.Mutable.associate_with`. This -is similar to :meth:`~.Mutable.as_mutable` except it will intercept +is similar to :meth:`~.Mutable.as_mutable` except it will intercept all occurrences of ``MutationDict`` in all mappings unconditionally, without the need to declare it individually:: @@ -167,8 +167,8 @@ the need to declare it individually:: __tablename__ = 'my_data' id = Column(Integer, primary_key=True) data = Column(JSONEncodedDict) - - + + Supporting Pickling -------------------- @@ -314,10 +314,10 @@ the minimal form of our ``Point`` class:: class Point(MutableComposite): # ... - + def __getstate__(self): return self.x, self.y - + def __setstate__(self, state): self.x, self.y = state @@ -338,11 +338,11 @@ class MutableBase(object): @memoized_property def _parents(self): """Dictionary of parent object->attribute name on the parent. - + This attribute is a so-called "memoized" property. It initializes itself with a new ``weakref.WeakKeyDictionary`` the first time it is accessed, returning the same object upon subsequent access. - + """ return weakref.WeakKeyDictionary() @@ -359,7 +359,7 @@ class MutableBase(object): @classmethod def _listen_on_attribute(cls, attribute, coerce, parent_cls): - """Establish this type as a mutation listener for the given + """Establish this type as a mutation listener for the given mapped descriptor. """ @@ -373,7 +373,7 @@ class MutableBase(object): def load(state, *args): """Listen for objects loaded or refreshed. - Wrap the target data member's value with + Wrap the target data member's value with ``Mutable``. """ @@ -389,7 +389,7 @@ class MutableBase(object): data member. Establish a weak reference to the parent object - on the incoming value, remove it for the one + on the incoming value, remove it for the one outgoing. """ @@ -436,7 +436,7 @@ class Mutable(MutableBase): @classmethod def associate_with_attribute(cls, attribute): - """Establish this type as a mutation listener for the given + """Establish this type as a mutation listener for the given mapped descriptor. """ @@ -444,15 +444,15 @@ class Mutable(MutableBase): @classmethod def associate_with(cls, sqltype): - """Associate this wrapper with all future mapped columns + """Associate this wrapper with all future mapped columns of the given type. This is a convenience method that calls ``associate_with_attribute`` automatically. - .. warning:: - + .. warning:: + The listeners established by this method are *global* - to all mappers, and are *not* garbage collected. Only use + to all mappers, and are *not* garbage collected. Only use :meth:`.associate_with` for types that are permanent to an application, not with ad-hoc types else this will cause unbounded growth in memory usage. @@ -474,7 +474,7 @@ class Mutable(MutableBase): This establishes listeners that will detect ORM mappings against the given type, adding mutation event trackers to those mappings. - The type is returned, unconditionally as an instance, so that + The type is returned, unconditionally as an instance, so that :meth:`.as_mutable` can be used inline:: Table('mytable', metadata, @@ -486,15 +486,15 @@ class Mutable(MutableBase): is given, and that only columns which are declared specifically with that type instance receive additional instrumentation. - To associate a particular mutable type with all occurrences of a + To associate a particular mutable type with all occurrences of a particular type, use the :meth:`.Mutable.associate_with` classmethod of the particular :meth:`.Mutable` subclass to establish a global association. - .. warning:: - + .. warning:: + The listeners established by this method are *global* - to all mappers, and are *not* garbage collected. Only use + to all mappers, and are *not* garbage collected. Only use :meth:`.as_mutable` for types that are permanent to an application, not with ad-hoc types else this will cause unbounded growth in memory usage. @@ -521,13 +521,13 @@ class MutableComposite(MutableBase): """Mixin that defines transparent propagation of change events on a SQLAlchemy "composite" object to its owning parent or parents. - + See the example in :ref:`mutable_composites` for usage information. - - .. warning:: - + + .. warning:: + The listeners established by the :class:`.MutableComposite` - class are *global* to all mappers, and are *not* garbage collected. Only use + class are *global* to all mappers, and are *not* garbage collected. Only use :class:`.MutableComposite` for types that are permanent to an application, not with ad-hoc types else this will cause unbounded growth in memory usage. @@ -542,7 +542,7 @@ class MutableComposite(MutableBase): prop = object_mapper(parent).get_property(key) for value, attr_name in zip( - self.__composite_values__(), + self.__composite_values__(), prop._attribute_keys): setattr(parent, attr_name, value) diff --git a/lib/sqlalchemy/ext/orderinglist.py b/lib/sqlalchemy/ext/orderinglist.py index 0a27ee3093..7f675d8e6b 100644 --- a/lib/sqlalchemy/ext/orderinglist.py +++ b/lib/sqlalchemy/ext/orderinglist.py @@ -73,14 +73,14 @@ Use the ``ordering_list`` function to set up the ``collection_class`` on relatio (as in the mapper example above). This implementation depends on the list starting in the proper order, so be SURE to put an order_by on your relationship. -.. warning:: +.. warning:: ``ordering_list`` only provides limited functionality when a primary - key column or unique column is the target of the sort. Since changing the order of - entries often means that two rows must trade values, this is not possible when + key column or unique column is the target of the sort. Since changing the order of + entries often means that two rows must trade values, this is not possible when the value is constrained by a primary key or unique constraint, since one of the rows would temporarily have to point to a third available value so that the other row - could take its old value. ``ordering_list`` doesn't do any of this for you, + could take its old value. ``ordering_list`` doesn't do any of this for you, nor does SQLAlchemy itself. ``ordering_list`` takes the name of the related object's ordering attribute as @@ -184,7 +184,7 @@ class OrderingList(list): This implementation relies on the list starting in the proper order, so be **sure** to put an ``order_by`` on your relationship. - :param ordering_attr: + :param ordering_attr: Name of the attribute that stores the object's order in the relationship. @@ -201,7 +201,7 @@ class OrderingList(list): like stepped numbering, alphabetical and Fibonacci numbering, see the unit tests. - :param reorder_on_append: + :param reorder_on_append: Default False. When appending an object with an existing (non-None) ordering value, that value will be left untouched unless ``reorder_on_append`` is true. This is an optimization to avoid a diff --git a/lib/sqlalchemy/ext/serializer.py b/lib/sqlalchemy/ext/serializer.py index ed2dec6c9c..342ca01c12 100644 --- a/lib/sqlalchemy/ext/serializer.py +++ b/lib/sqlalchemy/ext/serializer.py @@ -4,7 +4,7 @@ # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -"""Serializer/Deserializer objects for usage with SQLAlchemy query structures, +"""Serializer/Deserializer objects for usage with SQLAlchemy query structures, allowing "contextual" deserialization. Any SQLAlchemy query structure, either based on sqlalchemy.sql.* @@ -31,19 +31,19 @@ Usage is nearly the same as that of the standard Python pickle module:: print query2.all() -Similar restrictions as when using raw pickle apply; mapped classes must be +Similar restrictions as when using raw pickle apply; mapped classes must be themselves be pickleable, meaning they are importable from a module-level namespace. The serializer module is only appropriate for query structures. It is not needed for: -* instances of user-defined classes. These contain no references to engines, +* instances of user-defined classes. These contain no references to engines, sessions or expression constructs in the typical case and can be serialized directly. * Table metadata that is to be loaded entirely from the serialized structure (i.e. is - not already declared in the application). Regular pickle.loads()/dumps() can - be used to fully dump any ``MetaData`` object, typically one which was reflected + not already declared in the application). Regular pickle.loads()/dumps() can + be used to fully dump any ``MetaData`` object, typically one which was reflected from an existing database at some previous point in time. The serializer module is specifically for the opposite case, where the Table metadata is already present in memory. diff --git a/lib/sqlalchemy/ext/sqlsoup.py b/lib/sqlalchemy/ext/sqlsoup.py index 47589acc4c..1427e64e47 100644 --- a/lib/sqlalchemy/ext/sqlsoup.py +++ b/lib/sqlalchemy/ext/sqlsoup.py @@ -57,7 +57,7 @@ Loading objects is as easy as this:: >>> users [ MappedUsers(name=u'Joe Student',email=u'student@example.edu', - password=u'student',classname=None,admin=0), + password=u'student',classname=None,admin=0), MappedUsers(name=u'Bhargan Basepair',email=u'basepair@example.edu', password=u'basepair',classname=None,admin=1) ] @@ -67,7 +67,7 @@ Of course, letting the database do the sort is better:: >>> db.users.order_by(db.users.name).all() [ MappedUsers(name=u'Bhargan Basepair',email=u'basepair@example.edu', - password=u'basepair',classname=None,admin=1), + password=u'basepair',classname=None,admin=1), MappedUsers(name=u'Joe Student',email=u'student@example.edu', password=u'student',classname=None,admin=0) ] @@ -86,7 +86,7 @@ we're at it:: >>> db.users.filter(where).order_by(desc(db.users.name)).all() [ MappedUsers(name=u'Joe Student',email=u'student@example.edu', - password=u'student',classname=None,admin=0), + password=u'student',classname=None,admin=0), MappedUsers(name=u'Bhargan Basepair',email=u'basepair@example.edu', password=u'basepair',classname=None,admin=1) ] @@ -212,15 +212,15 @@ with `with_labels`, to disambiguate columns with their table name (.c is short for .columns):: >>> db.with_labels(join1).c.keys() - [u'users_name', u'users_email', u'users_password', - u'users_classname', u'users_admin', u'loans_book_id', + [u'users_name', u'users_email', u'users_password', + u'users_classname', u'users_admin', u'loans_book_id', u'loans_user_name', u'loans_loan_date'] You can also join directly to a labeled object:: >>> labeled_loans = db.with_labels(db.loans) >>> db.join(db.users, labeled_loans, isouter=True).c.keys() - [u'name', u'email', u'password', u'classname', + [u'name', u'email', u'password', u'classname', u'admin', u'loans_book_id', u'loans_user_name', u'loans_loan_date'] @@ -467,8 +467,8 @@ def _class_for_table(session, engine, selectable, base_cls, mapper_kwargs): selectable = expression._clause_element_as_expr(selectable) mapname = 'Mapped' + _selectable_name(selectable) # Py2K - if isinstance(mapname, unicode): - engine_encoding = engine.dialect.encoding + if isinstance(mapname, unicode): + engine_encoding = engine.dialect.encoding mapname = mapname.encode(engine_encoding) # end Py2K @@ -487,7 +487,7 @@ def _class_for_table(session, engine, selectable, base_cls, mapper_kwargs): raise TypeError('unable to compare with %s' % o.__class__) return t1, t2 - # python2/python3 compatible system of + # python2/python3 compatible system of # __cmp__ - __lt__ + __eq__ def __lt__(self, o): @@ -524,15 +524,15 @@ class SqlSoup(object): def __init__(self, engine_or_metadata, base=object, session=None): """Initialize a new :class:`.SqlSoup`. - :param engine_or_metadata: a string database URL, :class:`.Engine` + :param engine_or_metadata: a string database URL, :class:`.Engine` or :class:`.MetaData` object to associate with. If the argument is a :class:`.MetaData`, it should be *bound* to an :class:`.Engine`. - :param base: a class which will serve as the default class for + :param base: a class which will serve as the default class for returned mapped classes. Defaults to ``object``. :param session: a :class:`.ScopedSession` or :class:`.Session` with which to associate ORM operations for this :class:`.SqlSoup` instance. - If ``None``, a :class:`.ScopedSession` that's local to this + If ``None``, a :class:`.ScopedSession` that's local to this module is used. """ @@ -545,7 +545,7 @@ class SqlSoup(object): elif isinstance(engine_or_metadata, (basestring, Engine)): self._metadata = MetaData(engine_or_metadata) else: - raise ArgumentError("invalid engine or metadata argument %r" % + raise ArgumentError("invalid engine or metadata argument %r" % engine_or_metadata) self._cache = {} @@ -567,7 +567,7 @@ class SqlSoup(object): """Execute a SQL statement. The statement may be a string SQL string, - an :func:`.expression.select` construct, or an :func:`.expression.text` + an :func:`.expression.select` construct, or an :func:`.expression.text` construct. """ @@ -630,11 +630,11 @@ class SqlSoup(object): """ self.session.expunge_all() - def map_to(self, attrname, tablename=None, selectable=None, + def map_to(self, attrname, tablename=None, selectable=None, schema=None, base=None, mapper_args=util.immutabledict()): """Configure a mapping to the given attrname. - This is the "master" method that can be used to create any + This is the "master" method that can be used to create any configuration. .. versionadded:: 0.6.6 @@ -677,10 +677,10 @@ class SqlSoup(object): raise ArgumentError("'tablename' and 'selectable' " "arguments are mutually exclusive") - selectable = Table(tablename, - self._metadata, - autoload=True, - autoload_with=self.bind, + selectable = Table(tablename, + self._metadata, + autoload=True, + autoload_with=self.bind, schema=schema or self.schema) elif schema: raise ArgumentError("'tablename' argument is required when " @@ -742,7 +742,7 @@ class SqlSoup(object): ) def with_labels(self, selectable, base=None, **mapper_args): - """Map a selectable directly, wrapping the + """Map a selectable directly, wrapping the selectable in a subquery with labels. .. versionchanged:: 0.6.6 @@ -766,7 +766,7 @@ class SqlSoup(object): select(use_labels=True). alias('foo'), base=base, **mapper_args) - def join(self, left, right, onclause=None, isouter=False, + def join(self, left, right, onclause=None, isouter=False, base=None, **mapper_args): """Create an :func:`.expression.join` and map to it. @@ -792,7 +792,7 @@ class SqlSoup(object): return self.map(j, base=base, **mapper_args) def entity(self, attr, schema=None): - """Return the named entity from this :class:`.SqlSoup`, or + """Return the named entity from this :class:`.SqlSoup`, or create if not present. For more generalized mapping, see :meth:`.map_to`. diff --git a/lib/sqlalchemy/interfaces.py b/lib/sqlalchemy/interfaces.py index ed02ed0a87..6ce9a6d754 100644 --- a/lib/sqlalchemy/interfaces.py +++ b/lib/sqlalchemy/interfaces.py @@ -17,8 +17,8 @@ from sqlalchemy import event, util class PoolListener(object): """Hooks into the lifecycle of connections in a :class:`.Pool`. - .. note:: - + .. note:: + :class:`.PoolListener` is deprecated. Please refer to :class:`.PoolEvents`. @@ -27,7 +27,7 @@ class PoolListener(object): class MyListener(PoolListener): def connect(self, dbapi_con, con_record): '''perform connect operations''' - # etc. + # etc. # create a new pool with a listener p = QueuePool(..., listeners=[MyListener()]) @@ -151,8 +151,8 @@ class PoolListener(object): class ConnectionProxy(object): """Allows interception of statement execution by Connections. - .. note:: - + .. note:: + :class:`.ConnectionProxy` is deprecated. Please refer to :class:`.ConnectionEvents`. @@ -194,7 +194,7 @@ class ConnectionProxy(object): event.listen(self, 'before_execute', adapt_execute) - def adapt_cursor_execute(conn, cursor, statement, + def adapt_cursor_execute(conn, cursor, statement, parameters,context, executemany, ): def execute_wrapper( diff --git a/lib/sqlalchemy/log.py b/lib/sqlalchemy/log.py index e77730a98d..24608fdeaa 100644 --- a/lib/sqlalchemy/log.py +++ b/lib/sqlalchemy/log.py @@ -12,7 +12,7 @@ module. The regular dotted module namespace is used, starting at 'sqlalchemy'. For class-level logging, the class name is appended. The "echo" keyword parameter, available on SQLA :class:`.Engine` -and :class:`.Pool` objects, corresponds to a logger specific to that +and :class:`.Pool` objects, corresponds to a logger specific to that instance only. """ @@ -60,7 +60,7 @@ class InstanceLogger(object): """A logger adapter (wrapper) for :class:`.Identified` subclasses. This allows multiple instances (e.g. Engine or Pool instances) - to share a logger, but have its verbosity controlled on a + to share a logger, but have its verbosity controlled on a per-instance basis. The basic functionality is to return a logging level @@ -185,7 +185,7 @@ def instance_logger(instance, echoflag=None): logger = logging.getLogger(name) else: # if a specified echo flag, return an EchoLogger, - # which checks the flag, overrides normal log + # which checks the flag, overrides normal log # levels by calling logger._log() logger = InstanceLogger(echoflag, name) diff --git a/lib/sqlalchemy/orm/__init__.py b/lib/sqlalchemy/orm/__init__.py index 6b151e8b90..81f5ba992d 100644 --- a/lib/sqlalchemy/orm/__init__.py +++ b/lib/sqlalchemy/orm/__init__.py @@ -142,7 +142,7 @@ def scoped_session(session_factory, scopefunc=None): return ScopedSession(session_factory, scopefunc=scopefunc) def create_session(bind=None, **kwargs): - """Create a new :class:`.Session` + """Create a new :class:`.Session` with no automation enabled by default. This function is used primarily for testing. The usual @@ -192,57 +192,57 @@ def relationship(argument, secondary=None, **kwargs): 'children': relationship(Child) }) - Some arguments accepted by :func:`.relationship` optionally accept a + Some arguments accepted by :func:`.relationship` optionally accept a callable function, which when called produces the desired value. The callable is invoked by the parent :class:`.Mapper` at "mapper initialization" time, which happens only when mappers are first used, and is assumed to be after all mappings have been constructed. This can be used - to resolve order-of-declaration and other dependency issues, such as + to resolve order-of-declaration and other dependency issues, such as if ``Child`` is declared below ``Parent`` in the same file:: - + mapper(Parent, properties={ - "children":relationship(lambda: Child, + "children":relationship(lambda: Child, order_by=lambda: Child.id) }) - + When using the :ref:`declarative_toplevel` extension, the Declarative initializer allows string arguments to be passed to :func:`.relationship`. - These string arguments are converted into callables that evaluate + These string arguments are converted into callables that evaluate the string as Python code, using the Declarative class-registry as a namespace. This allows the lookup of related classes to be automatic via their string name, and removes the need to import related classes at all into the local module space:: - + from sqlalchemy.ext.declarative import declarative_base Base = declarative_base() - + class Parent(Base): __tablename__ = 'parent' id = Column(Integer, primary_key=True) children = relationship("Child", order_by="Child.id") - + A full array of examples and reference documentation regarding :func:`.relationship` is at :ref:`relationship_config_toplevel`. - + :param argument: a mapped class, or actual :class:`.Mapper` instance, representing the target of - the relationship. - + the relationship. + ``argument`` may also be passed as a callable function - which is evaluated at mapper initialization time, and may be passed as a + which is evaluated at mapper initialization time, and may be passed as a Python-evaluable string when using Declarative. :param secondary: for a many-to-many relationship, specifies the intermediary - table, and is an instance of :class:`.Table`. The ``secondary`` keyword + table, and is an instance of :class:`.Table`. The ``secondary`` keyword argument should generally only be used for a table that is not otherwise expressed in any class mapping, unless this relationship is declared as view only, otherwise - conflicting persistence operations can occur. - + conflicting persistence operations can occur. + ``secondary`` may - also be passed as a callable function which is evaluated at + also be passed as a callable function which is evaluated at mapper initialization time. :param active_history=False: @@ -258,16 +258,16 @@ def relationship(argument, secondary=None, **kwargs): :param backref: indicates the string name of a property to be placed on the related mapper's class that will handle this relationship in the other - direction. The other property will be created automatically + direction. The other property will be created automatically when the mappers are configured. Can also be passed as a :func:`backref` object to control the configuration of the new relationship. :param back_populates: - Takes a string name and has the same meaning as ``backref``, - except the complementing property is **not** created automatically, - and instead must be configured explicitly on the other mapper. The - complementing property should also indicate ``back_populates`` + Takes a string name and has the same meaning as ``backref``, + except the complementing property is **not** created automatically, + and instead must be configured explicitly on the other mapper. The + complementing property should also indicate ``back_populates`` to this relationship to ensure proper functioning. :param cascade: @@ -278,12 +278,12 @@ def relationship(argument, secondary=None, **kwargs): Available cascades are: - * ``save-update`` - cascade the :meth:`.Session.add` + * ``save-update`` - cascade the :meth:`.Session.add` operation. This cascade applies both to future and - past calls to :meth:`~sqlalchemy.orm.session.Session.add`, + past calls to :meth:`~sqlalchemy.orm.session.Session.add`, meaning new items added to a collection or scalar relationship - get placed into the same session as that of the parent, and - also applies to items which have been removed from this + get placed into the same session as that of the parent, and + also applies to items which have been removed from this relationship but are still part of unflushed history. * ``merge`` - cascade the :meth:`~sqlalchemy.orm.session.Session.merge` @@ -295,8 +295,8 @@ def relationship(argument, secondary=None, **kwargs): * ``delete`` - cascade the :meth:`.Session.delete` operation - * ``delete-orphan`` - if an item of the child's type is - detached from its parent, mark it for deletion. + * ``delete-orphan`` - if an item of the child's type is + detached from its parent, mark it for deletion. .. versionchanged:: 0.7 This option does not prevent @@ -305,7 +305,7 @@ def relationship(argument, secondary=None, **kwargs): that case, ensure the child's foreign key column(s) is configured as NOT NULL - * ``refresh-expire`` - cascade the :meth:`.Session.expire` + * ``refresh-expire`` - cascade the :meth:`.Session.expire` and :meth:`~sqlalchemy.orm.session.Session.refresh` operations * ``all`` - shorthand for "save-update,merge, refresh-expire, @@ -313,33 +313,33 @@ def relationship(argument, secondary=None, **kwargs): See the section :ref:`unitofwork_cascades` for more background on configuring cascades. - + :param cascade_backrefs=True: a boolean value indicating if the ``save-update`` cascade should - operate along an assignment event intercepted by a backref. + operate along an assignment event intercepted by a backref. When set to ``False``, the attribute managed by this relationship will not cascade an incoming transient object into the session of a persistent parent, if the event is received via backref. - + That is:: - + mapper(A, a_table, properties={ 'bs':relationship(B, backref="a", cascade_backrefs=False) }) - + If an ``A()`` is present in the session, assigning it to the "a" attribute on a transient ``B()`` will not place - the ``B()`` into the session. To set the flag in the other - direction, i.e. so that ``A().bs.append(B())`` won't add + the ``B()`` into the session. To set the flag in the other + direction, i.e. so that ``A().bs.append(B())`` won't add a transient ``A()`` into the session for a persistent ``B()``:: - + mapper(A, a_table, properties={ - 'bs':relationship(B, + 'bs':relationship(B, backref=backref("a", cascade_backrefs=False) ) }) - + See the section :ref:`unitofwork_cascades` for more background on configuring cascades. @@ -366,9 +366,9 @@ def relationship(argument, secondary=None, **kwargs): a list of columns which are to be used as "foreign key" columns. Normally, :func:`relationship` uses the :class:`.ForeignKey` and :class:`.ForeignKeyConstraint` objects present within the - mapped or secondary :class:`.Table` to determine the "foreign" side of + mapped or secondary :class:`.Table` to determine the "foreign" side of the join condition. This is used to construct SQL clauses in order - to load objects, as well as to "synchronize" values from + to load objects, as well as to "synchronize" values from primary key columns to referencing foreign key columns. The ``foreign_keys`` parameter overrides the notion of what's "foreign" in the table metadata, allowing the specification @@ -384,9 +384,9 @@ def relationship(argument, secondary=None, **kwargs): should artificially not be considered as foreign. ``foreign_keys`` may also be passed as a callable function - which is evaluated at mapper initialization time, and may be passed as a + which is evaluated at mapper initialization time, and may be passed as a Python-evaluable string when using Declarative. - + :param innerjoin=False: when ``True``, joined eager loads will use an inner join to join against related tables instead of an outer join. The purpose @@ -401,16 +401,16 @@ def relationship(argument, secondary=None, **kwargs): :param join_depth: when non-``None``, an integer value indicating how many levels - deep "eager" loaders should join on a self-referring or cyclical - relationship. The number counts how many times the same Mapper - shall be present in the loading condition along a particular join + deep "eager" loaders should join on a self-referring or cyclical + relationship. The number counts how many times the same Mapper + shall be present in the loading condition along a particular join branch. When left at its default of ``None``, eager loaders - will stop chaining when they encounter a the same target mapper + will stop chaining when they encounter a the same target mapper which is already higher up in the chain. This option applies both to joined- and subquery- eager loaders. - :param lazy='select': specifies - how the related items should be loaded. Default value is + :param lazy='select': specifies + how the related items should be loaded. Default value is ``select``. Values include: * ``select`` - items should be loaded lazily when the property is first @@ -433,12 +433,12 @@ def relationship(argument, secondary=None, **kwargs): which issues a JOIN to a subquery of the original statement. - * ``noload`` - no loading should occur at any time. This is to + * ``noload`` - no loading should occur at any time. This is to support "write-only" attributes, or attributes which are populated in some manner specific to the application. * ``dynamic`` - the attribute will return a pre-configured - :class:`~sqlalchemy.orm.query.Query` object for all read + :class:`~sqlalchemy.orm.query.Query` object for all read operations, onto which further filtering operations can be applied before iterating the results. See the section :ref:`dynamic_relationship` for more details. @@ -464,8 +464,8 @@ def relationship(argument, secondary=None, **kwargs): Note that the load of related objects on a pending or transient object also does not trigger any attribute change events - no user-defined - events will be emitted for these attributes, and if and when the - object is ultimately flushed, only the user-specific foreign key + events will be emitted for these attributes, and if and when the + object is ultimately flushed, only the user-specific foreign key attributes will be part of the modified state. The load_on_pending flag does not improve behavior @@ -479,14 +479,14 @@ def relationship(argument, secondary=None, **kwargs): :param order_by: indicates the ordering that should be applied when loading these items. ``order_by`` is expected to refer to one of the :class:`.Column` - objects to which the target class is mapped, or + objects to which the target class is mapped, or the attribute itself bound to the target class which refers to the column. ``order_by`` may also be passed as a callable function - which is evaluated at mapper initialization time, and may be passed as a + which is evaluated at mapper initialization time, and may be passed as a Python-evaluable string when using Declarative. - + :param passive_deletes=False: Indicates loading behavior during delete operations. @@ -566,7 +566,7 @@ def relationship(argument, secondary=None, **kwargs): table). ``primaryjoin`` may also be passed as a callable function - which is evaluated at mapper initialization time, and may be passed as a + which is evaluated at mapper initialization time, and may be passed as a Python-evaluable string when using Declarative. :param remote_side: @@ -574,16 +574,16 @@ def relationship(argument, secondary=None, **kwargs): list of columns that form the "remote side" of the relationship. ``remote_side`` may also be passed as a callable function - which is evaluated at mapper initialization time, and may be passed as a + which is evaluated at mapper initialization time, and may be passed as a Python-evaluable string when using Declarative. :param query_class: a :class:`.Query` subclass that will be used as the base of the "appender query" returned by a "dynamic" relationship, that - is, a relationship that specifies ``lazy="dynamic"`` or was + is, a relationship that specifies ``lazy="dynamic"`` or was otherwise constructed using the :func:`.orm.dynamic_loader` function. - + :param secondaryjoin: a SQL expression that will be used as the join of an association table to the child object. By default, this value is @@ -591,7 +591,7 @@ def relationship(argument, secondary=None, **kwargs): child tables. ``secondaryjoin`` may also be passed as a callable function - which is evaluated at mapper initialization time, and may be passed as a + which is evaluated at mapper initialization time, and may be passed as a Python-evaluable string when using Declarative. :param single_parent=(True|False): @@ -599,7 +599,7 @@ def relationship(argument, secondary=None, **kwargs): from being associated with more than one parent at a time. This is used for many-to-one or many-to-many relationships that should be treated either as one-to-one or one-to-many. Its - usage is optional unless delete-orphan cascade is also + usage is optional unless delete-orphan cascade is also set on this relationship(), in which case its required. :param uselist=(True|False): @@ -632,13 +632,13 @@ def relation(*arg, **kw): def dynamic_loader(argument, **kw): """Construct a dynamically-loading mapper property. - This is essentially the same as + This is essentially the same as using the ``lazy='dynamic'`` argument with :func:`relationship`:: dynamic_loader(SomeClass) - + # is the same as - + relationship(SomeClass, lazy="dynamic") See the section :ref:`dynamic_relationship` for more details @@ -692,19 +692,19 @@ def column_property(*cols, **kw): :param doc: optional string that will be applied as the doc on the class-bound descriptor. - + :param expire_on_flush=True: Disable expiry on flush. A column_property() which refers to a SQL expression (and not a single table-bound column) is considered to be a "read only" property; populating it has no effect on the state of data, and it can only return database state. For this reason a column_property()'s value - is expired whenever the parent object is involved in a + is expired whenever the parent object is involved in a flush, that is, has any kind of "dirty" state within a flush. Setting this parameter to ``False`` will have the effect of leaving any existing value present after the flush proceeds. Note however that the :class:`.Session` with default expiration - settings still expires + settings still expires all attributes after a :meth:`.Session.commit` call, however. .. versionadded:: 0.7.3 @@ -714,7 +714,7 @@ def column_property(*cols, **kw): :class:`.AttributeExtension` instance, or list of extensions, which will be prepended to the list of attribute listeners for the resulting - descriptor placed on the class. + descriptor placed on the class. **Deprecated.** Please see :class:`.AttributeEvents`. @@ -775,7 +775,7 @@ def backref(name, **kwargs): Used with the ``backref`` keyword argument to :func:`relationship` in place of a string argument, e.g.:: - + 'items':relationship(SomeItem, backref=backref('parent', lazy='subquery')) """ @@ -789,7 +789,7 @@ def deferred(*columns, **kwargs): Used with the "properties" dictionary sent to :func:`mapper`. See also: - + :ref:`deferred` """ @@ -797,47 +797,47 @@ def deferred(*columns, **kwargs): def mapper(class_, local_table=None, *args, **params): """Return a new :class:`~.Mapper` object. - + This function is typically used behind the scenes via the Declarative extension. When using Declarative, many of the usual :func:`.mapper` arguments are handled by the Declarative extension itself, including ``class_``, ``local_table``, ``properties``, and ``inherits``. - Other options are passed to :func:`.mapper` using + Other options are passed to :func:`.mapper` using the ``__mapper_args__`` class variable:: - + class MyClass(Base): __tablename__ = 'my_table' id = Column(Integer, primary_key=True) type = Column(String(50)) alt = Column("some_alt", Integer) - + __mapper_args__ = { 'polymorphic_on' : type } Explicit use of :func:`.mapper` - is often referred to as *classical mapping*. The above + is often referred to as *classical mapping*. The above declarative example is equivalent in classical form to:: - + my_table = Table("my_table", metadata, Column('id', Integer, primary_key=True), Column('type', String(50)), Column("some_alt", Integer) ) - + class MyClass(object): pass - - mapper(MyClass, my_table, - polymorphic_on=my_table.c.type, + + mapper(MyClass, my_table, + polymorphic_on=my_table.c.type, properties={ 'alt':my_table.c.some_alt }) - + See also: - + :ref:`classical_mapping` - discussion of direct usage of :func:`.mapper` @@ -845,10 +845,10 @@ def mapper(class_, local_table=None, *args, **params): this argument is automatically passed as the declared class itself. - :param local_table: The :class:`.Table` or other selectable - to which the class is mapped. May be ``None`` if + :param local_table: The :class:`.Table` or other selectable + to which the class is mapped. May be ``None`` if this mapper inherits from another mapper using single-table - inheritance. When using Declarative, this argument is + inheritance. When using Declarative, this argument is automatically passed by the extension, based on what is configured via the ``__table__`` argument or via the :class:`.Table` produced as a result of the ``__tablename__`` and :class:`.Column` @@ -872,30 +872,30 @@ def mapper(class_, local_table=None, *args, **params): particular primary key value. A "partial primary key" can occur if one has mapped to an OUTER JOIN, for example. - :param batch: Defaults to ``True``, indicating that save operations - of multiple entities can be batched together for efficiency. + :param batch: Defaults to ``True``, indicating that save operations + of multiple entities can be batched together for efficiency. Setting to False indicates that an instance will be fully saved before saving the next - instance. This is used in the extremely rare case that a - :class:`.MapperEvents` listener requires being called + instance. This is used in the extremely rare case that a + :class:`.MapperEvents` listener requires being called in between individual row persistence operations. - :param column_prefix: A string which will be prepended + :param column_prefix: A string which will be prepended to the mapped attribute name when :class:`.Column` objects are automatically assigned as attributes to the - mapped class. Does not affect explicitly specified - column-based properties. - + mapped class. Does not affect explicitly specified + column-based properties. + See the section :ref:`column_prefix` for an example. :param concrete: If True, indicates this mapper should use concrete table inheritance with its parent mapper. - + See the section :ref:`concrete_inheritance` for an example. - :param exclude_properties: A list or set of string column names to - be excluded from mapping. - + :param exclude_properties: A list or set of string column names to + be excluded from mapping. + See :ref:`include_exclude_cols` for an example. :param extension: A :class:`.MapperExtension` instance or @@ -904,47 +904,47 @@ def mapper(class_, local_table=None, *args, **params): :class:`.Mapper`. **Deprecated.** Please see :class:`.MapperEvents`. :param include_properties: An inclusive list or set of string column - names to map. - + names to map. + See :ref:`include_exclude_cols` for an example. - :param inherits: A mapped class or the corresponding :class:`.Mapper` + :param inherits: A mapped class or the corresponding :class:`.Mapper` of one indicating a superclass to which this :class:`.Mapper` should *inherit* from. The mapped class here must be a subclass of the other mapper's class. When using Declarative, this argument is passed automatically as a result of the natural class - hierarchy of the declared classes. - + hierarchy of the declared classes. + See also: - + :ref:`inheritance_toplevel` - + :param inherit_condition: For joined table inheritance, a SQL expression which will define how the two tables are joined; defaults to a natural join between the two tables. :param inherit_foreign_keys: When ``inherit_condition`` is used and the - columns present are missing a :class:`.ForeignKey` configuration, - this parameter can be used to specify which columns are "foreign". + columns present are missing a :class:`.ForeignKey` configuration, + this parameter can be used to specify which columns are "foreign". In most cases can be left as ``None``. :param non_primary: Specify that this :class:`.Mapper` is in addition to the "primary" mapper, that is, the one used for persistence. The :class:`.Mapper` created here may be used for ad-hoc mapping of the class to an alternate selectable, for loading - only. - + only. + The ``non_primary`` feature is rarely needed with modern usage. :param order_by: A single :class:`.Column` or list of :class:`.Column` objects for which selection operations should use as the default - ordering for entities. By default mappers have no pre-defined + ordering for entities. By default mappers have no pre-defined ordering. :param passive_updates: Indicates UPDATE behavior of foreign key - columns when a primary key column changes on a joined-table inheritance + columns when a primary key column changes on a joined-table inheritance mapping. Defaults to ``True``. When True, it is assumed that ON UPDATE CASCADE is configured on @@ -957,41 +957,41 @@ def mapper(class_, local_table=None, *args, **params): operation for an update. The :class:`.Mapper` here will emit an UPDATE statement for the dependent columns during a primary key change. - + See also: - - :ref:`passive_updates` - description of a similar feature as + + :ref:`passive_updates` - description of a similar feature as used with :func:`.relationship` - :param polymorphic_on: Specifies the column, attribute, or - SQL expression used to determine the target class for an + :param polymorphic_on: Specifies the column, attribute, or + SQL expression used to determine the target class for an incoming row, when inheriting classes are present. - + This value is commonly a :class:`.Column` object that's present in the mapped :class:`.Table`:: - + class Employee(Base): __tablename__ = 'employee' - + id = Column(Integer, primary_key=True) discriminator = Column(String(50)) - + __mapper_args__ = { "polymorphic_on":discriminator, "polymorphic_identity":"employee" } - + It may also be specified - as a SQL expression, as in this example where we + as a SQL expression, as in this example where we use the :func:`.case` construct to provide a conditional approach:: class Employee(Base): __tablename__ = 'employee' - + id = Column(Integer, primary_key=True) discriminator = Column(String(50)) - + __mapper_args__ = { "polymorphic_on":case([ (discriminator == "EN", "engineer"), @@ -999,14 +999,14 @@ def mapper(class_, local_table=None, *args, **params): ], else_="employee"), "polymorphic_identity":"employee" } - - It may also refer to any attribute + + It may also refer to any attribute configured with :func:`.column_property`, or to the string name of one:: - + class Employee(Base): __tablename__ = 'employee' - + id = Column(Integer, primary_key=True) discriminator = Column(String(50)) employee_type = column_property( @@ -1015,7 +1015,7 @@ def mapper(class_, local_table=None, *args, **params): (discriminator == "MA", "manager"), ], else_="employee") ) - + __mapper_args__ = { "polymorphic_on":employee_type, "polymorphic_identity":"employee" @@ -1028,8 +1028,8 @@ def mapper(class_, local_table=None, *args, **params): When setting ``polymorphic_on`` to reference an attribute or expression that's not present in the - locally mapped :class:`.Table`, yet the value - of the discriminator should be persisted to the database, + locally mapped :class:`.Table`, yet the value + of the discriminator should be persisted to the database, the value of the discriminator is not automatically set on new instances; this must be handled by the user, @@ -1039,27 +1039,27 @@ def mapper(class_, local_table=None, *args, **params): from sqlalchemy import event from sqlalchemy.orm import object_mapper - + @event.listens_for(Employee, "init", propagate=True) def set_identity(instance, *arg, **kw): mapper = object_mapper(instance) instance.discriminator = mapper.polymorphic_identity - + Where above, we assign the value of ``polymorphic_identity`` for the mapped class to the ``discriminator`` attribute, thus persisting the value to the ``discriminator`` column in the database. - + See also: - + :ref:`inheritance_toplevel` - - :param polymorphic_identity: Specifies the value which + + :param polymorphic_identity: Specifies the value which identifies this particular class as returned by the column expression referred to by the ``polymorphic_on`` setting. As rows are received, the value corresponding to the ``polymorphic_on`` column expression is compared - to this value, indicating which subclass should + to this value, indicating which subclass should be used for the newly reconstructed object. :param properties: A dictionary mapping the string names of object @@ -1077,11 +1077,11 @@ def mapper(class_, local_table=None, *args, **params): This is normally simply the primary key of the ``local_table``, but can be overridden here. - :param version_id_col: A :class:`.Column` + :param version_id_col: A :class:`.Column` that will be used to keep a running version id of mapped entities in the database. This is used during save operations to ensure that no other thread or process has updated the instance during the - lifetime of the entity, else a :class:`~sqlalchemy.orm.exc.StaleDataError` + lifetime of the entity, else a :class:`~sqlalchemy.orm.exc.StaleDataError` exception is thrown. By default the column must be of :class:`.Integer` type, unless ``version_id_generator`` specifies a new generation @@ -1098,13 +1098,13 @@ def mapper(class_, local_table=None, *args, **params): __tablename__ = 'mytable' id = Column(Integer, primary_key=True) version_uuid = Column(String(32)) - + __mapper_args__ = { 'version_id_col':version_uuid, 'version_id_generator':lambda version:uuid.uuid4().hex } - The callable receives the current version identifier as its + The callable receives the current version identifier as its single argument. :param with_polymorphic: A tuple in the form ``(, @@ -1115,20 +1115,20 @@ def mapper(class_, local_table=None, *args, **params): ``'*'`` may be used to indicate all descending classes should be loaded immediately. The second tuple argument indicates a selectable that will be used to query for multiple - classes. - + classes. + See also: - + :ref:`concrete_inheritance` - typically uses ``with_polymorphic`` to specify a UNION statement to select from. - - :ref:`with_polymorphic` - usage example of the related + + :ref:`with_polymorphic` - usage example of the related :meth:`.Query.with_polymorphic` method - + """ return Mapper(class_, local_table, *args, **params) -def synonym(name, map_column=False, descriptor=None, +def synonym(name, map_column=False, descriptor=None, comparator_factory=None, doc=None): """Denote an attribute name as a synonym to a mapped property. @@ -1150,7 +1150,7 @@ def synonym(name, map_column=False, descriptor=None, mapper(MyClass, sometable, properties={ "status":synonym("_status", map_column=True) }) - + Above, the ``status`` attribute of MyClass will produce expression behavior against the table column named ``status``, using the Python attribute ``_status`` on the mapped class @@ -1166,24 +1166,24 @@ def synonym(name, map_column=False, descriptor=None, column to map. """ - return SynonymProperty(name, map_column=map_column, - descriptor=descriptor, + return SynonymProperty(name, map_column=map_column, + descriptor=descriptor, comparator_factory=comparator_factory, doc=doc) def comparable_property(comparator_factory, descriptor=None): - """Provides a method of applying a :class:`.PropComparator` + """Provides a method of applying a :class:`.PropComparator` to any Python descriptor attribute. .. versionchanged:: 0.7 :func:`.comparable_property` is superseded by - the :mod:`~sqlalchemy.ext.hybrid` extension. See the example + the :mod:`~sqlalchemy.ext.hybrid` extension. See the example at :ref:`hybrid_custom_comparators`. - Allows any Python descriptor to behave like a SQL-enabled + Allows any Python descriptor to behave like a SQL-enabled attribute when used at the class level in queries, allowing redefinition of expression operator behavior. - + In the example below we redefine :meth:`.PropComparator.operate` to wrap both sides of an expression in ``func.lower()`` to produce case-insensitive comparison:: @@ -1197,7 +1197,7 @@ def comparable_property(comparator_factory, descriptor=None): class CaseInsensitiveComparator(PropComparator): def __clause_element__(self): return self.prop - + def operate(self, op, other): return op( func.lower(self.__clause_element__()), @@ -1214,13 +1214,13 @@ def comparable_property(comparator_factory, descriptor=None): CaseInsensitiveComparator(mapper.c.word, mapper) ) - - A mapping like the above allows the ``word_insensitive`` attribute + + A mapping like the above allows the ``word_insensitive`` attribute to render an expression like:: - + >>> print SearchWord.word_insensitive == "Trucks" lower(search_word.word) = lower(:lower_1) - + :param comparator_factory: A PropComparator subclass or factory that defines operator behavior for this property. @@ -1246,7 +1246,7 @@ def clear_mappers(): """Remove all mappers from all classes. This function removes all instrumentation from classes and disposes - of their associated mappers. Once called, the classes are unmapped + of their associated mappers. Once called, the classes are unmapped and can be later re-mapped with new mappers. :func:`.clear_mappers` is *not* for normal use, as there is literally no @@ -1257,7 +1257,7 @@ def clear_mappers(): such, :func:`.clear_mappers` is only for usage in test suites that re-use the same classes with different mappings, which is itself an extremely rare use case - the only such use case is in fact SQLAlchemy's own test suite, - and possibly the test suites of other ORM extension libraries which + and possibly the test suites of other ORM extension libraries which intend to test various combinations of mapper construction upon a fixed set of classes. @@ -1292,7 +1292,7 @@ def joinedload(*keys, **kw): query(User).options(joinedload(User.orders)) # joined-load the "keywords" collection on each "Item", - # but not the "items" collection on "Order" - those + # but not the "items" collection on "Order" - those # remain lazily loaded. query(Order).options(joinedload(Order.items, Item.keywords)) @@ -1307,17 +1307,17 @@ def joinedload(*keys, **kw): query(Order).options(joinedload(Order.user, innerjoin=True)) - .. note:: - + .. note:: + The join created by :func:`joinedload` is anonymously aliased such that it **does not affect the query results**. An :meth:`.Query.order_by` or :meth:`.Query.filter` call **cannot** reference these aliased - tables - so-called "user space" joins are constructed using + tables - so-called "user space" joins are constructed using :meth:`.Query.join`. The rationale for this is that :func:`joinedload` is only applied in order to affect how related objects or collections are loaded as an optimizing detail - it can be added or removed with no impact - on actual results. See the section :ref:`zen_of_eager_loading` for - a detailed description of how this is used, including how to use a single + on actual results. See the section :ref:`zen_of_eager_loading` for + a detailed description of how this is used, including how to use a single explicit JOIN for filtering/ordering and eager loading simultaneously. See also: :func:`subqueryload`, :func:`lazyload` @@ -1326,7 +1326,7 @@ def joinedload(*keys, **kw): innerjoin = kw.pop('innerjoin', None) if innerjoin is not None: return ( - strategies.EagerLazyOption(keys, lazy='joined'), + strategies.EagerLazyOption(keys, lazy='joined'), strategies.EagerJoinOption(keys, innerjoin) ) else: @@ -1334,7 +1334,7 @@ def joinedload(*keys, **kw): def joinedload_all(*keys, **kw): """Return a ``MapperOption`` that will convert all properties along the - given dot-separated path or series of mapped attributes + given dot-separated path or series of mapped attributes into an joined eager load. .. versionchanged:: 0.6beta3 @@ -1366,7 +1366,7 @@ def joinedload_all(*keys, **kw): innerjoin = kw.pop('innerjoin', None) if innerjoin is not None: return ( - strategies.EagerLazyOption(keys, lazy='joined', chained=True), + strategies.EagerLazyOption(keys, lazy='joined', chained=True), strategies.EagerJoinOption(keys, innerjoin, chained=True) ) else: @@ -1382,8 +1382,8 @@ def eagerload_all(*args, **kwargs): return joinedload_all(*args, **kwargs) def subqueryload(*keys): - """Return a ``MapperOption`` that will convert the property - of the given name or series of mapped attributes + """Return a ``MapperOption`` that will convert the property + of the given name or series of mapped attributes into an subquery eager load. Used with :meth:`~sqlalchemy.orm.query.Query.options`. @@ -1394,7 +1394,7 @@ def subqueryload(*keys): query(User).options(subqueryload(User.orders)) # subquery-load the "keywords" collection on each "Item", - # but not the "items" collection on "Order" - those + # but not the "items" collection on "Order" - those # remain lazily loaded. query(Order).options(subqueryload(Order.items, Item.keywords)) @@ -1411,7 +1411,7 @@ def subqueryload(*keys): def subqueryload_all(*keys): """Return a ``MapperOption`` that will convert all properties along the - given dot-separated path or series of mapped attributes + given dot-separated path or series of mapped attributes into a subquery eager load. Used with :meth:`~sqlalchemy.orm.query.Query.options`. @@ -1446,7 +1446,7 @@ def lazyload(*keys): def lazyload_all(*keys): """Return a ``MapperOption`` that will convert all the properties - along the given dot-separated path or series of mapped attributes + along the given dot-separated path or series of mapped attributes into a lazy load. Used with :meth:`~sqlalchemy.orm.query.Query.options`. @@ -1462,22 +1462,22 @@ def noload(*keys): Used with :meth:`~sqlalchemy.orm.query.Query.options`. - See also: :func:`lazyload`, :func:`eagerload`, + See also: :func:`lazyload`, :func:`eagerload`, :func:`subqueryload`, :func:`immediateload` """ return strategies.EagerLazyOption(keys, lazy=None) def immediateload(*keys): - """Return a ``MapperOption`` that will convert the property of the given + """Return a ``MapperOption`` that will convert the property of the given name or series of mapped attributes into an immediate load. - + The "immediate" load means the attribute will be fetched - with a separate SELECT statement per parent in the + with a separate SELECT statement per parent in the same way as lazy loading - except the loader is guaranteed to be called at load time before the parent object is returned in the result. - + The normal behavior of lazy loading applies - if the relationship is a simple many-to-one, and the child object is already present in the :class:`.Session`, @@ -1497,7 +1497,7 @@ def contains_alias(alias): the main table has been aliased. This is used in the very rare case that :func:`.contains_eager` - is being used in conjunction with a user-defined SELECT + is being used in conjunction with a user-defined SELECT statement that aliases the parent table. E.g.:: # define an aliased UNION called 'ulist' @@ -1509,18 +1509,18 @@ def contains_alias(alias): statement = statement.outerjoin(addresses).\\ select().apply_labels() - # create query, indicating "ulist" will be an - # alias for the main table, "addresses" + # create query, indicating "ulist" will be an + # alias for the main table, "addresses" # property should be eager loaded query = session.query(User).options( - contains_alias('ulist'), + contains_alias('ulist'), contains_eager('addresses')) # then get results via the statement results = query.from_statement(statement).all() - :param alias: is the string name of an alias, or a - :class:`~.sql.expression.Alias` object representing + :param alias: is the string name of an alias, or a + :class:`~.sql.expression.Alias` object representing the alias. """ @@ -1533,7 +1533,7 @@ def contains_eager(*keys, **kwargs): Used with :meth:`~sqlalchemy.orm.query.Query.options`. - The option is used in conjunction with an explicit join that loads + The option is used in conjunction with an explicit join that loads the desired rows, i.e.:: sess.query(Order).\\ @@ -1554,7 +1554,7 @@ def contains_eager(*keys, **kwargs): join((user_alias, Order.user)).\\ options(contains_eager(Order.user, alias=user_alias)) - See also :func:`eagerload` for the "automatic" version of this + See also :func:`eagerload` for the "automatic" version of this functionality. For additional examples of :func:`contains_eager` see @@ -1574,36 +1574,36 @@ def defer(*key): of the given name into a deferred load. Used with :meth:`.Query.options`. - + e.g.:: - + from sqlalchemy.orm import defer - query(MyClass).options(defer("attribute_one"), + query(MyClass).options(defer("attribute_one"), defer("attribute_two")) - + A class bound descriptor is also accepted:: - + query(MyClass).options( - defer(MyClass.attribute_one), + defer(MyClass.attribute_one), defer(MyClass.attribute_two)) - + A "path" can be specified onto a related or collection object using a dotted name. The :func:`.orm.defer` option will be applied to that object when loaded:: - + query(MyClass).options( - defer("related.attribute_one"), + defer("related.attribute_one"), defer("related.attribute_two")) - + To specify a path via class, send multiple arguments:: query(MyClass).options( - defer(MyClass.related, MyOtherClass.attribute_one), + defer(MyClass.related, MyOtherClass.attribute_one), defer(MyClass.related, MyOtherClass.attribute_two)) - + See also: - + :ref:`deferred` :param \*key: A key representing an individual path. Multiple entries @@ -1618,41 +1618,41 @@ def undefer(*key): of the given name into a non-deferred (regular column) load. Used with :meth:`.Query.options`. - + e.g.:: - + from sqlalchemy.orm import undefer - query(MyClass).options(undefer("attribute_one"), + query(MyClass).options(undefer("attribute_one"), undefer("attribute_two")) - + A class bound descriptor is also accepted:: - + query(MyClass).options( - undefer(MyClass.attribute_one), + undefer(MyClass.attribute_one), undefer(MyClass.attribute_two)) - + A "path" can be specified onto a related or collection object using a dotted name. The :func:`.orm.undefer` option will be applied to that object when loaded:: - + query(MyClass).options( - undefer("related.attribute_one"), + undefer("related.attribute_one"), undefer("related.attribute_two")) - + To specify a path via class, send multiple arguments:: query(MyClass).options( - undefer(MyClass.related, MyOtherClass.attribute_one), + undefer(MyClass.related, MyOtherClass.attribute_one), undefer(MyClass.related, MyOtherClass.attribute_two)) - + See also: - + :func:`.orm.undefer_group` as a means to "undefer" a group of attributes at once. - + :ref:`deferred` - + :param \*key: A key representing an individual path. Multiple entries are accepted to allow a multiple-token path for a single target, not multiple targets. @@ -1665,17 +1665,17 @@ def undefer_group(name): column properties into a non-deferred (regular column) load. Used with :meth:`.Query.options`. - + e.g.:: - + query(MyClass).options(undefer("group_one")) See also: - + :ref:`deferred` - - :param name: String name of the deferred group. This name is - established using the "group" name to the :func:`.orm.deferred` + + :param name: String name of the deferred group. This name is + established using the "group" name to the :func:`.orm.deferred` configurational function. """ diff --git a/lib/sqlalchemy/orm/attributes.py b/lib/sqlalchemy/orm/attributes.py index a7e1824a65..6be981ac20 100644 --- a/lib/sqlalchemy/orm/attributes.py +++ b/lib/sqlalchemy/orm/attributes.py @@ -29,7 +29,7 @@ NO_VALUE = util.symbol('NO_VALUE') NEVER_SET = util.symbol('NEVER_SET') PASSIVE_RETURN_NEVER_SET = util.symbol('PASSIVE_RETURN_NEVER_SET', -"""Symbol indicating that loader callables can be +"""Symbol indicating that loader callables can be fired off, but if no callable is applicable and no value is present, the attribute should remain non-initialized. NEVER_SET is returned in this case. @@ -37,14 +37,14 @@ NEVER_SET is returned in this case. PASSIVE_NO_INITIALIZE = util.symbol('PASSIVE_NO_INITIALIZE', """Symbol indicating that loader callables should - not be fired off, and a non-initialized attribute + not be fired off, and a non-initialized attribute should remain that way. """) PASSIVE_NO_FETCH = util.symbol('PASSIVE_NO_FETCH', -"""Symbol indicating that loader callables should not emit SQL, +"""Symbol indicating that loader callables should not emit SQL, but a value can be fetched from the current session. - + Non-initialized attributes should be initialized to an empty value. """) @@ -53,9 +53,9 @@ PASSIVE_NO_FETCH_RELATED = util.symbol('PASSIVE_NO_FETCH_RELATED', """Symbol indicating that loader callables should not emit SQL for loading a related object, but can refresh the attributes of the local instance in order to locate a related object in the current session. - + Non-initialized attributes should be initialized to an empty value. - + The unit of work uses this mode to check if history is present on many-to-one attributes with minimal SQL emitted. @@ -81,7 +81,7 @@ PASSIVE_OFF = util.symbol('PASSIVE_OFF', class QueryableAttribute(interfaces.PropComparator): """Base class for class-bound attributes. """ - def __init__(self, class_, key, impl=None, + def __init__(self, class_, key, impl=None, comparator=None, parententity=None): self.class_ = class_ self.key = key @@ -92,7 +92,7 @@ class QueryableAttribute(interfaces.PropComparator): manager = manager_of_class(class_) # manager is None in the case of AliasedClass if manager: - # propagate existing event listeners from + # propagate existing event listeners from # immediate superclass for base in manager._bases: if key in base: @@ -134,8 +134,8 @@ class QueryableAttribute(interfaces.PropComparator): except AttributeError: raise AttributeError( 'Neither %r object nor %r object has an attribute %r' % ( - type(self).__name__, - type(self.comparator).__name__, + type(self).__name__, + type(self.comparator).__name__, key) ) @@ -151,7 +151,7 @@ class InstrumentedAttribute(QueryableAttribute): """Class bound instrumented attribute which adds descriptor methods.""" def __set__(self, instance, value): - self.impl.set(instance_state(instance), + self.impl.set(instance_state(instance), instance_dict(instance), value, None) def __delete__(self, instance): @@ -179,12 +179,12 @@ def create_proxied_attribute(descriptor): class Proxy(QueryableAttribute): """Presents the :class:`.QueryableAttribute` interface as a - proxy on top of a Python descriptor / :class:`.PropComparator` + proxy on top of a Python descriptor / :class:`.PropComparator` combination. """ - def __init__(self, class_, key, descriptor, comparator, + def __init__(self, class_, key, descriptor, comparator, adapter=None, doc=None): self.class_ = class_ self.key = key @@ -233,8 +233,8 @@ def create_proxied_attribute(descriptor): except AttributeError: raise AttributeError( 'Neither %r object nor %r object has an attribute %r' % ( - type(descriptor).__name__, - type(self.comparator).__name__, + type(descriptor).__name__, + type(self.comparator).__name__, attribute) ) @@ -250,7 +250,7 @@ class AttributeImpl(object): def __init__(self, class_, key, callable_, dispatch, trackparent=False, extension=None, - compare_function=None, active_history=False, + compare_function=None, active_history=False, parent_token=None, expire_missing=True, **kwargs): """Construct an AttributeImpl. @@ -287,12 +287,12 @@ class AttributeImpl(object): parent_token Usually references the MapperProperty, used as a key for the hasparent() function to identify an "owning" attribute. - Allows multiple AttributeImpls to all match a single + Allows multiple AttributeImpls to all match a single owner attribute. expire_missing if False, don't add an "expiry" callable to this attribute - during state.expire_attributes(None), if no value is present + during state.expire_attributes(None), if no value is present for this key. """ @@ -331,7 +331,7 @@ class AttributeImpl(object): def hasparent(self, state, optimistic=False): - """Return the boolean value of a `hasparent` flag attached to + """Return the boolean value of a `hasparent` flag attached to the given state. The `optimistic` flag determines what the default return value @@ -375,8 +375,8 @@ class AttributeImpl(object): "state %s along attribute '%s', " "but the parent record " "has gone stale, can't be sure this " - "is the most recent parent." % - (mapperutil.state_str(state), + "is the most recent parent." % + (mapperutil.state_str(state), mapperutil.state_str(parent_state), self.key)) @@ -406,8 +406,8 @@ class AttributeImpl(object): raise NotImplementedError() def get_all_pending(self, state, dict_): - """Return a list of tuples of (state, obj) - for all objects in this attribute's current state + """Return a list of tuples of (state, obj) + for all objects in this attribute's current state + history. Only applies to object-based attributes. @@ -416,8 +416,8 @@ class AttributeImpl(object): which roughly corresponds to: get_state_history( - state, - key, + state, + key, passive=PASSIVE_NO_INITIALIZE).sum() """ @@ -478,14 +478,14 @@ class AttributeImpl(object): self.set(state, dict_, value, initiator, passive=passive) def remove(self, state, dict_, value, initiator, passive=PASSIVE_OFF): - self.set(state, dict_, None, initiator, + self.set(state, dict_, None, initiator, passive=passive, check_old=value) def pop(self, state, dict_, value, initiator, passive=PASSIVE_OFF): - self.set(state, dict_, None, initiator, + self.set(state, dict_, None, initiator, passive=passive, check_old=value, pop=True) - def set(self, state, dict_, value, initiator, + def set(self, state, dict_, value, initiator, passive=PASSIVE_OFF, check_old=None, pop=False): raise NotImplementedError() @@ -532,7 +532,7 @@ class ScalarAttributeImpl(AttributeImpl): return History.from_scalar_attribute( self, state, dict_.get(self.key, NO_VALUE)) - def set(self, state, dict_, value, initiator, + def set(self, state, dict_, value, initiator, passive=PASSIVE_OFF, check_old=None, pop=False): if initiator and initiator.parent_token is self.parent_token: return @@ -543,7 +543,7 @@ class ScalarAttributeImpl(AttributeImpl): old = dict_.get(self.key, NO_VALUE) if self.dispatch.set: - value = self.fire_replace_event(state, dict_, + value = self.fire_replace_event(state, dict_, value, old, initiator) state.modified_event(dict_, self, old) dict_[self.key] = value @@ -575,10 +575,10 @@ class MutableScalarAttributeImpl(ScalarAttributeImpl): class_manager, copy_function=None, compare_function=None, **kwargs): super(ScalarAttributeImpl, self).__init__( - class_, - key, + class_, + key, callable_, dispatch, - compare_function=compare_function, + compare_function=compare_function, **kwargs) class_manager.mutable_attributes.add(key) if copy_function is None: @@ -611,15 +611,15 @@ class MutableScalarAttributeImpl(ScalarAttributeImpl): ScalarAttributeImpl.delete(self, state, dict_) state.mutable_dict.pop(self.key) - def set(self, state, dict_, value, initiator, + def set(self, state, dict_, value, initiator, passive=PASSIVE_OFF, check_old=None, pop=False): - ScalarAttributeImpl.set(self, state, dict_, value, + ScalarAttributeImpl.set(self, state, dict_, value, initiator, passive, check_old=check_old, pop=pop) state.mutable_dict[self.key] = value class ScalarObjectAttributeImpl(ScalarAttributeImpl): - """represents a scalar-holding InstrumentedAttribute, + """represents a scalar-holding InstrumentedAttribute, where the target object is also instrumented. Adds events to delete/set operations. @@ -665,7 +665,7 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl): else: return [] - def set(self, state, dict_, value, initiator, + def set(self, state, dict_, value, initiator, passive=PASSIVE_OFF, check_old=None, pop=False): """Set a value on the given InstanceState. @@ -744,12 +744,12 @@ class CollectionAttributeImpl(AttributeImpl): typecallable=None, trackparent=False, extension=None, copy_function=None, compare_function=None, **kwargs): super(CollectionAttributeImpl, self).__init__( - class_, - key, + class_, + key, callable_, dispatch, trackparent=trackparent, extension=extension, - compare_function=compare_function, + compare_function=compare_function, **kwargs) if copy_function is None: @@ -777,11 +777,11 @@ class CollectionAttributeImpl(AttributeImpl): if self.key in state.committed_state: original = state.committed_state[self.key] if original is not NO_VALUE: - current_states = [((c is not None) and - instance_state(c) or None, c) + current_states = [((c is not None) and + instance_state(c) or None, c) for c in current] - original_states = [((c is not None) and - instance_state(c) or None, c) + original_states = [((c is not None) and + instance_state(c) or None, c) for c in original] current_set = dict(current_states) @@ -869,13 +869,13 @@ class CollectionAttributeImpl(AttributeImpl): def pop(self, state, dict_, value, initiator, passive=PASSIVE_OFF): try: # TODO: better solution here would be to add - # a "popper" role to collections.py to complement + # a "popper" role to collections.py to complement # "remover". self.remove(state, dict_, value, initiator, passive=passive) except (ValueError, KeyError, IndexError): pass - def set(self, state, dict_, value, initiator, + def set(self, state, dict_, value, initiator, passive=PASSIVE_OFF, pop=False): """Set a value on the given object. @@ -954,7 +954,7 @@ class CollectionAttributeImpl(AttributeImpl): return user_data - def get_collection(self, state, dict_, + def get_collection(self, state, dict_, user_data=None, passive=PASSIVE_OFF): """Retrieve the CollectionAdapter associated with the given state. @@ -983,19 +983,19 @@ def backref_listeners(attribute, key, uselist): old_state, old_dict = instance_state(oldchild),\ instance_dict(oldchild) impl = old_state.manager[key].impl - impl.pop(old_state, - old_dict, - state.obj(), + impl.pop(old_state, + old_dict, + state.obj(), initiator, passive=PASSIVE_NO_FETCH) if child is not None: child_state, child_dict = instance_state(child),\ instance_dict(child) child_state.manager[key].impl.append( - child_state, - child_dict, - state.obj(), - initiator, + child_state, + child_dict, + state.obj(), + initiator, passive=PASSIVE_NO_FETCH) return child @@ -1003,10 +1003,10 @@ def backref_listeners(attribute, key, uselist): child_state, child_dict = instance_state(child), \ instance_dict(child) child_state.manager[key].impl.append( - child_state, - child_dict, - state.obj(), - initiator, + child_state, + child_dict, + state.obj(), + initiator, passive=PASSIVE_NO_FETCH) return child @@ -1015,29 +1015,29 @@ def backref_listeners(attribute, key, uselist): child_state, child_dict = instance_state(child),\ instance_dict(child) child_state.manager[key].impl.pop( - child_state, - child_dict, - state.obj(), + child_state, + child_dict, + state.obj(), initiator, passive=PASSIVE_NO_FETCH) if uselist: - event.listen(attribute, "append", - emit_backref_from_collection_append_event, + event.listen(attribute, "append", + emit_backref_from_collection_append_event, retval=True, raw=True) else: - event.listen(attribute, "set", - emit_backref_from_scalar_set_event, + event.listen(attribute, "set", + emit_backref_from_scalar_set_event, retval=True, raw=True) # TODO: need coverage in test/orm/ of remove event - event.listen(attribute, "remove", - emit_backref_from_collection_remove_event, + event.listen(attribute, "remove", + emit_backref_from_collection_remove_event, retval=True, raw=True) _NO_HISTORY = util.symbol('NO_HISTORY') _NO_STATE_SYMBOLS = frozenset([ - id(PASSIVE_NO_RESULT), - id(NO_VALUE), + id(PASSIVE_NO_RESULT), + id(NO_VALUE), id(NEVER_SET)]) class History(tuple): """A 3-tuple of added, unchanged and deleted values, @@ -1078,7 +1078,7 @@ class History(tuple): return not bool( (self.added or self.deleted) or self.unchanged and self.unchanged != [None] - ) + ) def sum(self): """Return a collection of added + unchanged + deleted.""" @@ -1130,7 +1130,7 @@ class History(tuple): elif attribute.is_equal(current, original) is True: return cls((), [current], ()) else: - # current convention on native scalars is to not + # current convention on native scalars is to not # include information # about missing previous value in "deleted", but # we do include None, which helps in some primary @@ -1156,11 +1156,11 @@ class History(tuple): elif current is original: return cls((), [current], ()) else: - # current convention on related objects is to not + # current convention on related objects is to not # include information # about missing previous value in "deleted", and # to also not include None - the dependency.py rules - # ignore the None in any case. + # ignore the None in any case. if id(original) in _NO_STATE_SYMBOLS or original is None: deleted = () else: @@ -1181,11 +1181,11 @@ class History(tuple): return cls((), list(current), ()) else: - current_states = [((c is not None) and instance_state(c) or None, c) - for c in current + current_states = [((c is not None) and instance_state(c) or None, c) + for c in current ] - original_states = [((c is not None) and instance_state(c) or None, c) - for c in original + original_states = [((c is not None) and instance_state(c) or None, c) + for c in original ] current_set = dict(current_states) @@ -1200,7 +1200,7 @@ class History(tuple): HISTORY_BLANK = History(None, None, None) def get_history(obj, key, passive=PASSIVE_OFF): - """Return a :class:`.History` record for the given object + """Return a :class:`.History` record for the given object and attribute key. :param obj: an object whose class is instrumented by the @@ -1239,14 +1239,14 @@ def register_attribute(class_, key, **kw): comparator = kw.pop('comparator', None) parententity = kw.pop('parententity', None) doc = kw.pop('doc', None) - desc = register_descriptor(class_, key, + desc = register_descriptor(class_, key, comparator, parententity, doc=doc) register_attribute_impl(class_, key, **kw) return desc def register_attribute_impl(class_, key, - uselist=False, callable_=None, - useobject=False, mutable_scalars=False, + uselist=False, callable_=None, + useobject=False, mutable_scalars=False, impl_class=None, backref=None, **kw): manager = manager_of_class(class_) @@ -1281,7 +1281,7 @@ def register_attribute_impl(class_, key, manager.post_configure_attribute(key) return manager[key] -def register_descriptor(class_, key, comparator=None, +def register_descriptor(class_, key, comparator=None, parententity=None, doc=None): manager = manager_of_class(class_) @@ -1310,7 +1310,7 @@ def init_collection(obj, key): :func:`~sqlalchemy.orm.attributes.set_committed_value`. obj is an instrumented object instance. An InstanceState - is accepted directly for backwards compatibility but + is accepted directly for backwards compatibility but this usage is deprecated. """ @@ -1328,7 +1328,7 @@ def init_state_collection(state, dict_, key): def set_committed_value(instance, key, value): """Set the value of an attribute with no history events. - Cancels any previous history present. The value should be + Cancels any previous history present. The value should be a scalar value for scalar-holding attributes, or an iterable for any collection-holding attribute. @@ -1385,7 +1385,7 @@ def del_attribute(instance, key): def flag_modified(instance, key): """Mark an attribute on an instance as 'modified'. - This sets the 'modified' flag on the instance and + This sets the 'modified' flag on the instance and establishes an unconditional change event for the given attribute. """ diff --git a/lib/sqlalchemy/orm/collections.py b/lib/sqlalchemy/orm/collections.py index d51d7bcd21..e92e82c10f 100644 --- a/lib/sqlalchemy/orm/collections.py +++ b/lib/sqlalchemy/orm/collections.py @@ -166,7 +166,7 @@ class _SerializableColumnGetter(object): state = instance_state(value) m = _state_mapper(state) key = [m._get_state_attr_by_column( - state, state.dict, + state, state.dict, m.mapped_table.columns[k]) for k in self.colkeys] if self.composite: @@ -175,7 +175,7 @@ class _SerializableColumnGetter(object): return key[0] class _SerializableColumnGetterV2(_PlainColumnGetter): - """Updated serializable getter which deals with + """Updated serializable getter which deals with multi-table mapped classes. Two extremely unusual cases are not supported. @@ -709,8 +709,8 @@ class CollectionAdapter(object): """ if initiator is not False and item is not None: return self.attr.fire_append_event( - self.owner_state, - self.owner_state.dict, + self.owner_state, + self.owner_state.dict, item, initiator) else: return item @@ -725,8 +725,8 @@ class CollectionAdapter(object): """ if initiator is not False and item is not None: self.attr.fire_remove_event( - self.owner_state, - self.owner_state.dict, + self.owner_state, + self.owner_state.dict, item, initiator) def fire_pre_remove_event(self, initiator=None): @@ -737,8 +737,8 @@ class CollectionAdapter(object): """ self.attr.fire_pre_remove_event( - self.owner_state, - self.owner_state.dict, + self.owner_state, + self.owner_state.dict, initiator=initiator) def __getstate__(self): diff --git a/lib/sqlalchemy/orm/dependency.py b/lib/sqlalchemy/orm/dependency.py index b3789e7586..a8a0df8e9a 100644 --- a/lib/sqlalchemy/orm/dependency.py +++ b/lib/sqlalchemy/orm/dependency.py @@ -48,7 +48,7 @@ class DependencyProcessor(object): def hasparent(self, state): """return True if the given object instance has a parent, - according to the ``InstrumentedAttribute`` handled by this + according to the ``InstrumentedAttribute`` handled by this ``DependencyProcessor``. """ @@ -69,29 +69,29 @@ class DependencyProcessor(object): before_delete = unitofwork.ProcessAll(uow, self, True, True) parent_saves = unitofwork.SaveUpdateAll( - uow, + uow, self.parent.primary_base_mapper ) child_saves = unitofwork.SaveUpdateAll( - uow, + uow, self.mapper.primary_base_mapper ) parent_deletes = unitofwork.DeleteAll( - uow, + uow, self.parent.primary_base_mapper ) child_deletes = unitofwork.DeleteAll( - uow, + uow, self.mapper.primary_base_mapper ) - self.per_property_dependencies(uow, - parent_saves, - child_saves, - parent_deletes, - child_deletes, - after_save, + self.per_property_dependencies(uow, + parent_saves, + child_saves, + parent_deletes, + child_deletes, + after_save, before_delete ) @@ -99,7 +99,7 @@ class DependencyProcessor(object): def per_state_flush_actions(self, uow, states, isdelete): """establish actions and dependencies related to a flush. - These actions will operate on all relevant states + These actions will operate on all relevant states individually. This occurs only if there are cycles in the 'aggregated' version of events. @@ -141,14 +141,14 @@ class DependencyProcessor(object): # check if the "parent" side is part of the cycle if not isdelete: parent_saves = unitofwork.SaveUpdateAll( - uow, + uow, self.parent.base_mapper) parent_deletes = before_delete = None if parent_saves in uow.cycles: parent_in_cycles = True else: parent_deletes = unitofwork.DeleteAll( - uow, + uow, self.parent.base_mapper) parent_saves = after_save = None if parent_deletes in uow.cycles: @@ -165,19 +165,19 @@ class DependencyProcessor(object): continue if isdelete: - before_delete = unitofwork.ProcessState(uow, + before_delete = unitofwork.ProcessState(uow, self, True, state) if parent_in_cycles: parent_deletes = unitofwork.DeleteState( - uow, - state, + uow, + state, parent_base_mapper) else: after_save = unitofwork.ProcessState(uow, self, False, state) if parent_in_cycles: parent_saves = unitofwork.SaveUpdateState( - uow, - state, + uow, + state, parent_base_mapper) if child_in_cycles: @@ -190,24 +190,24 @@ class DependencyProcessor(object): if deleted: child_action = ( unitofwork.DeleteState( - uow, child_state, - child_base_mapper), + uow, child_state, + child_base_mapper), True) else: child_action = ( unitofwork.SaveUpdateState( - uow, child_state, - child_base_mapper), + uow, child_state, + child_base_mapper), False) child_actions.append(child_action) # establish dependencies between our possibly per-state # parent action and our possibly per-state child action. for child_action, childisdelete in child_actions: - self.per_state_dependencies(uow, parent_saves, - parent_deletes, - child_action, - after_save, before_delete, + self.per_state_dependencies(uow, parent_saves, + parent_deletes, + child_action, + after_save, before_delete, isdelete, childisdelete) @@ -232,12 +232,12 @@ class DependencyProcessor(object): passive = attributes.PASSIVE_OFF for s in states: - # TODO: add a high speed method + # TODO: add a high speed method # to InstanceState which returns: attribute # has a non-None value, or had one history = uowcommit.get_attribute_history( - s, - self.key, + s, + self.key, passive) if history and not history.empty(): return True @@ -248,7 +248,7 @@ class DependencyProcessor(object): def _verify_canload(self, state): if state is not None and \ - not self.mapper._canload(state, + not self.mapper._canload(state, allow_subtypes=not self.enable_typechecks): if self.mapper._canload(state, allow_subtypes=True): raise exc.FlushError('Attempting to flush an item of type ' @@ -287,11 +287,11 @@ class DependencyProcessor(object): return None process_key = tuple(sorted( - [self.key] + + [self.key] + [p.key for p in self.prop._reverse_property] )) return uow.memo( - ('reverse_key', process_key), + ('reverse_key', process_key), set ) @@ -299,7 +299,7 @@ class DependencyProcessor(object): for x in related: if x is not None: uowcommit.issue_post_update( - state, + state, [r for l, r in self.prop.synchronize_pairs] ) break @@ -312,21 +312,21 @@ class DependencyProcessor(object): class OneToManyDP(DependencyProcessor): - def per_property_dependencies(self, uow, parent_saves, - child_saves, - parent_deletes, - child_deletes, - after_save, + def per_property_dependencies(self, uow, parent_saves, + child_saves, + parent_deletes, + child_deletes, + after_save, before_delete, ): if self.post_update: child_post_updates = unitofwork.IssuePostUpdate( - uow, - self.mapper.primary_base_mapper, + uow, + self.mapper.primary_base_mapper, False) child_pre_updates = unitofwork.IssuePostUpdate( - uow, - self.mapper.primary_base_mapper, + uow, + self.mapper.primary_base_mapper, True) uow.dependencies.update([ @@ -352,22 +352,22 @@ class OneToManyDP(DependencyProcessor): (before_delete, child_deletes), ]) - def per_state_dependencies(self, uow, - save_parent, - delete_parent, - child_action, - after_save, before_delete, + def per_state_dependencies(self, uow, + save_parent, + delete_parent, + child_action, + after_save, before_delete, isdelete, childisdelete): if self.post_update: child_post_updates = unitofwork.IssuePostUpdate( - uow, - self.mapper.primary_base_mapper, + uow, + self.mapper.primary_base_mapper, False) child_pre_updates = unitofwork.IssuePostUpdate( - uow, - self.mapper.primary_base_mapper, + uow, + self.mapper.primary_base_mapper, True) # TODO: this whole block is not covered @@ -393,7 +393,7 @@ class OneToManyDP(DependencyProcessor): else: uow.dependencies.update([ (before_delete, child_pre_updates), - (child_pre_updates, delete_parent), + (child_pre_updates, delete_parent), ]) elif not isdelete: uow.dependencies.update([ @@ -408,16 +408,16 @@ class OneToManyDP(DependencyProcessor): ]) def presort_deletes(self, uowcommit, states): - # head object is being deleted, and we manage its list of - # child objects the child objects have to have their + # head object is being deleted, and we manage its list of + # child objects the child objects have to have their # foreign key to the parent set to NULL should_null_fks = not self.cascade.delete and \ not self.passive_deletes == 'all' for state in states: history = uowcommit.get_attribute_history( - state, - self.key, + state, + self.key, self._passive_delete_flag) if history: for child in history.deleted: @@ -430,7 +430,7 @@ class OneToManyDP(DependencyProcessor): if should_null_fks: for child in history.unchanged: if child is not None: - uowcommit.register_object(child, + uowcommit.register_object(child, operation="delete", prop=self.prop) @@ -447,25 +447,25 @@ class OneToManyDP(DependencyProcessor): passive = attributes.PASSIVE_OFF history = uowcommit.get_attribute_history( - state, - self.key, + state, + self.key, passive) if history: for child in history.added: if child is not None: - uowcommit.register_object(child, cancel_delete=True, - operation="add", + uowcommit.register_object(child, cancel_delete=True, + operation="add", prop=self.prop) children_added.update(history.added) for child in history.deleted: if not self.cascade.delete_orphan: - uowcommit.register_object(child, isdelete=False, - operation='delete', + uowcommit.register_object(child, isdelete=False, + operation='delete', prop=self.prop) elif self.hasparent(child) is False: - uowcommit.register_object(child, isdelete=True, + uowcommit.register_object(child, isdelete=True, operation="delete", prop=self.prop) for c, m, st_, dct_ in self.mapper.cascade_iterator( 'delete', child): @@ -478,16 +478,16 @@ class OneToManyDP(DependencyProcessor): for child in history.unchanged: if child is not None: uowcommit.register_object( - child, - False, + child, + False, self.passive_updates, operation="pk change", prop=self.prop) def process_deletes(self, uowcommit, states): - # head object is being deleted, and we manage its list of - # child objects the child objects have to have their foreign - # key to the parent set to NULL this phase can be called + # head object is being deleted, and we manage its list of + # child objects the child objects have to have their foreign + # key to the parent set to NULL this phase can be called # safely for any cascade but is unnecessary if delete cascade # is on. @@ -496,17 +496,17 @@ class OneToManyDP(DependencyProcessor): for state in states: history = uowcommit.get_attribute_history( - state, - self.key, + state, + self.key, self._passive_delete_flag) if history: for child in history.deleted: if child is not None and \ self.hasparent(child) is False: self._synchronize( - state, - child, - None, True, + state, + child, + None, True, uowcommit, False) if self.post_update and child: self._post_update(child, uowcommit, [state]) @@ -516,18 +516,18 @@ class OneToManyDP(DependencyProcessor): difference(children_added): if child is not None: self._synchronize( - state, - child, - None, True, + state, + child, + None, True, uowcommit, False) if self.post_update and child: - self._post_update(child, - uowcommit, + self._post_update(child, + uowcommit, [state]) # technically, we can even remove each child from the - # collection here too. but this would be a somewhat - # inconsistent behavior since it wouldn't happen + # collection here too. but this would be a somewhat + # inconsistent behavior since it wouldn't happen #if the old parent wasn't deleted but child was moved. def process_saves(self, uowcommit, states): @@ -538,7 +538,7 @@ class OneToManyDP(DependencyProcessor): attributes.PASSIVE_NO_INITIALIZE) if history: for child in history.added: - self._synchronize(state, child, None, + self._synchronize(state, child, None, False, uowcommit, False) if child is not None and self.post_update: self._post_update(child, uowcommit, [state]) @@ -546,15 +546,15 @@ class OneToManyDP(DependencyProcessor): for child in history.deleted: if not self.cascade.delete_orphan and \ not self.hasparent(child): - self._synchronize(state, child, None, True, + self._synchronize(state, child, None, True, uowcommit, False) if self._pks_changed(uowcommit, state): for child in history.unchanged: - self._synchronize(state, child, None, + self._synchronize(state, child, None, False, uowcommit, True) - def _synchronize(self, state, child, + def _synchronize(self, state, child, associationrow, clearkeys, uowcommit, pks_changed): source = state @@ -566,15 +566,15 @@ class OneToManyDP(DependencyProcessor): if clearkeys: sync.clear(dest, self.mapper, self.prop.synchronize_pairs) else: - sync.populate(source, self.parent, dest, self.mapper, + sync.populate(source, self.parent, dest, self.mapper, self.prop.synchronize_pairs, uowcommit, self.passive_updates and pks_changed) def _pks_changed(self, uowcommit, state): return sync.source_modified( - uowcommit, - state, - self.parent, + uowcommit, + state, + self.parent, self.prop.synchronize_pairs) class ManyToOneDP(DependencyProcessor): @@ -582,22 +582,22 @@ class ManyToOneDP(DependencyProcessor): DependencyProcessor.__init__(self, prop) self.mapper._dependency_processors.append(DetectKeySwitch(prop)) - def per_property_dependencies(self, uow, - parent_saves, - child_saves, - parent_deletes, - child_deletes, - after_save, + def per_property_dependencies(self, uow, + parent_saves, + child_saves, + parent_deletes, + child_deletes, + after_save, before_delete): if self.post_update: parent_post_updates = unitofwork.IssuePostUpdate( - uow, - self.parent.primary_base_mapper, + uow, + self.parent.primary_base_mapper, False) parent_pre_updates = unitofwork.IssuePostUpdate( - uow, - self.parent.primary_base_mapper, + uow, + self.parent.primary_base_mapper, True) uow.dependencies.update([ @@ -618,19 +618,19 @@ class ManyToOneDP(DependencyProcessor): (parent_deletes, child_deletes) ]) - def per_state_dependencies(self, uow, - save_parent, - delete_parent, - child_action, - after_save, before_delete, + def per_state_dependencies(self, uow, + save_parent, + delete_parent, + child_action, + after_save, before_delete, isdelete, childisdelete): if self.post_update: if not isdelete: parent_post_updates = unitofwork.IssuePostUpdate( - uow, - self.parent.primary_base_mapper, + uow, + self.parent.primary_base_mapper, False) if childisdelete: uow.dependencies.update([ @@ -646,8 +646,8 @@ class ManyToOneDP(DependencyProcessor): ]) else: parent_pre_updates = unitofwork.IssuePostUpdate( - uow, - self.parent.primary_base_mapper, + uow, + self.parent.primary_base_mapper, True) uow.dependencies.update([ @@ -677,8 +677,8 @@ class ManyToOneDP(DependencyProcessor): if self.cascade.delete or self.cascade.delete_orphan: for state in states: history = uowcommit.get_attribute_history( - state, - self.key, + state, + self.key, self._passive_delete_flag) if history: if self.cascade.delete_orphan: @@ -688,7 +688,7 @@ class ManyToOneDP(DependencyProcessor): for child in todelete: if child is None: continue - uowcommit.register_object(child, isdelete=True, + uowcommit.register_object(child, isdelete=True, operation="delete", prop=self.prop) for c, m, st_, dct_ in self.mapper.cascade_iterator( 'delete', child): @@ -700,14 +700,14 @@ class ManyToOneDP(DependencyProcessor): uowcommit.register_object(state, operation="add", prop=self.prop) if self.cascade.delete_orphan: history = uowcommit.get_attribute_history( - state, - self.key, + state, + self.key, self._passive_delete_flag) if history: ret = True for child in history.deleted: if self.hasparent(child) is False: - uowcommit.register_object(child, isdelete=True, + uowcommit.register_object(child, isdelete=True, operation="delete", prop=self.prop) for c, m, st_, dct_ in self.mapper.cascade_iterator( @@ -721,15 +721,15 @@ class ManyToOneDP(DependencyProcessor): not self.cascade.delete_orphan and \ not self.passive_deletes == 'all': - # post_update means we have to update our + # post_update means we have to update our # row to not reference the child object # before we can DELETE the row for state in states: self._synchronize(state, None, None, True, uowcommit) if state and self.post_update: history = uowcommit.get_attribute_history( - state, - self.key, + state, + self.key, self._passive_delete_flag) if history: self._post_update(state, uowcommit, history.sum()) @@ -737,12 +737,12 @@ class ManyToOneDP(DependencyProcessor): def process_saves(self, uowcommit, states): for state in states: history = uowcommit.get_attribute_history( - state, + state, self.key, attributes.PASSIVE_NO_INITIALIZE) if history: for child in history.added: - self._synchronize(state, child, None, False, + self._synchronize(state, child, None, False, uowcommit, "add") if self.post_update: @@ -759,7 +759,7 @@ class ManyToOneDP(DependencyProcessor): not uowcommit.session._contains_state(child): util.warn( "Object of type %s not in session, %s " - "operation along '%s' won't proceed" % + "operation along '%s' won't proceed" % (mapperutil.state_class_str(child), operation, self.prop)) return @@ -767,14 +767,14 @@ class ManyToOneDP(DependencyProcessor): sync.clear(state, self.parent, self.prop.synchronize_pairs) else: self._verify_canload(child) - sync.populate(child, self.mapper, state, - self.parent, - self.prop.synchronize_pairs, + sync.populate(child, self.mapper, state, + self.parent, + self.prop.synchronize_pairs, uowcommit, - False) + False) class DetectKeySwitch(DependencyProcessor): - """For many-to-one relationships with no one-to-many backref, + """For many-to-one relationships with no one-to-many backref, searches for parents through the unit of work when a primary key has changed and updates them. @@ -798,7 +798,7 @@ class DetectKeySwitch(DependencyProcessor): def per_property_flush_actions(self, uow): parent_saves = unitofwork.SaveUpdateAll( - uow, + uow, self.parent.base_mapper) after_save = unitofwork.ProcessAll(uow, self, False, False) uow.dependencies.update([ @@ -837,7 +837,7 @@ class DetectKeySwitch(DependencyProcessor): def _key_switchers(self, uow, states): switched, notswitched = uow.memo( - ('pk_switchers', self), + ('pk_switchers', self), lambda: (set(), set()) ) @@ -865,29 +865,29 @@ class DetectKeySwitch(DependencyProcessor): related is not None: related_state = attributes.instance_state(dict_[self.key]) if related_state in switchers: - uowcommit.register_object(state, - False, + uowcommit.register_object(state, + False, self.passive_updates) sync.populate( - related_state, - self.mapper, state, - self.parent, self.prop.synchronize_pairs, + related_state, + self.mapper, state, + self.parent, self.prop.synchronize_pairs, uowcommit, self.passive_updates) def _pks_changed(self, uowcommit, state): - return bool(state.key) and sync.source_modified(uowcommit, - state, - self.mapper, + return bool(state.key) and sync.source_modified(uowcommit, + state, + self.mapper, self.prop.synchronize_pairs) class ManyToManyDP(DependencyProcessor): - def per_property_dependencies(self, uow, parent_saves, - child_saves, - parent_deletes, - child_deletes, - after_save, + def per_property_dependencies(self, uow, parent_saves, + child_saves, + parent_deletes, + child_deletes, + after_save, before_delete ): @@ -896,9 +896,9 @@ class ManyToManyDP(DependencyProcessor): (child_saves, after_save), (after_save, child_deletes), - # a rowswitch on the parent from deleted to saved - # can make this one occur, as the "save" may remove - # an element from the + # a rowswitch on the parent from deleted to saved + # can make this one occur, as the "save" may remove + # an element from the # "deleted" list before we have a chance to # process its child rows (before_delete, parent_saves), @@ -908,11 +908,11 @@ class ManyToManyDP(DependencyProcessor): (before_delete, child_saves), ]) - def per_state_dependencies(self, uow, - save_parent, - delete_parent, - child_action, - after_save, before_delete, + def per_state_dependencies(self, uow, + save_parent, + delete_parent, + child_action, + after_save, before_delete, isdelete, childisdelete): if not isdelete: if childisdelete: @@ -933,25 +933,25 @@ class ManyToManyDP(DependencyProcessor): def presort_deletes(self, uowcommit, states): if not self.passive_deletes: - # if no passive deletes, load history on + # if no passive deletes, load history on # the collection, so that prop_has_changes() # returns True for state in states: history = uowcommit.get_attribute_history( - state, - self.key, + state, + self.key, self._passive_delete_flag) def presort_saves(self, uowcommit, states): if not self.passive_updates: - # if no passive updates, load history on + # if no passive updates, load history on # each collection where parent has changed PK, # so that prop_has_changes() returns True for state in states: if self._pks_changed(uowcommit, state): history = uowcommit.get_attribute_history( - state, - self.key, + state, + self.key, attributes.PASSIVE_OFF) if not self.cascade.delete_orphan: @@ -961,16 +961,16 @@ class ManyToManyDP(DependencyProcessor): # if delete_orphan check is turned on. for state in states: history = uowcommit.get_attribute_history( - state, - self.key, + state, + self.key, attributes.PASSIVE_NO_INITIALIZE) if history: for child in history.deleted: if self.hasparent(child) is False: - uowcommit.register_object(child, isdelete=True, + uowcommit.register_object(child, isdelete=True, operation="delete", prop=self.prop) for c, m, st_, dct_ in self.mapper.cascade_iterator( - 'delete', + 'delete', child): uowcommit.register_object( st_, isdelete=True) @@ -983,23 +983,23 @@ class ManyToManyDP(DependencyProcessor): processed = self._get_reversed_processed_set(uowcommit) tmp = set() for state in states: - # this history should be cached already, as + # this history should be cached already, as # we loaded it in preprocess_deletes history = uowcommit.get_attribute_history( - state, - self.key, + state, + self.key, self._passive_delete_flag) if history: for child in history.non_added(): if child is None or \ - (processed is not None and + (processed is not None and (state, child) in processed): continue associationrow = {} if not self._synchronize( - state, - child, - associationrow, + state, + child, + associationrow, False, uowcommit, "delete"): continue secondary_delete.append(associationrow) @@ -1009,7 +1009,7 @@ class ManyToManyDP(DependencyProcessor): if processed is not None: processed.update(tmp) - self._run_crud(uowcommit, secondary_insert, + self._run_crud(uowcommit, secondary_insert, secondary_update, secondary_delete) def process_saves(self, uowcommit, states): @@ -1022,7 +1022,7 @@ class ManyToManyDP(DependencyProcessor): for state in states: need_cascade_pks = not self.passive_updates and \ - self._pks_changed(uowcommit, state) + self._pks_changed(uowcommit, state) if need_cascade_pks: passive = attributes.PASSIVE_OFF else: @@ -1032,45 +1032,45 @@ class ManyToManyDP(DependencyProcessor): if history: for child in history.added: if child is None or \ - (processed is not None and + (processed is not None and (state, child) in processed): continue associationrow = {} - if not self._synchronize(state, - child, - associationrow, + if not self._synchronize(state, + child, + associationrow, False, uowcommit, "add"): continue secondary_insert.append(associationrow) for child in history.deleted: if child is None or \ - (processed is not None and + (processed is not None and (state, child) in processed): continue associationrow = {} - if not self._synchronize(state, - child, - associationrow, + if not self._synchronize(state, + child, + associationrow, False, uowcommit, "delete"): continue secondary_delete.append(associationrow) - tmp.update((c, state) + tmp.update((c, state) for c in history.added + history.deleted) if need_cascade_pks: for child in history.unchanged: associationrow = {} - sync.update(state, - self.parent, - associationrow, - "old_", + sync.update(state, + self.parent, + associationrow, + "old_", self.prop.synchronize_pairs) - sync.update(child, - self.mapper, - associationrow, - "old_", + sync.update(child, + self.mapper, + associationrow, + "old_", self.prop.secondary_synchronize_pairs) secondary_update.append(associationrow) @@ -1078,18 +1078,18 @@ class ManyToManyDP(DependencyProcessor): if processed is not None: processed.update(tmp) - self._run_crud(uowcommit, secondary_insert, + self._run_crud(uowcommit, secondary_insert, secondary_update, secondary_delete) - def _run_crud(self, uowcommit, secondary_insert, + def _run_crud(self, uowcommit, secondary_insert, secondary_update, secondary_delete): connection = uowcommit.transaction.connection(self.mapper) if secondary_delete: associationrow = secondary_delete[0] statement = self.secondary.delete(sql.and_(*[ - c == sql.bindparam(c.key, type_=c.type) - for c in self.secondary.c + c == sql.bindparam(c.key, type_=c.type) + for c in self.secondary.c if c.key in associationrow ])) result = connection.execute(statement, secondary_delete) @@ -1098,7 +1098,7 @@ class ManyToManyDP(DependencyProcessor): result.rowcount != len(secondary_delete): raise exc.StaleDataError( "DELETE statement on table '%s' expected to delete %d row(s); " - "Only %d were matched." % + "Only %d were matched." % (self.secondary.description, len(secondary_delete), result.rowcount) ) @@ -1106,8 +1106,8 @@ class ManyToManyDP(DependencyProcessor): if secondary_update: associationrow = secondary_update[0] statement = self.secondary.update(sql.and_(*[ - c == sql.bindparam("old_" + c.key, type_=c.type) - for c in self.secondary.c + c == sql.bindparam("old_" + c.key, type_=c.type) + for c in self.secondary.c if c.key in associationrow ])) result = connection.execute(statement, secondary_update) @@ -1115,7 +1115,7 @@ class ManyToManyDP(DependencyProcessor): result.rowcount != len(secondary_update): raise exc.StaleDataError( "UPDATE statement on table '%s' expected to update %d row(s); " - "Only %d were matched." % + "Only %d were matched." % (self.secondary.description, len(secondary_update), result.rowcount) ) @@ -1124,7 +1124,7 @@ class ManyToManyDP(DependencyProcessor): statement = self.secondary.insert() connection.execute(statement, secondary_insert) - def _synchronize(self, state, child, associationrow, + def _synchronize(self, state, child, associationrow, clearkeys, uowcommit, operation): if associationrow is None: return @@ -1133,13 +1133,13 @@ class ManyToManyDP(DependencyProcessor): if not child.deleted: util.warn( "Object of type %s not in session, %s " - "operation along '%s' won't proceed" % + "operation along '%s' won't proceed" % (mapperutil.state_class_str(child), operation, self.prop)) return False self._verify_canload(child) - sync.populate_dict(state, self.parent, associationrow, + sync.populate_dict(state, self.parent, associationrow, self.prop.synchronize_pairs) sync.populate_dict(child, self.mapper, associationrow, self.prop.secondary_synchronize_pairs) @@ -1148,9 +1148,9 @@ class ManyToManyDP(DependencyProcessor): def _pks_changed(self, uowcommit, state): return sync.source_modified( - uowcommit, - state, - self.parent, + uowcommit, + state, + self.parent, self.prop.synchronize_pairs) _direction_to_processor = { diff --git a/lib/sqlalchemy/orm/deprecated_interfaces.py b/lib/sqlalchemy/orm/deprecated_interfaces.py index de9c5ef75a..4b3cfdfc18 100644 --- a/lib/sqlalchemy/orm/deprecated_interfaces.py +++ b/lib/sqlalchemy/orm/deprecated_interfaces.py @@ -11,10 +11,10 @@ from interfaces import EXT_CONTINUE class MapperExtension(object): """Base implementation for :class:`.Mapper` event hooks. - .. note:: - + .. note:: + :class:`.MapperExtension` is deprecated. Please - refer to :func:`.event.listen` as well as + refer to :func:`.event.listen` as well as :class:`.MapperEvents`. New extension classes subclass :class:`.MapperExtension` and are specified @@ -42,8 +42,8 @@ class MapperExtension(object): to the next ``MapperExtension`` for processing". For methods that return objects like translated rows or new object instances, EXT_CONTINUE means the result of the method - should be ignored. In some cases it's required for a - default mapper activity to be performed, such as adding a + should be ignored. In some cases it's required for a + default mapper activity to be performed, such as adding a new instance to a result list. The symbol EXT_STOP has significance within a chain @@ -91,29 +91,29 @@ class MapperExtension(object): def reconstruct(instance, ctx): ls_meth(self, instance) return reconstruct - event.listen(self.class_manager, 'load', + event.listen(self.class_manager, 'load', go(ls_meth), raw=False, propagate=True) elif meth == 'init_instance': def go(ls_meth): def init_instance(instance, args, kwargs): - ls_meth(self, self.class_, - self.class_manager.original_init, + ls_meth(self, self.class_, + self.class_manager.original_init, instance, args, kwargs) return init_instance - event.listen(self.class_manager, 'init', + event.listen(self.class_manager, 'init', go(ls_meth), raw=False, propagate=True) elif meth == 'init_failed': def go(ls_meth): def init_failed(instance, args, kwargs): - util.warn_exception(ls_meth, self, self.class_, - self.class_manager.original_init, + util.warn_exception(ls_meth, self, self.class_, + self.class_manager.original_init, instance, args, kwargs) return init_failed - event.listen(self.class_manager, 'init_failure', + event.listen(self.class_manager, 'init_failure', go(ls_meth), raw=False, propagate=True) else: - event.listen(self, "%s" % meth, ls_meth, + event.listen(self, "%s" % meth, ls_meth, raw=False, retval=True, propagate=True) @@ -121,7 +121,7 @@ class MapperExtension(object): """Receive a class when the mapper is first constructed, and has applied instrumentation to the mapped class. - The return value is only significant within the ``MapperExtension`` + The return value is only significant within the ``MapperExtension`` chain; the parent mapper's behavior isn't modified by this method. """ @@ -130,25 +130,25 @@ class MapperExtension(object): def init_instance(self, mapper, class_, oldinit, instance, args, kwargs): """Receive an instance when it's constructor is called. - This method is only called during a userland construction of + This method is only called during a userland construction of an object. It is not called when an object is loaded from the database. - The return value is only significant within the ``MapperExtension`` + The return value is only significant within the ``MapperExtension`` chain; the parent mapper's behavior isn't modified by this method. """ return EXT_CONTINUE def init_failed(self, mapper, class_, oldinit, instance, args, kwargs): - """Receive an instance when it's constructor has been called, + """Receive an instance when it's constructor has been called, and raised an exception. - This method is only called during a userland construction of + This method is only called during a userland construction of an object. It is not called when an object is loaded from the database. - The return value is only significant within the ``MapperExtension`` + The return value is only significant within the ``MapperExtension`` chain; the parent mapper's behavior isn't modified by this method. """ @@ -160,9 +160,9 @@ class MapperExtension(object): This is called when the mapper first receives a row, before the object identity or the instance itself has been derived - from that row. The given row may or may not be a + from that row. The given row may or may not be a ``RowProxy`` object - it will always be a dictionary-like - object which contains mapped columns as keys. The + object which contains mapped columns as keys. The returned object should also be a dictionary-like object which recognizes mapped columns as keys. @@ -197,7 +197,7 @@ class MapperExtension(object): """ return EXT_CONTINUE - def append_result(self, mapper, selectcontext, row, instance, + def append_result(self, mapper, selectcontext, row, instance, result, **flags): """Receive an object instance before that instance is appended to a result list. @@ -231,7 +231,7 @@ class MapperExtension(object): return EXT_CONTINUE - def populate_instance(self, mapper, selectcontext, row, + def populate_instance(self, mapper, selectcontext, row, instance, **flags): """Receive an instance before that instance has its attributes populated. @@ -266,11 +266,11 @@ class MapperExtension(object): instance's lifetime. Note that during a result-row load, this method is called upon - the first row received for this instance. Note that some - attributes and collections may or may not be loaded or even + the first row received for this instance. Note that some + attributes and collections may or may not be loaded or even initialized, depending on what's present in the result rows. - The return value is only significant within the ``MapperExtension`` + The return value is only significant within the ``MapperExtension`` chain; the parent mapper's behavior isn't modified by this method. """ @@ -285,12 +285,12 @@ class MapperExtension(object): Column-based attributes can be modified within this method which will result in the new value being inserted. However - *no* changes to the overall flush plan can be made, and + *no* changes to the overall flush plan can be made, and manipulation of the ``Session`` will not have the desired effect. - To manipulate the ``Session`` within an extension, use + To manipulate the ``Session`` within an extension, use ``SessionExtension``. - The return value is only significant within the ``MapperExtension`` + The return value is only significant within the ``MapperExtension`` chain; the parent mapper's behavior isn't modified by this method. """ @@ -300,7 +300,7 @@ class MapperExtension(object): def after_insert(self, mapper, connection, instance): """Receive an object instance after that instance is inserted. - The return value is only significant within the ``MapperExtension`` + The return value is only significant within the ``MapperExtension`` chain; the parent mapper's behavior isn't modified by this method. """ @@ -327,12 +327,12 @@ class MapperExtension(object): Column-based attributes can be modified within this method which will result in the new value being updated. However - *no* changes to the overall flush plan can be made, and + *no* changes to the overall flush plan can be made, and manipulation of the ``Session`` will not have the desired effect. - To manipulate the ``Session`` within an extension, use + To manipulate the ``Session`` within an extension, use ``SessionExtension``. - The return value is only significant within the ``MapperExtension`` + The return value is only significant within the ``MapperExtension`` chain; the parent mapper's behavior isn't modified by this method. """ @@ -342,7 +342,7 @@ class MapperExtension(object): def after_update(self, mapper, connection, instance): """Receive an object instance after that instance is updated. - The return value is only significant within the ``MapperExtension`` + The return value is only significant within the ``MapperExtension`` chain; the parent mapper's behavior isn't modified by this method. """ @@ -357,7 +357,7 @@ class MapperExtension(object): desired effect. To manipulate the ``Session`` within an extension, use ``SessionExtension``. - The return value is only significant within the ``MapperExtension`` + The return value is only significant within the ``MapperExtension`` chain; the parent mapper's behavior isn't modified by this method. """ @@ -378,10 +378,10 @@ class SessionExtension(object): """Base implementation for :class:`.Session` event hooks. - .. note:: - + .. note:: + :class:`.SessionExtension` is deprecated. Please - refer to :func:`.event.listen` as well as + refer to :func:`.event.listen` as well as :class:`.SessionEvents`. Subclasses may be installed into a :class:`.Session` (or @@ -498,10 +498,10 @@ class AttributeExtension(object): """Base implementation for :class:`.AttributeImpl` event hooks, events that fire upon attribute mutations in user code. - .. note:: - + .. note:: + :class:`.AttributeExtension` is deprecated. Please - refer to :func:`.event.listen` as well as + refer to :func:`.event.listen` as well as :class:`.AttributeEvents`. :class:`.AttributeExtension` is used to listen for set, @@ -555,10 +555,10 @@ class AttributeExtension(object): active_history=listener.active_history, raw=True, retval=True) event.listen(self, 'remove', listener.remove, - active_history=listener.active_history, + active_history=listener.active_history, raw=True, retval=True) event.listen(self, 'set', listener.set, - active_history=listener.active_history, + active_history=listener.active_history, raw=True, retval=True) def append(self, state, value, initiator): diff --git a/lib/sqlalchemy/orm/evaluator.py b/lib/sqlalchemy/orm/evaluator.py index e727c17bb2..111c95098a 100644 --- a/lib/sqlalchemy/orm/evaluator.py +++ b/lib/sqlalchemy/orm/evaluator.py @@ -13,10 +13,10 @@ class UnevaluatableError(Exception): pass _straight_ops = set(getattr(operators, op) - for op in ('add', 'mul', 'sub', + for op in ('add', 'mul', 'sub', # Py2K 'div', - # end Py2K + # end Py2K 'mod', 'truediv', 'lt', 'le', 'ne', 'gt', 'ge', 'eq')) diff --git a/lib/sqlalchemy/orm/events.py b/lib/sqlalchemy/orm/events.py index 3d1961583a..daed302276 100644 --- a/lib/sqlalchemy/orm/events.py +++ b/lib/sqlalchemy/orm/events.py @@ -91,11 +91,11 @@ class InstanceEvents(event.Events): When using :class:`.InstanceEvents`, several modifiers are available to the :func:`.event.listen` function. - :param propagate=False: When True, the event listener should - be applied to all inheriting mappers as well as the + :param propagate=False: When True, the event listener should + be applied to all inheriting mappers as well as the mapper which is the target of this listener. :param raw=False: When True, the "target" argument passed - to applicable event listener functions will be the + to applicable event listener functions will be the instance's :class:`.InstanceState` management object, rather than the mapped instance itself. @@ -142,17 +142,17 @@ class InstanceEvents(event.Events): def init(self, target, args, kwargs): """Receive an instance when it's constructor is called. - This method is only called during a userland construction of + This method is only called during a userland construction of an object. It is not called when an object is loaded from the database. """ def init_failure(self, target, args, kwargs): - """Receive an instance when it's constructor has been called, + """Receive an instance when it's constructor has been called, and raised an exception. - This method is only called during a userland construction of + This method is only called during a userland construction of an object. It is not called when an object is loaded from the database. @@ -168,12 +168,12 @@ class InstanceEvents(event.Events): instance's lifetime. Note that during a result-row load, this method is called upon - the first row received for this instance. Note that some - attributes and collections may or may not be loaded or even + the first row received for this instance. Note that some + attributes and collections may or may not be loaded or even initialized, depending on what's present in the result rows. - :param target: the mapped instance. If - the event is configured with ``raw=True``, this will + :param target: the mapped instance. If + the event is configured with ``raw=True``, this will instead be the :class:`.InstanceState` state-management object associated with the instance. :param context: the :class:`.QueryContext` corresponding to the @@ -184,16 +184,16 @@ class InstanceEvents(event.Events): """ def refresh(self, target, context, attrs): - """Receive an object instance after one or more attributes have + """Receive an object instance after one or more attributes have been refreshed from a query. - :param target: the mapped instance. If - the event is configured with ``raw=True``, this will + :param target: the mapped instance. If + the event is configured with ``raw=True``, this will instead be the :class:`.InstanceState` state-management object associated with the instance. :param context: the :class:`.QueryContext` corresponding to the current :class:`.Query` in progress. - :param attrs: iterable collection of attribute names which + :param attrs: iterable collection of attribute names which were populated, or None if all column-mapped, non-deferred attributes were populated. @@ -206,23 +206,23 @@ class InstanceEvents(event.Events): 'keys' is a list of attribute names. If None, the entire state was expired. - :param target: the mapped instance. If - the event is configured with ``raw=True``, this will + :param target: the mapped instance. If + the event is configured with ``raw=True``, this will instead be the :class:`.InstanceState` state-management object associated with the instance. :param attrs: iterable collection of attribute - names which were expired, or None if all attributes were + names which were expired, or None if all attributes were expired. """ def resurrect(self, target): - """Receive an object instance as it is 'resurrected' from + """Receive an object instance as it is 'resurrected' from garbage collection, which occurs when a "dirty" state falls out of scope. - :param target: the mapped instance. If - the event is configured with ``raw=True``, this will + :param target: the mapped instance. If + the event is configured with ``raw=True``, this will instead be the :class:`.InstanceState` state-management object associated with the instance. @@ -232,28 +232,28 @@ class InstanceEvents(event.Events): """Receive an object instance when its associated state is being pickled. - :param target: the mapped instance. If - the event is configured with ``raw=True``, this will + :param target: the mapped instance. If + the event is configured with ``raw=True``, this will instead be the :class:`.InstanceState` state-management object associated with the instance. - :param state_dict: the dictionary returned by + :param state_dict: the dictionary returned by :class:`.InstanceState.__getstate__`, containing the state to be pickled. - + """ def unpickle(self, target, state_dict): """Receive an object instance after it's associated state has been unpickled. - :param target: the mapped instance. If - the event is configured with ``raw=True``, this will + :param target: the mapped instance. If + the event is configured with ``raw=True``, this will instead be the :class:`.InstanceState` state-management object associated with the instance. :param state_dict: the dictionary sent to :class:`.InstanceState.__setstate__`, containing the state dictionary which was pickled. - + """ class MapperEvents(event.Events): @@ -267,7 +267,7 @@ class MapperEvents(event.Events): # execute a stored procedure upon INSERT, # apply the value to the row to be inserted target.calculated_value = connection.scalar( - "select my_special_function(%d)" + "select my_special_function(%d)" % target.special_number) # associate the listener function with SomeMappedClass, @@ -304,16 +304,16 @@ class MapperEvents(event.Events): When using :class:`.MapperEvents`, several modifiers are available to the :func:`.event.listen` function. - :param propagate=False: When True, the event listener should - be applied to all inheriting mappers as well as the + :param propagate=False: When True, the event listener should + be applied to all inheriting mappers as well as the mapper which is the target of this listener. :param raw=False: When True, the "target" argument passed - to applicable event listener functions will be the + to applicable event listener functions will be the instance's :class:`.InstanceState` management object, rather than the mapped instance itself. :param retval=False: when True, the user-defined event function must have a return value, the purpose of which is either to - control subsequent event propagation, or to otherwise alter + control subsequent event propagation, or to otherwise alter the operation in progress by the mapper. Possible return values are: @@ -322,7 +322,7 @@ class MapperEvents(event.Events): * ``sqlalchemy.orm.interfaces.EXT_STOP`` - cancel all subsequent event handlers in the chain. * other values - the return value specified by specific listeners, - such as :meth:`~.MapperEvents.translate_row` or + such as :meth:`~.MapperEvents.translate_row` or :meth:`~.MapperEvents.create_instance`. """ @@ -340,7 +340,7 @@ class MapperEvents(event.Events): return target @classmethod - def _listen(cls, target, identifier, fn, + def _listen(cls, target, identifier, fn, raw=False, retval=False, propagate=False): if not raw or not retval: @@ -370,7 +370,7 @@ class MapperEvents(event.Events): event.Events._listen(target, identifier, fn) def instrument_class(self, mapper, class_): - """Receive a class when the mapper is first constructed, + """Receive a class when the mapper is first constructed, before instrumentation is applied to the mapped class. This event is the earliest phase of mapper construction. @@ -404,11 +404,11 @@ class MapperEvents(event.Events): This corresponds to the :func:`.orm.configure_mappers` call, which note is usually called automatically as mappings are first used. - + Theoretically this event is called once per application, but is actually called any time new mappers have been affected by a :func:`.orm.configure_mappers` call. If new mappings - are constructed after existing ones have already been used, + are constructed after existing ones have already been used, this event can be called again. """ @@ -420,9 +420,9 @@ class MapperEvents(event.Events): This listener is typically registered with ``retval=True``. It is called when the mapper first receives a row, before the object identity or the instance itself has been derived - from that row. The given row may or may not be a + from that row. The given row may or may not be a :class:`.RowProxy` object - it will always be a dictionary-like - object which contains mapped columns as keys. The + object which contains mapped columns as keys. The returned object should also be a dictionary-like object which recognizes mapped columns as keys. @@ -431,7 +431,7 @@ class MapperEvents(event.Events): :param context: the :class:`.QueryContext`, which includes a handle to the current :class:`.Query` in progress as well as additional state information. - :param row: the result row being handled. This may be + :param row: the result row being handled. This may be an actual :class:`.RowProxy` or may be a dictionary containing :class:`.Column` objects as keys. :return: When configured with ``retval=True``, the function @@ -454,18 +454,18 @@ class MapperEvents(event.Events): :param context: the :class:`.QueryContext`, which includes a handle to the current :class:`.Query` in progress as well as additional state information. - :param row: the result row being handled. This may be + :param row: the result row being handled. This may be an actual :class:`.RowProxy` or may be a dictionary containing :class:`.Column` objects as keys. :param class\_: the mapped class. :return: When configured with ``retval=True``, the return value - should be a newly created instance of the mapped class, + should be a newly created instance of the mapped class, or ``EXT_CONTINUE`` indicating that default object construction should take place. """ - def append_result(self, mapper, context, row, target, + def append_result(self, mapper, context, row, target, result, **flags): """Receive an object instance before that instance is appended to a result list. @@ -478,27 +478,27 @@ class MapperEvents(event.Events): :param context: the :class:`.QueryContext`, which includes a handle to the current :class:`.Query` in progress as well as additional state information. - :param row: the result row being handled. This may be + :param row: the result row being handled. This may be an actual :class:`.RowProxy` or may be a dictionary containing :class:`.Column` objects as keys. - :param target: the mapped instance being populated. If - the event is configured with ``raw=True``, this will + :param target: the mapped instance being populated. If + the event is configured with ``raw=True``, this will instead be the :class:`.InstanceState` state-management object associated with the instance. :param result: a list-like object where results are being appended. - :param \**flags: Additional state information about the + :param \**flags: Additional state information about the current handling of the row. :return: If this method is registered with ``retval=True``, a return value of ``EXT_STOP`` will prevent the instance - from being appended to the given result list, whereas a + from being appended to the given result list, whereas a return value of ``EXT_CONTINUE`` will result in the default behavior of appending the value to the result list. """ - def populate_instance(self, mapper, context, row, + def populate_instance(self, mapper, context, row, target, **flags): """Receive an instance before that instance has its attributes populated. @@ -518,11 +518,11 @@ class MapperEvents(event.Events): :param context: the :class:`.QueryContext`, which includes a handle to the current :class:`.Query` in progress as well as additional state information. - :param row: the result row being handled. This may be + :param row: the result row being handled. This may be an actual :class:`.RowProxy` or may be a dictionary containing :class:`.Column` objects as keys. - :param target: the mapped instance. If - the event is configured with ``raw=True``, this will + :param target: the mapped instance. If + the event is configured with ``raw=True``, this will instead be the :class:`.InstanceState` state-management object associated with the instance. :return: When configured with ``retval=True``, a return @@ -536,9 +536,9 @@ class MapperEvents(event.Events): """Receive an object instance before an INSERT statement is emitted corresponding to that instance. - This event is used to modify local, non-object related + This event is used to modify local, non-object related attributes on the instance before an INSERT occurs, as well - as to emit additional SQL statements on the given + as to emit additional SQL statements on the given connection. The event is often called for a batch of objects of the @@ -552,23 +552,23 @@ class MapperEvents(event.Events): .. warning:: Mapper-level flush events are designed to operate **on attributes - local to the immediate object being handled + local to the immediate object being handled and via SQL operations with the given** :class:`.Connection` **only.** - Handlers here should **not** make alterations to the state of + Handlers here should **not** make alterations to the state of the :class:`.Session` overall, and in general should not - affect any :func:`.relationship` -mapped attributes, as + affect any :func:`.relationship` -mapped attributes, as session cascade rules will not function properly, nor is it - always known if the related class has already been handled. + always known if the related class has already been handled. Operations that **are not supported in mapper events** include: - + * :meth:`.Session.add` * :meth:`.Session.delete` * Mapped collection append, add, remove, delete, discard, etc. * Mapped relationship attribute set/del events, i.e. ``someobject.related = someotherobject`` - + Operations which manipulate the state of the object relative to other objects are better handled: - + * In the ``__init__()`` method of the mapped object itself, or another method designed to establish some particular state. * In a ``@validates`` handler, see :ref:`simple_validators` @@ -576,12 +576,12 @@ class MapperEvents(event.Events): :param mapper: the :class:`.Mapper` which is the target of this event. - :param connection: the :class:`.Connection` being used to + :param connection: the :class:`.Connection` being used to emit INSERT statements for this instance. This - provides a handle into the current transaction on the + provides a handle into the current transaction on the target database specific to this instance. - :param target: the mapped instance being persisted. If - the event is configured with ``raw=True``, this will + :param target: the mapped instance being persisted. If + the event is configured with ``raw=True``, this will instead be the :class:`.InstanceState` state-management object associated with the instance. :return: No return value is supported by this event. @@ -594,7 +594,7 @@ class MapperEvents(event.Events): This event is used to modify in-Python-only state on the instance after an INSERT occurs, as well - as to emit additional SQL statements on the given + as to emit additional SQL statements on the given connection. The event is often called for a batch of objects of the @@ -608,23 +608,23 @@ class MapperEvents(event.Events): .. warning:: Mapper-level flush events are designed to operate **on attributes - local to the immediate object being handled + local to the immediate object being handled and via SQL operations with the given** :class:`.Connection` **only.** - Handlers here should **not** make alterations to the state of + Handlers here should **not** make alterations to the state of the :class:`.Session` overall, and in general should not - affect any :func:`.relationship` -mapped attributes, as + affect any :func:`.relationship` -mapped attributes, as session cascade rules will not function properly, nor is it - always known if the related class has already been handled. + always known if the related class has already been handled. Operations that **are not supported in mapper events** include: - + * :meth:`.Session.add` * :meth:`.Session.delete` * Mapped collection append, add, remove, delete, discard, etc. * Mapped relationship attribute set/del events, i.e. ``someobject.related = someotherobject`` - + Operations which manipulate the state of the object relative to other objects are better handled: - + * In the ``__init__()`` method of the mapped object itself, or another method designed to establish some particular state. * In a ``@validates`` handler, see :ref:`simple_validators` @@ -632,12 +632,12 @@ class MapperEvents(event.Events): :param mapper: the :class:`.Mapper` which is the target of this event. - :param connection: the :class:`.Connection` being used to + :param connection: the :class:`.Connection` being used to emit INSERT statements for this instance. This - provides a handle into the current transaction on the + provides a handle into the current transaction on the target database specific to this instance. - :param target: the mapped instance being persisted. If - the event is configured with ``raw=True``, this will + :param target: the mapped instance being persisted. If + the event is configured with ``raw=True``, this will instead be the :class:`.InstanceState` state-management object associated with the instance. :return: No return value is supported by this event. @@ -648,9 +648,9 @@ class MapperEvents(event.Events): """Receive an object instance before an UPDATE statement is emitted corresponding to that instance. - This event is used to modify local, non-object related + This event is used to modify local, non-object related attributes on the instance before an UPDATE occurs, as well - as to emit additional SQL statements on the given + as to emit additional SQL statements on the given connection. This method is called for all instances that are @@ -683,23 +683,23 @@ class MapperEvents(event.Events): .. warning:: Mapper-level flush events are designed to operate **on attributes - local to the immediate object being handled + local to the immediate object being handled and via SQL operations with the given** :class:`.Connection` **only.** - Handlers here should **not** make alterations to the state of + Handlers here should **not** make alterations to the state of the :class:`.Session` overall, and in general should not - affect any :func:`.relationship` -mapped attributes, as + affect any :func:`.relationship` -mapped attributes, as session cascade rules will not function properly, nor is it - always known if the related class has already been handled. + always known if the related class has already been handled. Operations that **are not supported in mapper events** include: - + * :meth:`.Session.add` * :meth:`.Session.delete` * Mapped collection append, add, remove, delete, discard, etc. * Mapped relationship attribute set/del events, i.e. ``someobject.related = someotherobject`` - + Operations which manipulate the state of the object relative to other objects are better handled: - + * In the ``__init__()`` method of the mapped object itself, or another method designed to establish some particular state. * In a ``@validates`` handler, see :ref:`simple_validators` @@ -707,12 +707,12 @@ class MapperEvents(event.Events): :param mapper: the :class:`.Mapper` which is the target of this event. - :param connection: the :class:`.Connection` being used to + :param connection: the :class:`.Connection` being used to emit UPDATE statements for this instance. This - provides a handle into the current transaction on the + provides a handle into the current transaction on the target database specific to this instance. - :param target: the mapped instance being persisted. If - the event is configured with ``raw=True``, this will + :param target: the mapped instance being persisted. If + the event is configured with ``raw=True``, this will instead be the :class:`.InstanceState` state-management object associated with the instance. :return: No return value is supported by this event. @@ -724,12 +724,12 @@ class MapperEvents(event.Events): This event is used to modify in-Python-only state on the instance after an UPDATE occurs, as well - as to emit additional SQL statements on the given + as to emit additional SQL statements on the given connection. This method is called for all instances that are marked as "dirty", *even those which have no net changes - to their column-based attributes*, and for which + to their column-based attributes*, and for which no UPDATE statement has proceeded. An object is marked as dirty when any of its column-based attributes have a "set attribute" operation called or when any of its @@ -756,23 +756,23 @@ class MapperEvents(event.Events): .. warning:: Mapper-level flush events are designed to operate **on attributes - local to the immediate object being handled + local to the immediate object being handled and via SQL operations with the given** :class:`.Connection` **only.** - Handlers here should **not** make alterations to the state of + Handlers here should **not** make alterations to the state of the :class:`.Session` overall, and in general should not - affect any :func:`.relationship` -mapped attributes, as + affect any :func:`.relationship` -mapped attributes, as session cascade rules will not function properly, nor is it - always known if the related class has already been handled. + always known if the related class has already been handled. Operations that **are not supported in mapper events** include: - + * :meth:`.Session.add` * :meth:`.Session.delete` * Mapped collection append, add, remove, delete, discard, etc. * Mapped relationship attribute set/del events, i.e. ``someobject.related = someotherobject`` - + Operations which manipulate the state of the object relative to other objects are better handled: - + * In the ``__init__()`` method of the mapped object itself, or another method designed to establish some particular state. * In a ``@validates`` handler, see :ref:`simple_validators` @@ -780,12 +780,12 @@ class MapperEvents(event.Events): :param mapper: the :class:`.Mapper` which is the target of this event. - :param connection: the :class:`.Connection` being used to + :param connection: the :class:`.Connection` being used to emit UPDATE statements for this instance. This - provides a handle into the current transaction on the + provides a handle into the current transaction on the target database specific to this instance. - :param target: the mapped instance being persisted. If - the event is configured with ``raw=True``, this will + :param target: the mapped instance being persisted. If + the event is configured with ``raw=True``, this will instead be the :class:`.InstanceState` state-management object associated with the instance. :return: No return value is supported by this event. @@ -796,33 +796,33 @@ class MapperEvents(event.Events): """Receive an object instance before a DELETE statement is emitted corresponding to that instance. - This event is used to emit additional SQL statements on + This event is used to emit additional SQL statements on the given connection as well as to perform application specific bookkeeping related to a deletion event. The event is often called for a batch of objects of the same class before their DELETE statements are emitted at - once in a later step. + once in a later step. .. warning:: Mapper-level flush events are designed to operate **on attributes - local to the immediate object being handled + local to the immediate object being handled and via SQL operations with the given** :class:`.Connection` **only.** - Handlers here should **not** make alterations to the state of + Handlers here should **not** make alterations to the state of the :class:`.Session` overall, and in general should not - affect any :func:`.relationship` -mapped attributes, as + affect any :func:`.relationship` -mapped attributes, as session cascade rules will not function properly, nor is it - always known if the related class has already been handled. + always known if the related class has already been handled. Operations that **are not supported in mapper events** include: - + * :meth:`.Session.add` * :meth:`.Session.delete` * Mapped collection append, add, remove, delete, discard, etc. * Mapped relationship attribute set/del events, i.e. ``someobject.related = someotherobject`` - + Operations which manipulate the state of the object relative to other objects are better handled: - + * In the ``__init__()`` method of the mapped object itself, or another method designed to establish some particular state. * In a ``@validates`` handler, see :ref:`simple_validators` @@ -830,12 +830,12 @@ class MapperEvents(event.Events): :param mapper: the :class:`.Mapper` which is the target of this event. - :param connection: the :class:`.Connection` being used to + :param connection: the :class:`.Connection` being used to emit DELETE statements for this instance. This - provides a handle into the current transaction on the + provides a handle into the current transaction on the target database specific to this instance. - :param target: the mapped instance being deleted. If - the event is configured with ``raw=True``, this will + :param target: the mapped instance being deleted. If + the event is configured with ``raw=True``, this will instead be the :class:`.InstanceState` state-management object associated with the instance. :return: No return value is supported by this event. @@ -846,33 +846,33 @@ class MapperEvents(event.Events): """Receive an object instance after a DELETE statement has been emitted corresponding to that instance. - This event is used to emit additional SQL statements on + This event is used to emit additional SQL statements on the given connection as well as to perform application specific bookkeeping related to a deletion event. The event is often called for a batch of objects of the same class after their DELETE statements have been emitted at - once in a previous step. + once in a previous step. .. warning:: Mapper-level flush events are designed to operate **on attributes - local to the immediate object being handled + local to the immediate object being handled and via SQL operations with the given** :class:`.Connection` **only.** - Handlers here should **not** make alterations to the state of + Handlers here should **not** make alterations to the state of the :class:`.Session` overall, and in general should not - affect any :func:`.relationship` -mapped attributes, as + affect any :func:`.relationship` -mapped attributes, as session cascade rules will not function properly, nor is it - always known if the related class has already been handled. + always known if the related class has already been handled. Operations that **are not supported in mapper events** include: - + * :meth:`.Session.add` * :meth:`.Session.delete` * Mapped collection append, add, remove, delete, discard, etc. * Mapped relationship attribute set/del events, i.e. ``someobject.related = someotherobject`` - + Operations which manipulate the state of the object relative to other objects are better handled: - + * In the ``__init__()`` method of the mapped object itself, or another method designed to establish some particular state. * In a ``@validates`` handler, see :ref:`simple_validators` @@ -880,12 +880,12 @@ class MapperEvents(event.Events): :param mapper: the :class:`.Mapper` which is the target of this event. - :param connection: the :class:`.Connection` being used to + :param connection: the :class:`.Connection` being used to emit DELETE statements for this instance. This - provides a handle into the current transaction on the + provides a handle into the current transaction on the target database specific to this instance. - :param target: the mapped instance being deleted. If - the event is configured with ``raw=True``, this will + :param target: the mapped instance being deleted. If + the event is configured with ``raw=True``, this will instead be the :class:`.InstanceState` state-management object associated with the instance. :return: No return value is supported by this event. @@ -952,7 +952,7 @@ class SessionEvents(event.Events): transaction is ongoing. :param session: The target :class:`.Session`. - + """ def after_commit(self, session): @@ -960,19 +960,19 @@ class SessionEvents(event.Events): Note that this may not be per-flush if a longer running transaction is ongoing. - + :param session: The target :class:`.Session`. - + """ def after_rollback(self, session): """Execute after a real DBAPI rollback has occurred. - + Note that this event only fires when the *actual* rollback against - the database occurs - it does *not* fire each time the - :meth:`.Session.rollback` method is called, if the underlying + the database occurs - it does *not* fire each time the + :meth:`.Session.rollback` method is called, if the underlying DBAPI transaction has already been rolled back. In many - cases, the :class:`.Session` will not be in + cases, the :class:`.Session` will not be in an "active" state during this event, as the current transaction is not valid. To acquire a :class:`.Session` which is active after the outermost rollback has proceeded, @@ -984,23 +984,23 @@ class SessionEvents(event.Events): """ def after_soft_rollback(self, session, previous_transaction): - """Execute after any rollback has occurred, including "soft" + """Execute after any rollback has occurred, including "soft" rollbacks that don't actually emit at the DBAPI level. - + This corresponds to both nested and outer rollbacks, i.e. - the innermost rollback that calls the DBAPI's - rollback() method, as well as the enclosing rollback + the innermost rollback that calls the DBAPI's + rollback() method, as well as the enclosing rollback calls that only pop themselves from the transaction stack. - - The given :class:`.Session` can be used to invoke SQL and - :meth:`.Session.query` operations after an outermost rollback + + The given :class:`.Session` can be used to invoke SQL and + :meth:`.Session.query` operations after an outermost rollback by first checking the :attr:`.Session.is_active` flag:: @event.listens_for(Session, "after_soft_rollback") def do_something(session, previous_transaction): if session.is_active: session.execute("select * from some_table") - + :param session: The target :class:`.Session`. :param previous_transaction: The :class:`.SessionTransaction` transactional marker object which was just closed. The current :class:`.SessionTransaction` @@ -1030,7 +1030,7 @@ class SessionEvents(event.Events): Note that the session's state is still in pre-flush, i.e. 'new', 'dirty', and 'deleted' lists still show pre-flush state as well as the history settings on instance attributes. - + :param session: The target :class:`.Session`. :param flush_context: Internal :class:`.UOWTransaction` object which handles the details of the flush. @@ -1044,8 +1044,8 @@ class SessionEvents(event.Events): This will be when the 'new', 'dirty', and 'deleted' lists are in their final state. An actual commit() may or may not have occurred, depending on whether or not the flush started its own - transaction or participated in a larger transaction. - + transaction or participated in a larger transaction. + :param session: The target :class:`.Session`. :param flush_context: Internal :class:`.UOWTransaction` object which handles the details of the flush. @@ -1056,9 +1056,9 @@ class SessionEvents(event.Events): :param session: The target :class:`.Session`. :param transaction: The :class:`.SessionTransaction`. - :param connection: The :class:`~.engine.base.Connection` object + :param connection: The :class:`~.engine.base.Connection` object which will be used for SQL statements. - + """ def after_attach(self, session, instance): @@ -1072,7 +1072,7 @@ class SessionEvents(event.Events): This is called as a result of the :meth:`.Query.update` method. :param query: the :class:`.Query` object that this update operation was - called upon. + called upon. :param query_context: The :class:`.QueryContext` object, corresponding to the invocation of an ORM query. :param result: the :class:`.ResultProxy` returned as a result of the @@ -1086,7 +1086,7 @@ class SessionEvents(event.Events): This is called as a result of the :meth:`.Query.delete` method. :param query: the :class:`.Query` object that this update operation was - called upon. + called upon. :param query_context: The :class:`.QueryContext` object, corresponding to the invocation of an ORM query. :param result: the :class:`.ResultProxy` returned as a result of the @@ -1137,15 +1137,15 @@ class AttributeEvents(event.Events): :param propagate=False: When True, the listener function will be established not just for the class attribute given, but - for attributes of the same name on all current subclasses - of that class, as well as all future subclasses of that - class, using an additional listener that listens for + for attributes of the same name on all current subclasses + of that class, as well as all future subclasses of that + class, using an additional listener that listens for instrumentation events. :param raw=False: When True, the "target" argument to the event will be the :class:`.InstanceState` management object, rather than the mapped instance itself. - :param retval=False: when True, the user-defined event - listening must return the "value" argument from the + :param retval=False: when True, the user-defined event + listening must return the "value" argument from the function. This gives the listening function the opportunity to change the value that is ultimately used for a "set" or "append" event. @@ -1161,7 +1161,7 @@ class AttributeEvents(event.Events): return target @classmethod - def _listen(cls, target, identifier, fn, active_history=False, + def _listen(cls, target, identifier, fn, active_history=False, raw=False, retval=False, propagate=False): if active_history: @@ -1202,9 +1202,9 @@ class AttributeEvents(event.Events): be the :class:`.InstanceState` object. :param value: the value being appended. If this listener is registered with ``retval=True``, the listener - function must return this value, or a new value which + function must return this value, or a new value which replaces it. - :param initiator: the attribute implementation object + :param initiator: the attribute implementation object which initiated this event. :return: if the event was registered with ``retval=True``, the given value, or a new effective value, should be returned. @@ -1218,7 +1218,7 @@ class AttributeEvents(event.Events): If the listener is registered with ``raw=True``, this will be the :class:`.InstanceState` object. :param value: the value being removed. - :param initiator: the attribute implementation object + :param initiator: the attribute implementation object which initiated this event. :return: No return value is defined for this event. """ @@ -1231,15 +1231,15 @@ class AttributeEvents(event.Events): be the :class:`.InstanceState` object. :param value: the value being set. If this listener is registered with ``retval=True``, the listener - function must return this value, or a new value which + function must return this value, or a new value which replaces it. :param oldvalue: the previous value being replaced. This may also be the symbol ``NEVER_SET`` or ``NO_VALUE``. If the listener is registered with ``active_history=True``, the previous value of the attribute will be loaded from - the database if the existing value is currently unloaded + the database if the existing value is currently unloaded or expired. - :param initiator: the attribute implementation object + :param initiator: the attribute implementation object which initiated this event. :return: if the event was registered with ``retval=True``, the given value, or a new effective value, should be returned. diff --git a/lib/sqlalchemy/orm/exc.py b/lib/sqlalchemy/orm/exc.py index 9b3a78c435..c57aa49efc 100644 --- a/lib/sqlalchemy/orm/exc.py +++ b/lib/sqlalchemy/orm/exc.py @@ -18,13 +18,13 @@ class StaleDataError(sa.exc.SQLAlchemyError): Conditions which cause this to happen include: * A flush may have attempted to update or delete rows - and an unexpected number of rows were matched during - the UPDATE or DELETE statement. Note that when + and an unexpected number of rows were matched during + the UPDATE or DELETE statement. Note that when version_id_col is used, rows in UPDATE or DELETE statements are also matched against the current known version identifier. - * A mapped object with version_id_col was refreshed, + * A mapped object with version_id_col was refreshed, and the version number coming back from the database does not match that of the object itself. @@ -52,7 +52,7 @@ class ObjectDereferencedError(sa.exc.SQLAlchemyError): """An operation cannot complete due to an object being garbage collected.""" class DetachedInstanceError(sa.exc.SQLAlchemyError): - """An attempt to access unloaded attributes on a + """An attempt to access unloaded attributes on a mapped instance that is detached.""" class UnmappedInstanceError(UnmappedError): @@ -91,21 +91,21 @@ class UnmappedClassError(UnmappedError): class ObjectDeletedError(sa.exc.InvalidRequestError): """A refresh operation failed to retrieve the database row corresponding to an object's known primary key identity. - - A refresh operation proceeds when an expired attribute is + + A refresh operation proceeds when an expired attribute is accessed on an object, or when :meth:`.Query.get` is used to retrieve an object which is, upon retrieval, detected as expired. A SELECT is emitted for the target row based on primary key; if no row is returned, this exception is raised. - - The true meaning of this exception is simply that + + The true meaning of this exception is simply that no row exists for the primary key identifier associated - with a persistent object. The row may have been + with a persistent object. The row may have been deleted, or in some cases the primary key updated to a new value, outside of the ORM's management of the target - object. - + object. + """ def __init__(self, state, msg=None): if not msg: diff --git a/lib/sqlalchemy/orm/identity.py b/lib/sqlalchemy/orm/identity.py index 59d121de98..b318fb03f4 100644 --- a/lib/sqlalchemy/orm/identity.py +++ b/lib/sqlalchemy/orm/identity.py @@ -128,7 +128,7 @@ class WeakInstanceDict(IdentityMap): o = existing_state._is_really_none() if o is not None: raise AssertionError("A conflicting state is already " - "present in the identity map for key %r" + "present in the identity map for key %r" % (key, )) else: return diff --git a/lib/sqlalchemy/orm/instrumentation.py b/lib/sqlalchemy/orm/instrumentation.py index af9ef7841a..9998046b2a 100644 --- a/lib/sqlalchemy/orm/instrumentation.py +++ b/lib/sqlalchemy/orm/instrumentation.py @@ -91,7 +91,7 @@ class ClassManager(dict): self.originals = {} self._bases = [mgr for mgr in [ - manager_of_class(base) + manager_of_class(base) for base in self.class_.__bases__ if isinstance(base, type) ] if mgr is not None] @@ -139,7 +139,7 @@ class ClassManager(dict): def _instrument_init(self): # TODO: self.class_.__init__ is often the already-instrumented - # __init__ from an instrumented superclass. We still need to make + # __init__ from an instrumented superclass. We still need to make # our own wrapper, but it would # be nice to wrap the original __init__ and not our existing wrapper # of such, since this adds method overhead. @@ -212,7 +212,7 @@ class ClassManager(dict): if key in self.mutable_attributes: self.mutable_attributes.remove(key) for cls in self.class_.__subclasses__(): - manager = manager_of_class(cls) + manager = manager_of_class(cls) if manager: manager.uninstrument_attribute(key, True) @@ -277,12 +277,12 @@ class ClassManager(dict): def new_instance(self, state=None): instance = self.class_.__new__(self.class_) - setattr(instance, self.STATE_ATTR, + setattr(instance, self.STATE_ATTR, state or self._state_constructor(instance, self)) return instance def setup_instance(self, instance, state=None): - setattr(instance, self.STATE_ATTR, + setattr(instance, self.STATE_ATTR, state or self._state_constructor(instance, self)) def teardown_instance(self, instance): @@ -387,7 +387,7 @@ class _ClassInstrumentationAdapter(ClassManager): if delegate: return delegate(key, state, factory) else: - return ClassManager.initialize_collection(self, key, + return ClassManager.initialize_collection(self, key, state, factory) def new_instance(self, state=None): @@ -463,7 +463,7 @@ def is_instrumented(instance, key): class InstrumentationRegistry(object): """Private instrumentation registration singleton. - All classes are routed through this registry + All classes are routed through this registry when first instrumented, however the InstrumentationRegistry is not actually needed unless custom ClassManagers are in use. @@ -501,7 +501,7 @@ class InstrumentationRegistry(object): if factory != ClassManager and not self._extended: # somebody invoked a custom ClassManager. - # reinstall global "getter" functions with the more + # reinstall global "getter" functions with the more # expensive ones. self._extended = True _install_lookup_strategy(self) @@ -543,7 +543,7 @@ class InstrumentationRegistry(object): return factories def manager_of_class(self, cls): - # this is only called when alternate instrumentation + # this is only called when alternate instrumentation # has been established if cls is None: return None @@ -555,7 +555,7 @@ class InstrumentationRegistry(object): return finder(cls) def state_of(self, instance): - # this is only called when alternate instrumentation + # this is only called when alternate instrumentation # has been established if instance is None: raise AttributeError("None has no persistent state.") @@ -566,7 +566,7 @@ class InstrumentationRegistry(object): instance.__class__) def dict_of(self, instance): - # this is only called when alternate instrumentation + # this is only called when alternate instrumentation # has been established if instance is None: raise AttributeError("None has no persistent state.") @@ -632,7 +632,7 @@ instrumentation_finders.append(find_native_user_instrumentation_hook) def _generate_init(class_, class_manager): """Build an __init__ decorator that triggers ClassManager events.""" - # TODO: we should use the ClassManager's notion of the + # TODO: we should use the ClassManager's notion of the # original '__init__' method, once ClassManager is fixed # to always reference that. original__init__ = class_.__init__ diff --git a/lib/sqlalchemy/orm/interfaces.py b/lib/sqlalchemy/orm/interfaces.py index bda48cbb13..fcb6f7401e 100644 --- a/lib/sqlalchemy/orm/interfaces.py +++ b/lib/sqlalchemy/orm/interfaces.py @@ -61,13 +61,13 @@ class MapperProperty(object): attribute, as well as that attribute as it appears on individual instances of the class, including attribute instrumentation, attribute access, loading behavior, and dependency calculations. - + The most common occurrences of :class:`.MapperProperty` are the - mapped :class:`.Column`, which is represented in a mapping as + mapped :class:`.Column`, which is represented in a mapping as an instance of :class:`.ColumnProperty`, and a reference to another class produced by :func:`.relationship`, represented in the mapping as an instance of :class:`.RelationshipProperty`. - + """ cascade = () @@ -87,7 +87,7 @@ class MapperProperty(object): pass - def create_row_processor(self, context, path, reduced_path, + def create_row_processor(self, context, path, reduced_path, mapper, row, adapter): """Return a 3-tuple consisting of three row processing functions. @@ -263,13 +263,13 @@ class PropComparator(operators.ColumnOperators): """Return true if this collection contains any member that meets the given criterion. - The usual implementation of ``any()`` is + The usual implementation of ``any()`` is :meth:`.RelationshipProperty.Comparator.any`. - :param criterion: an optional ClauseElement formulated against the + :param criterion: an optional ClauseElement formulated against the member class' table or attributes. - :param \**kwargs: key/value pairs corresponding to member class attribute + :param \**kwargs: key/value pairs corresponding to member class attribute names which will be compared via equality to the corresponding values. @@ -281,13 +281,13 @@ class PropComparator(operators.ColumnOperators): """Return true if this element references a member which meets the given criterion. - The usual implementation of ``has()`` is + The usual implementation of ``has()`` is :meth:`.RelationshipProperty.Comparator.has`. - :param criterion: an optional ClauseElement formulated against the + :param criterion: an optional ClauseElement formulated against the member class' table or attributes. - :param \**kwargs: key/value pairs corresponding to member class attribute + :param \**kwargs: key/value pairs corresponding to member class attribute names which will be compared via equality to the corresponding values. @@ -337,12 +337,12 @@ class StrategizedProperty(MapperProperty): def setup(self, context, entity, path, reduced_path, adapter, **kwargs): self._get_context_strategy(context, reduced_path + (self.key,)).\ - setup_query(context, entity, path, + setup_query(context, entity, path, reduced_path, adapter, **kwargs) def create_row_processor(self, context, path, reduced_path, mapper, row, adapter): return self._get_context_strategy(context, reduced_path + (self.key,)).\ - create_row_processor(context, path, + create_row_processor(context, path, reduced_path, mapper, row, adapter) def do_init(self): @@ -365,7 +365,7 @@ def serialize_path(path): return None return zip( - [m.class_ for m in [path[i] for i in range(0, len(path), 2)]], + [m.class_ for m in [path[i] for i in range(0, len(path), 2)]], [path[i] for i in range(1, len(path), 2)] + [None] ) @@ -382,7 +382,7 @@ class MapperOption(object): """Describe a modification to a Query.""" propagate_to_loaders = False - """if True, indicate this option should be carried along + """if True, indicate this option should be carried along Query object generated by scalar or object lazy loaders. """ @@ -464,9 +464,9 @@ class PropertyOption(MapperOption): else: raise sa_exc.ArgumentError( "Can't find property '%s' on any entity " - "specified in this Query. Note the full path " - "from root (%s) to target entity must be specified." - % (token, ",".join(str(x) for + "specified in this Query. Note the full path " + "from root (%s) to target entity must be specified." + % (token, ",".join(str(x) for x in query._mapper_entities)) ) else: @@ -494,7 +494,7 @@ class PropertyOption(MapperOption): l = [] mappers = [] - # _current_path implies we're in a + # _current_path implies we're in a # secondary load with an existing path current_path = list(query._current_path) @@ -520,8 +520,8 @@ class PropertyOption(MapperOption): if not entity: entity = self._find_entity_basestring( - query, - token, + query, + token, raiseerr) if entity is None: return [], [] @@ -555,8 +555,8 @@ class PropertyOption(MapperOption): if not entity: entity = self._find_entity_prop_comparator( query, - prop.key, - token.parententity, + prop.key, + token.parententity, raiseerr) if not entity: return [], [] @@ -587,7 +587,7 @@ class PropertyOption(MapperOption): ) if current_path: - # ran out of tokens before + # ran out of tokens before # current_path was exhausted. assert not tokens return [], [] @@ -630,9 +630,9 @@ def _reduce_path(path): of the mapper referenced by Mapper.prop1. """ - return tuple([i % 2 != 0 and - element or - getattr(element, 'base_mapper', element) + return tuple([i % 2 != 0 and + element or + getattr(element, 'base_mapper', element) for i, element in enumerate(path)]) class LoaderStrategy(object): @@ -678,7 +678,7 @@ class LoaderStrategy(object): def setup_query(self, context, entity, path, reduced_path, adapter, **kwargs): pass - def create_row_processor(self, context, path, reduced_path, mapper, + def create_row_processor(self, context, path, reduced_path, mapper, row, adapter): """Return row processing functions which fulfill the contract specified by MapperProperty.create_row_processor. diff --git a/lib/sqlalchemy/orm/persistence.py b/lib/sqlalchemy/orm/persistence.py index e73e97d3c6..b2740f2b2b 100644 --- a/lib/sqlalchemy/orm/persistence.py +++ b/lib/sqlalchemy/orm/persistence.py @@ -23,7 +23,7 @@ from sqlalchemy.orm import attributes, sync, \ from sqlalchemy.orm.util import _state_mapper, state_str def save_obj(base_mapper, states, uowtransaction, single=False): - """Issue ``INSERT`` and/or ``UPDATE`` statements for a list + """Issue ``INSERT`` and/or ``UPDATE`` statements for a list of objects. This is called within the context of a UOWTransaction during a @@ -40,30 +40,30 @@ def save_obj(base_mapper, states, uowtransaction, single=False): return states_to_insert, states_to_update = _organize_states_for_save( - base_mapper, - states, + base_mapper, + states, uowtransaction) cached_connections = _cached_connection_dict(base_mapper) for table, mapper in base_mapper._sorted_tables.iteritems(): - insert = _collect_insert_commands(base_mapper, uowtransaction, + insert = _collect_insert_commands(base_mapper, uowtransaction, table, states_to_insert) - update = _collect_update_commands(base_mapper, uowtransaction, + update = _collect_update_commands(base_mapper, uowtransaction, table, states_to_update) if update: - _emit_update_statements(base_mapper, uowtransaction, - cached_connections, + _emit_update_statements(base_mapper, uowtransaction, + cached_connections, mapper, table, update) if insert: - _emit_insert_statements(base_mapper, uowtransaction, - cached_connections, + _emit_insert_statements(base_mapper, uowtransaction, + cached_connections, table, insert) - _finalize_insert_update_commands(base_mapper, uowtransaction, + _finalize_insert_update_commands(base_mapper, uowtransaction, states_to_insert, states_to_update) def post_update(base_mapper, states, uowtransaction, post_update_cols): @@ -74,18 +74,18 @@ def post_update(base_mapper, states, uowtransaction, post_update_cols): cached_connections = _cached_connection_dict(base_mapper) states_to_update = _organize_states_for_post_update( - base_mapper, + base_mapper, states, uowtransaction) for table, mapper in base_mapper._sorted_tables.iteritems(): - update = _collect_post_update_commands(base_mapper, uowtransaction, - table, states_to_update, + update = _collect_post_update_commands(base_mapper, uowtransaction, + table, states_to_update, post_update_cols) if update: - _emit_post_update_statements(base_mapper, uowtransaction, - cached_connections, + _emit_post_update_statements(base_mapper, uowtransaction, + cached_connections, mapper, table, update) def delete_obj(base_mapper, states, uowtransaction): @@ -99,19 +99,19 @@ def delete_obj(base_mapper, states, uowtransaction): cached_connections = _cached_connection_dict(base_mapper) states_to_delete = _organize_states_for_delete( - base_mapper, + base_mapper, states, uowtransaction) table_to_mapper = base_mapper._sorted_tables for table in reversed(table_to_mapper.keys()): - delete = _collect_delete_commands(base_mapper, uowtransaction, + delete = _collect_delete_commands(base_mapper, uowtransaction, table, states_to_delete) mapper = table_to_mapper[table] - _emit_delete_statements(base_mapper, uowtransaction, + _emit_delete_statements(base_mapper, uowtransaction, cached_connections, mapper, table, delete) for state, state_dict, mapper, has_identity, connection \ @@ -121,20 +121,20 @@ def delete_obj(base_mapper, states, uowtransaction): def _organize_states_for_save(base_mapper, states, uowtransaction): """Make an initial pass across a set of states for INSERT or UPDATE. - + This includes splitting out into distinct lists for each, calling before_insert/before_update, obtaining key information for each state including its dictionary, mapper, the connection to use for the execution per state, and the identity flag. - + """ states_to_insert = [] states_to_update = [] for state, dict_, mapper, connection in _connections_for_states( - base_mapper, uowtransaction, + base_mapper, uowtransaction, states): has_identity = bool(state.key) @@ -148,9 +148,9 @@ def _organize_states_for_save(base_mapper, states, uowtransaction): else: mapper.dispatch.before_update(mapper, connection, state) - # detect if we have a "pending" instance (i.e. has - # no instance_key attached to it), and another instance - # with the same identity key already exists as persistent. + # detect if we have a "pending" instance (i.e. has + # no instance_key attached to it), and another instance + # with the same identity key already exists as persistent. # convert to an UPDATE if so. if not has_identity and \ instance_key in uowtransaction.session.identity_map: @@ -160,14 +160,14 @@ def _organize_states_for_save(base_mapper, states, uowtransaction): if not uowtransaction.is_deleted(existing): raise orm_exc.FlushError( "New instance %s with identity key %s conflicts " - "with persistent instance %s" % + "with persistent instance %s" % (state_str(state), instance_key, state_str(existing))) base_mapper._log_debug( "detected row switch for identity %s. " "will update %s, remove %s from " - "transaction", instance_key, + "transaction", instance_key, state_str(state), state_str(existing)) # remove the "delete" flag from the existing element @@ -176,55 +176,55 @@ def _organize_states_for_save(base_mapper, states, uowtransaction): if not has_identity and not row_switch: states_to_insert.append( - (state, dict_, mapper, connection, + (state, dict_, mapper, connection, has_identity, instance_key, row_switch) ) else: states_to_update.append( - (state, dict_, mapper, connection, + (state, dict_, mapper, connection, has_identity, instance_key, row_switch) ) return states_to_insert, states_to_update -def _organize_states_for_post_update(base_mapper, states, +def _organize_states_for_post_update(base_mapper, states, uowtransaction): """Make an initial pass across a set of states for UPDATE corresponding to post_update. - - This includes obtaining key information for each state - including its dictionary, mapper, the connection to use for + + This includes obtaining key information for each state + including its dictionary, mapper, the connection to use for the execution per state. - + """ - return list(_connections_for_states(base_mapper, uowtransaction, + return list(_connections_for_states(base_mapper, uowtransaction, states)) def _organize_states_for_delete(base_mapper, states, uowtransaction): """Make an initial pass across a set of states for DELETE. - + This includes calling out before_delete and obtaining key information for each state including its dictionary, mapper, the connection to use for the execution per state. - + """ states_to_delete = [] for state, dict_, mapper, connection in _connections_for_states( - base_mapper, uowtransaction, + base_mapper, uowtransaction, states): mapper.dispatch.before_delete(mapper, connection, state) - states_to_delete.append((state, dict_, mapper, + states_to_delete.append((state, dict_, mapper, bool(state.key), connection)) return states_to_delete -def _collect_insert_commands(base_mapper, uowtransaction, table, +def _collect_insert_commands(base_mapper, uowtransaction, table, states_to_insert): """Identify sets of values to use in INSERT statements for a list of states. - + """ insert = [] for state, state_dict, mapper, connection, has_identity, \ @@ -242,7 +242,7 @@ def _collect_insert_commands(base_mapper, uowtransaction, table, if col is mapper.version_id_col: params[col.key] = mapper.version_id_generator(None) else: - # pull straight from the dict for + # pull straight from the dict for # pending objects prop = mapper._columntoproperty[col] value = state_dict.get(prop.key, None) @@ -259,15 +259,15 @@ def _collect_insert_commands(base_mapper, uowtransaction, table, else: params[col.key] = value - insert.append((state, state_dict, params, mapper, + insert.append((state, state_dict, params, mapper, connection, value_params, has_all_pks)) return insert -def _collect_update_commands(base_mapper, uowtransaction, +def _collect_update_commands(base_mapper, uowtransaction, table, states_to_update): """Identify sets of values to use in UPDATE statements for a list of states. - + This function works intricately with the history system to determine exactly what values should be updated as well as how the row should be matched within an UPDATE @@ -292,14 +292,14 @@ def _collect_update_commands(base_mapper, uowtransaction, if col is mapper.version_id_col: params[col._label] = \ mapper._get_committed_state_attr_by_column( - row_switch or state, - row_switch and row_switch.dict + row_switch or state, + row_switch and row_switch.dict or state_dict, col) prop = mapper._columntoproperty[col] history = attributes.get_state_history( - state, prop.key, + state, prop.key, attributes.PASSIVE_NO_INITIALIZE ) if history.added: @@ -309,20 +309,20 @@ def _collect_update_commands(base_mapper, uowtransaction, params[col.key] = mapper.version_id_generator( params[col._label]) - # HACK: check for history, in case the + # HACK: check for history, in case the # history is only - # in a different table than the one + # in a different table than the one # where the version_id_col is. for prop in mapper._columntoproperty.itervalues(): history = attributes.get_state_history( - state, prop.key, + state, prop.key, attributes.PASSIVE_NO_INITIALIZE) if history.added: hasdata = True else: prop = mapper._columntoproperty[col] history = attributes.get_state_history( - state, prop.key, + state, prop.key, attributes.PASSIVE_NO_INITIALIZE) if history.added: if isinstance(history.added[0], @@ -344,7 +344,7 @@ def _collect_update_commands(base_mapper, uowtransaction, value = history.added[0] params[col._label] = value else: - # use the old value to + # use the old value to # locate the row value = history.deleted[0] params[col._label] = value @@ -374,12 +374,12 @@ def _collect_update_commands(base_mapper, uowtransaction, "Can't update table " "using NULL for primary " "key value") - update.append((state, state_dict, params, mapper, + update.append((state, state_dict, params, mapper, connection, value_params)) return update -def _collect_post_update_commands(base_mapper, uowtransaction, table, +def _collect_post_update_commands(base_mapper, uowtransaction, table, states_to_update, post_update_cols): """Identify sets of values to use in UPDATE statements for a list of states within a post_update operation. @@ -403,20 +403,20 @@ def _collect_post_update_commands(base_mapper, uowtransaction, table, elif col in post_update_cols: prop = mapper._columntoproperty[col] history = attributes.get_state_history( - state, prop.key, + state, prop.key, attributes.PASSIVE_NO_INITIALIZE) if history.added: value = history.added[0] params[col.key] = value hasdata = True if hasdata: - update.append((state, state_dict, params, mapper, + update.append((state, state_dict, params, mapper, connection)) return update -def _collect_delete_commands(base_mapper, uowtransaction, table, +def _collect_delete_commands(base_mapper, uowtransaction, table, states_to_delete): - """Identify values to use in DELETE statements for a list of + """Identify values to use in DELETE statements for a list of states to be deleted.""" delete = util.defaultdict(list) @@ -448,7 +448,7 @@ def _collect_delete_commands(base_mapper, uowtransaction, table, return delete -def _emit_update_statements(base_mapper, uowtransaction, +def _emit_update_statements(base_mapper, uowtransaction, cached_connections, mapper, table, update): """Emit UPDATE statements corresponding to value lists collected by _collect_update_commands().""" @@ -486,13 +486,13 @@ def _emit_update_statements(base_mapper, uowtransaction, _postfetch( mapper, - uowtransaction, - table, - state, - state_dict, - c.context.prefetch_cols, + uowtransaction, + table, + state, + state_dict, + c.context.prefetch_cols, c.context.postfetch_cols, - c.context.compiled_parameters[0], + c.context.compiled_parameters[0], value_params) rows += c.rowcount @@ -505,11 +505,11 @@ def _emit_update_statements(base_mapper, uowtransaction, elif needs_version_id: util.warn("Dialect %s does not support updated rowcount " - "- versioning cannot be verified." % + "- versioning cannot be verified." % c.dialect.dialect_description, stacklevel=12) -def _emit_insert_statements(base_mapper, uowtransaction, +def _emit_insert_statements(base_mapper, uowtransaction, cached_connections, table, insert): """Emit INSERT statements corresponding to value lists collected by _collect_insert_commands().""" @@ -517,10 +517,10 @@ def _emit_insert_statements(base_mapper, uowtransaction, statement = base_mapper._memo(('insert', table), table.insert) for (connection, pkeys, hasvalue, has_all_pks), \ - records in groupby(insert, - lambda rec: (rec[4], - rec[2].keys(), - bool(rec[5]), + records in groupby(insert, + lambda rec: (rec[4], + rec[2].keys(), + bool(rec[5]), rec[6]) ): if has_all_pks and not hasvalue: @@ -529,19 +529,19 @@ def _emit_insert_statements(base_mapper, uowtransaction, c = cached_connections[connection].\ execute(statement, multiparams) - for (state, state_dict, params, mapper, + for (state, state_dict, params, mapper, conn, value_params, has_all_pks), \ last_inserted_params in \ zip(records, c.context.compiled_parameters): _postfetch( mapper, - uowtransaction, + uowtransaction, table, - state, + state, state_dict, c.context.prefetch_cols, c.context.postfetch_cols, - last_inserted_params, + last_inserted_params, value_params) else: @@ -561,31 +561,31 @@ def _emit_insert_statements(base_mapper, uowtransaction, if primary_key is not None: # set primary key attributes - for pk, col in zip(primary_key, + for pk, col in zip(primary_key, mapper._pks_by_table[table]): prop = mapper._columntoproperty[col] if state_dict.get(prop.key) is None: # TODO: would rather say: #state_dict[prop.key] = pk mapper._set_state_attr_by_column( - state, - state_dict, + state, + state_dict, col, pk) _postfetch( mapper, - uowtransaction, - table, - state, + uowtransaction, + table, + state, state_dict, - result.context.prefetch_cols, + result.context.prefetch_cols, result.context.postfetch_cols, - result.context.compiled_parameters[0], + result.context.compiled_parameters[0], value_params) -def _emit_post_update_statements(base_mapper, uowtransaction, +def _emit_post_update_statements(base_mapper, uowtransaction, cached_connections, mapper, table, update): """Emit UPDATE statements corresponding to value lists collected by _collect_post_update_commands().""" @@ -603,19 +603,19 @@ def _emit_post_update_statements(base_mapper, uowtransaction, # execute each UPDATE in the order according to the original # list of states to guarantee row access order, but - # also group them into common (connection, cols) sets + # also group them into common (connection, cols) sets # to support executemany(). for key, grouper in groupby( update, lambda rec: (rec[4], rec[2].keys()) ): connection = key[0] - multiparams = [params for state, state_dict, + multiparams = [params for state, state_dict, params, mapper, conn in grouper] cached_connections[connection].\ execute(statement, multiparams) -def _emit_delete_statements(base_mapper, uowtransaction, cached_connections, +def _emit_delete_statements(base_mapper, uowtransaction, cached_connections, mapper, table, delete): """Emit DELETE statements corresponding to value lists collected by _collect_delete_commands().""" @@ -631,9 +631,9 @@ def _emit_delete_statements(base_mapper, uowtransaction, cached_connections, if need_version_id: clause.clauses.append( - mapper.version_id_col == + mapper.version_id_col == sql.bindparam( - mapper.version_id_col.key, + mapper.version_id_col.key, type_=mapper.version_id_col.type ) ) @@ -657,13 +657,13 @@ def _emit_delete_statements(base_mapper, uowtransaction, cached_connections, if rows != len(del_objects): raise orm_exc.StaleDataError( "DELETE statement on table '%s' expected to " - "delete %d row(s); %d were matched." % + "delete %d row(s); %d were matched." % (table.description, len(del_objects), c.rowcount) ) else: util.warn( "Dialect %s does not support deleted rowcount " - "- versioning cannot be verified." % + "- versioning cannot be verified." % connection.dialect.dialect_description, stacklevel=12) connection.execute(statement, del_objects) @@ -671,11 +671,11 @@ def _emit_delete_statements(base_mapper, uowtransaction, cached_connections, connection.execute(statement, del_objects) -def _finalize_insert_update_commands(base_mapper, uowtransaction, +def _finalize_insert_update_commands(base_mapper, uowtransaction, states_to_insert, states_to_update): """finalize state on states that have been inserted or updated, including calling after_insert/after_update events. - + """ for state, state_dict, mapper, connection, has_identity, \ instance_key, row_switch in states_to_insert + \ @@ -683,7 +683,7 @@ def _finalize_insert_update_commands(base_mapper, uowtransaction, if mapper._readonly_props: readonly = state.unmodified_intersection( - [p.key for p in mapper._readonly_props + [p.key for p in mapper._readonly_props if p.expire_on_flush or p.key not in state.dict] ) if readonly: @@ -703,7 +703,7 @@ def _finalize_insert_update_commands(base_mapper, uowtransaction, else: mapper.dispatch.after_update(mapper, connection, state) -def _postfetch(mapper, uowtransaction, table, +def _postfetch(mapper, uowtransaction, table, state, dict_, prefetch_cols, postfetch_cols, params, value_params): """Expire attributes in need of newly persisted database state, @@ -718,9 +718,9 @@ def _postfetch(mapper, uowtransaction, table, mapper._set_state_attr_by_column(state, dict_, c, params[c.key]) if postfetch_cols: - state.expire_attributes(state.dict, - [mapper._columntoproperty[c].key - for c in postfetch_cols if c in + state.expire_attributes(state.dict, + [mapper._columntoproperty[c].key + for c in postfetch_cols if c in mapper._columntoproperty] ) @@ -728,21 +728,21 @@ def _postfetch(mapper, uowtransaction, table, # TODO: this still goes a little too often. would be nice to # have definitive list of "columns that changed" here for m, equated_pairs in mapper._table_to_equated[table]: - sync.populate(state, m, state, m, - equated_pairs, + sync.populate(state, m, state, m, + equated_pairs, uowtransaction, mapper.passive_updates) def _connections_for_states(base_mapper, uowtransaction, states): """Return an iterator of (state, state.dict, mapper, connection). - + The states are sorted according to _sort_states, then paired with the connection they should be using for the given unit of work transaction. - + """ # if session has a connection callable, - # organize individual states with the connection + # organize individual states with the connection # to use for update if uowtransaction.session.connection_callable: connection_callable = \ diff --git a/lib/sqlalchemy/orm/properties.py b/lib/sqlalchemy/orm/properties.py index f2186df511..f71ecf71ae 100644 --- a/lib/sqlalchemy/orm/properties.py +++ b/lib/sqlalchemy/orm/properties.py @@ -33,9 +33,9 @@ from descriptor_props import CompositeProperty, SynonymProperty, \ class ColumnProperty(StrategizedProperty): """Describes an object attribute that corresponds to a table column. - + Public constructor is the :func:`.orm.column_property` function. - + """ def __init__(self, *columns, **kwargs): @@ -62,7 +62,7 @@ class ColumnProperty(StrategizedProperty): """ self._orig_columns = [expression._labeled(c) for c in columns] - self.columns = [expression._labeled(_orm_deannotate(c)) + self.columns = [expression._labeled(_orm_deannotate(c)) for c in columns] self.group = kwargs.pop('group', None) self.deferred = kwargs.pop('deferred', False) @@ -88,7 +88,7 @@ class ColumnProperty(StrategizedProperty): if kwargs: raise TypeError( "%s received unexpected keyword argument(s): %s" % ( - self.__class__.__name__, + self.__class__.__name__, ', '.join(sorted(kwargs.keys())))) util.set_creation_order(self) @@ -104,9 +104,9 @@ class ColumnProperty(StrategizedProperty): return attributes.register_descriptor( - mapper.class_, - self.key, - comparator=self.comparator_factory(self, mapper), + mapper.class_, + self.key, + comparator=self.comparator_factory(self, mapper), parententity=mapper, doc=self.doc ) @@ -124,17 +124,17 @@ class ColumnProperty(StrategizedProperty): def copy(self): return ColumnProperty( - deferred=self.deferred, - group=self.group, + deferred=self.deferred, + group=self.group, active_history=self.active_history, *self.columns) - def _getcommitted(self, state, dict_, column, + def _getcommitted(self, state, dict_, column, passive=attributes.PASSIVE_OFF): return state.get_impl(self.key).\ get_committed_value(state, dict_, passive=passive) - def merge(self, session, source_state, source_dict, dest_state, + def merge(self, session, source_state, source_dict, dest_state, dest_dict, load, _recursive): if not self.instrument: return @@ -177,20 +177,20 @@ log.class_logger(ColumnProperty) class RelationshipProperty(StrategizedProperty): """Describes an object property that holds a single item or list of items that correspond to a related database table. - + Public constructor is the :func:`.orm.relationship` function. - + Of note here is the :class:`.RelationshipProperty.Comparator` class, which implements comparison operations for scalar- and collection-referencing mapped attributes. - + """ strategy_wildcard_key = 'relationship:*' def __init__(self, argument, secondary=None, primaryjoin=None, - secondaryjoin=None, + secondaryjoin=None, foreign_keys=None, uselist=None, order_by=False, @@ -208,7 +208,7 @@ class RelationshipProperty(StrategizedProperty): active_history=False, cascade_backrefs=True, load_on_pending=False, - strategy_class=None, _local_remote_pairs=None, + strategy_class=None, _local_remote_pairs=None, query_class=None): self.uselist = uselist @@ -257,7 +257,7 @@ class RelationshipProperty(StrategizedProperty): self.cascade = CascadeOptions("save-update, merge") if self.passive_deletes == 'all' and \ - ("delete" in self.cascade or + ("delete" in self.cascade or "delete-orphan" in self.cascade): raise sa_exc.ArgumentError( "Can't set passive_deletes='all' in conjunction " @@ -279,9 +279,9 @@ class RelationshipProperty(StrategizedProperty): def instrument_class(self, mapper): attributes.register_descriptor( - mapper.class_, - self.key, - comparator=self.comparator_factory(self, mapper), + mapper.class_, + self.key, + comparator=self.comparator_factory(self, mapper), parententity=mapper, doc=self.doc, ) @@ -293,7 +293,7 @@ class RelationshipProperty(StrategizedProperty): def __init__(self, prop, mapper, of_type=None, adapter=None): """Construction of :class:`.RelationshipProperty.Comparator` is internal to the ORM's attribute mechanics. - + """ self.prop = prop self.mapper = mapper @@ -326,20 +326,20 @@ class RelationshipProperty(StrategizedProperty): def of_type(self, cls): """Produce a construct that represents a particular 'subtype' of attribute for the parent class. - + Currently this is usable in conjunction with :meth:`.Query.join` and :meth:`.Query.outerjoin`. - + """ return RelationshipProperty.Comparator( - self.property, - self.mapper, + self.property, + self.mapper, cls, adapter=self.adapter) def in_(self, other): - """Produce an IN clause - this is not implemented + """Produce an IN clause - this is not implemented for :func:`~.orm.relationship`-based attributes at this time. - + """ raise NotImplementedError('in_() not yet supported for ' 'relationships. For a simple many-to-one, use ' @@ -356,20 +356,20 @@ class RelationshipProperty(StrategizedProperty): this will typically produce a clause such as:: - + mytable.related_id == - - Where ```` is the primary key of the given + + Where ```` is the primary key of the given object. - + The ``==`` operator provides partial functionality for non- many-to-one comparisons: - + * Comparisons against collections are not supported. Use :meth:`~.RelationshipProperty.Comparator.contains`. - * Compared to a scalar one-to-many, will produce a + * Compared to a scalar one-to-many, will produce a clause that compares the target columns in the parent to - the given target. + the given target. * Compared to a scalar many-to-many, an alias of the association table will be rendered as well, forming a natural join that is part of the @@ -443,9 +443,9 @@ class RelationshipProperty(StrategizedProperty): # limit this adapter to annotated only? criterion = target_adapter.traverse(criterion) - # only have the "joined left side" of what we + # only have the "joined left side" of what we # return be subject to Query adaption. The right - # side of it is used for an exists() subquery and + # side of it is used for an exists() subquery and # should not correlate or otherwise reach out # to anything in the enclosing query. if criterion is not None: @@ -459,42 +459,42 @@ class RelationshipProperty(StrategizedProperty): def any(self, criterion=None, **kwargs): """Produce an expression that tests a collection against particular criterion, using EXISTS. - + An expression like:: - + session.query(MyClass).filter( MyClass.somereference.any(SomeRelated.x==2) ) - - + + Will produce a query like:: - + SELECT * FROM my_table WHERE - EXISTS (SELECT 1 FROM related WHERE related.my_id=my_table.id + EXISTS (SELECT 1 FROM related WHERE related.my_id=my_table.id AND related.x=2) - + Because :meth:`~.RelationshipProperty.Comparator.any` uses a correlated subquery, its performance is not nearly as good when compared against large target tables as that of using a join. - + :meth:`~.RelationshipProperty.Comparator.any` is particularly useful for testing for empty collections:: - + session.query(MyClass).filter( ~MyClass.somereference.any() ) - + will produce:: - + SELECT * FROM my_table WHERE NOT EXISTS (SELECT 1 FROM related WHERE related.my_id=my_table.id) - + :meth:`~.RelationshipProperty.Comparator.any` is only valid for collections, i.e. a :func:`.relationship` that has ``uselist=True``. For scalar references, use :meth:`~.RelationshipProperty.Comparator.has`. - + """ if not self.property.uselist: raise sa_exc.InvalidRequestError( @@ -509,14 +509,14 @@ class RelationshipProperty(StrategizedProperty): particular criterion, using EXISTS. An expression like:: - + session.query(MyClass).filter( MyClass.somereference.has(SomeRelated.x==2) ) - - + + Will produce a query like:: - + SELECT * FROM my_table WHERE EXISTS (SELECT 1 FROM related WHERE related.id==my_table.related_id AND related.x=2) @@ -525,12 +525,12 @@ class RelationshipProperty(StrategizedProperty): a correlated subquery, its performance is not nearly as good when compared against large target tables as that of using a join. - + :meth:`~.RelationshipProperty.Comparator.has` is only valid for scalar references, i.e. a :func:`.relationship` that has ``uselist=False``. For collection references, use :meth:`~.RelationshipProperty.Comparator.any`. - + """ if self.property.uselist: raise sa_exc.InvalidRequestError( @@ -539,46 +539,46 @@ class RelationshipProperty(StrategizedProperty): return self._criterion_exists(criterion, **kwargs) def contains(self, other, **kwargs): - """Return a simple expression that tests a collection for + """Return a simple expression that tests a collection for containment of a particular item. - + :meth:`~.RelationshipProperty.Comparator.contains` is only valid for a collection, i.e. a :func:`~.orm.relationship` that implements one-to-many or many-to-many with ``uselist=True``. - - When used in a simple one-to-many context, an + + When used in a simple one-to-many context, an expression like:: - + MyClass.contains(other) - + Produces a clause like:: - + mytable.id == - + Where ```` is the value of the foreign key attribute on ``other`` which refers to the primary key of its parent object. From this it follows that :meth:`~.RelationshipProperty.Comparator.contains` is very useful when used with simple one-to-many operations. - + For many-to-many operations, the behavior of :meth:`~.RelationshipProperty.Comparator.contains` has more caveats. The association table will be rendered in the statement, producing an "implicit" join, that is, includes multiple tables in the FROM clause which are equated in the WHERE clause:: - + query(MyClass).filter(MyClass.contains(other)) - + Produces a query like:: - + SELECT * FROM my_table, my_association_table AS my_association_table_1 WHERE my_table.id = my_association_table_1.parent_id AND my_association_table_1.child_id = - + Where ```` would be the primary key of ``other``. From the above, it is clear that :meth:`~.RelationshipProperty.Comparator.contains` @@ -592,7 +592,7 @@ class RelationshipProperty(StrategizedProperty): a less-performant alternative using EXISTS, or refer to :meth:`.Query.outerjoin` as well as :ref:`ormtutorial_joins` for more details on constructing outer joins. - + """ if not self.property.uselist: raise sa_exc.InvalidRequestError( @@ -630,7 +630,7 @@ class RelationshipProperty(StrategizedProperty): adapt(x) == None) for (x, y) in self.property.local_remote_pairs]) - criterion = sql.and_(*[x==y for (x, y) in + criterion = sql.and_(*[x==y for (x, y) in zip( self.property.mapper.primary_key, self.property.\ @@ -643,26 +643,26 @@ class RelationshipProperty(StrategizedProperty): """Implement the ``!=`` operator. In a many-to-one context, such as:: - + MyClass.some_prop != - + This will typically produce a clause such as:: - + mytable.related_id != - + Where ```` is the primary key of the given object. - + The ``!=`` operator provides partial functionality for non- many-to-one comparisons: - + * Comparisons against collections are not supported. Use :meth:`~.RelationshipProperty.Comparator.contains` in conjunction with :func:`~.expression.not_`. - * Compared to a scalar one-to-many, will produce a + * Compared to a scalar one-to-many, will produce a clause that compares the target columns in the parent to - the given target. + the given target. * Compared to a scalar many-to-many, an alias of the association table will be rendered as well, forming a natural join that is part of the @@ -676,7 +676,7 @@ class RelationshipProperty(StrategizedProperty): membership tests. * Comparisons against ``None`` given in a one-to-many or many-to-many context produce an EXISTS clause. - + """ if isinstance(other, (NoneType, expression._Null)): if self.property.direction == MANYTOONE: @@ -697,26 +697,26 @@ class RelationshipProperty(StrategizedProperty): configure_mappers() return self.prop - def compare(self, op, value, - value_is_parent=False, + def compare(self, op, value, + value_is_parent=False, alias_secondary=True): if op == operators.eq: if value is None: if self.uselist: return ~sql.exists([1], self.primaryjoin) else: - return self._optimized_compare(None, + return self._optimized_compare(None, value_is_parent=value_is_parent, alias_secondary=alias_secondary) else: - return self._optimized_compare(value, + return self._optimized_compare(value, value_is_parent=value_is_parent, alias_secondary=alias_secondary) else: return op(self.comparator, value) - def _optimized_compare(self, value, value_is_parent=False, - adapt_source=None, + def _optimized_compare(self, value, value_is_parent=False, + adapt_source=None, alias_secondary=True): if value is not None: value = attributes.instance_state(value) @@ -728,12 +728,12 @@ class RelationshipProperty(StrategizedProperty): def __str__(self): return str(self.parent.class_.__name__) + "." + self.key - def merge(self, + def merge(self, session, source_state, source_dict, dest_state, - dest_dict, + dest_dict, load, _recursive): if load: @@ -843,8 +843,8 @@ class RelationshipProperty(StrategizedProperty): raise AssertionError("Attribute '%s' on class '%s' " "doesn't handle objects " "of type '%s'" % ( - self.key, - self.parent.class_, + self.key, + self.parent.class_, c.__class__ )) @@ -872,11 +872,11 @@ class RelationshipProperty(StrategizedProperty): @util.memoized_property def mapper(self): - """Return the targeted :class:`.Mapper` for this + """Return the targeted :class:`.Mapper` for this :class:`.RelationshipProperty`. - + This is a lazy-initializing static attribute. - + """ if isinstance(self.argument, type): mapper_ = mapper.class_mapper(self.argument, @@ -900,8 +900,8 @@ class RelationshipProperty(StrategizedProperty): @util.memoized_property @util.deprecated("0.7", "Use .target") def table(self): - """Return the selectable linked to this - :class:`.RelationshipProperty` object's target + """Return the selectable linked to this + :class:`.RelationshipProperty` object's target :class:`.Mapper`.""" return self.target @@ -917,7 +917,7 @@ class RelationshipProperty(StrategizedProperty): super(RelationshipProperty, self).do_init() def _check_conflicts(self): - """Test that this relationship is legal, warn about + """Test that this relationship is legal, warn about inheritance conflicts.""" if not self.is_primary() \ @@ -944,11 +944,11 @@ class RelationshipProperty(StrategizedProperty): % (self.key, self.parent, inheriting)) def _process_dependent_arguments(self): - """Convert incoming configuration arguments to their + """Convert incoming configuration arguments to their proper form. - + Callables are resolved, ORM annotations removed. - + """ # accept callables for other attributes which may require # deferred initialization. This technique is used @@ -978,20 +978,20 @@ class RelationshipProperty(StrategizedProperty): # remote_side are all columns, not strings. if self.order_by is not False and self.order_by is not None: self.order_by = [ - expression._only_column_elements(x, "order_by") + expression._only_column_elements(x, "order_by") for x in util.to_list(self.order_by)] self._user_defined_foreign_keys = \ util.column_set( - expression._only_column_elements(x, "foreign_keys") + expression._only_column_elements(x, "foreign_keys") for x in util.to_column_set( self._user_defined_foreign_keys )) self.remote_side = \ util.column_set( - expression._only_column_elements(x, "remote_side") + expression._only_column_elements(x, "remote_side") for x in util.to_column_set(self.remote_side)) @@ -1005,10 +1005,10 @@ class RelationshipProperty(StrategizedProperty): def _determine_joins(self): """Determine the 'primaryjoin' and 'secondaryjoin' attributes, if not passed to the constructor already. - + This is based on analysis of the foreign key relationships between the parent and target mapped selectables. - + """ if self.secondaryjoin is not None and self.secondary is None: raise sa_exc.ArgumentError("Property '" + self.key @@ -1024,7 +1024,7 @@ class RelationshipProperty(StrategizedProperty): # for more specificity, then if not found will try the more # general mapped table, which in the case of inheritance is # a join. - return join_condition(mapper.mapped_table, table, + return join_condition(mapper.mapped_table, table, a_subset=mapper.local_table) try: @@ -1048,9 +1048,9 @@ class RelationshipProperty(StrategizedProperty): % self) def _columns_are_mapped(self, *cols): - """Return True if all columns in the given collection are + """Return True if all columns in the given collection are mapped by the tables referenced by this :class:`.Relationship`. - + """ for c in cols: if self.secondary is not None \ @@ -1065,11 +1065,11 @@ class RelationshipProperty(StrategizedProperty): """Determine a list of "source"/"destination" column pairs based on the given join condition, as well as the foreign keys argument. - + "source" would be a column referenced by a foreign key, and "destination" would be the column who has a foreign key reference to "source". - + """ fks = self._user_defined_foreign_keys @@ -1078,7 +1078,7 @@ class RelationshipProperty(StrategizedProperty): consider_as_foreign_keys=fks, any_operator=self.viewonly) - # couldn't find any fks, but we have + # couldn't find any fks, but we have # "secondary" - assume the "secondary" columns # are the fks if not eq_pairs and \ @@ -1103,19 +1103,19 @@ class RelationshipProperty(StrategizedProperty): # Filter out just to columns that are mapped. # If viewonly, allow pairs where the FK col # was part of "foreign keys" - the column it references - # may be in an un-mapped table - see + # may be in an un-mapped table - see # test.orm.test_relationships.ViewOnlyComplexJoin.test_basic # for an example of this. eq_pairs = [(l, r) for (l, r) in eq_pairs if self._columns_are_mapped(l, r) - or self.viewonly and + or self.viewonly and r in fks] if eq_pairs: return eq_pairs # from here below is just determining the best error message - # to report. Check for a join condition using any operator + # to report. Check for a join condition using any operator # (not just ==), perhaps they need to turn on "viewonly=True". if not self.viewonly and criterion_as_pairs(join_condition, consider_as_foreign_keys=self._user_defined_foreign_keys, @@ -1125,8 +1125,8 @@ class RelationshipProperty(StrategizedProperty): "foreign-key-equated, locally mapped column "\ "pairs for %s "\ "condition '%s' on relationship %s." % ( - primary and 'primaryjoin' or 'secondaryjoin', - join_condition, + primary and 'primaryjoin' or 'secondaryjoin', + join_condition, self ) @@ -1155,10 +1155,10 @@ class RelationshipProperty(StrategizedProperty): "have adequate ForeignKey and/or " "ForeignKeyConstraint objects established " "(in which case 'foreign_keys' is usually " - "unnecessary)?" + "unnecessary)?" % ( primary and 'primaryjoin' or 'secondaryjoin', - join_condition, + join_condition, self, primary and 'mapped' or 'secondary' )) @@ -1169,18 +1169,18 @@ class RelationshipProperty(StrategizedProperty): "referencing Column objects have a " "ForeignKey present, or are otherwise part " "of a ForeignKeyConstraint on their parent " - "Table, or specify the foreign_keys parameter " + "Table, or specify the foreign_keys parameter " "to this relationship." % ( - primary and 'primaryjoin' or 'secondaryjoin', - join_condition, + primary and 'primaryjoin' or 'secondaryjoin', + join_condition, self )) def _determine_synchronize_pairs(self): """Resolve 'primary'/foreign' column pairs from the primaryjoin and secondaryjoin arguments. - + """ if self.local_remote_pairs: if not self._user_defined_foreign_keys: @@ -1195,7 +1195,7 @@ class RelationshipProperty(StrategizedProperty): self.synchronize_pairs.append((r, l)) else: self.synchronize_pairs = self._sync_pairs_from_join( - self.primaryjoin, + self.primaryjoin, True) self._calculated_foreign_keys = util.column_set( @@ -1204,7 +1204,7 @@ class RelationshipProperty(StrategizedProperty): if self.secondaryjoin is not None: self.secondary_synchronize_pairs = self._sync_pairs_from_join( - self.secondaryjoin, + self.secondaryjoin, False) self._calculated_foreign_keys.update( r for (l, r) in @@ -1213,12 +1213,12 @@ class RelationshipProperty(StrategizedProperty): self.secondary_synchronize_pairs = None def _determine_direction(self): - """Determine if this relationship is one to many, many to one, + """Determine if this relationship is one to many, many to one, many to many. - + This is derived from the primaryjoin, presence of "secondary", and in the case of self-referential the "remote side". - + """ if self.secondaryjoin is not None: self.direction = MANYTOMANY @@ -1295,19 +1295,19 @@ class RelationshipProperty(StrategizedProperty): % self) def _determine_local_remote_pairs(self): - """Determine pairs of columns representing "local" to + """Determine pairs of columns representing "local" to "remote", where "local" columns are on the parent mapper, "remote" are on the target mapper. - + These pairs are used on the load side only to generate lazy loading clauses. """ if not self.local_remote_pairs and not self.remote_side: - # the most common, trivial case. Derive + # the most common, trivial case. Derive # local/remote pairs from the synchronize pairs. eq_pairs = util.unique_list( - self.synchronize_pairs + + self.synchronize_pairs + (self.secondary_synchronize_pairs or [])) if self.direction is MANYTOONE: self.local_remote_pairs = [(r, l) for l, r in eq_pairs] @@ -1469,8 +1469,8 @@ class RelationshipProperty(StrategizedProperty): if not self.viewonly and self._dependency_processor: self._dependency_processor.per_property_preprocessors(uow) - def _create_joins(self, source_polymorphic=False, - source_selectable=None, dest_polymorphic=False, + def _create_joins(self, source_polymorphic=False, + source_selectable=None, dest_polymorphic=False, dest_selectable=None, of_type=None): if source_selectable is None: if source_polymorphic and self.parent.with_polymorphic: @@ -1492,10 +1492,10 @@ class RelationshipProperty(StrategizedProperty): # place a barrier on the destination such that # replacement traversals won't ever dig into it. - # its internal structure remains fixed + # its internal structure remains fixed # regardless of context. dest_selectable = _shallow_annotate( - dest_selectable, + dest_selectable, {'no_replacement_traverse':True}) aliased = aliased or (source_selectable is not None) diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py index f82fd3edac..168fc0c3da 100644 --- a/lib/sqlalchemy/orm/query.py +++ b/lib/sqlalchemy/orm/query.py @@ -6,14 +6,14 @@ """The Query class and support. -Defines the :class:`.Query` class, the central +Defines the :class:`.Query` class, the central construct used by the ORM to construct database queries. The :class:`.Query` class should not be confused with the -:class:`.Select` class, which defines database -SELECT operations at the SQL (non-ORM) level. ``Query`` differs from -``Select`` in that it returns ORM-mapped objects and interacts with an -ORM session, whereas the ``Select`` construct interacts directly with the +:class:`.Select` class, which defines database +SELECT operations at the SQL (non-ORM) level. ``Query`` differs from +``Select`` in that it returns ORM-mapped objects and interacts with an +ORM session, whereas the ``Select`` construct interacts directly with the database to return iterable result sets. """ @@ -55,14 +55,14 @@ class Query(object): """ORM-level SQL construction object. :class:`.Query` is the source of all SELECT statements generated by the - ORM, both those formulated by end-user query operations as well as by - high level internal operations such as related collection loading. It + ORM, both those formulated by end-user query operations as well as by + high level internal operations such as related collection loading. It features a generative interface whereby successive calls return a new - :class:`.Query` object, a copy of the former with additional + :class:`.Query` object, a copy of the former with additional criteria and options associated with it. - :class:`.Query` objects are normally initially generated using the - :meth:`~.Session.query` method of :class:`.Session`. For a full walkthrough + :class:`.Query` objects are normally initially generated using the + :meth:`~.Session.query` method of :class:`.Session`. For a full walkthrough of :class:`.Query` usage, see the :ref:`ormtutorial_toplevel`. """ @@ -134,20 +134,20 @@ class Query(object): with_polymorphic = mapper._with_polymorphic_mappers if mapper.mapped_table not in \ self._polymorphic_adapters: - self._mapper_loads_polymorphically_with(mapper, + self._mapper_loads_polymorphically_with(mapper, sql_util.ColumnAdapter( - selectable, + selectable, mapper._equivalent_columns)) adapter = None elif is_aliased_class: adapter = sql_util.ColumnAdapter( - selectable, + selectable, mapper._equivalent_columns) with_polymorphic = None else: with_polymorphic = adapter = None - d[entity] = (mapper, adapter, selectable, + d[entity] = (mapper, adapter, selectable, is_aliased_class, with_polymorphic) ent.setup_entity(entity, *d[entity]) @@ -199,8 +199,8 @@ class Query(object): def _adapt_col_list(self, cols): return [ self._adapt_clause( - expression._literal_as_text(o), - True, True) + expression._literal_as_text(o), + True, True) for o in cols ] @@ -209,7 +209,7 @@ class Query(object): self._orm_only_adapt = False def _adapt_clause(self, clause, as_filter, orm_only): - """Adapt incoming clauses to transformations which have been applied + """Adapt incoming clauses to transformations which have been applied within this query.""" adapters = [] @@ -228,12 +228,12 @@ class Query(object): if self._from_obj_alias: # for the "from obj" alias, apply extra rule to the - # 'ORM only' check, if this query were generated from a + # 'ORM only' check, if this query were generated from a # subquery of itself, i.e. _from_selectable(), apply adaption # to all SQL constructs. adapters.append( ( - getattr(self, '_orm_only_from_obj_alias', orm_only), + getattr(self, '_orm_only_from_obj_alias', orm_only), self._from_obj_alias.replace ) ) @@ -261,8 +261,8 @@ class Query(object): return e return visitors.replacement_traverse( - clause, - {}, + clause, + {}, replace ) @@ -297,7 +297,7 @@ class Query(object): def _only_mapper_zero(self, rationale=None): if len(self._entities) > 1: raise sa_exc.InvalidRequestError( - rationale or + rationale or "This operation requires a Query against a single mapper." ) return self._mapper_zero() @@ -317,7 +317,7 @@ class Query(object): def _only_entity_zero(self, rationale=None): if len(self._entities) > 1: raise sa_exc.InvalidRequestError( - rationale or + rationale or "This operation requires a Query against a single mapper." ) return self._entity_zero() @@ -389,13 +389,13 @@ class Query(object): ): if getattr(self, attr) is not notset: raise sa_exc.InvalidRequestError( - "Can't call Query.%s() when %s has been called" % + "Can't call Query.%s() when %s has been called" % (meth, methname) ) - def _get_options(self, populate_existing=None, - version_check=None, - only_load_props=None, + def _get_options(self, populate_existing=None, + version_check=None, + only_load_props=None, refresh_state=None): if populate_existing: self._populate_existing = populate_existing @@ -432,17 +432,17 @@ class Query(object): return stmt._annotate({'no_replacement_traverse': True}) def subquery(self, name=None): - """return the full SELECT statement represented by this :class:`.Query`, + """return the full SELECT statement represented by this :class:`.Query`, embedded within an :class:`.Alias`. Eager JOIN generation within the query is disabled. The statement will not have disambiguating labels - applied to the list of selected columns unless the + applied to the list of selected columns unless the :meth:`.Query.with_labels` method is used to generate a new :class:`.Query` with the option enabled. - :param name: string name to be assigned as the alias; + :param name: string name to be assigned as the alias; this is passed through to :meth:`.FromClause.alias`. If ``None``, a name will be deterministically generated at compile time. @@ -456,12 +456,12 @@ class Query(object): represented as a common table expression (CTE). .. versionadded:: 0.7.6 - - Parameters and usage are the same as those of the - :meth:`._SelectBase.cte` method; see that method for + + Parameters and usage are the same as those of the + :meth:`._SelectBase.cte` method; see that method for further details. - - Here is the `Postgresql WITH + + Here is the `Postgresql WITH RECURSIVE example `_. Note that, in this example, the ``included_parts`` cte and the ``incl_alias`` alias of it are Core selectables, which @@ -478,8 +478,8 @@ class Query(object): quantity = Column(Integer) included_parts = session.query( - Part.sub_part, - Part.part, + Part.sub_part, + Part.part, Part.quantity).\\ filter(Part.part=="our part").\\ cte(name="included_parts", recursive=True) @@ -488,8 +488,8 @@ class Query(object): parts_alias = aliased(Part, name="p") included_parts = included_parts.union_all( session.query( - parts_alias.part, - parts_alias.sub_part, + parts_alias.part, + parts_alias.sub_part, parts_alias.quantity).\\ filter(parts_alias.part==incl_alias.c.sub_part) ) @@ -501,14 +501,14 @@ class Query(object): group_by(included_parts.c.sub_part) See also: - + :meth:`._SelectBase.cte` """ return self.enable_eagerloads(False).statement.cte(name=name, recursive=recursive) def label(self, name): - """Return the full SELECT statement represented by this :class:`.Query`, converted + """Return the full SELECT statement represented by this :class:`.Query`, converted to a scalar subquery with a label of the given name. Analogous to :meth:`sqlalchemy.sql._SelectBaseMixin.label`. @@ -521,7 +521,7 @@ class Query(object): def as_scalar(self): - """Return the full SELECT statement represented by this :class:`.Query`, converted + """Return the full SELECT statement represented by this :class:`.Query`, converted to a scalar subquery. Analogous to :meth:`sqlalchemy.sql._SelectBaseMixin.as_scalar`. @@ -538,7 +538,7 @@ class Query(object): @_generative() def enable_eagerloads(self, value): - """Control whether or not eager joins and subqueries are + """Control whether or not eager joins and subqueries are rendered. When set to False, the returned Query will not render @@ -574,17 +574,17 @@ class Query(object): def enable_assertions(self, value): """Control whether assertions are generated. - When set to False, the returned Query will - not assert its state before certain operations, + When set to False, the returned Query will + not assert its state before certain operations, including that LIMIT/OFFSET has not been applied when filter() is called, no criterion exists when get() is called, and no "from_statement()" exists when filter()/order_by()/group_by() etc. - is called. This more permissive mode is used by - custom Query subclasses to specify criterion or + is called. This more permissive mode is used by + custom Query subclasses to specify criterion or other modifiers outside of the usual usage patterns. - Care should be taken to ensure that the usage + Care should be taken to ensure that the usage pattern is even possible. A statement applied by from_statement() will override any criterion set by filter() or order_by(), for example. @@ -596,7 +596,7 @@ class Query(object): def whereclause(self): """A readonly attribute which returns the current WHERE criterion for this Query. - This returned value is a SQL expression construct, or ``None`` if no + This returned value is a SQL expression construct, or ``None`` if no criterion has been established. """ @@ -604,19 +604,19 @@ class Query(object): @_generative() def _with_current_path(self, path): - """indicate that this query applies to objects loaded + """indicate that this query applies to objects loaded within a certain path. - Used by deferred loaders (see strategies.py) which transfer - query options from an originating query to a newly generated + Used by deferred loaders (see strategies.py) which transfer + query options from an originating query to a newly generated query intended for the deferred load. """ self._current_path = path @_generative(_no_clauseelement_condition) - def with_polymorphic(self, - cls_or_mappers, + def with_polymorphic(self, + cls_or_mappers, selectable=None, discriminator=None): """Load columns for descendant mappers of this Query's mapper. @@ -654,8 +654,8 @@ class Query(object): "No primary mapper set up for this Query.") entity = self._entities[0]._clone() self._entities = [entity] + self._entities[1:] - entity.set_with_polymorphic(self, - cls_or_mappers, + entity.set_with_polymorphic(self, + cls_or_mappers, selectable=selectable, discriminator=discriminator) @@ -668,15 +668,15 @@ class Query(object): overwritten. In particular, it's usually impossible to use this setting with - eagerly loaded collections (i.e. any lazy='joined' or 'subquery') - since those collections will be cleared for a new load when + eagerly loaded collections (i.e. any lazy='joined' or 'subquery') + since those collections will be cleared for a new load when encountered in a subsequent result batch. In the case of 'subquery' loading, the full result for all rows is fetched which generally defeats the purpose of :meth:`~sqlalchemy.orm.query.Query.yield_per`. Also note that many DBAPIs do not "stream" results, pre-buffering - all rows before making them available, including mysql-python and - psycopg2. :meth:`~sqlalchemy.orm.query.Query.yield_per` will also + all rows before making them available, including mysql-python and + psycopg2. :meth:`~sqlalchemy.orm.query.Query.yield_per` will also set the ``stream_results`` execution option to ``True``, which currently is only understood by psycopg2 and causes server side cursors to be used. @@ -687,33 +687,33 @@ class Query(object): self._execution_options['stream_results'] = True def get(self, ident): - """Return an instance based on the given primary key identifier, + """Return an instance based on the given primary key identifier, or ``None`` if not found. - + E.g.:: - + my_user = session.query(User).get(5) - + some_object = session.query(VersionedFoo).get((5, 10)) - - :meth:`~.Query.get` is special in that it provides direct + + :meth:`~.Query.get` is special in that it provides direct access to the identity map of the owning :class:`.Session`. If the given primary key identifier is present in the local identity map, the object is returned - directly from this collection and no SQL is emitted, + directly from this collection and no SQL is emitted, unless the object has been marked fully expired. If not present, a SELECT is performed in order to locate the object. - - :meth:`~.Query.get` also will perform a check if - the object is present in the identity map and - marked as expired - a SELECT + + :meth:`~.Query.get` also will perform a check if + the object is present in the identity map and + marked as expired - a SELECT is emitted to refresh the object as well as to ensure that the row is still present. If not, :class:`~sqlalchemy.orm.exc.ObjectDeletedError` is raised. - + :meth:`~.Query.get` is only used to return a single - mapped instance, not multiple instances or + mapped instance, not multiple instances or individual column constructs, and strictly on a single primary key value. The originating :class:`.Query` must be constructed in this way, @@ -722,26 +722,26 @@ class Query(object): options via :meth:`~.Query.options` may be applied however, and will be used if the object is not yet locally present. - + A lazy-loading, many-to-one attribute configured by :func:`.relationship`, using a simple - foreign-key-to-primary-key criterion, will also use an + foreign-key-to-primary-key criterion, will also use an operation equivalent to :meth:`~.Query.get` in order to retrieve the target value from the local identity map before querying the database. See :ref:`loading_toplevel` for further details on relationship loading. - + :param ident: A scalar or tuple value representing the primary key. For a composite primary key, the order of identifiers corresponds in most cases - to that of the mapped :class:`.Table` object's + to that of the mapped :class:`.Table` object's primary key columns. For a :func:`.mapper` that was given the ``primary key`` argument during - construction, the order of identifiers corresponds + construction, the order of identifiers corresponds to the elements present in this collection. :return: The object instance, or ``None``. - + """ # convert composite types to individual args @@ -789,14 +789,14 @@ class Query(object): :meth:`.Select.correlate` after coercion to expression constructs. The correlation arguments take effect in such cases - as when :meth:`.Query.from_self` is used, or when - a subquery as returned by :meth:`.Query.subquery` is + as when :meth:`.Query.from_self` is used, or when + a subquery as returned by :meth:`.Query.subquery` is embedded in another :func:`~.expression.select` construct. """ self._correlate = self._correlate.union( - _orm_selectable(s) + _orm_selectable(s) for s in args) @_generative() @@ -813,11 +813,11 @@ class Query(object): @_generative() def populate_existing(self): - """Return a :class:`.Query` that will expire and refresh all instances + """Return a :class:`.Query` that will expire and refresh all instances as they are loaded, or reused from the current :class:`.Session`. - :meth:`.populate_existing` does not improve behavior when - the ORM is used normally - the :class:`.Session` object's usual + :meth:`.populate_existing` does not improve behavior when + the ORM is used normally - the :class:`.Session` object's usual behavior of maintaining a transaction and expiring all attributes after rollback or commit handles object state automatically. This method is not intended for general use. @@ -830,7 +830,7 @@ class Query(object): """Set the 'invoke all eagers' flag which causes joined- and subquery loaders to traverse into already-loaded related objects and collections. - + Default is that of :attr:`.Query._invoke_all_eagers`. """ @@ -838,7 +838,7 @@ class Query(object): def with_parent(self, instance, property=None): """Add filtering criterion that relates the given instance - to a child object or collection, using its attribute state + to a child object or collection, using its attribute state as well as an established :func:`.relationship()` configuration. @@ -863,7 +863,7 @@ class Query(object): else: raise sa_exc.InvalidRequestError( "Could not locate a property which relates instances " - "of class '%s' to instances of class '%s'" % + "of class '%s' to instances of class '%s'" % ( self._mapper_zero().class_.__name__, instance.__class__.__name__) @@ -873,7 +873,7 @@ class Query(object): @_generative() def add_entity(self, entity, alias=None): - """add a mapped entity to the list of result columns + """add a mapped entity to the list of result columns to be returned.""" if alias is not None: @@ -892,7 +892,7 @@ class Query(object): self.session = session def from_self(self, *entities): - """return a Query that selects from this Query's + """return a Query that selects from this Query's SELECT statement. \*entities - optional list of entities which will replace @@ -914,11 +914,11 @@ class Query(object): @_generative() def _from_selectable(self, fromclause): for attr in ( - '_statement', '_criterion', + '_statement', '_criterion', '_order_by', '_group_by', - '_limit', '_offset', - '_joinpath', '_joinpoint', - '_distinct', '_having', + '_limit', '_offset', + '_joinpath', '_joinpoint', + '_distinct', '_having', '_prefixes', ): self.__dict__.pop(attr, None) @@ -934,7 +934,7 @@ class Query(object): e.adapt_to_selectable(self, self._from_obj[0]) def values(self, *columns): - """Return an iterator yielding result tuples corresponding + """Return an iterator yielding result tuples corresponding to the given list of columns""" if not columns: @@ -947,7 +947,7 @@ class Query(object): _values = values def value(self, column): - """Return a scalar result corresponding to the given + """Return a scalar result corresponding to the given column expression.""" try: # Py3K @@ -972,7 +972,7 @@ class Query(object): filter(User.name.like('%ed%')).\\ order_by(Address.email) - # given *only* User.id==5, Address.email, and 'q', what + # given *only* User.id==5, Address.email, and 'q', what # would the *next* User in the result be ? subq = q.with_entities(Address.email).\\ order_by(None).\\ @@ -989,7 +989,7 @@ class Query(object): @_generative() def add_columns(self, *column): - """Add one or more column expressions to the list + """Add one or more column expressions to the list of result columns to be returned.""" self._entities = list(self._entities) @@ -1000,13 +1000,13 @@ class Query(object): # given arg is a FROM clause self._setup_aliasizers(self._entities[l:]) - @util.pending_deprecation("0.7", - ":meth:`.add_column` is superseded by :meth:`.add_columns`", + @util.pending_deprecation("0.7", + ":meth:`.add_column` is superseded by :meth:`.add_columns`", False) def add_column(self, column): """Add a column expression to the list of result columns to be returned. - Pending deprecation: :meth:`.add_column` will be superseded by + Pending deprecation: :meth:`.add_column` will be superseded by :meth:`.add_columns`. """ @@ -1045,16 +1045,16 @@ class Query(object): def with_transformation(self, fn): """Return a new :class:`.Query` object transformed by the given function. - + E.g.:: - + def filter_something(criterion): def transform(q): return q.filter(criterion) return transform - + q = q.with_transformation(filter_something(x==5)) - + This allows ad-hoc recipes to be created for :class:`.Query` objects. See the example at :ref:`hybrid_transformers`. @@ -1065,13 +1065,13 @@ class Query(object): @_generative() def with_hint(self, selectable, text, dialect_name='*'): - """Add an indexing hint for the given entity or selectable to + """Add an indexing hint for the given entity or selectable to this :class:`.Query`. - Functionality is passed straight through to - :meth:`~sqlalchemy.sql.expression.Select.with_hint`, - with the addition that ``selectable`` can be a - :class:`.Table`, :class:`.Alias`, or ORM entity / mapped class + Functionality is passed straight through to + :meth:`~sqlalchemy.sql.expression.Select.with_hint`, + with the addition that ``selectable`` can be a + :class:`.Table`, :class:`.Alias`, or ORM entity / mapped class /etc. """ mapper, selectable, is_aliased_class = _entity_info(selectable) @@ -1082,7 +1082,7 @@ class Query(object): def execution_options(self, **kwargs): """ Set non-SQL options which take effect during execution. - The options are the same as those accepted by + The options are the same as those accepted by :meth:`.Connection.execution_options`. Note that the ``stream_results`` execution option is enabled @@ -1105,16 +1105,16 @@ class Query(object): ``FOR UPDATE`` (standard SQL, supported by most dialects) ``'update_nowait'`` - passes ``for_update='nowait'``, which - translates to ``FOR UPDATE NOWAIT`` (supported by Oracle, + translates to ``FOR UPDATE NOWAIT`` (supported by Oracle, PostgreSQL 8.1 upwards) ``'read'`` - passes ``for_update='read'``, which translates to - ``LOCK IN SHARE MODE`` (for MySQL), and ``FOR SHARE`` (for + ``LOCK IN SHARE MODE`` (for MySQL), and ``FOR SHARE`` (for PostgreSQL) - ``'read_nowait'`` - passes ``for_update='read_nowait'``, which + ``'read_nowait'`` - passes ``for_update='read_nowait'``, which translates to ``FOR SHARE NOWAIT`` (supported by PostgreSQL). - + .. versionadded:: 0.7.7 ``FOR SHARE`` and ``FOR SHARE NOWAIT`` (PostgreSQL). """ @@ -1123,7 +1123,7 @@ class Query(object): @_generative() def params(self, *args, **kwargs): - """add values for bind parameters which may have been + """add values for bind parameters which may have been specified in filter(). parameters may be specified using \**kwargs, or optionally a single @@ -1147,15 +1147,15 @@ class Query(object): of this :class:`.Query`, using SQL expressions. e.g.:: - + session.query(MyClass).filter(MyClass.name == 'some name') - + Multiple criteria are joined together by AND:: - + session.query(MyClass).\\ filter(MyClass.name == 'some name', MyClass.id > 5) - - The criterion is any SQL expression object applicable to the + + The criterion is any SQL expression object applicable to the WHERE clause of a select. String expressions are coerced into SQL expression constructs via the :func:`.text` construct. @@ -1163,7 +1163,7 @@ class Query(object): Multiple criteria joined by AND. See also: - + :meth:`.Query.filter_by` - filter on keyword expressions. """ @@ -1187,24 +1187,24 @@ class Query(object): def filter_by(self, **kwargs): """apply the given filtering criterion to a copy of this :class:`.Query`, using keyword expressions. - + e.g.:: - + session.query(MyClass).filter_by(name = 'some name') - + Multiple criteria are joined together by AND:: - + session.query(MyClass).\\ filter_by(name = 'some name', id = 5) - - The keyword expressions are extracted from the primary - entity of the query, or the last entity that was the + + The keyword expressions are extracted from the primary + entity of the query, or the last entity that was the target of a call to :meth:`.Query.join`. - + See also: - + :meth:`.Query.filter` - filter on SQL expressions. - + """ clauses = [_entity_descriptor(self._joinpoint_zero(), key) == value @@ -1213,15 +1213,15 @@ class Query(object): @_generative(_no_statement_condition, _no_limit_offset) def order_by(self, *criterion): - """apply one or more ORDER BY criterion to the query and return + """apply one or more ORDER BY criterion to the query and return the newly resulting ``Query`` - All existing ORDER BY settings can be suppressed by + All existing ORDER BY settings can be suppressed by passing ``None`` - this will suppress any ORDER BY configured on mappers as well. Alternatively, an existing ORDER BY setting on the Query - object can be entirely cancelled by passing ``False`` + object can be entirely cancelled by passing ``False`` as the value - use this before calling methods where an ORDER BY is invalid. @@ -1245,7 +1245,7 @@ class Query(object): @_generative(_no_statement_condition, _no_limit_offset) def group_by(self, *criterion): - """apply one or more GROUP BY criterion to the query and return + """apply one or more GROUP BY criterion to the query and return the newly resulting :class:`.Query`""" criterion = list(chain(*[_orm_columns(c) for c in criterion])) @@ -1261,17 +1261,17 @@ class Query(object): def having(self, criterion): """apply a HAVING criterion to the query and return the newly resulting :class:`.Query`. - + :meth:`having` is used in conjunction with :meth:`group_by`. - + HAVING criterion makes it possible to use filters on aggregate functions like COUNT, SUM, AVG, MAX, and MIN, eg.:: - + q = session.query(User.id).\\ join(User.addresses).\\ group_by(User.id).\\ having(func.count(Address.id) > 2) - + """ if isinstance(criterion, basestring): @@ -1307,7 +1307,7 @@ class Query(object): will nest on each ``union()``, and produces:: - SELECT * FROM (SELECT * FROM (SELECT * FROM X UNION + SELECT * FROM (SELECT * FROM (SELECT * FROM X UNION SELECT * FROM y) UNION SELECT * FROM Z) Whereas:: @@ -1316,14 +1316,14 @@ class Query(object): produces:: - SELECT * FROM (SELECT * FROM X UNION SELECT * FROM y UNION + SELECT * FROM (SELECT * FROM X UNION SELECT * FROM y UNION SELECT * FROM Z) Note that many database backends do not allow ORDER BY to be rendered on a query called within UNION, EXCEPT, etc. To disable all ORDER BY clauses including those configured on mappers, issue ``query.order_by(None)`` - the resulting - :class:`.Query` object will not render ORDER BY within + :class:`.Query` object will not render ORDER BY within its SELECT statement. """ @@ -1390,103 +1390,103 @@ class Query(object): def join(self, *props, **kwargs): """Create a SQL JOIN against this :class:`.Query` object's criterion and apply generatively, returning the newly resulting :class:`.Query`. - + **Simple Relationship Joins** - + Consider a mapping between two classes ``User`` and ``Address``, - with a relationship ``User.addresses`` representing a collection - of ``Address`` objects associated with each ``User``. The most common + with a relationship ``User.addresses`` representing a collection + of ``Address`` objects associated with each ``User``. The most common usage of :meth:`~.Query.join` is to create a JOIN along this relationship, using the ``User.addresses`` attribute as an indicator for how this should occur:: - + q = session.query(User).join(User.addresses) - - Where above, the call to :meth:`~.Query.join` along ``User.addresses`` + + Where above, the call to :meth:`~.Query.join` along ``User.addresses`` will result in SQL equivalent to:: - + SELECT user.* FROM user JOIN address ON user.id = address.user_id - + In the above example we refer to ``User.addresses`` as passed to :meth:`~.Query.join` as the *on clause*, that is, it indicates - how the "ON" portion of the JOIN should be constructed. For a + how the "ON" portion of the JOIN should be constructed. For a single-entity query such as the one above (i.e. we start by selecting only from - ``User`` and nothing else), the relationship can also be specified by its + ``User`` and nothing else), the relationship can also be specified by its string name:: - + q = session.query(User).join("addresses") - - :meth:`~.Query.join` can also accommodate multiple + + :meth:`~.Query.join` can also accommodate multiple "on clause" arguments to produce a chain of joins, such as below where a join across four related entities is constructed:: - + q = session.query(User).join("orders", "items", "keywords") - + The above would be shorthand for three separate calls to :meth:`~.Query.join`, each using an explicit attribute to indicate the source entity:: - + q = session.query(User).\\ join(User.orders).\\ join(Order.items).\\ join(Item.keywords) - + **Joins to a Target Entity or Selectable** - + A second form of :meth:`~.Query.join` allows any mapped entity - or core selectable construct as a target. In this usage, + or core selectable construct as a target. In this usage, :meth:`~.Query.join` will attempt to create a JOIN along the natural foreign key relationship between two entities:: - + q = session.query(User).join(Address) - - The above calling form of :meth:`.join` will raise an error if - either there are no foreign keys between the two entities, or if + + The above calling form of :meth:`.join` will raise an error if + either there are no foreign keys between the two entities, or if there are multiple foreign key linkages between them. In the - above calling form, :meth:`~.Query.join` is called upon to + above calling form, :meth:`~.Query.join` is called upon to create the "on clause" automatically for us. The target can be any mapped entity or selectable, such as a :class:`.Table`:: - + q = session.query(User).join(addresses_table) - + **Joins to a Target with an ON Clause** - + The third calling form allows both the target entity as well - as the ON clause to be passed explicitly. Suppose for + as the ON clause to be passed explicitly. Suppose for example we wanted to join to ``Address`` twice, using - an alias the second time. We use :func:`~sqlalchemy.orm.aliased` + an alias the second time. We use :func:`~sqlalchemy.orm.aliased` to create a distinct alias of ``Address``, and join - to it using the ``target, onclause`` form, so that the + to it using the ``target, onclause`` form, so that the alias can be specified explicitly as the target along with the relationship to instruct how the ON clause should proceed:: - + a_alias = aliased(Address) - + q = session.query(User).\\ join(User.addresses).\\ join(a_alias, User.addresses).\\ filter(Address.email_address=='ed@foo.com').\\ filter(a_alias.email_address=='ed@bar.com') - + Where above, the generated SQL would be similar to:: - - SELECT user.* FROM user + + SELECT user.* FROM user JOIN address ON user.id = address.user_id JOIN address AS address_1 ON user.id=address_1.user_id WHERE address.email_address = :email_address_1 AND address_1.email_address = :email_address_2 - - The two-argument calling form of :meth:`~.Query.join` + + The two-argument calling form of :meth:`~.Query.join` also allows us to construct arbitrary joins with SQL-oriented "on clause" expressions, not relying upon configured relationships at all. Any SQL expression can be passed as the ON clause when using the two-argument form, which should refer to the target entity in some way as well as an applicable source entity:: - + q = session.query(User).join(Address, User.id==Address.user_id) - + .. versionchanged:: 0.7 - In SQLAlchemy 0.6 and earlier, the two argument form of + In SQLAlchemy 0.6 and earlier, the two argument form of :meth:`~.Query.join` requires the usage of a tuple: ``query(User).join((Address, User.id==Address.user_id))``\ . This calling form is accepted in 0.7 and further, though @@ -1494,12 +1494,12 @@ class Query(object): a single :meth:`~.Query.join` call, which itself is also not generally necessary as it is now equivalent to multiple calls (this wasn't always the case). - + **Advanced Join Targeting and Adaption** - There is a lot of flexibility in what the "target" can be when using - :meth:`~.Query.join`. As noted previously, it also accepts - :class:`.Table` constructs and other selectables such as :func:`.alias` + There is a lot of flexibility in what the "target" can be when using + :meth:`~.Query.join`. As noted previously, it also accepts + :class:`.Table` constructs and other selectables such as :func:`.alias` and :func:`.select` constructs, with either the one or two-argument forms:: addresses_q = select([Address.user_id]).\\ @@ -1508,133 +1508,133 @@ class Query(object): q = session.query(User).\\ join(addresses_q, addresses_q.c.user_id==User.id) - - :meth:`~.Query.join` also features the ability to *adapt* a + + :meth:`~.Query.join` also features the ability to *adapt* a :meth:`~sqlalchemy.orm.relationship` -driven ON clause to the target selectable. Below we construct a JOIN from ``User`` to a subquery against ``Address``, allowing the relationship denoted by ``User.addresses`` to *adapt* itself to the altered target:: - + address_subq = session.query(Address).\\ filter(Address.email_address == 'ed@foo.com').\\ subquery() q = session.query(User).join(address_subq, User.addresses) - + Producing SQL similar to:: - - SELECT user.* FROM user + + SELECT user.* FROM user JOIN ( - SELECT address.id AS id, - address.user_id AS user_id, - address.email_address AS email_address - FROM address + SELECT address.id AS id, + address.user_id AS user_id, + address.email_address AS email_address + FROM address WHERE address.email_address = :email_address_1 ) AS anon_1 ON user.id = anon_1.user_id - + The above form allows one to fall back onto an explicit ON clause at any time:: - + q = session.query(User).\\ join(address_subq, User.id==address_subq.c.user_id) - + **Controlling what to Join From** - + While :meth:`~.Query.join` exclusively deals with the "right" side of the JOIN, we can also control the "left" side, in those cases where it's needed, using :meth:`~.Query.select_from`. Below we construct a query against ``Address`` but can still make usage of ``User.addresses`` as our ON clause by instructing - the :class:`.Query` to select first from the ``User`` + the :class:`.Query` to select first from the ``User`` entity:: - + q = session.query(Address).select_from(User).\\ join(User.addresses).\\ filter(User.name == 'ed') - + Which will produce SQL similar to:: - - SELECT address.* FROM user - JOIN address ON user.id=address.user_id + + SELECT address.* FROM user + JOIN address ON user.id=address.user_id WHERE user.name = :name_1 - + **Constructing Aliases Anonymously** - + :meth:`~.Query.join` can construct anonymous aliases using the ``aliased=True`` flag. This feature is useful when a query is being joined algorithmically, such as when querying self-referentially to an arbitrary depth:: - + q = session.query(Node).\\ join("children", "children", aliased=True) - + When ``aliased=True`` is used, the actual "alias" construct - is not explicitly available. To work with it, methods such as - :meth:`.Query.filter` will adapt the incoming entity to + is not explicitly available. To work with it, methods such as + :meth:`.Query.filter` will adapt the incoming entity to the last join point:: - + q = session.query(Node).\\ join("children", "children", aliased=True).\\ filter(Node.name == 'grandchild 1') - + When using automatic aliasing, the ``from_joinpoint=True`` argument can allow a multi-node join to be broken into multiple calls to :meth:`~.Query.join`, so that each path along the way can be further filtered:: - + q = session.query(Node).\\ join("children", aliased=True).\\ filter(Node.name='child 1').\\ join("children", aliased=True, from_joinpoint=True).\\ filter(Node.name == 'grandchild 1') - + The filtering aliases above can then be reset back to the original ``Node`` entity using :meth:`~.Query.reset_joinpoint`:: - + q = session.query(Node).\\ join("children", "children", aliased=True).\\ filter(Node.name == 'grandchild 1').\\ reset_joinpoint().\\ filter(Node.name == 'parent 1) - - For an example of ``aliased=True``, see the distribution + + For an example of ``aliased=True``, see the distribution example :ref:`examples_xmlpersistence` which illustrates an XPath-like query system using algorithmic joins. - - :param *props: A collection of one or more join conditions, - each consisting of a relationship-bound attribute or string - relationship name representing an "on clause", or a single + + :param *props: A collection of one or more join conditions, + each consisting of a relationship-bound attribute or string + relationship name representing an "on clause", or a single target entity, or a tuple in the form of ``(target, onclause)``. A special two-argument calling form of the form ``target, onclause`` is also accepted. - :param aliased=False: If True, indicate that the JOIN target should be + :param aliased=False: If True, indicate that the JOIN target should be anonymously aliased. Subsequent calls to :class:`~.Query.filter` - and similar will adapt the incoming criterion to the target + and similar will adapt the incoming criterion to the target alias, until :meth:`~.Query.reset_joinpoint` is called. :param from_joinpoint=False: When using ``aliased=True``, a setting of True here will cause the join to be from the most recent - joined target, rather than starting back from the original + joined target, rather than starting back from the original FROM clauses of the query. - + See also: - + :ref:`ormtutorial_joins` in the ORM tutorial. :ref:`inheritance_toplevel` for details on how :meth:`~.Query.join` is used for inheritance relationships. - + :func:`.orm.join` - a standalone ORM-level join function, - used internally by :meth:`.Query.join`, which in previous + used internally by :meth:`.Query.join`, which in previous SQLAlchemy versions was the primary ORM-level joining interface. - + """ aliased, from_joinpoint = kwargs.pop('aliased', False),\ kwargs.pop('from_joinpoint', False) if kwargs: raise TypeError("unknown arguments: %s" % ','.join(kwargs.iterkeys())) - return self._join(props, - outerjoin=False, create_aliases=aliased, + return self._join(props, + outerjoin=False, create_aliases=aliased, from_joinpoint=from_joinpoint) def outerjoin(self, *props, **kwargs): @@ -1649,8 +1649,8 @@ class Query(object): if kwargs: raise TypeError("unknown arguments: %s" % ','.join(kwargs.iterkeys())) - return self._join(props, - outerjoin=True, create_aliases=aliased, + return self._join(props, + outerjoin=True, create_aliases=aliased, from_joinpoint=from_joinpoint) def _update_joinpoint(self, jp): @@ -1676,9 +1676,9 @@ class Query(object): self._reset_joinpoint() if len(keys) == 2 and \ - isinstance(keys[0], (expression.FromClause, + isinstance(keys[0], (expression.FromClause, type, AliasedClass)) and \ - isinstance(keys[1], (basestring, expression.ClauseElement, + isinstance(keys[1], (basestring, expression.ClauseElement, interfaces.PropComparator)): # detect 2-arg form of join and # convert to a tuple. @@ -1688,7 +1688,7 @@ class Query(object): if isinstance(arg1, tuple): # "tuple" form of join, multiple # tuples are accepted as well. The simpler - # "2-arg" form is preferred. May deprecate + # "2-arg" form is preferred. May deprecate # the "tuple" usage. arg1, arg2 = arg1 else: @@ -1762,11 +1762,11 @@ class Query(object): raise NotImplementedError("query.join(a==b) not supported.") self._join_left_to_right( - left_entity, - right_entity, onclause, + left_entity, + right_entity, onclause, outerjoin, create_aliases, prop) - def _join_left_to_right(self, left, right, + def _join_left_to_right(self, left, right, onclause, outerjoin, create_aliases, prop): """append a JOIN to the query's from clause.""" @@ -1782,12 +1782,12 @@ class Query(object): not create_aliases: raise sa_exc.InvalidRequestError( "Can't construct a join from %s to %s, they " - "are the same entity" % + "are the same entity" % (left, right)) right, right_is_aliased, onclause = self._prepare_right_side( right, onclause, - outerjoin, create_aliases, + outerjoin, create_aliases, prop) # if joining on a MapperProperty path, @@ -1802,11 +1802,11 @@ class Query(object): '_joinpoint_entity':right } - self._join_to_left(left, right, - right_is_aliased, + self._join_to_left(left, right, + right_is_aliased, onclause, outerjoin) - def _prepare_right_side(self, right, onclause, outerjoin, + def _prepare_right_side(self, right, onclause, outerjoin, create_aliases, prop): right_mapper, right_selectable, right_is_aliased = _entity_info(right) @@ -1857,11 +1857,11 @@ class Query(object): # until reset_joinpoint() is called. if need_adapter: self._filter_aliases = ORMAdapter(right, - equivalents=right_mapper and + equivalents=right_mapper and right_mapper._equivalent_columns or {}, chain_to=self._filter_aliases) - # if the onclause is a ClauseElement, adapt it with any + # if the onclause is a ClauseElement, adapt it with any # adapters that are in place right now if isinstance(onclause, expression.ClauseElement): onclause = self._adapt_clause(onclause, True, True) @@ -1874,7 +1874,7 @@ class Query(object): self._mapper_loads_polymorphically_with( right_mapper, ORMAdapter( - right, + right, equivalents=right_mapper._equivalent_columns ) ) @@ -1884,19 +1884,19 @@ class Query(object): def _join_to_left(self, left, right, right_is_aliased, onclause, outerjoin): left_mapper, left_selectable, left_is_aliased = _entity_info(left) - # this is an overly broad assumption here, but there's a + # this is an overly broad assumption here, but there's a # very wide variety of situations where we rely upon orm.join's # adaption to glue clauses together, with joined-table inheritance's # wide array of variables taking up most of the space. # Setting the flag here is still a guess, so it is a bug - # that we don't have definitive criterion to determine when - # adaption should be enabled (or perhaps that we're even doing the + # that we don't have definitive criterion to determine when + # adaption should be enabled (or perhaps that we're even doing the # whole thing the way we are here). join_to_left = not right_is_aliased and not left_is_aliased if self._from_obj and left_selectable is not None: replace_clause_index, clause = sql_util.find_join_source( - self._from_obj, + self._from_obj, left_selectable) if clause is not None: # the entire query's FROM clause is an alias of itself (i.e. @@ -1912,9 +1912,9 @@ class Query(object): join_to_left = False try: - clause = orm_join(clause, - right, - onclause, isouter=outerjoin, + clause = orm_join(clause, + right, + onclause, isouter=outerjoin, join_to_left=join_to_left) except sa_exc.ArgumentError, ae: raise sa_exc.InvalidRequestError( @@ -1944,7 +1944,7 @@ class Query(object): "Could not find a FROM clause to join from") try: - clause = orm_join(clause, right, onclause, + clause = orm_join(clause, right, onclause, isouter=outerjoin, join_to_left=join_to_left) except sa_exc.ArgumentError, ae: raise sa_exc.InvalidRequestError( @@ -1961,10 +1961,10 @@ class Query(object): def reset_joinpoint(self): """Return a new :class:`.Query`, where the "join point" has been reset back to the base FROM entities of the query. - + This method is usually used in conjunction with the ``aliased=True`` feature of the :meth:`~.Query.join` - method. See the example in :meth:`~.Query.join` for how + method. See the example in :meth:`~.Query.join` for how this is used. """ @@ -1975,15 +1975,15 @@ class Query(object): """Set the FROM clause of this :class:`.Query` explicitly. Sending a mapped class or entity here effectively replaces the - "left edge" of any calls to :meth:`~.Query.join`, when no + "left edge" of any calls to :meth:`~.Query.join`, when no joinpoint is otherwise established - usually, the default "join point" is the leftmost entity in the :class:`~.Query` object's list of entities to be selected. Mapped entities or plain :class:`~.Table` or other selectables can be sent here which will form the default FROM clause. - - See the example in :meth:`~.Query.join` for a typical + + See the example in :meth:`~.Query.join` for a typical usage of :meth:`~.Query.select_from`. """ @@ -2070,21 +2070,21 @@ class Query(object): construct. """ - if not criterion: - self._distinct = True - else: + if not criterion: + self._distinct = True + else: criterion = self._adapt_col_list(criterion) if isinstance(self._distinct, list): self._distinct += criterion - else: - self._distinct = criterion + else: + self._distinct = criterion @_generative() def prefix_with(self, *prefixes): """Apply the prefixes to the query and return the newly resulting ``Query``. - :param \*prefixes: optional prefixes, typically strings, + :param \*prefixes: optional prefixes, typically strings, not using any commas. In particular is useful for MySQL keywords. e.g.:: @@ -2095,9 +2095,9 @@ class Query(object): Would render:: - SELECT HIGH_PRIORITY SQL_SMALL_RESULT ALL users.name AS users_name + SELECT HIGH_PRIORITY SQL_SMALL_RESULT ALL users.name AS users_name FROM users - + .. versionadded:: 0.7.7 """ @@ -2129,7 +2129,7 @@ class Query(object): if isinstance(statement, basestring): statement = sql.text(statement) - if not isinstance(statement, + if not isinstance(statement, (expression._TextClause, expression._SelectBase)): raise sa_exc.ArgumentError( @@ -2139,12 +2139,12 @@ class Query(object): self._statement = statement def first(self): - """Return the first result of this ``Query`` or + """Return the first result of this ``Query`` or None if the result doesn't contain any row. first() applies a limit of one within the generated SQL, so that - only one primary entity row is generated on the server side - (note this may consist of multiple result rows if join-loaded + only one primary entity row is generated on the server side + (note this may consist of multiple result rows if join-loaded collections are present). Calling ``first()`` results in an execution of the underlying query. @@ -2162,22 +2162,22 @@ class Query(object): def one(self): """Return exactly one result or raise an exception. - Raises ``sqlalchemy.orm.exc.NoResultFound`` if the query selects - no rows. Raises ``sqlalchemy.orm.exc.MultipleResultsFound`` + Raises ``sqlalchemy.orm.exc.NoResultFound`` if the query selects + no rows. Raises ``sqlalchemy.orm.exc.MultipleResultsFound`` if multiple object identities are returned, or if multiple rows are returned for a query that does not return object identities. Note that an entity query, that is, one which selects one or more mapped classes as opposed to individual column attributes, - may ultimately represent many rows but only one row of + may ultimately represent many rows but only one row of unique entity or entities - this is a successful result for one(). Calling ``one()`` results in an execution of the underlying query. .. versionchanged:: 0.6 - ``one()`` fully fetches all results instead of applying - any kind of limit, so that the "unique"-ing of entities does not + ``one()`` fully fetches all results instead of applying + any kind of limit, so that the "unique"-ing of entities does not conceal multiple object identities. """ @@ -2244,7 +2244,7 @@ class Query(object): @property def column_descriptions(self): - """Return metadata about the columns which would be + """Return metadata about the columns which would be returned by this :class:`.Query`. Format is a list of dictionaries:: @@ -2385,7 +2385,7 @@ class Query(object): this :class:`.Query` - if these do not correspond, unchecked errors will occur. The 'load' argument is the same as that of :meth:`.Session.merge`. - + For an example of how :meth:`~.Query.merge_result` is used, see the source code for the example :ref:`examples_caching`, where :meth:`~.Query.merge_result` is used to efficiently restore state @@ -2405,22 +2405,22 @@ class Query(object): if single_entity: if isinstance(self._entities[0], _MapperEntity): result = [session._merge( - attributes.instance_state(instance), - attributes.instance_dict(instance), + attributes.instance_state(instance), + attributes.instance_dict(instance), load=load, _recursive={}) for instance in iterator] else: result = list(iterator) else: - mapped_entities = [i for i, e in enumerate(self._entities) + mapped_entities = [i for i, e in enumerate(self._entities) if isinstance(e, _MapperEntity)] result = [] for row in iterator: newrow = list(row) for i in mapped_entities: newrow[i] = session._merge( - attributes.instance_state(newrow[i]), - attributes.instance_dict(newrow[i]), + attributes.instance_state(newrow[i]), + attributes.instance_dict(newrow[i]), load=load, _recursive={}) result.append(util.NamedTuple(newrow, row._labels)) @@ -2430,7 +2430,7 @@ class Query(object): @classmethod def _get_from_identity(cls, session, key, passive): - """Look up the given key in the given session's identity map, + """Look up the given key in the given session's identity map, check the object for expired state if found. """ @@ -2533,39 +2533,39 @@ class Query(object): def count(self): """Return a count of rows this Query would return. - + This generates the SQL for this Query as follows:: - + SELECT count(1) AS count_1 FROM ( SELECT ) AS anon_1 .. versionchanged:: 0.7 The above scheme is newly refined as of 0.7b3. - - For fine grained control over specific columns + + For fine grained control over specific columns to count, to skip the usage of a subquery or otherwise control of the FROM clause, or to use other aggregate functions, use :attr:`~sqlalchemy.sql.expression.func` expressions in conjunction with :meth:`~.Session.query`, i.e.:: - + from sqlalchemy import func - + # count User records, without # using a subquery. session.query(func.count(User.id)) - + # return count of user "id" grouped # by "name" session.query(func.count(User.id)).\\ group_by(User.name) from sqlalchemy import distinct - + # count distinct "name" values session.query(func.count(distinct(User.name))) - + """ col = sql.func.count(sql.literal_column('*')) return self.from_self(col).scalar() @@ -2593,7 +2593,7 @@ class Query(object): ``'evaluate'`` - Evaluate the query's criteria in Python straight on the objects in the session. If evaluation of the criteria isn't - implemented, an error is raised. In that case you probably + implemented, an error is raised. In that case you probably want to use the 'fetch' strategy as a fallback. The expression evaluator currently doesn't account for differing @@ -2608,13 +2608,13 @@ class Query(object): state of dependent objects subject to delete or delete-orphan cascade to be correctly represented. - Note that the :meth:`.MapperEvents.before_delete` and + Note that the :meth:`.MapperEvents.before_delete` and :meth:`.MapperEvents.after_delete` events are **not** invoked from this method. It instead invokes :meth:`.SessionEvents.after_bulk_delete`. """ - #TODO: lots of duplication and ifs - probably needs to be + #TODO: lots of duplication and ifs - probably needs to be # refactored to strategies #TODO: cascades need handling. @@ -2734,7 +2734,7 @@ class Query(object): or call expire_all()) in order for the state of dependent objects subject foreign key cascade to be correctly represented. - Note that the :meth:`.MapperEvents.before_update` and + Note that the :meth:`.MapperEvents.before_update` and :meth:`.MapperEvents.after_update` events are **not** invoked from this method. It instead invokes :meth:`.SessionEvents.after_bulk_update`. @@ -2743,7 +2743,7 @@ class Query(object): #TODO: value keys need to be mapped to corresponding sql cols and # instr.attr.s to string keys - #TODO: updates of manytoone relationships need to be converted to + #TODO: updates of manytoone relationships need to be converted to # fk assignments #TODO: cascades need handling. @@ -2828,7 +2828,7 @@ class Query(object): state.commit(dict_, list(to_evaluate)) - # expire attributes with pending changes + # expire attributes with pending changes # (there was no autoflush, so they are overwritten) state.expire_attributes(dict_, set(evaluated_keys). @@ -2842,7 +2842,7 @@ class Query(object): list(primary_key)) if identity_key in session.identity_map: session.expire( - session.identity_map[identity_key], + session.identity_map[identity_key], [_attr_as_key(k) for k in values] ) @@ -2879,11 +2879,11 @@ class Query(object): eager_joins = context.eager_joins.values() if context.from_clause: - # "load from explicit FROMs" mode, + # "load from explicit FROMs" mode, # i.e. when select_from() or join() is used froms = list(context.from_clause) else: - # "load from discrete FROMs" mode, + # "load from discrete FROMs" mode, # i.e. when each _MappedEntity has its own FROM froms = context.froms @@ -2902,7 +2902,7 @@ class Query(object): "SELECT from.") if context.multi_row_eager_loaders and self._should_nest_selectable: - # for eager joins present and LIMIT/OFFSET/DISTINCT, + # for eager joins present and LIMIT/OFFSET/DISTINCT, # wrap the query inside a select, # then append eager joins onto that @@ -2923,7 +2923,7 @@ class Query(object): from_obj=froms, use_labels=labels, correlate=False, - # TODO: this order_by is only needed if + # TODO: this order_by is only needed if # LIMIT/OFFSET is present in self._select_args, # else the application on the outside is enough order_by=context.order_by, @@ -2943,17 +2943,17 @@ class Query(object): context.adapter = sql_util.ColumnAdapter(inner, equivs) statement = sql.select( - [inner] + context.secondary_columns, - for_update=for_update, + [inner] + context.secondary_columns, + for_update=for_update, use_labels=labels) from_clause = inner for eager_join in eager_joins: # EagerLoader places a 'stop_on' attribute on the join, - # giving us a marker as to where the "splice point" of + # giving us a marker as to where the "splice point" of # the join should be from_clause = sql_util.splice_joins( - from_clause, + from_clause, eager_join, eager_join.stop_on) statement.append_from(from_clause) @@ -2973,7 +2973,7 @@ class Query(object): if self._distinct and context.order_by: order_by_col_expr = list( chain(*[ - sql_util.unwrap_order_by(o) + sql_util.unwrap_order_by(o) for o in context.order_by ]) ) @@ -3059,7 +3059,7 @@ class _MapperEntity(_QueryEntity): self.entities = [entity] self.entity_zero = self.expr = entity - def setup_entity(self, entity, mapper, adapter, + def setup_entity(self, entity, mapper, adapter, from_obj, is_aliased_class, with_polymorphic): self.mapper = mapper self.adapter = adapter @@ -3080,7 +3080,7 @@ class _MapperEntity(_QueryEntity): self._label_name = self.mapper.class_.__name__ - def set_with_polymorphic(self, query, cls_or_mappers, + def set_with_polymorphic(self, query, cls_or_mappers, selectable, discriminator): if cls_or_mappers is None: query._reset_polymorphic_adapter(self.mapper) @@ -3091,12 +3091,12 @@ class _MapperEntity(_QueryEntity): self._with_polymorphic = mappers self._polymorphic_discriminator = discriminator - # TODO: do the wrapped thing here too so that + # TODO: do the wrapped thing here too so that # with_polymorphic() can be applied to aliases if not self.is_aliased_class: self.selectable = from_obj - query._mapper_loads_polymorphically_with(self.mapper, - sql_util.ColumnAdapter(from_obj, + query._mapper_loads_polymorphically_with(self.mapper, + sql_util.ColumnAdapter(from_obj, self.mapper._equivalent_columns)) filter_fn = id @@ -3145,7 +3145,7 @@ class _MapperEntity(_QueryEntity): elif not adapter: adapter = context.adapter - # polymorphic mappers which have concrete tables in + # polymorphic mappers which have concrete tables in # their hierarchy usually # require row aliasing unconditionally. if not adapter and self.mapper._requires_row_aliasing: @@ -3155,7 +3155,7 @@ class _MapperEntity(_QueryEntity): if self.primary_entity: _instance = self.mapper._instance_processor( - context, + context, self._path, self._reduced_path, adapter, @@ -3166,7 +3166,7 @@ class _MapperEntity(_QueryEntity): ) else: _instance = self.mapper._instance_processor( - context, + context, self._path, self._reduced_path, adapter, @@ -3309,13 +3309,13 @@ class _ColumnEntity(_QueryEntity): def adapt_to_selectable(self, query, sel): c = _ColumnEntity(query, sel.corresponding_column(self.column)) - c._label_name = self._label_name + c._label_name = self._label_name c.entity_zero = self.entity_zero c.entities = self.entities def setup_entity(self, entity, mapper, adapter, from_obj, is_aliased_class, with_polymorphic): - if 'selectable' not in self.__dict__: + if 'selectable' not in self.__dict__: self.selectable = from_obj self.froms.add(from_obj) diff --git a/lib/sqlalchemy/orm/scoping.py b/lib/sqlalchemy/orm/scoping.py index 3c1cd7f26d..6e1d0d1d60 100644 --- a/lib/sqlalchemy/orm/scoping.py +++ b/lib/sqlalchemy/orm/scoping.py @@ -16,10 +16,10 @@ __all__ = ['ScopedSession'] class ScopedSession(object): """Provides thread-local management of Sessions. - + Typical invocation is via the :func:`.scoped_session` function:: - + Session = scoped_session(sessionmaker()) The internal registry is accessible, @@ -71,7 +71,7 @@ class ScopedSession(object): self.session_factory.configure(**kwargs) def query_property(self, query_cls=None): - """return a class property which produces a `Query` object + """return a class property which produces a `Query` object against the class when called. e.g.:: @@ -122,7 +122,7 @@ def makeprop(name): def get(self): return getattr(self.registry(), name) return property(get, set) -for prop in ('bind', 'dirty', 'deleted', 'new', 'identity_map', +for prop in ('bind', 'dirty', 'deleted', 'new', 'identity_map', 'is_active', 'autoflush', 'no_autoflush'): setattr(ScopedSession, prop, makeprop(prop)) diff --git a/lib/sqlalchemy/orm/state.py b/lib/sqlalchemy/orm/state.py index 4803ecdc3d..12a4d0c72d 100644 --- a/lib/sqlalchemy/orm/state.py +++ b/lib/sqlalchemy/orm/state.py @@ -124,7 +124,7 @@ class InstanceState(object): return [] elif hasattr(impl, 'get_collection'): return [ - (attributes.instance_state(o), o) for o in + (attributes.instance_state(o), o) for o in impl.get_collection(self, dict_, x, passive=passive) ] else: @@ -134,10 +134,10 @@ class InstanceState(object): d = {'instance':self.obj()} d.update( (k, self.__dict__[k]) for k in ( - 'committed_state', 'pending', 'modified', 'expired', + 'committed_state', 'pending', 'modified', 'expired', 'callables', 'key', 'parents', 'load_options', 'mutable_dict', 'class_', - ) if k in self.__dict__ + ) if k in self.__dict__ ) if self.load_path: d['load_path'] = interfaces.serialize_path(self.load_path) @@ -181,26 +181,26 @@ class InstanceState(object): self.__dict__.update([ (k, state[k]) for k in ( 'key', 'load_options', 'mutable_dict' - ) if k in state + ) if k in state ]) if 'load_path' in state: self.load_path = interfaces.deserialize_path(state['load_path']) - # setup _sa_instance_state ahead of time so that + # setup _sa_instance_state ahead of time so that # unpickle events can access the object normally. # see [ticket:2362] manager.setup_instance(inst, self) manager.dispatch.unpickle(self, state) def initialize(self, key): - """Set this attribute to an empty value or collection, + """Set this attribute to an empty value or collection, based on the AttributeImpl in use.""" self.manager.get_impl(key).initialize(self, self.dict) def reset(self, dict_, key): - """Remove the given attribute and any + """Remove the given attribute and any callables associated with it.""" dict_.pop(key, None) @@ -284,7 +284,7 @@ class InstanceState(object): self.manager.deferred_scalar_loader(self, toload) - # if the loader failed, or this + # if the loader failed, or this # instance state didn't have an identity, # the attributes still might be in the callables # dict. ensure they are removed. @@ -321,7 +321,7 @@ class InstanceState(object): @property def expired_attributes(self): """Return the set of keys which are 'expired' to be loaded by - the manager's deferred scalar loader, assuming no pending + the manager's deferred scalar loader, assuming no pending changes. see also the ``unmodified`` collection which is intersected @@ -348,7 +348,7 @@ class InstanceState(object): self.committed_state[attr.key] = previous - # the "or not self.modified" is defensive at + # the "or not self.modified" is defensive at # this point. The assertion below is expected # to be True: # assert self._strong_obj is None or self.modified @@ -363,9 +363,9 @@ class InstanceState(object): raise orm_exc.ObjectDereferencedError( "Can't emit change event for attribute '%s' - " "parent object of type %s has been garbage " - "collected." + "collected." % ( - self.manager[attr.key], + self.manager[attr.key], orm_util.state_class_str(self) )) self.modified = True @@ -433,7 +433,7 @@ class InstanceState(object): self._strong_obj = None class MutableAttrInstanceState(InstanceState): - """InstanceState implementation for objects that reference 'mutable' + """InstanceState implementation for objects that reference 'mutable' attributes. Has a more involved "cleanup" handler that checks mutable attributes @@ -491,7 +491,7 @@ class MutableAttrInstanceState(InstanceState): This would be called in the extremely rare race condition that the weakref returned None but - the cleanup handler had not yet established the + the cleanup handler had not yet established the __resurrect callable as its replacement. """ diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py index d7d3d57ebd..c552f11ee1 100644 --- a/lib/sqlalchemy/orm/strategies.py +++ b/lib/sqlalchemy/orm/strategies.py @@ -4,7 +4,7 @@ # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -"""sqlalchemy.orm.interfaces.LoaderStrategy +"""sqlalchemy.orm.interfaces.LoaderStrategy implementations, and related MapperOptions.""" from sqlalchemy import exc as sa_exc @@ -23,15 +23,15 @@ from sqlalchemy.orm.query import Query import itertools def _register_attribute(strategy, mapper, useobject, - compare_function=None, + compare_function=None, typecallable=None, - copy_function=None, - mutable_scalars=False, + copy_function=None, + mutable_scalars=False, uselist=False, - callable_=None, - proxy_property=None, + callable_=None, + proxy_property=None, active_history=False, - impl_class=None, + impl_class=None, **kw ): @@ -47,7 +47,7 @@ def _register_attribute(strategy, mapper, useobject, if prop.key in prop.parent.validators: fn, include_removes = prop.parent.validators[prop.key] listen_hooks.append( - lambda desc, prop: mapperutil._validator_events(desc, + lambda desc, prop: mapperutil._validator_events(desc, prop.key, fn, include_removes) ) @@ -59,8 +59,8 @@ def _register_attribute(strategy, mapper, useobject, backref = kw.pop('backref', None) if backref: listen_hooks.append( - lambda desc, prop: attributes.backref_listeners(desc, - backref, + lambda desc, prop: attributes.backref_listeners(desc, + backref, uselist) ) @@ -68,18 +68,18 @@ def _register_attribute(strategy, mapper, useobject, if prop is m._props.get(prop.key): desc = attributes.register_attribute_impl( - m.class_, - prop.key, + m.class_, + prop.key, parent_token=prop, mutable_scalars=mutable_scalars, - uselist=uselist, - copy_function=copy_function, - compare_function=compare_function, + uselist=uselist, + copy_function=copy_function, + compare_function=compare_function, useobject=useobject, - extension=attribute_ext, - trackparent=useobject and (prop.single_parent or prop.direction is interfaces.ONETOMANY), + extension=attribute_ext, + trackparent=useobject and (prop.single_parent or prop.direction is interfaces.ONETOMANY), typecallable=typecallable, - callable_=callable_, + callable_=callable_, active_history=active_history, impl_class=impl_class, doc=prop.doc, @@ -99,7 +99,7 @@ class UninstrumentedColumnLoader(LoaderStrategy): def init(self): self.columns = self.parent_property.columns - def setup_query(self, context, entity, path, reduced_path, adapter, + def setup_query(self, context, entity, path, reduced_path, adapter, column_collection=None, **kwargs): for c in self.columns: if adapter: @@ -116,7 +116,7 @@ class ColumnLoader(LoaderStrategy): self.columns = self.parent_property.columns self.is_composite = hasattr(self.parent_property, 'composite_class') - def setup_query(self, context, entity, path, reduced_path, + def setup_query(self, context, entity, path, reduced_path, adapter, column_collection, **kwargs): for c in self.columns: if adapter: @@ -137,7 +137,7 @@ class ColumnLoader(LoaderStrategy): active_history = active_history ) - def create_row_processor(self, context, path, reduced_path, + def create_row_processor(self, context, path, reduced_path, mapper, row, adapter): key = self.key # look through list of columns represented here @@ -199,10 +199,10 @@ class DeferredColumnLoader(LoaderStrategy): expire_missing=False ) - def setup_query(self, context, entity, path, reduced_path, adapter, + def setup_query(self, context, entity, path, reduced_path, adapter, only_load_props=None, **kwargs): if ( - self.group is not None and + self.group is not None and context.attributes.get(('undefer', self.group), False) ) or (only_load_props and self.key in only_load_props): self.parent_property._get_strategy(ColumnLoader).\ @@ -220,10 +220,10 @@ class DeferredColumnLoader(LoaderStrategy): if self.group: toload = [ - p.key for p in - localparent.iterate_properties - if isinstance(p, StrategizedProperty) and - isinstance(p.strategy, DeferredColumnLoader) and + p.key for p in + localparent.iterate_properties + if isinstance(p, StrategizedProperty) and + isinstance(p.strategy, DeferredColumnLoader) and p.group==self.group ] else: @@ -236,12 +236,12 @@ class DeferredColumnLoader(LoaderStrategy): if session is None: raise orm_exc.DetachedInstanceError( "Parent instance %s is not bound to a Session; " - "deferred load operation of attribute '%s' cannot proceed" % + "deferred load operation of attribute '%s' cannot proceed" % (mapperutil.state_str(state), self.key) ) query = session.query(localparent) - if query._load_on_ident(state.key, + if query._load_on_ident(state.key, only_load_props=group, refresh_state=state) is None: raise orm_exc.ObjectDeletedError(state) @@ -297,14 +297,14 @@ class AbstractRelationshipLoader(LoaderStrategy): class NoLoader(AbstractRelationshipLoader): """Provide loading behavior for a :class:`.RelationshipProperty` with "lazy=None". - + """ def init_class_attribute(self, mapper): self.is_class_level = True _register_attribute(self, mapper, - useobject=True, + useobject=True, uselist=self.parent_property.uselist, typecallable = self.parent_property.collection_class, ) @@ -319,7 +319,7 @@ log.class_logger(NoLoader) class LazyLoader(AbstractRelationshipLoader): """Provide loading behavior for a :class:`.RelationshipProperty` with "lazy=True", that is loads when first accessed. - + """ def init(self): @@ -331,7 +331,7 @@ class LazyLoader(AbstractRelationshipLoader): self._rev_lazywhere, \ self._rev_bind_to_col, \ self._rev_equated_columns = self._create_lazy_clause( - self.parent_property, + self.parent_property, reverse_direction=True) self.logger.info("%s lazy loading clause %s", self, self._lazywhere) @@ -341,8 +341,8 @@ class LazyLoader(AbstractRelationshipLoader): #from sqlalchemy.orm import query self.use_get = not self.uselist and \ self.mapper._get_clause[0].compare( - self._lazywhere, - use_proxies=True, + self._lazywhere, + use_proxies=True, equivalents=self.mapper._equivalent_columns ) @@ -358,13 +358,13 @@ class LazyLoader(AbstractRelationshipLoader): def init_class_attribute(self, mapper): self.is_class_level = True - # MANYTOONE currently only needs the + # MANYTOONE currently only needs the # "old" value for delete-orphan - # cascades. the required _SingleParentValidator + # cascades. the required _SingleParentValidator # will enable active_history - # in that case. otherwise we don't need the + # in that case. otherwise we don't need the # "old" value during backref operations. - _register_attribute(self, + _register_attribute(self, mapper, useobject=True, callable_=self._load_for_state, @@ -378,12 +378,12 @@ class LazyLoader(AbstractRelationshipLoader): not self.use_get, ) - def lazy_clause(self, state, reverse_direction=False, - alias_secondary=False, + def lazy_clause(self, state, reverse_direction=False, + alias_secondary=False, adapt_source=None): if state is None: return self._lazy_none_clause( - reverse_direction, + reverse_direction, adapt_source=adapt_source) if not reverse_direction: @@ -414,14 +414,14 @@ class LazyLoader(AbstractRelationshipLoader): if bindparam._identifying_key in bind_to_col: bindparam.callable = \ lambda: mapper._get_committed_state_attr_by_column( - state, dict_, + state, dict_, bind_to_col[bindparam._identifying_key]) else: def visit_bindparam(bindparam): if bindparam._identifying_key in bind_to_col: bindparam.callable = \ lambda: mapper._get_state_attr_by_column( - state, dict_, + state, dict_, bind_to_col[bindparam._identifying_key]) @@ -465,10 +465,10 @@ class LazyLoader(AbstractRelationshipLoader): if ( (passive is attributes.PASSIVE_NO_FETCH or \ - passive is attributes.PASSIVE_NO_FETCH_RELATED) and + passive is attributes.PASSIVE_NO_FETCH_RELATED) and not self.use_get ) or ( - passive is attributes.PASSIVE_ONLY_PERSISTENT and + passive is attributes.PASSIVE_ONLY_PERSISTENT and pending ): return attributes.PASSIVE_NO_RESULT @@ -477,11 +477,11 @@ class LazyLoader(AbstractRelationshipLoader): if not session: raise orm_exc.DetachedInstanceError( "Parent instance %s is not bound to a Session; " - "lazy load operation of attribute '%s' cannot proceed" % + "lazy load operation of attribute '%s' cannot proceed" % (mapperutil.state_str(state), self.key) ) - # if we have a simple primary key load, check the + # if we have a simple primary key load, check the # identity map without generating a Query at all if self.use_get: ident = self._get_ident_for_use_get( @@ -555,7 +555,7 @@ class LazyLoader(AbstractRelationshipLoader): q = q.order_by(*util.to_list(self.parent_property.order_by)) for rev in self.parent_property._reverse_property: - # reverse props that are MANYTOONE are loading *this* + # reverse props that are MANYTOONE are loading *this* # object from get(), so don't need to eager out to those. if rev.direction is interfaces.MANYTOONE and \ rev._use_get and \ @@ -580,7 +580,7 @@ class LazyLoader(AbstractRelationshipLoader): if l > 1: util.warn( "Multiple rows returned with " - "uselist=False for lazily-loaded attribute '%s' " + "uselist=False for lazily-loaded attribute '%s' " % self.parent_property) return result[0] @@ -588,30 +588,30 @@ class LazyLoader(AbstractRelationshipLoader): return None - def create_row_processor(self, context, path, reduced_path, + def create_row_processor(self, context, path, reduced_path, mapper, row, adapter): key = self.key if not self.is_class_level: def set_lazy_callable(state, dict_, row): - # we are not the primary manager for this attribute + # we are not the primary manager for this attribute # on this class - set up a - # per-instance lazyloader, which will override the + # per-instance lazyloader, which will override the # class-level behavior. - # this currently only happens when using a + # this currently only happens when using a # "lazyload" option on a "no load" - # attribute - "eager" attributes always have a + # attribute - "eager" attributes always have a # class-level lazyloader installed. state.set_callable(dict_, key, LoadLazyAttribute(state, key)) return set_lazy_callable, None, None else: def reset_for_lazy_callable(state, dict_, row): - # we are the primary manager for this attribute on + # we are the primary manager for this attribute on # this class - reset its - # per-instance attribute state, so that the class-level + # per-instance attribute state, so that the class-level # lazy loader is # executed when next referenced on this instance. # this is needed in - # populate_existing() types of scenarios to reset + # populate_existing() types of scenarios to reset # any existing state. state.reset(dict_, key) @@ -648,7 +648,7 @@ class LazyLoader(AbstractRelationshipLoader): if prop.secondaryjoin is None or not reverse_direction: lazywhere = visitors.replacement_traverse( - lazywhere, {}, col_to_bind) + lazywhere, {}, col_to_bind) if prop.secondaryjoin is not None: secondaryjoin = prop.secondaryjoin @@ -685,12 +685,12 @@ class ImmediateLoader(AbstractRelationshipLoader): _get_strategy(LazyLoader).\ init_class_attribute(mapper) - def setup_query(self, context, entity, + def setup_query(self, context, entity, path, reduced_path, adapter, column_collection=None, parentmapper=None, **kwargs): pass - def create_row_processor(self, context, path, reduced_path, + def create_row_processor(self, context, path, reduced_path, mapper, row, adapter): def load_immediate(state, dict_, row): state.get_impl(self.key).get(state, dict_) @@ -707,8 +707,8 @@ class SubqueryLoader(AbstractRelationshipLoader): _get_strategy(LazyLoader).\ init_class_attribute(mapper) - def setup_query(self, context, entity, - path, reduced_path, adapter, + def setup_query(self, context, entity, + path, reduced_path, adapter, column_collection=None, parentmapper=None, **kwargs): @@ -738,7 +738,7 @@ class SubqueryLoader(AbstractRelationshipLoader): self._get_leftmost(subq_path) orig_query = context.attributes.get( - ("orig_query", SubqueryLoader), + ("orig_query", SubqueryLoader), context.query) # generate a new Query from the original, then @@ -748,7 +748,7 @@ class SubqueryLoader(AbstractRelationshipLoader): leftmost_attr, subq_path ) - # generate another Query that will join the + # generate another Query that will join the # left alias to the target relationships. # basically doing a longhand # "from_self()". (from_self() itself not quite industrial @@ -770,7 +770,7 @@ class SubqueryLoader(AbstractRelationshipLoader): q = self._setup_options(q, subq_path, orig_query) q = self._setup_outermost_orderby(q) - # add new query to attributes to be picked up + # add new query to attributes to be picked up # by create_row_processor context.attributes[('subquery', reduced_path)] = q @@ -801,7 +801,7 @@ class SubqueryLoader(AbstractRelationshipLoader): # to look only for significant columns q = orig_query._clone() - # TODO: why does polymporphic etc. require hardcoding + # TODO: why does polymporphic etc. require hardcoding # into _adapt_col_list ? Does query.add_columns(...) work # with polymorphic loading ? q._set_entities(q._adapt_col_list(leftmost_attr)) @@ -823,7 +823,7 @@ class SubqueryLoader(AbstractRelationshipLoader): def _prep_for_joins(self, left_alias, subq_path): # figure out what's being joined. a.k.a. the fun part to_join = [ - (subq_path[i], subq_path[i+1]) + (subq_path[i], subq_path[i+1]) for i in xrange(0, len(subq_path), 2) ] @@ -836,13 +836,13 @@ class SubqueryLoader(AbstractRelationshipLoader): parent_alias = left_alias elif subq_path[-2].isa(self.parent): # In the case of multiple levels, retrieve - # it from subq_path[-2]. This is the same as self.parent - # in the vast majority of cases, and [ticket:2014] + # it from subq_path[-2]. This is the same as self.parent + # in the vast majority of cases, and [ticket:2014] # illustrates a case where sub_path[-2] is a subclass # of self.parent parent_alias = mapperutil.AliasedClass(subq_path[-2]) else: - # if of_type() were used leading to this relationship, + # if of_type() were used leading to this relationship, # self.parent is more specific than subq_path[-2] parent_alias = mapperutil.AliasedClass(self.parent) @@ -860,10 +860,10 @@ class SubqueryLoader(AbstractRelationshipLoader): for i, (mapper, key) in enumerate(to_join): # we need to use query.join() as opposed to - # orm.join() here because of the - # rich behavior it brings when dealing with + # orm.join() here because of the + # rich behavior it brings when dealing with # "with_polymorphic" mappers. "aliased" - # and "from_joinpoint" take care of most of + # and "from_joinpoint" take care of most of # the chaining and aliasing for us. first = i == 0 @@ -897,14 +897,14 @@ class SubqueryLoader(AbstractRelationshipLoader): # these will fire relative to subq_path. q = q._with_current_path(subq_path) q = q._conditional_options(*orig_query._with_options) - if orig_query._populate_existing: + if orig_query._populate_existing: q._populate_existing = orig_query._populate_existing return q def _setup_outermost_orderby(self, q): if self.parent_property.order_by: # if there's an ORDER BY, alias it the same - # way joinedloader does, but we have to pull out + # way joinedloader does, but we have to pull out # the "eagerjoin" from the query. # this really only picks up the "secondary" table # right now. @@ -919,12 +919,12 @@ class SubqueryLoader(AbstractRelationshipLoader): q = q.order_by(*eager_order_by) return q - def create_row_processor(self, context, path, reduced_path, + def create_row_processor(self, context, path, reduced_path, mapper, row, adapter): if not self.parent.class_manager[self.key].impl.supports_population: raise sa_exc.InvalidRequestError( "'%s' does not support object " - "population - eager loading cannot be applied." % + "population - eager loading cannot be applied." % self) reduced_path = reduced_path + (self.key,) @@ -943,9 +943,9 @@ class SubqueryLoader(AbstractRelationshipLoader): collections = context.attributes[('collections', reduced_path)] else: collections = context.attributes[('collections', reduced_path)] = dict( - (k, [v[0] for v in v]) + (k, [v[0] for v in v]) for k, v in itertools.groupby( - q, + q, lambda x:x[1:] )) @@ -960,7 +960,7 @@ class SubqueryLoader(AbstractRelationshipLoader): def _create_collection_loader(self, collections, local_cols): def load_collection_from_subq(state, dict_, row): collection = collections.get( - tuple([row[col] for col in local_cols]), + tuple([row[col] for col in local_cols]), () ) state.get_impl(self.key).\ @@ -971,7 +971,7 @@ class SubqueryLoader(AbstractRelationshipLoader): def _create_scalar_loader(self, collections, local_cols): def load_scalar_from_subq(state, dict_, row): collection = collections.get( - tuple([row[col] for col in local_cols]), + tuple([row[col] for col in local_cols]), (None,) ) if len(collection) > 1: @@ -991,7 +991,7 @@ log.class_logger(SubqueryLoader) class JoinedLoader(AbstractRelationshipLoader): """Provide loading behavior for a :class:`.RelationshipProperty` using joined eager loading. - + """ def init(self): super(JoinedLoader, self).init() @@ -1022,7 +1022,7 @@ class JoinedLoader(AbstractRelationshipLoader): ) else: # check for join_depth or basic recursion, - # if the current path was not explicitly stated as + # if the current path was not explicitly stated as # a desired "loaderstrategy" (i.e. via query.options()) if ("loaderstrategy", reduced_path) not in context.attributes: if self.join_depth: @@ -1043,16 +1043,16 @@ class JoinedLoader(AbstractRelationshipLoader): for value in self.mapper._polymorphic_properties: value.setup( - context, - entity, - path, + context, + entity, + path, reduced_path, - clauses, - parentmapper=self.mapper, + clauses, + parentmapper=self.mapper, column_collection=add_to_collection, allow_innerjoin=allow_innerjoin) - def _get_user_defined_adapter(self, context, entity, + def _get_user_defined_adapter(self, context, entity, reduced_path, adapter): clauses = context.attributes[ ("user_defined_eager_row_processor", @@ -1071,12 +1071,12 @@ class JoinedLoader(AbstractRelationshipLoader): add_to_collection = context.primary_columns return clauses, adapter, add_to_collection - def _generate_row_adapter(self, + def _generate_row_adapter(self, context, entity, path, reduced_path, adapter, column_collection, parentmapper, allow_innerjoin ): clauses = mapperutil.ORMAdapter( - mapperutil.AliasedClass(self.mapper), + mapperutil.AliasedClass(self.mapper), equivalents=self.mapper._equivalent_columns, adapt_required=True) @@ -1084,7 +1084,7 @@ class JoinedLoader(AbstractRelationshipLoader): context.multi_row_eager_loaders = True innerjoin = allow_innerjoin and context.attributes.get( - ("eager_join_type", path), + ("eager_join_type", path), self.parent_property.innerjoin) if not innerjoin: # if this is an outer join, all eager joins from @@ -1092,8 +1092,8 @@ class JoinedLoader(AbstractRelationshipLoader): allow_innerjoin = False context.create_eager_joins.append( - (self._create_eager_join, context, - entity, path, adapter, + (self._create_eager_join, context, + entity, path, adapter, parentmapper, clauses, innerjoin) ) @@ -1103,8 +1103,8 @@ class JoinedLoader(AbstractRelationshipLoader): ] = clauses return clauses, adapter, add_to_collection, allow_innerjoin - def _create_eager_join(self, context, entity, - path, adapter, parentmapper, + def _create_eager_join(self, context, entity, + path, adapter, parentmapper, clauses, innerjoin): if parentmapper is None: @@ -1113,7 +1113,7 @@ class JoinedLoader(AbstractRelationshipLoader): localparent = parentmapper # whether or not the Query will wrap the selectable in a subquery, - # and then attach eager load joins to that (i.e., in the case of + # and then attach eager load joins to that (i.e., in the case of # LIMIT/OFFSET etc.) should_nest_selectable = context.multi_row_eager_loaders and \ context.query._should_nest_selectable @@ -1128,7 +1128,7 @@ class JoinedLoader(AbstractRelationshipLoader): if clause is not None: # join to an existing FROM clause on the query. # key it to its list index in the eager_joins dict. - # Query._compile_context will adapt as needed and + # Query._compile_context will adapt as needed and # append to the FROM clause of the select(). entity_key, default_towrap = index, clause @@ -1146,14 +1146,14 @@ class JoinedLoader(AbstractRelationshipLoader): else: onclause = getattr( mapperutil.AliasedClass( - self.parent, + self.parent, adapter.selectable - ), + ), self.key, self.parent_property ) if onclause is self.parent_property: - # TODO: this is a temporary hack to + # TODO: this is a temporary hack to # account for polymorphic eager loads where # the eagerload is referencing via of_type(). join_to_left = True @@ -1162,10 +1162,10 @@ class JoinedLoader(AbstractRelationshipLoader): context.eager_joins[entity_key] = eagerjoin = \ mapperutil.join( - towrap, - clauses.aliased_class, - onclause, - join_to_left=join_to_left, + towrap, + clauses.aliased_class, + onclause, + join_to_left=join_to_left, isouter=not innerjoin ) @@ -1175,11 +1175,11 @@ class JoinedLoader(AbstractRelationshipLoader): if self.parent_property.secondary is None and \ not parentmapper: # for parentclause that is the non-eager end of the join, - # ensure all the parent cols in the primaryjoin are actually + # ensure all the parent cols in the primaryjoin are actually # in the - # columns clause (i.e. are not deferred), so that aliasing applied + # columns clause (i.e. are not deferred), so that aliasing applied # by the Query propagates those columns outward. - # This has the effect + # This has the effect # of "undefering" those columns. for col in sql_util.find_columns( self.parent_property.primaryjoin): @@ -1204,7 +1204,7 @@ class JoinedLoader(AbstractRelationshipLoader): decorator = context.attributes[ ("user_defined_eager_row_processor", reduced_path)] - # user defined eagerloads are part of the "primary" + # user defined eagerloads are part of the "primary" # portion of the load. # the adapters applied to the Query should be honored. if context.adapter and decorator: @@ -1221,7 +1221,7 @@ class JoinedLoader(AbstractRelationshipLoader): self.mapper.identity_key_from_row(row, decorator) return decorator except KeyError: - # no identity key - dont return a row + # no identity key - dont return a row # processor, will cause a degrade to lazy return False @@ -1229,23 +1229,23 @@ class JoinedLoader(AbstractRelationshipLoader): if not self.parent.class_manager[self.key].impl.supports_population: raise sa_exc.InvalidRequestError( "'%s' does not support object " - "population - eager loading cannot be applied." % + "population - eager loading cannot be applied." % self) our_path = path + (self.key,) our_reduced_path = reduced_path + (self.key,) eager_adapter = self._create_eager_adapter( - context, - row, + context, + row, adapter, our_path, our_reduced_path) if eager_adapter is not False: key = self.key _instance = self.mapper._instance_processor( - context, - our_path + (self.mapper,), + context, + our_path + (self.mapper,), our_reduced_path + (self.mapper.base_mapper,), eager_adapter) @@ -1257,7 +1257,7 @@ class JoinedLoader(AbstractRelationshipLoader): return self.parent_property.\ _get_strategy(LazyLoader).\ create_row_processor( - context, path, + context, path, reduced_path, mapper, row, adapter) @@ -1388,7 +1388,7 @@ class LoadEagerFromAliasOption(PropertyOption): prop = root_mapper._props[propname] adapter = query._polymorphic_adapters.get(prop.mapper, None) query._attributes.setdefault( - ("user_defined_eager_row_processor", + ("user_defined_eager_row_processor", interfaces._reduce_path(path)), adapter) if self.alias is not None: @@ -1397,7 +1397,7 @@ class LoadEagerFromAliasOption(PropertyOption): prop = root_mapper._props[propname] self.alias = prop.target.alias(self.alias) query._attributes[ - ("user_defined_eager_row_processor", + ("user_defined_eager_row_processor", interfaces._reduce_path(paths[-1])) ] = sql_util.ColumnAdapter(self.alias) else: @@ -1405,18 +1405,18 @@ class LoadEagerFromAliasOption(PropertyOption): prop = root_mapper._props[propname] adapter = query._polymorphic_adapters.get(prop.mapper, None) query._attributes[ - ("user_defined_eager_row_processor", + ("user_defined_eager_row_processor", interfaces._reduce_path(paths[-1]))] = adapter def single_parent_validator(desc, prop): def _do_check(state, value, oldvalue, initiator): if value is not None and initiator.key == prop.key: hasparent = initiator.hasparent(attributes.instance_state(value)) - if hasparent and oldvalue is not value: + if hasparent and oldvalue is not value: raise sa_exc.InvalidRequestError( "Instance %s is already associated with an instance " "of %s via its %s attribute, and is only allowed a " - "single parent." % + "single parent." % (mapperutil.instance_str(value), state.class_, prop) ) return value diff --git a/lib/sqlalchemy/orm/sync.py b/lib/sqlalchemy/orm/sync.py index a20e871e4f..8fa04cc14e 100644 --- a/lib/sqlalchemy/orm/sync.py +++ b/lib/sqlalchemy/orm/sync.py @@ -4,14 +4,14 @@ # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -"""private module containing functions used for copying data +"""private module containing functions used for copying data between instances based on join conditions. """ from sqlalchemy.orm import exc, util as mapperutil, attributes -def populate(source, source_mapper, dest, dest_mapper, +def populate(source, source_mapper, dest, dest_mapper, synchronize_pairs, uowcommit, flag_cascaded_pks): source_dict = source.dict dest_dict = dest.dict @@ -20,7 +20,7 @@ def populate(source, source_mapper, dest, dest_mapper, try: # inline of source_mapper._get_state_attr_by_column prop = source_mapper._columntoproperty[l] - value = source.manager[prop.key].impl.get(source, source_dict, + value = source.manager[prop.key].impl.get(source, source_dict, attributes.PASSIVE_OFF) except exc.UnmappedColumnError: _raise_col_to_prop(False, source_mapper, l, dest_mapper, r) @@ -47,7 +47,7 @@ def clear(dest, dest_mapper, synchronize_pairs): if r.primary_key: raise AssertionError( "Dependency rule tried to blank-out primary key " - "column '%s' on instance '%s'" % + "column '%s' on instance '%s'" % (r, mapperutil.state_str(dest)) ) try: @@ -75,7 +75,7 @@ def populate_dict(source, source_mapper, dict_, synchronize_pairs): dict_[r.key] = value def source_modified(uowcommit, source, source_mapper, synchronize_pairs): - """return true if the source object has changes from an old to a + """return true if the source object has changes from an old to a new value on the given synchronize pairs """ @@ -84,7 +84,7 @@ def source_modified(uowcommit, source, source_mapper, synchronize_pairs): prop = source_mapper._columntoproperty[l] except exc.UnmappedColumnError: _raise_col_to_prop(False, source_mapper, l, None, r) - history = uowcommit.get_attribute_history(source, prop.key, + history = uowcommit.get_attribute_history(source, prop.key, attributes.PASSIVE_NO_INITIALIZE) return bool(history.deleted) else: @@ -103,6 +103,6 @@ def _raise_col_to_prop(isdest, source_mapper, source_column, dest_mapper, dest_c "Can't execute sync rule for source column '%s'; mapper '%s' " "does not map this column. Try using an explicit `foreign_keys`" " collection which does not include destination column '%s' (or " - "use a viewonly=True relation)." % + "use a viewonly=True relation)." % (source_column, source_mapper, dest_column) ) diff --git a/lib/sqlalchemy/orm/unitofwork.py b/lib/sqlalchemy/orm/unitofwork.py index 8fc5f139d3..8761842a2a 100644 --- a/lib/sqlalchemy/orm/unitofwork.py +++ b/lib/sqlalchemy/orm/unitofwork.py @@ -26,7 +26,7 @@ def track_cascade_events(descriptor, prop): key = prop.key def append(state, item, initiator): - # process "save_update" cascade rules for when + # process "save_update" cascade rules for when # an instance is appended to the list of another instance sess = session._state_session(state) @@ -51,7 +51,7 @@ def track_cascade_events(descriptor, prop): sess.expunge(item) def set_(state, newvalue, oldvalue, initiator): - # process "save_update" cascade rules for when an instance + # process "save_update" cascade rules for when an instance # is attached to another instance if oldvalue is newvalue: return newvalue @@ -86,12 +86,12 @@ class UOWTransaction(object): def __init__(self, session): self.session = session - # dictionary used by external actors to + # dictionary used by external actors to # store arbitrary state information. self.attributes = {} - # dictionary of mappers to sets of - # DependencyProcessors, which are also + # dictionary of mappers to sets of + # DependencyProcessors, which are also # set to be part of the sorted flush actions, # which have that mapper as a parent. self.deps = util.defaultdict(set) @@ -106,7 +106,7 @@ class UOWTransaction(object): # and determine if a flush action is needed self.presort_actions = {} - # dictionary of PostSortRec objects, each + # dictionary of PostSortRec objects, each # one issues work during the flush within # a certain ordering. self.postsort_actions = {} @@ -124,7 +124,7 @@ class UOWTransaction(object): # tracks InstanceStates which will be receiving # a "post update" call. Keys are mappers, - # values are a set of states and a set of the + # values are a set of states and a set of the # columns which should be included in the update. self.post_update_states = util.defaultdict(lambda: (set(), set())) @@ -133,7 +133,7 @@ class UOWTransaction(object): return bool(self.states) def is_deleted(self, state): - """return true if the given state is marked as deleted + """return true if the given state is marked as deleted within this uowtransaction.""" return state in self.states and self.states[state][0] @@ -152,7 +152,7 @@ class UOWTransaction(object): self.states[state] = (isdelete, True) - def get_attribute_history(self, state, key, + def get_attribute_history(self, state, key, passive=attributes.PASSIVE_NO_INITIALIZE): """facade to attributes.get_state_history(), including caching of results.""" @@ -164,12 +164,12 @@ class UOWTransaction(object): if hashkey in self.attributes: history, state_history, cached_passive = self.attributes[hashkey] - # if the cached lookup was "passive" and now + # if the cached lookup was "passive" and now # we want non-passive, do a non-passive lookup and re-cache if cached_passive is not attributes.PASSIVE_OFF \ and passive is attributes.PASSIVE_OFF: impl = state.manager[key].impl - history = impl.get_history(state, state.dict, + history = impl.get_history(state, state.dict, attributes.PASSIVE_OFF) if history and impl.uses_objects: state_history = history.as_state() @@ -197,13 +197,13 @@ class UOWTransaction(object): if key not in self.presort_actions: self.presort_actions[key] = Preprocess(processor, fromparent) - def register_object(self, state, isdelete=False, + def register_object(self, state, isdelete=False, listonly=False, cancel_delete=False, operation=None, prop=None): if not self.session._contains_state(state): if not state.deleted and operation is not None: util.warn("Object of type %s not in session, %s operation " - "along '%s' will not proceed" % + "along '%s' will not proceed" % (mapperutil.state_class_str(state), operation, prop)) return False @@ -228,8 +228,8 @@ class UOWTransaction(object): @util.memoized_property def _mapper_for_dep(self): - """return a dynamic mapping of (Mapper, DependencyProcessor) to - True or False, indicating if the DependencyProcessor operates + """return a dynamic mapping of (Mapper, DependencyProcessor) to + True or False, indicating if the DependencyProcessor operates on objects of that Mapper. The result is stored in the dictionary persistently once @@ -241,7 +241,7 @@ class UOWTransaction(object): ) def filter_states_for_dep(self, dep, states): - """Filter the given list of InstanceStates to those relevant to the + """Filter the given list of InstanceStates to those relevant to the given DependencyProcessor. """ @@ -273,7 +273,7 @@ class UOWTransaction(object): # see if the graph of mapper dependencies has cycles. self.cycles = cycles = topological.find_cycles( - self.dependencies, + self.dependencies, self.postsort_actions.values()) if cycles: @@ -319,14 +319,14 @@ class UOWTransaction(object): # execute if self.cycles: for set_ in topological.sort_as_subsets( - self.dependencies, + self.dependencies, postsort_actions): while set_: n = set_.pop() n.execute_aggregate(self, set_) else: for rec in topological.sort( - self.dependencies, + self.dependencies, postsort_actions): rec.execute(self) @@ -470,7 +470,7 @@ class SaveUpdateAll(PostSortRec): assert mapper is mapper.base_mapper def execute(self, uow): - persistence.save_obj(self.mapper, + persistence.save_obj(self.mapper, uow.states_for_mapper_hierarchy(self.mapper, False, False), uow ) @@ -478,8 +478,8 @@ class SaveUpdateAll(PostSortRec): def per_state_flush_actions(self, uow): states = list(uow.states_for_mapper_hierarchy(self.mapper, False, False)) for rec in self.mapper._per_state_flush_actions( - uow, - states, + uow, + states, False): yield rec @@ -501,8 +501,8 @@ class DeleteAll(PostSortRec): def per_state_flush_actions(self, uow): states = list(uow.states_for_mapper_hierarchy(self.mapper, True, False)) for rec in self.mapper._per_state_flush_actions( - uow, - states, + uow, + states, True): yield rec @@ -520,8 +520,8 @@ class ProcessState(PostSortRec): cls_ = self.__class__ dependency_processor = self.dependency_processor delete = self.delete - our_recs = [r for r in recs - if r.__class__ is cls_ and + our_recs = [r for r in recs + if r.__class__ is cls_ and r.dependency_processor is dependency_processor and r.delete is delete] recs.difference_update(our_recs) @@ -547,13 +547,13 @@ class SaveUpdateState(PostSortRec): def execute_aggregate(self, uow, recs): cls_ = self.__class__ mapper = self.mapper - our_recs = [r for r in recs - if r.__class__ is cls_ and + our_recs = [r for r in recs + if r.__class__ is cls_ and r.mapper is mapper] recs.difference_update(our_recs) persistence.save_obj(mapper, - [self.state] + - [r.state for r in our_recs], + [self.state] + + [r.state for r in our_recs], uow) def __repr__(self): @@ -570,13 +570,13 @@ class DeleteState(PostSortRec): def execute_aggregate(self, uow, recs): cls_ = self.__class__ mapper = self.mapper - our_recs = [r for r in recs - if r.__class__ is cls_ and + our_recs = [r for r in recs + if r.__class__ is cls_ and r.mapper is mapper] recs.difference_update(our_recs) states = [self.state] + [r.state for r in our_recs] persistence.delete_obj(mapper, - [s for s in states if uow.states[s][0]], + [s for s in states if uow.states[s][0]], uow) def __repr__(self): diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py index 9e9c909c3c..fa6651f6cc 100644 --- a/lib/sqlalchemy/orm/util.py +++ b/lib/sqlalchemy/orm/util.py @@ -30,15 +30,15 @@ class CascadeOptions(frozenset): def __new__(cls, arg): values = set([ - c for c + c for c in re.split('\s*,\s*', arg or "") if c ]) if values.difference(cls._allowed_cascades): raise sa_exc.ArgumentError( - "Invalid cascade option(s): %s" % - ", ".join([repr(x) for x in + "Invalid cascade option(s): %s" % + ", ".join([repr(x) for x in sorted( values.difference(cls._allowed_cascades) )]) @@ -98,12 +98,12 @@ def polymorphic_union(table_map, typecolname, aliasname='p_union', cast_nulls=Tr See :ref:`concrete_inheritance` for an example of how this is used. - :param table_map: mapping of polymorphic identities to + :param table_map: mapping of polymorphic identities to :class:`.Table` objects. - :param typecolname: string name of a "discriminator" column, which will be + :param typecolname: string name of a "discriminator" column, which will be derived from the query, producing the polymorphic identity for each row. If ``None``, no polymorphic discriminator is generated. - :param aliasname: name of the :func:`~sqlalchemy.sql.expression.alias()` + :param aliasname: name of the :func:`~sqlalchemy.sql.expression.alias()` construct generated. :param cast_nulls: if True, non-existent columns, which are represented as labeled NULLs, will be passed into CAST. This is a legacy behavior that is problematic @@ -117,7 +117,7 @@ def polymorphic_union(table_map, typecolname, aliasname='p_union', cast_nulls=Tr for key in table_map.keys(): table = table_map[key] - # mysql doesnt like selecting from a select; + # mysql doesnt like selecting from a select; # make it an alias of the select if isinstance(table, sql.Select): table = table.alias() @@ -215,14 +215,14 @@ class ORMAdapter(sql_util.ColumnAdapter): and the AliasedClass if any is referenced. """ - def __init__(self, entity, equivalents=None, + def __init__(self, entity, equivalents=None, chain_to=None, adapt_required=False): self.mapper, selectable, is_aliased_class = _entity_info(entity) if is_aliased_class: self.aliased_class = entity else: self.aliased_class = None - sql_util.ColumnAdapter.__init__(self, selectable, + sql_util.ColumnAdapter.__init__(self, selectable, equivalents, chain_to, adapt_required=adapt_required) @@ -252,7 +252,7 @@ class AliasedClass(object): The resulting object is an instance of :class:`.AliasedClass`, however it implements a ``__getattribute__()`` scheme which will proxy attribute access to that of the ORM class being aliased. All classmethods - on the mapped entity should also be available here, including + on the mapped entity should also be available here, including hybrids created with the :ref:`hybrids_toplevel` extension, which will receive the :class:`.AliasedClass` as the "class" argument when classmethods are called. @@ -260,15 +260,15 @@ class AliasedClass(object): :param cls: ORM mapped entity which will be "wrapped" around an alias. :param alias: a selectable, such as an :func:`.alias` or :func:`.select` construct, which will be rendered in place of the mapped table of the - ORM entity. If left as ``None``, an ordinary :class:`.Alias` of the + ORM entity. If left as ``None``, an ordinary :class:`.Alias` of the ORM entity's mapped table will be generated. :param name: A name which will be applied both to the :class:`.Alias` if one is generated, as well as the name present in the "named tuple" returned by the :class:`.Query` object when results are returned. :param adapt_on_names: if True, more liberal "matching" will be used when - mapping the mapped columns of the ORM entity to those of the given selectable - - a name-based match will be performed if the given selectable doesn't - otherwise have a column that corresponds to one on the entity. The + mapping the mapped columns of the ORM entity to those of the given selectable - + a name-based match will be performed if the given selectable doesn't + otherwise have a column that corresponds to one on the entity. The use case for this is when associating an entity with some derived selectable such as one that uses aggregate functions:: @@ -311,8 +311,8 @@ class AliasedClass(object): def __getstate__(self): return { - 'mapper':self.__mapper, - 'alias':self.__alias, + 'mapper':self.__mapper, + 'alias':self.__alias, 'name':self._sa_label_name, 'adapt_on_names':self.__adapt_on_names, } @@ -333,7 +333,7 @@ class AliasedClass(object): def __adapt_element(self, elem): return self.__adapter.traverse(elem).\ _annotate({ - 'parententity': self, + 'parententity': self, 'parentmapper':self.__mapper} ) @@ -400,7 +400,7 @@ class _ORMJoin(expression.Join): __visit_name__ = expression.Join.__visit_name__ - def __init__(self, left, right, onclause=None, + def __init__(self, left, right, onclause=None, isouter=False, join_to_left=True): adapt_from = None @@ -477,8 +477,8 @@ def join(left, right, onclause=None, isouter=False, join_to_left=True): as its functionality is encapsulated within that of the :meth:`.Query.join` method, which features a significant amount of automation beyond :func:`.orm.join` - by itself. Explicit usage of :func:`.orm.join` - with :class:`.Query` involves usage of the + by itself. Explicit usage of :func:`.orm.join` + with :class:`.Query` involves usage of the :meth:`.Query.select_from` method, as in:: from sqlalchemy.orm import join @@ -486,7 +486,7 @@ def join(left, right, onclause=None, isouter=False, join_to_left=True): select_from(join(User, Address, User.addresses)).\\ filter(Address.email_address=='foo@bar.com') - In modern SQLAlchemy the above join can be written more + In modern SQLAlchemy the above join can be written more succinctly as:: session.query(User).\\ @@ -516,12 +516,12 @@ def with_parent(instance, prop): The SQL rendered is the same as that rendered when a lazy loader would fire off from the given parent on that attribute, meaning - that the appropriate state is taken from the parent object in + that the appropriate state is taken from the parent object in Python without the need to render joins to the parent table in the rendered statement. .. versionchanged:: 0.6.4 - This method accepts parent instances in all + This method accepts parent instances in all persistence states, including transient, persistent, and detached. Only the requisite primary key/foreign key attributes need to be populated. Previous versions didn't work with transient @@ -532,8 +532,8 @@ def with_parent(instance, prop): :param property: String property name, or class-bound attribute, which indicates - what relationship from the instance should be used to reconcile the - parent/child relationship. + what relationship from the instance should be used to reconcile the + parent/child relationship. """ if isinstance(prop, basestring): @@ -542,8 +542,8 @@ def with_parent(instance, prop): elif isinstance(prop, attributes.QueryableAttribute): prop = prop.property - return prop.compare(operators.eq, - instance, + return prop.compare(operators.eq, + instance, value_is_parent=True) @@ -597,7 +597,7 @@ def _entity_descriptor(entity, key): return getattr(entity, key) except AttributeError: raise sa_exc.InvalidRequestError( - "Entity '%s' has no property '%s'" % + "Entity '%s' has no property '%s'" % (description, key) ) @@ -639,7 +639,7 @@ def object_mapper(instance): raise exc.UnmappedInstanceError(instance) def class_mapper(class_, compile=True): - """Given a class, return the primary :class:`.Mapper` associated + """Given a class, return the primary :class:`.Mapper` associated with the key. Raises :class:`.UnmappedClassError` if no mapping is configured @@ -653,8 +653,8 @@ def class_mapper(class_, compile=True): mapper = class_manager.mapper except exc.NO_STATE: - if not isinstance(class_, type): - raise sa_exc.ArgumentError("Class object expected, got '%r'." % class_) + if not isinstance(class_, type): + raise sa_exc.ArgumentError("Class object expected, got '%r'." % class_) raise exc.UnmappedClassError(class_) if compile and mapperlib.module._new_mappers: @@ -685,7 +685,7 @@ def has_identity(object): return state.has_identity def _is_mapped_class(cls): - """Return True if the given object is a mapped class, + """Return True if the given object is a mapped class, :class:`.Mapper`, or :class:`.AliasedClass`.""" if isinstance(cls, (AliasedClass, mapperlib.Mapper)): @@ -698,7 +698,7 @@ def _is_mapped_class(cls): return False def _mapper_or_none(cls): - """Return the :class:`.Mapper` for the given class or None if the + """Return the :class:`.Mapper` for the given class or None if the class is not mapped.""" manager = attributes.manager_of_class(cls) diff --git a/lib/sqlalchemy/processors.py b/lib/sqlalchemy/processors.py index c4bac28347..bc5c3909a4 100644 --- a/lib/sqlalchemy/processors.py +++ b/lib/sqlalchemy/processors.py @@ -5,7 +5,7 @@ # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -"""defines generic type conversion functions, as used in bind and result +"""defines generic type conversion functions, as used in bind and result processors. They all share one common characteristic: None is passed through unchanged. @@ -110,9 +110,9 @@ try: def to_decimal_processor_factory(target_class, scale=10): # Note that the scale argument is not taken into account for integer - # values in the C implementation while it is in the Python one. - # For example, the Python implementation might return - # Decimal('5.00000') whereas the C implementation will + # values in the C implementation while it is in the Python one. + # For example, the Python implementation might return + # Decimal('5.00000') whereas the C implementation will # return Decimal('5'). These are equivalent of course. return DecimalResultProcessor(target_class, "%%.%df" % scale).process diff --git a/lib/sqlalchemy/schema.py b/lib/sqlalchemy/schema.py index df55f61255..0817614ee0 100644 --- a/lib/sqlalchemy/schema.py +++ b/lib/sqlalchemy/schema.py @@ -84,7 +84,7 @@ def _validate_dialect_kwargs(kwargs, name): # validate remaining kwargs that they all specify DB prefixes if len([k for k in kwargs if not re.match( - r'^(?:%s)_' % + r'^(?:%s)_' % '|'.join(dialects.__all__), k ) ]): @@ -97,7 +97,7 @@ class Table(SchemaItem, expression.TableClause): e.g.:: - mytable = Table("mytable", metadata, + mytable = Table("mytable", metadata, Column('mytable_id', Integer, primary_key=True), Column('value', String(50)) ) @@ -105,7 +105,7 @@ class Table(SchemaItem, expression.TableClause): The :class:`.Table` object constructs a unique instance of itself based on its name and optional schema name within the given :class:`.MetaData` object. Calling the :class:`.Table` - constructor with the same name and same :class:`.MetaData` argument + constructor with the same name and same :class:`.MetaData` argument a second time will return the *same* :class:`.Table` object - in this way the :class:`.Table` constructor acts as a registry function. @@ -115,7 +115,7 @@ class Table(SchemaItem, expression.TableClause): Constructor arguments are as follows: - :param name: The name of this table as represented in the database. + :param name: The name of this table as represented in the database. This property, along with the *schema*, indicates the *singleton identity* of this table in relation to its parent :class:`.MetaData`. @@ -126,13 +126,13 @@ class Table(SchemaItem, expression.TableClause): will be treated as case insensitive names, and will not be quoted unless they are a reserved word. Names with any number of upper case characters will be quoted and sent exactly. Note that this - behavior applies even for databases which standardize upper + behavior applies even for databases which standardize upper case names as case insensitive such as Oracle. - :param metadata: a :class:`.MetaData` object which will contain this + :param metadata: a :class:`.MetaData` object which will contain this table. The metadata is used as a point of association of this table with other tables which are referenced via foreign key. It also - may be used to associate this table with a particular + may be used to associate this table with a particular :class:`~sqlalchemy.engine.base.Connectable`. :param \*args: Additional positional arguments are used primarily @@ -141,14 +141,14 @@ class Table(SchemaItem, expression.TableClause): :class:`.SchemaItem` constructs may be added here, including :class:`.PrimaryKeyConstraint`, and :class:`.ForeignKeyConstraint`. - :param autoload: Defaults to False: the Columns for this table should + :param autoload: Defaults to False: the Columns for this table should be reflected from the database. Usually there will be no Column objects in the constructor if this property is set. - :param autoload_replace: If ``True``, when using ``autoload=True`` + :param autoload_replace: If ``True``, when using ``autoload=True`` and ``extend_existing=True``, replace ``Column`` objects already present in the ``Table`` that's - in the ``MetaData`` registry with + in the ``MetaData`` registry with what's reflected. Otherwise, all existing columns will be excluded from the reflection process. Note that this does not impact ``Column`` objects specified in the same call to ``Table`` @@ -157,7 +157,7 @@ class Table(SchemaItem, expression.TableClause): .. versionadded:: 0.7.5 - :param autoload_with: If autoload==True, this is an optional Engine + :param autoload_with: If autoload==True, this is an optional Engine or Connection instance to be used for the table reflection. If ``None``, the underlying MetaData's bound connectable will be used. @@ -166,7 +166,7 @@ class Table(SchemaItem, expression.TableClause): the constructor to the existing :class:`.Table`. If ``extend_existing`` or ``keep_existing`` are not set, an error is - raised if additional table modifiers are specified when + raised if additional table modifiers are specified when the given :class:`.Table` is already present in the :class:`.MetaData`. .. versionchanged:: 0.7.4 @@ -188,17 +188,17 @@ class Table(SchemaItem, expression.TableClause): autoload_with=engine ) - The above will overwrite all columns within ``mytable`` which + The above will overwrite all columns within ``mytable`` which are present in the database, except for ``y`` which will be used as is from the above definition. If the ``autoload_replace`` flag is set to False, no existing columns will be replaced. - :param implicit_returning: True by default - indicates that - RETURNING can be used by default to fetch newly inserted primary key - values, for backends which support this. Note that + :param implicit_returning: True by default - indicates that + RETURNING can be used by default to fetch newly inserted primary key + values, for backends which support this. Note that create_engine() also provides an implicit_returning flag. - :param include_columns: A list of strings indicating a subset of + :param include_columns: A list of strings indicating a subset of columns to be loaded via the ``autoload`` operation; table columns who aren't present in this list will not be represented on the resulting ``Table`` object. Defaults to ``None`` which indicates all columns @@ -207,7 +207,7 @@ class Table(SchemaItem, expression.TableClause): :param info: A dictionary which defaults to ``{}``. A space to store application specific data. This must be a dictionary. - :param keep_existing: When ``True``, indicates that if this Table + :param keep_existing: When ``True``, indicates that if this Table is already present in the given :class:`.MetaData`, ignore further arguments within the constructor to the existing :class:`.Table`, and return the :class:`.Table` object as @@ -218,13 +218,13 @@ class Table(SchemaItem, expression.TableClause): being applied a second time. Also see extend_existing. If extend_existing or keep_existing are not set, an error is - raised if additional table modifiers are specified when + raised if additional table modifiers are specified when the given :class:`.Table` is already present in the :class:`.MetaData`. :param listeners: A list of tuples of the form ``(, )`` - which will be passed to :func:`.event.listen` upon construction. + which will be passed to :func:`.event.listen` upon construction. This alternate hook to :func:`.event.listen` allows the establishment - of a listener function specific to this :class:`.Table` before + of a listener function specific to this :class:`.Table` before the "autoload" process begins. Particularly useful for the :meth:`.events.column_reflect` event:: @@ -233,13 +233,13 @@ class Table(SchemaItem, expression.TableClause): # ... t = Table( - 'sometable', + 'sometable', autoload=True, listeners=[ ('column_reflect', listen_for_reflect) ]) - :param mustexist: When ``True``, indicates that this Table must already + :param mustexist: When ``True``, indicates that this Table must already be present in the given :class:`.MetaData`` collection, else an exception is raised. @@ -250,14 +250,14 @@ class Table(SchemaItem, expression.TableClause): :param quote: Force quoting of this table's name on or off, corresponding to ``True`` or ``False``. When left at its default of ``None``, the column identifier will be quoted according to whether the name is - case sensitive (identifiers with at least one upper case character are - treated as case sensitive), or if it's a reserved word. This flag + case sensitive (identifiers with at least one upper case character are + treated as case sensitive), or if it's a reserved word. This flag is only needed to force quoting of a reserved word which is not known by the SQLAlchemy dialect. :param quote_schema: same as 'quote' but applies to the schema identifier. - :param schema: The *schema name* for this table, which is required if + :param schema: The *schema name* for this table, which is required if the table resides in a schema other than the default selected schema for the engine's database connection. Defaults to ``None``. @@ -330,7 +330,7 @@ class Table(SchemaItem, expression.TableClause): for constructor arguments. """ - # __init__ is overridden to prevent __new__ from + # __init__ is overridden to prevent __new__ from # calling the superclass constructor. def _init(self, name, metadata, *args, **kwargs): @@ -346,7 +346,7 @@ class Table(SchemaItem, expression.TableClause): self.indexes = set() self.constraints = set() self._columns = expression.ColumnCollection() - PrimaryKeyConstraint()._set_parent_with_dispatch(self) + PrimaryKeyConstraint()._set_parent_with_dispatch(self) self.foreign_keys = set() self._extra_dependencies = set() self.kwargs = {} @@ -387,7 +387,7 @@ class Table(SchemaItem, expression.TableClause): def _autoload(self, metadata, autoload_with, include_columns, exclude_columns=()): if self.primary_key.columns: PrimaryKeyConstraint(*[ - c for c in self.primary_key.columns + c for c in self.primary_key.columns if c.key in exclude_columns ])._set_parent_with_dispatch(self) @@ -397,7 +397,7 @@ class Table(SchemaItem, expression.TableClause): self, include_columns, exclude_columns ) else: - bind = _bind_or_error(metadata, + bind = _bind_or_error(metadata, msg="No engine is bound to this Table's MetaData. " "Pass an engine to the Table via " "autoload_with=, " @@ -493,8 +493,8 @@ class Table(SchemaItem, expression.TableClause): This is another Table object which must be created first before this one can, or dropped after this one. - Usually, dependencies between tables are determined via - ForeignKey objects. However, for other situations that + Usually, dependencies between tables are determined via + ForeignKey objects. However, for other situations that create dependencies outside of foreign keys (rules, inheriting), this method can manually establish such a link. @@ -511,11 +511,11 @@ class Table(SchemaItem, expression.TableClause): UPDATE, etc. statements generated from this :class:`~.schema.Table` construct. - Note that this does **not** change the definition of the table + Note that this does **not** change the definition of the table as it exists within any underlying database, assuming that - table has already been created in the database. Relational - databases support the addition of columns to existing tables - using the SQL ALTER command, which would need to be + table has already been created in the database. Relational + databases support the addition of columns to existing tables + using the SQL ALTER command, which would need to be emitted for an already-existing table that doesn't contain the newly added column. @@ -527,11 +527,11 @@ class Table(SchemaItem, expression.TableClause): """Append a :class:`~.schema.Constraint` to this :class:`~.schema.Table`. This has the effect of the constraint being included in any - future CREATE TABLE statement, assuming specific DDL creation - events have not been associated with the given :class:`~.schema.Constraint` + future CREATE TABLE statement, assuming specific DDL creation + events have not been associated with the given :class:`~.schema.Constraint` object. - Note that this does **not** produce the constraint within the + Note that this does **not** produce the constraint within the relational database automatically, for a table that already exists in the database. To add a constraint to an existing relational database table, the SQL ALTER command must @@ -558,7 +558,7 @@ class Table(SchemaItem, expression.TableClause): metadata._add_table(self.name, self.schema, self) self.metadata = metadata - def get_children(self, column_collections=True, + def get_children(self, column_collections=True, schema_visitor=False, **kw): if not schema_visitor: return expression.TableClause.get_children( @@ -575,11 +575,11 @@ class Table(SchemaItem, expression.TableClause): if bind is None: bind = _bind_or_error(self) - return bind.run_callable(bind.dialect.has_table, + return bind.run_callable(bind.dialect.has_table, self.name, schema=self.schema) def create(self, bind=None, checkfirst=False): - """Issue a ``CREATE`` statement for this + """Issue a ``CREATE`` statement for this :class:`.Table`, using the given :class:`.Connectable` for connectivity. @@ -589,13 +589,13 @@ class Table(SchemaItem, expression.TableClause): if bind is None: bind = _bind_or_error(self) - bind._run_visitor(ddl.SchemaGenerator, - self, + bind._run_visitor(ddl.SchemaGenerator, + self, checkfirst=checkfirst) def drop(self, bind=None, checkfirst=False): - """Issue a ``DROP`` statement for this + """Issue a ``DROP`` statement for this :class:`.Table`, using the given :class:`.Connectable` for connectivity. @@ -604,8 +604,8 @@ class Table(SchemaItem, expression.TableClause): """ if bind is None: bind = _bind_or_error(self) - bind._run_visitor(ddl.SchemaDropper, - self, + bind._run_visitor(ddl.SchemaDropper, + self, checkfirst=checkfirst) @@ -668,7 +668,7 @@ class Column(SchemaItem, expression.ColumnClause): """ Construct a new ``Column`` object. - :param name: The name of this column as represented in the database. + :param name: The name of this column as represented in the database. This argument may be the first positional argument, or specified via keyword. @@ -676,15 +676,15 @@ class Column(SchemaItem, expression.ColumnClause): will be treated as case insensitive names, and will not be quoted unless they are a reserved word. Names with any number of upper case characters will be quoted and sent exactly. Note that this - behavior applies even for databases which standardize upper + behavior applies even for databases which standardize upper case names as case insensitive such as Oracle. The name field may be omitted at construction time and applied - later, at any time before the Column is associated with a + later, at any time before the Column is associated with a :class:`.Table`. This is to support convenient usage within the :mod:`~sqlalchemy.ext.declarative` extension. - :param type\_: The column's type, indicated using an instance which + :param type\_: The column's type, indicated using an instance which subclasses :class:`~sqlalchemy.types.TypeEngine`. If no arguments are required for the type, the class of the type can be sent as well, e.g.:: @@ -698,30 +698,30 @@ class Column(SchemaItem, expression.ColumnClause): The ``type`` argument may be the second positional argument or specified by keyword. - There is partial support for automatic detection of the - type based on that of a :class:`.ForeignKey` associated - with this column, if the type is specified as ``None``. - However, this feature is not fully implemented and + There is partial support for automatic detection of the + type based on that of a :class:`.ForeignKey` associated + with this column, if the type is specified as ``None``. + However, this feature is not fully implemented and may not function in all cases. - :param \*args: Additional positional arguments include various - :class:`.SchemaItem` derived constructs which will be applied - as options to the column. These include instances of - :class:`.Constraint`, :class:`.ForeignKey`, :class:`.ColumnDefault`, - and :class:`.Sequence`. In some cases an equivalent keyword + :param \*args: Additional positional arguments include various + :class:`.SchemaItem` derived constructs which will be applied + as options to the column. These include instances of + :class:`.Constraint`, :class:`.ForeignKey`, :class:`.ColumnDefault`, + and :class:`.Sequence`. In some cases an equivalent keyword argument is available such as ``server_default``, ``default`` and ``unique``. - :param autoincrement: This flag may be set to ``False`` to + :param autoincrement: This flag may be set to ``False`` to indicate an integer primary key column that should not be considered to be the "autoincrement" column, that is - the integer primary key column which generates values + the integer primary key column which generates values implicitly upon INSERT and whose value is usually returned via the DBAPI cursor.lastrowid attribute. It defaults to ``True`` to satisfy the common use case of a table with a single integer primary key column. If the table has a composite primary key consisting of more than one - integer column, set this flag to True only on the + integer column, set this flag to True only on the column that should be considered "autoincrement". The setting *only* has an effect for columns which are: @@ -744,18 +744,18 @@ class Column(SchemaItem, expression.ColumnClause): * DDL issued for the column will include database-specific keywords intended to signify this column as an "autoincrement" column, such as AUTO INCREMENT on MySQL, - SERIAL on Postgresql, and IDENTITY on MS-SQL. It does + SERIAL on Postgresql, and IDENTITY on MS-SQL. It does *not* issue AUTOINCREMENT for SQLite since this is a special SQLite flag that is not required for autoincrementing behavior. See the SQLite dialect documentation for information on SQLite's AUTOINCREMENT. - * The column will be considered to be available as + * The column will be considered to be available as cursor.lastrowid or equivalent, for those dialects which "post fetch" newly inserted identifiers after a row has - been inserted (SQLite, MySQL, MS-SQL). It does not have - any effect in this regard for databases that use sequences - to generate primary key identifiers (i.e. Firebird, Postgresql, + been inserted (SQLite, MySQL, MS-SQL). It does not have + any effect in this regard for databases that use sequences + to generate primary key identifiers (i.e. Firebird, Postgresql, Oracle). .. versionchanged:: 0.7.4 @@ -772,7 +772,7 @@ class Column(SchemaItem, expression.ColumnClause): the insert. This is a shortcut to using :class:`.ColumnDefault` as a positional argument. - Contrast this argument to ``server_default`` which creates a + Contrast this argument to ``server_default`` which creates a default generator on the database side. :param doc: optional String that can be used by the ORM or similar @@ -794,7 +794,7 @@ class Column(SchemaItem, expression.ColumnClause): :param info: A dictionary which defaults to ``{}``. A space to store application specific data. This must be a dictionary. - :param nullable: If set to the default of ``True``, indicates the + :param nullable: If set to the default of ``True``, indicates the column will be rendered as allowing NULL, else it's rendered as NOT NULL. This parameter is only used when issuing CREATE TABLE statements. @@ -996,7 +996,7 @@ class Column(SchemaItem, expression.ColumnClause): [repr(self.name)] + [repr(self.type)] + [repr(x) for x in self.foreign_keys if x is not None] + [repr(x) for x in self.constraints] + - [(self.table is not None and "table=<%s>" % + [(self.table is not None and "table=<%s>" % self.table.description or "table=None")] + ["%s=%s" % (k, repr(getattr(self, k))) for k in kwarg]) @@ -1011,7 +1011,7 @@ class Column(SchemaItem, expression.ColumnClause): existing = getattr(self, 'table', None) if existing is not None and existing is not table: raise exc.ArgumentError( - "Column object already assigned to Table '%s'" % + "Column object already assigned to Table '%s'" % existing.description) if self.key in table._columns: @@ -1071,15 +1071,15 @@ class Column(SchemaItem, expression.ColumnClause): [c.copy(**kw) for c in self.foreign_keys if not c.constraint] c = self._constructor( - name=self.name, - type_=self.type, - key = self.key, - primary_key = self.primary_key, - nullable = self.nullable, - unique = self.unique, - quote=self.quote, - index=self.index, - autoincrement=self.autoincrement, + name=self.name, + type_=self.type, + key = self.key, + primary_key = self.primary_key, + nullable = self.nullable, + unique = self.unique, + quote=self.quote, + index=self.index, + autoincrement=self.autoincrement, default=self.default, server_default=self.server_default, onupdate=self.onupdate, @@ -1107,11 +1107,11 @@ class Column(SchemaItem, expression.ColumnClause): "been assigned.") try: c = self._constructor( - expression._as_truncated(name or self.name), - self.type, - key = name or self.key, - primary_key = self.primary_key, - nullable = self.nullable, + expression._as_truncated(name or self.name), + self.type, + key = name or self.key, + primary_key = self.primary_key, + nullable = self.nullable, quote=self.quote, _proxies=[self], *fk) except TypeError, e: # Py3K @@ -1142,7 +1142,7 @@ class Column(SchemaItem, expression.ColumnClause): def get_children(self, schema_visitor=False, **kwargs): if schema_visitor: - return [x for x in (self.default, self.onupdate) + return [x for x in (self.default, self.onupdate) if x is not None] + \ list(self.foreign_keys) + list(self.constraints) else: @@ -1155,7 +1155,7 @@ class ForeignKey(SchemaItem): ``ForeignKey`` is specified as an argument to a :class:`.Column` object, e.g.:: - t = Table("remote_table", metadata, + t = Table("remote_table", metadata, Column("remote_id", ForeignKey("main_table.id")) ) @@ -1163,7 +1163,7 @@ class ForeignKey(SchemaItem): a dependency between two columns. The actual constraint is in all cases represented by the :class:`.ForeignKeyConstraint` object. This object will be generated automatically when - a ``ForeignKey`` is associated with a :class:`.Column` which + a ``ForeignKey`` is associated with a :class:`.Column` which in turn is associated with a :class:`.Table`. Conversely, when :class:`.ForeignKeyConstraint` is applied to a :class:`.Table`, ``ForeignKey`` markers are automatically generated to be @@ -1177,7 +1177,7 @@ class ForeignKey(SchemaItem): to the :class:`.Table`. The associated ``ForeignKey`` objects are created automatically. - The ``ForeignKey`` objects associated with an individual + The ``ForeignKey`` objects associated with an individual :class:`.Column` object are available in the `foreign_keys` collection of that column. @@ -1243,7 +1243,7 @@ class ForeignKey(SchemaItem): # the linked ForeignKeyConstraint. # ForeignKey will create this when parent Column # is attached to a Table, *or* ForeignKeyConstraint - # object passes itself in when creating ForeignKey + # object passes itself in when creating ForeignKey # markers. self.constraint = _constraint @@ -1315,7 +1315,7 @@ class ForeignKey(SchemaItem): return table.corresponding_column(self.column) is not None def get_referent(self, table): - """Return the :class:`.Column` in the given :class:`.Table` + """Return the :class:`.Column` in the given :class:`.Table` referenced by this :class:`.ForeignKey`. Returns None if this :class:`.ForeignKey` does not reference the given @@ -1335,7 +1335,7 @@ class ForeignKey(SchemaItem): process to locate the referenced remote :class:`.Column`. The resolution process traverses to the parent :class:`.Column`, :class:`.Table`, and - :class:`.MetaData` to proceed - if any of these aren't + :class:`.MetaData` to proceed - if any of these aren't yet present, an error is raised. """ @@ -1414,7 +1414,7 @@ class ForeignKey(SchemaItem): raise exc.NoReferencedColumnError( "Could not create ForeignKey '%s' on table '%s': " "table '%s' has no column named '%s'" % ( - self._colspec, parenttable.name, table.name, key), + self._colspec, parenttable.name, table.name, key), table.name, key) elif hasattr(self._colspec, '__clause_element__'): @@ -1495,7 +1495,7 @@ class DefaultGenerator(_NotAColumnExpr, SchemaItem): class ColumnDefault(DefaultGenerator): """A plain default value on a column. - This could correspond to a constant, a callable function, + This could correspond to a constant, a callable function, or a SQL clause. :class:`.ColumnDefault` is generated automatically @@ -1609,7 +1609,7 @@ class Sequence(DefaultGenerator): is_sequence = True def __init__(self, name, start=None, increment=None, schema=None, - optional=False, quote=None, metadata=None, + optional=False, quote=None, metadata=None, quote_schema=None, for_update=False): """Construct a :class:`.Sequence` object. @@ -1617,10 +1617,10 @@ class Sequence(DefaultGenerator): :param name: The name of the sequence. :param start: the starting index of the sequence. This value is used when the CREATE SEQUENCE command is emitted to the database - as the value of the "START WITH" clause. If ``None``, the + as the value of the "START WITH" clause. If ``None``, the clause is omitted, which on most platforms indicates a starting value of 1. - :param increment: the increment value of the sequence. This + :param increment: the increment value of the sequence. This value is used when the CREATE SEQUENCE command is emitted to the database as the value of the "INCREMENT BY" clause. If ``None``, the clause is omitted, which on most platforms indicates an @@ -1637,9 +1637,9 @@ class Sequence(DefaultGenerator): forces quoting of the schema name on or off. When left at its default of ``None``, normal quoting rules based on casing and reserved words take place. - :param metadata: optional :class:`.MetaData` object which will be + :param metadata: optional :class:`.MetaData` object which will be associated with this :class:`.Sequence`. A :class:`.Sequence` - that is associated with a :class:`.MetaData` gains access to the + that is associated with a :class:`.MetaData` gains access to the ``bind`` of that :class:`.MetaData`, meaning the :meth:`.Sequence.create` and :meth:`.Sequence.drop` methods will make usage of that engine automatically. @@ -1647,14 +1647,14 @@ class Sequence(DefaultGenerator): .. versionchanged:: 0.7 Additionally, the appropriate CREATE SEQUENCE/ DROP SEQUENCE DDL commands will be emitted corresponding to this - :class:`.Sequence` when :meth:`.MetaData.create_all` and + :class:`.Sequence` when :meth:`.MetaData.create_all` and :meth:`.MetaData.drop_all` are invoked. - Note that when a :class:`.Sequence` is applied to a :class:`.Column`, - the :class:`.Sequence` is automatically associated with the - :class:`.MetaData` object of that column's parent :class:`.Table`, + Note that when a :class:`.Sequence` is applied to a :class:`.Column`, + the :class:`.Sequence` is automatically associated with the + :class:`.MetaData` object of that column's parent :class:`.Table`, when that association is made. The :class:`.Sequence` will then - be subject to automatic CREATE SEQUENCE/DROP SEQUENCE corresponding + be subject to automatic CREATE SEQUENCE/DROP SEQUENCE corresponding to when the :class:`.Table` object itself is created or dropped, rather than that of the :class:`.MetaData` object overall. :param for_update: Indicates this :class:`.Sequence`, when associated @@ -1719,8 +1719,8 @@ class Sequence(DefaultGenerator): if bind is None: bind = _bind_or_error(self) - bind._run_visitor(ddl.SchemaGenerator, - self, + bind._run_visitor(ddl.SchemaGenerator, + self, checkfirst=checkfirst) def drop(self, bind=None, checkfirst=True): @@ -1728,8 +1728,8 @@ class Sequence(DefaultGenerator): if bind is None: bind = _bind_or_error(self) - bind._run_visitor(ddl.SchemaDropper, - self, + bind._run_visitor(ddl.SchemaDropper, + self, checkfirst=checkfirst) def _not_a_column_expr(self): @@ -1737,7 +1737,7 @@ class Sequence(DefaultGenerator): "This %s cannot be used directly " "as a column expression. Use func.next_value(sequence) " "to produce a 'next value' function that's usable " - "as a column element." + "as a column element." % self.__class__.__name__) @@ -1813,10 +1813,10 @@ class PassiveDefault(DefaultClause): """A DDL-specified DEFAULT column value. .. deprecated:: 0.6 - :class:`.PassiveDefault` is deprecated. + :class:`.PassiveDefault` is deprecated. Use :class:`.DefaultClause`. """ - @util.deprecated("0.6", + @util.deprecated("0.6", ":class:`.PassiveDefault` is deprecated. " "Use :class:`.DefaultClause`.", False) @@ -1828,8 +1828,8 @@ class Constraint(SchemaItem): __visit_name__ = 'constraint' - def __init__(self, name=None, deferrable=None, initially=None, - _create_rule=None, + def __init__(self, name=None, deferrable=None, initially=None, + _create_rule=None, **kw): """Create a SQL constraint. @@ -1860,8 +1860,8 @@ class Constraint(SchemaItem): _create_rule is used by some types to create constraints. Currently, its call signature is subject to change at any time. - - :param \**kwargs: + + :param \**kwargs: Dialect-specific keyword parameters, see the documentation for various dialects and constraints regarding options here. @@ -1896,7 +1896,7 @@ class Constraint(SchemaItem): class ColumnCollectionMixin(object): def __init__(self, *columns): self.columns = expression.ColumnCollection() - self._pending_colargs = [_to_schema_column_or_string(c) + self._pending_colargs = [_to_schema_column_or_string(c) for c in columns] if self._pending_colargs and \ isinstance(self._pending_colargs[0], Column) and \ @@ -1949,7 +1949,7 @@ class ColumnCollectionConstraint(ColumnCollectionMixin, Constraint): return self.columns.contains_column(col) def __iter__(self): - # inlining of + # inlining of # return iter(self.columns) # ColumnCollection->OrderedProperties->OrderedDict ordered_dict = self.columns._data @@ -1965,7 +1965,7 @@ class CheckConstraint(Constraint): Can be included in the definition of a Table or Column. """ - def __init__(self, sqltext, name=None, deferrable=None, + def __init__(self, sqltext, name=None, deferrable=None, initially=None, table=None, _create_rule=None): """Construct a CHECK constraint. @@ -2000,7 +2000,7 @@ class CheckConstraint(Constraint): __visit_name__ = property(__visit_name__) def copy(self, **kw): - c = CheckConstraint(self.sqltext, + c = CheckConstraint(self.sqltext, name=self.name, initially=self.initially, deferrable=self.deferrable, @@ -2079,16 +2079,16 @@ class ForeignKeyConstraint(Constraint): # standalone ForeignKeyConstraint - create # associated ForeignKey objects which will be applied to hosted - # Column objects (in col.foreign_keys), either now or when attached + # Column objects (in col.foreign_keys), either now or when attached # to the Table for string-specified names for col, refcol in zip(columns, refcolumns): self._elements[col] = ForeignKey( - refcol, - _constraint=self, - name=self.name, - onupdate=self.onupdate, - ondelete=self.ondelete, - use_alter=self.use_alter, + refcol, + _constraint=self, + name=self.name, + onupdate=self.onupdate, + ondelete=self.ondelete, + use_alter=self.use_alter, link_to_name=self.link_to_name ) @@ -2132,11 +2132,11 @@ class ForeignKeyConstraint(Constraint): def copy(self, **kw): fkc = ForeignKeyConstraint( - [x.parent.name for x in self._elements.values()], - [x._get_colspec(**kw) for x in self._elements.values()], - name=self.name, - onupdate=self.onupdate, - ondelete=self.ondelete, + [x.parent.name for x in self._elements.values()], + [x._get_colspec(**kw) for x in self._elements.values()], + name=self.name, + onupdate=self.onupdate, + ondelete=self.ondelete, use_alter=self.use_alter, deferrable=self.deferrable, initially=self.initially, @@ -2252,7 +2252,7 @@ class Index(ColumnCollectionMixin, SchemaItem): return self.table.bind def create(self, bind=None): - """Issue a ``CREATE`` statement for this + """Issue a ``CREATE`` statement for this :class:`.Index`, using the given :class:`.Connectable` for connectivity. @@ -2265,7 +2265,7 @@ class Index(ColumnCollectionMixin, SchemaItem): return self def drop(self, bind=None): - """Issue a ``DROP`` statement for this + """Issue a ``DROP`` statement for this :class:`.Index`, using the given :class:`.Connectable` for connectivity. @@ -2279,7 +2279,7 @@ class Index(ColumnCollectionMixin, SchemaItem): def __repr__(self): return 'Index(%s)' % ( ", ".join( - [repr(self.name)] + + [repr(self.name)] + [repr(c) for c in self.columns] + (self.unique and ["unique=True"] or []) )) @@ -2287,7 +2287,7 @@ class Index(ColumnCollectionMixin, SchemaItem): class MetaData(SchemaItem): """A collection of :class:`.Table` objects and their associated schema constructs. - Holds a collection of :class:`.Table` objects as well as + Holds a collection of :class:`.Table` objects as well as an optional binding to an :class:`.Engine` or :class:`.Connection`. If bound, the :class:`.Table` objects in the collection and their columns may participate in implicit SQL @@ -2380,14 +2380,14 @@ class MetaData(SchemaItem): key = _get_table_key(name, schema) dict.pop(self.tables, key, None) if self._schemas: - self._schemas = set([t.schema - for t in self.tables.values() + self._schemas = set([t.schema + for t in self.tables.values() if t.schema is not None]) def __getstate__(self): return {'tables': self.tables, 'schema':self.schema, 'quote_schema':self.quote_schema, - 'schemas':self._schemas, + 'schemas':self._schemas, 'sequences':self._sequences} def __setstate__(self, state): @@ -2513,7 +2513,7 @@ class MetaData(SchemaItem): s = schema and (" schema '%s'" % schema) or '' raise exc.InvalidRequestError( 'Could not reflect: requested table(s) not available ' - 'in %s%s: (%s)' % + 'in %s%s: (%s)' % (bind.engine.url, s, ', '.join(missing))) load = [name for name in only if name not in current] @@ -2558,8 +2558,8 @@ class MetaData(SchemaItem): """ if bind is None: bind = _bind_or_error(self) - bind._run_visitor(ddl.SchemaGenerator, - self, + bind._run_visitor(ddl.SchemaGenerator, + self, checkfirst=checkfirst, tables=tables) @@ -2585,8 +2585,8 @@ class MetaData(SchemaItem): """ if bind is None: bind = _bind_or_error(self) - bind._run_visitor(ddl.SchemaDropper, - self, + bind._run_visitor(ddl.SchemaDropper, + self, checkfirst=checkfirst, tables=tables) @@ -2713,8 +2713,8 @@ class DDLElement(expression.Executable, expression.ClauseElement): ``.bind`` property. :param target: - Optional, defaults to None. The target SchemaItem for the - execute call. Will be passed to the ``on`` callable if any, + Optional, defaults to None. The target SchemaItem for the + execute call. Will be passed to the ``on`` callable if any, and may also provide string expansion data for the statement. See ``execute_at`` for more information. @@ -2748,7 +2748,7 @@ class DDLElement(expression.Executable, expression.ClauseElement): The Table or MetaData instance for which this DDLElement will be associated with. - A DDLElement instance can be linked to any number of schema items. + A DDLElement instance can be linked to any number of schema items. ``execute_at`` builds on the ``append_ddl_listener`` interface of :class:`.MetaData` and :class:`.Table` objects. @@ -2760,7 +2760,7 @@ class DDLElement(expression.Executable, expression.ClauseElement): """ def call_event(target, connection, **kw): - if self._should_execute_deprecated(event_name, + if self._should_execute_deprecated(event_name, target, connection, **kw): return connection.execute(self.against(target)) @@ -2774,14 +2774,14 @@ class DDLElement(expression.Executable, expression.ClauseElement): @expression._generative def execute_if(self, dialect=None, callable_=None, state=None): - """Return a callable that will execute this + """Return a callable that will execute this DDLElement conditionally. Used to provide a wrapper for event listening:: event.listen( metadata, - 'before_create', + 'before_create', DDL("my_ddl").execute_if(dialect='postgresql') ) @@ -2795,15 +2795,15 @@ class DDLElement(expression.Executable, expression.ClauseElement): DDL('something').execute_if(dialect=('postgresql', 'mysql')) - :param callable_: A callable, which will be invoked with - four positional arguments as well as optional keyword + :param callable_: A callable, which will be invoked with + four positional arguments as well as optional keyword arguments: :ddl: This DDL element. :target: - The :class:`.Table` or :class:`.MetaData` object which is the target of + The :class:`.Table` or :class:`.MetaData` object which is the target of this event. May be None if the DDL is executed explicitly. :bind: @@ -2820,13 +2820,13 @@ class DDLElement(expression.Executable, expression.ClauseElement): :checkfirst: Keyword argument, will be True if the 'checkfirst' flag was - set during the call to ``create()``, ``create_all()``, + set during the call to ``create()``, ``create_all()``, ``drop()``, ``drop_all()``. If the callable returns a true value, the DDL statement will be executed. - :param state: any value which will be passed to the callable_ + :param state: any value which will be passed to the callable_ as the ``state`` keyword argument. See also: @@ -2875,7 +2875,7 @@ class DDLElement(expression.Executable, expression.ClauseElement): def _check_ddl_on(self, on): if (on is not None and - (not isinstance(on, (basestring, tuple, list, set)) and + (not isinstance(on, (basestring, tuple, list, set)) and not util.callable(on))): raise exc.ArgumentError( "Expected the name of a database dialect, a tuple " @@ -2903,10 +2903,10 @@ class DDLElement(expression.Executable, expression.ClauseElement): class DDL(DDLElement): """A literal DDL statement. - Specifies literal SQL DDL to be executed by the database. DDL objects + Specifies literal SQL DDL to be executed by the database. DDL objects function as DDL event listeners, and can be subscribed to those events listed in :class:`.DDLEvents`, using either :class:`.Table` or :class:`.MetaData` - objects as targets. Basic templating support allows a single DDL instance + objects as targets. Basic templating support allows a single DDL instance to handle repetitive tasks for multiple tables. Examples:: @@ -2973,7 +2973,7 @@ class DDL(DDLElement): 'after-create' Will be None if the DDL is executed explicitly. :target: - The ``Table`` or ``MetaData`` object which is the target of + The ``Table`` or ``MetaData`` object which is the target of this event. May be None if the DDL is executed explicitly. :connection: @@ -3058,7 +3058,7 @@ class _CreateDropBase(DDLElement): def _create_rule_disable(self, compiler): """Allow disable of _create_rule using a callable. - Pass to _create_rule using + Pass to _create_rule using util.portable_instancemethod(self._create_rule_disable) to retain serializability. diff --git a/lib/sqlalchemy/sql/expression.py b/lib/sqlalchemy/sql/expression.py index f0549cc799..495ea0c8dd 100644 --- a/lib/sqlalchemy/sql/expression.py +++ b/lib/sqlalchemy/sql/expression.py @@ -114,20 +114,20 @@ def outerjoin(left, right, onclause=None): The returned object is an instance of :class:`.Join`. - Similar functionality is also available via the - :meth:`~.FromClause.outerjoin()` method on any + Similar functionality is also available via the + :meth:`~.FromClause.outerjoin()` method on any :class:`.FromClause`. :param left: The left side of the join. :param right: The right side of the join. - :param onclause: Optional criterion for the ``ON`` clause, is - derived from foreign key relationships established between + :param onclause: Optional criterion for the ``ON`` clause, is + derived from foreign key relationships established between left and right otherwise. - To chain joins together, use the :meth:`.FromClause.join` or - :meth:`.FromClause.outerjoin` methods on the resulting + To chain joins together, use the :meth:`.FromClause.join` or + :meth:`.FromClause.outerjoin` methods on the resulting :class:`.Join` object. """ @@ -138,20 +138,20 @@ def join(left, right, onclause=None, isouter=False): The returned object is an instance of :class:`.Join`. - Similar functionality is also available via the - :meth:`~.FromClause.join()` method on any + Similar functionality is also available via the + :meth:`~.FromClause.join()` method on any :class:`.FromClause`. :param left: The left side of the join. :param right: The right side of the join. - :param onclause: Optional criterion for the ``ON`` clause, is - derived from foreign key relationships established between + :param onclause: Optional criterion for the ``ON`` clause, is + derived from foreign key relationships established between left and right otherwise. - To chain joins together, use the :meth:`.FromClause.join` or - :meth:`.FromClause.outerjoin` methods on the resulting + To chain joins together, use the :meth:`.FromClause.join` or + :meth:`.FromClause.outerjoin` methods on the resulting :class:`.Join` object. @@ -208,7 +208,7 @@ def select(columns=None, whereclause=None, from_obj=[], **kwargs): to set the autocommit option. :param bind=None: - an :class:`~.base.Engine` or :class:`~.base.Connection` instance + an :class:`~.base.Engine` or :class:`~.base.Connection` instance to which the resulting :class:`.Select` object will be bound. The :class:`.Select` object will otherwise automatically bind to whatever @@ -236,7 +236,7 @@ def select(columns=None, whereclause=None, from_obj=[], **kwargs): ``distinct`` is also available via the :meth:`~.Select.distinct` generative method. - .. note:: + .. note:: The ``distinct`` keyword's acceptance of a string argument for usage with MySQL is deprecated. Use @@ -249,7 +249,7 @@ def select(columns=None, whereclause=None, from_obj=[], **kwargs): Certain database dialects also support alternate values for this parameter: - * With the MySQL dialect, the value ``"read"`` translates to + * With the MySQL dialect, the value ``"read"`` translates to ``LOCK IN SHARE MODE``. * With the Oracle and Postgresql dialects, the value ``"nowait"`` translates to ``FOR UPDATE NOWAIT``. @@ -306,7 +306,7 @@ def select(columns=None, whereclause=None, from_obj=[], **kwargs): **kwargs) def subquery(alias, *args, **kwargs): - """Return an :class:`.Alias` object derived + """Return an :class:`.Alias` object derived from a :class:`.Select`. name @@ -321,7 +321,7 @@ def subquery(alias, *args, **kwargs): return Select(*args, **kwargs).alias(alias) def insert(table, values=None, inline=False, **kwargs): - """Represent an ``INSERT`` statement via the :class:`.Insert` SQL + """Represent an ``INSERT`` statement via the :class:`.Insert` SQL construct. Similar functionality is available via the :meth:`~.TableClause.insert` method on @@ -370,7 +370,7 @@ def insert(table, values=None, inline=False, **kwargs): return Insert(table, values, inline=inline, **kwargs) def update(table, whereclause=None, values=None, inline=False, **kwargs): - """Represent an ``UPDATE`` statement via the :class:`.Update` SQL + """Represent an ``UPDATE`` statement via the :class:`.Update` SQL construct. E.g.:: @@ -393,12 +393,12 @@ def update(table, whereclause=None, values=None, inline=False, **kwargs): :param whereclause: Optional SQL expression describing the ``WHERE`` condition of the ``UPDATE`` statement. Modern applications - may prefer to use the generative :meth:`~Update.where()` + may prefer to use the generative :meth:`~Update.where()` method to specify the ``WHERE`` clause. The WHERE clause can refer to multiple tables. For databases which support this, an ``UPDATE FROM`` clause will - be generated, or on MySQL, a multi-table update. The statement + be generated, or on MySQL, a multi-table update. The statement will fail on databases that don't have support for multi-table update statements. A SQL-standard method of referring to additional tables in the WHERE clause is to use a correlated @@ -416,20 +416,20 @@ def update(table, whereclause=None, values=None, inline=False, **kwargs): :param values: Optional dictionary which specifies the ``SET`` conditions of the ``UPDATE``. If left as ``None``, the ``SET`` - conditions are determined from those parameters passed to the - statement during the execution and/or compilation of the + conditions are determined from those parameters passed to the + statement during the execution and/or compilation of the statement. When compiled standalone without any parameters, the ``SET`` clause generates for all columns. - Modern applications may prefer to use the generative - :meth:`.Update.values` method to set the values of the + Modern applications may prefer to use the generative + :meth:`.Update.values` method to set the values of the UPDATE statement. :param inline: - if True, SQL defaults present on :class:`.Column` objects via + if True, SQL defaults present on :class:`.Column` objects via the ``default`` keyword will be compiled 'inline' into the statement and not pre-executed. This means that their values will not - be available in the dictionary returned from + be available in the dictionary returned from :meth:`.ResultProxy.last_updated_params`. If both ``values`` and compile-time bind parameters are present, the @@ -441,7 +441,7 @@ def update(table, whereclause=None, values=None, inline=False, **kwargs): :class:`.Column`, normally but not necessarily equivalent to its "name"). Normally, the :class:`.Column` objects used here are expected to be - part of the target :class:`.Table` that is the table + part of the target :class:`.Table` that is the table to be updated. However when using MySQL, a multiple-table UPDATE statement can refer to columns from any of the tables referred to in the WHERE clause. @@ -450,12 +450,12 @@ def update(table, whereclause=None, values=None, inline=False, **kwargs): * a literal data value (i.e. string, number, etc.) * a SQL expression, such as a related :class:`.Column`, - a scalar-returning :func:`.select` construct, + a scalar-returning :func:`.select` construct, etc. When combining :func:`.select` constructs within the values clause of an :func:`.update` construct, - the subquery represented by the :func:`.select` should be + the subquery represented by the :func:`.select` should be *correlated* to the parent table, that is, providing criterion which links the table inside the subquery to the outer table being updated:: @@ -468,20 +468,20 @@ def update(table, whereclause=None, values=None, inline=False, **kwargs): See also: - :ref:`inserts_and_updates` - SQL Expression + :ref:`inserts_and_updates` - SQL Expression Language Tutorial """ return Update( - table, - whereclause=whereclause, - values=values, - inline=inline, + table, + whereclause=whereclause, + values=values, + inline=inline, **kwargs) def delete(table, whereclause = None, **kwargs): - """Represent a ``DELETE`` statement via the :class:`.Delete` SQL + """Represent a ``DELETE`` statement via the :class:`.Delete` SQL construct. Similar functionality is available via the :meth:`~.TableClause.delete` method on @@ -582,7 +582,7 @@ def case(whens, value=None, else_=None): when specified as strings, will be interpreted as bound values. To specify textual SQL expressions for these, use the :func:`literal_column` - construct. + construct. The expressions used for the WHEN criterion may only be literal strings when "value" is @@ -607,7 +607,7 @@ def case(whens, value=None, else_=None): can be specified which determines the type of the :func:`case()` construct overall:: - case([(orderline.c.qty > 100, + case([(orderline.c.qty > 100, literal_column("'greaterthan100'", String)), (orderline.c.qty > 10, literal_column("'greaterthan10'", String)) @@ -653,8 +653,8 @@ def collate(expression, collation): expr = _literal_as_binds(expression) return _BinaryExpression( - expr, - _literal_as_text(collation), + expr, + _literal_as_text(collation), operators.collate, type_=expr.type) def exists(*args, **kwargs): @@ -785,7 +785,7 @@ def alias(selectable, name=None): with an alternate name assigned within SQL, typically using the ``AS`` clause when generated, e.g. ``SELECT * FROM table AS aliasname``. - Similar functionality is available via the + Similar functionality is available via the :meth:`~.FromClause.alias` method available on all :class:`.FromClause` subclasses. @@ -845,10 +845,10 @@ def tuple_(*expr): .. warning:: - The composite IN construct is not supported by all backends, + The composite IN construct is not supported by all backends, and is currently known to work on Postgresql and MySQL, but not SQLite. Unsupported backends will raise - a subclass of :class:`~sqlalchemy.exc.DBAPIError` when such + a subclass of :class:`~sqlalchemy.exc.DBAPIError` when such an expression is invoked. """ @@ -884,7 +884,7 @@ def type_coerce(expr, type_): conn.execute( select([type_coerce(mytable.c.ident, AsGuid)]).\\ where( - type_coerce(mytable.c.ident, AsGuid) == + type_coerce(mytable.c.ident, AsGuid) == uuid.uuid3(uuid.NAMESPACE_URL, 'bar') ) ) @@ -936,11 +936,11 @@ def column(text, type_=None): from sqlalchemy.sql import table, column - :param text: the name of the column. Quoting rules will be applied + :param text: the name of the column. Quoting rules will be applied to the clause like any other column name. For textual column constructs that are not to be quoted, use the :func:`literal_column` function. - :param type\_: an optional :class:`~sqlalchemy.types.TypeEngine` object + :param type\_: an optional :class:`~sqlalchemy.types.TypeEngine` object which will provide result-set translation for this column. See :class:`.ColumnClause` for further examples. @@ -974,8 +974,8 @@ def table(name, *columns): """Represent a textual table clause. The object returned is an instance of :class:`.TableClause`, which represents the - "syntactical" portion of the schema-level :class:`~.schema.Table` object. - It may be used to construct lightweight table constructs. + "syntactical" portion of the schema-level :class:`~.schema.Table` object. + It may be used to construct lightweight table constructs. Note that the :func:`~.expression.table` function is not part of the ``sqlalchemy`` namespace. It must be imported from the ``sql`` package:: @@ -1030,11 +1030,11 @@ def bindparam(key, value=None, type_=None, unique=False, required=False, callabl """ if isinstance(key, ColumnClause): - return _BindParamClause(key.name, value, type_=key.type, + return _BindParamClause(key.name, value, type_=key.type, callable_=callable_, unique=unique, required=required) else: - return _BindParamClause(key, value, type_=type_, + return _BindParamClause(key, value, type_=type_, callable_=callable_, unique=unique, required=required) @@ -1061,8 +1061,8 @@ def text(text, bind=None, *args, **kwargs): The advantages :func:`text` provides over a plain string are backend-neutral support for bind parameters, per-statement - execution options, as well as - bind parameter and result-column typing behavior, allowing + execution options, as well as + bind parameter and result-column typing behavior, allowing SQLAlchemy type constructs to play a role when executing a statement that is specified literally. @@ -1072,7 +1072,7 @@ def text(text, bind=None, *args, **kwargs): t = text("SELECT * FROM users WHERE id=:user_id") result = connection.execute(t, user_id=12) - To invoke SQLAlchemy typing logic for bind parameters, the + To invoke SQLAlchemy typing logic for bind parameters, the ``bindparams`` list allows specification of :func:`bindparam` constructs which specify the type for a given name:: @@ -1114,8 +1114,8 @@ def text(text, bind=None, *args, **kwargs): Note that SQLAlchemy's usual "autocommit" behavior applies to :func:`text` constructs - that is, statements which begin - with a phrase such as ``INSERT``, ``UPDATE``, ``DELETE``, - or a variety of other phrases specific to certain backends, will + with a phrase such as ``INSERT``, ``UPDATE``, ``DELETE``, + or a variety of other phrases specific to certain backends, will be eligible for autocommit if no transaction is in progress. :param text: @@ -1141,7 +1141,7 @@ def text(text, bind=None, *args, **kwargs): a dictionary mapping the names of columns represented in the columns clause of a ``SELECT`` statement to type objects, which will be used to perform post-processing on columns within - the result set. This argument applies to any expression + the result set. This argument applies to any expression that returns result sets. """ @@ -1184,14 +1184,14 @@ def null(): return _Null() def true(): - """Return a :class:`_True` object, which compiles to ``true``, or the + """Return a :class:`_True` object, which compiles to ``true``, or the boolean equivalent for the target dialect. """ return _True() def false(): - """Return a :class:`_False` object, which compiles to ``false``, or the + """Return a :class:`_False` object, which compiles to ``false``, or the boolean equivalent for the target dialect. """ @@ -1302,23 +1302,23 @@ class _truncated_label(unicode): return self # for backwards compatibility in case -# someone is re-implementing the +# someone is re-implementing the # _truncated_identifier() sequence in a custom # compiler _generated_label = _truncated_label class _anonymous_label(_truncated_label): - """A unicode subclass used to identify anonymously + """A unicode subclass used to identify anonymously generated names.""" def __add__(self, other): return _anonymous_label( - unicode(self) + + unicode(self) + unicode(other)) def __radd__(self, other): return _anonymous_label( - unicode(other) + + unicode(other) + unicode(self)) def apply_map(self, map_): @@ -1327,7 +1327,7 @@ class _anonymous_label(_truncated_label): def _as_truncated(value): """coerce the given value to :class:`._truncated_label`. - Existing :class:`._truncated_label` and + Existing :class:`._truncated_label` and :class:`._anonymous_label` objects are passed unchanged. """ @@ -1357,7 +1357,7 @@ def _expand_cloned(elements): return itertools.chain(*[x._cloned_set for x in elements]) def _select_iterables(elements): - """expand tables into individual columns in the + """expand tables into individual columns in the given list of column expressions. """ @@ -1487,8 +1487,8 @@ def _corresponding_column_or_error(fromclause, column, raise exc.InvalidRequestError( "Given column '%s', attached to table '%s', " "failed to locate a corresponding column from table '%s'" - % - (column, + % + (column, getattr(column, 'table', None),fromclause.description) ) return c @@ -1546,7 +1546,7 @@ class ClauseElement(Visitable): def _constructor(self): """return the 'constructor' for this ClauseElement. - This is for the purposes for creating a new object of + This is for the purposes for creating a new object of this type. Usually, its just the element's __class__. However, the "Annotated" version of the object overrides to return the class of its proxied element. @@ -1683,21 +1683,21 @@ class ClauseElement(Visitable): def self_group(self, against=None): """Apply a 'grouping' to this :class:`.ClauseElement`. - This method is overridden by subclasses to return a + This method is overridden by subclasses to return a "grouping" construct, i.e. parenthesis. In particular it's used by "binary" expressions to provide a grouping - around themselves when placed into a larger expression, + around themselves when placed into a larger expression, as well as by :func:`.select` constructs when placed into - the FROM clause of another :func:`.select`. (Note that - subqueries should be normally created using the + the FROM clause of another :func:`.select`. (Note that + subqueries should be normally created using the :func:`.Select.alias` method, as many platforms require nested SELECT statements to be named). As expressions are composed together, the application of - :meth:`self_group` is automatic - end-user code should never + :meth:`self_group` is automatic - end-user code should never need to use this method directly. Note that SQLAlchemy's - clause constructs take operator precedence into account - - so parenthesis might not be needed, for example, in + clause constructs take operator precedence into account - + so parenthesis might not be needed, for example, in an expression like ``x OR (y AND z)`` - AND takes precedence over OR. @@ -1807,8 +1807,8 @@ class ClauseElement(Visitable): return self.negation_clause else: return _UnaryExpression( - self.self_group(against=operators.inv), - operator=operators.inv, + self.self_group(against=operators.inv), + operator=operators.inv, negate=None) def __repr__(self): @@ -1837,7 +1837,7 @@ class _CompareMixin(ColumnOperators): """Defines comparison and math operations for :class:`.ClauseElement` instances. - See :class:`.ColumnOperators` and :class:`.Operators` for descriptions + See :class:`.ColumnOperators` and :class:`.Operators` for descriptions of all operations. """ @@ -1859,16 +1859,16 @@ class _CompareMixin(ColumnOperators): obj = self._check_literal(op, obj) if reverse: - return _BinaryExpression(obj, - self, - op, - type_=sqltypes.BOOLEANTYPE, + return _BinaryExpression(obj, + self, + op, + type_=sqltypes.BOOLEANTYPE, negate=negate, modifiers=kwargs) else: - return _BinaryExpression(self, - obj, - op, - type_=sqltypes.BOOLEANTYPE, + return _BinaryExpression(self, + obj, + op, + type_=sqltypes.BOOLEANTYPE, negate=negate, modifiers=kwargs) def __operate(self, op, obj, reverse=False): @@ -1993,7 +1993,7 @@ class _CompareMixin(ColumnOperators): """See :meth:`.ColumnOperators.endswith`.""" return self.__compare( operators.like_op, - literal_column("'%'", type_=sqltypes.String) + + literal_column("'%'", type_=sqltypes.String) + self._check_literal(operators.like_op, other), escape=escape) @@ -2065,8 +2065,8 @@ class _CompareMixin(ColumnOperators): return lambda other: self.__operate(operator, other) def _bind_param(self, operator, obj): - return _BindParamClause(None, obj, - _compared_to_operator=operator, + return _BindParamClause(None, obj, + _compared_to_operator=operator, _compared_to_type=self.type, unique=True) def _check_literal(self, operator, other): @@ -2138,7 +2138,7 @@ class ColumnElement(ClauseElement, _CompareMixin): return s def shares_lineage(self, othercolumn): - """Return True if the given :class:`.ColumnElement` + """Return True if the given :class:`.ColumnElement` has a common ancestor to this :class:`.ColumnElement`.""" return bool(self.proxy_set.intersection(othercolumn.proxy_set)) @@ -2165,8 +2165,8 @@ class ColumnElement(ClauseElement, _CompareMixin): else: key = name - co = ColumnClause(_as_truncated(name), - selectable, + co = ColumnClause(_as_truncated(name), + selectable, type_=getattr(self, 'type', None)) co.proxies = [self] @@ -2377,7 +2377,7 @@ class Selectable(ClauseElement): __visit_name__ = 'selectable' class FromClause(Selectable): - """Represent an element that can be used within the ``FROM`` + """Represent an element that can be used within the ``FROM`` clause of a ``SELECT`` statement. """ @@ -2386,7 +2386,7 @@ class FromClause(Selectable): _hide_froms = [] quote = None schema = None - _memoized_property = util.group_expirable_memoized_property(["_columns"]) + _memoized_property = util.group_expirable_memoized_property(["_columns"]) def count(self, whereclause=None, **params): """return a SELECT COUNT generated against this @@ -2397,9 +2397,9 @@ class FromClause(Selectable): else: col = list(self.columns)[0] return select( - [func.count(col).label('tbl_row_count')], - whereclause, - from_obj=[self], + [func.count(col).label('tbl_row_count')], + whereclause, + from_obj=[self], **params) def select(self, whereclause=None, **params): @@ -2441,22 +2441,22 @@ class FromClause(Selectable): """ # this is essentially an "identity" check in the base class. - # Other constructs override this to traverse through + # Other constructs override this to traverse through # contained elements. return fromclause in self._cloned_set def _is_lexical_equivalent(self, other): """Return True if this FromClause and the other represent the same lexical identity. - - This tests if either one is a copy of the other, or + + This tests if either one is a copy of the other, or if they are the same via annotation identity. """ - return self._cloned_set.intersection(other._cloned_set) + return self._cloned_set.intersection(other._cloned_set) def replace_selectable(self, old, alias): - """replace all occurrences of FromClause 'old' with the given Alias + """replace all occurrences of FromClause 'old' with the given Alias object, returning a copy of this :class:`.FromClause`. """ @@ -2614,9 +2614,9 @@ class _BindParamClause(ColumnElement): __visit_name__ = 'bindparam' quote = None - def __init__(self, key, value, type_=None, unique=False, + def __init__(self, key, value, type_=None, unique=False, callable_=None, - isoutparam=False, required=False, + isoutparam=False, required=False, _compared_to_operator=None, _compared_to_type=None): """Construct a _BindParamClause. @@ -2672,7 +2672,7 @@ class _BindParamClause(ColumnElement): # identity self._identifying_key = self.key - # key that was passed in the first place, used to + # key that was passed in the first place, used to # generate new keys self._orig_key = key or 'param' @@ -2696,7 +2696,7 @@ class _BindParamClause(ColumnElement): @property def effective_value(self): - """Return the value of this bound parameter, + """Return the value of this bound parameter, taking into account if the ``callable`` parameter was set. @@ -2788,12 +2788,12 @@ class Executable(_Generative): """ Set non-SQL options for the statement which take effect during execution. - Execution options can be set on a per-statement or - per :class:`.Connection` basis. Additionally, the + Execution options can be set on a per-statement or + per :class:`.Connection` basis. Additionally, the :class:`.Engine` and ORM :class:`~.orm.query.Query` objects provide access to execution options which they in turn configure upon connections. - The :meth:`execution_options` method is generative. A new + The :meth:`execution_options` method is generative. A new instance of this statement is returned that contains the options:: statement = select([table.c.x, table.c.y]) @@ -2802,7 +2802,7 @@ class Executable(_Generative): Note that only a subset of possible execution options can be applied to a statement - these include "autocommit" and "stream_results", but not "isolation_level" or "compiled_cache". - See :meth:`.Connection.execution_options` for a full list of + See :meth:`.Connection.execution_options` for a full list of possible options. See also: @@ -2847,7 +2847,7 @@ class Executable(_Generative): @property def bind(self): - """Returns the :class:`.Engine` or :class:`.Connection` to + """Returns the :class:`.Engine` or :class:`.Connection` to which this :class:`.Executable` is bound, or None if none found. This is a traversal which checks locally, then @@ -3110,12 +3110,12 @@ class _Case(ColumnElement): if value is not None: whenlist = [ - (_literal_as_binds(c).self_group(), + (_literal_as_binds(c).self_group(), _literal_as_binds(r)) for (c, r) in whens ] else: whenlist = [ - (_no_literals(c).self_group(), + (_no_literals(c).self_group(), _literal_as_binds(r)) for (c, r) in whens ] @@ -3139,7 +3139,7 @@ class _Case(ColumnElement): def _copy_internals(self, clone=_clone, **kw): if self.value is not None: self.value = clone(self.value, **kw) - self.whens = [(clone(x, **kw), clone(y, **kw)) + self.whens = [(clone(x, **kw), clone(y, **kw)) for x, y in self.whens] if self.else_ is not None: self.else_ = clone(self.else_, **kw) @@ -3151,7 +3151,7 @@ class _Case(ColumnElement): yield x yield y if self.else_ is not None: - yield self.else_ + yield self.else_ @property def _from_objects(self): @@ -3216,7 +3216,7 @@ class FunctionElement(Executable, ColumnElement, FromClause): return self.clauses._from_objects def get_children(self, **kwargs): - return self.clause_expr, + return self.clause_expr, def _copy_internals(self, clone=_clone, **kw): self.clause_expr = clone(self.clause_expr, **kw) @@ -3224,7 +3224,7 @@ class FunctionElement(Executable, ColumnElement, FromClause): util.reset_memoized(self, 'clauses') def select(self): - """Produce a :func:`~.expression.select` construct + """Produce a :func:`~.expression.select` construct against this :class:`.FunctionElement`. This is shorthand for:: @@ -3241,10 +3241,10 @@ class FunctionElement(Executable, ColumnElement, FromClause): """Execute this :class:`.FunctionElement` against an embedded 'bind' and return a scalar value. - This first calls :meth:`~.FunctionElement.select` to + This first calls :meth:`~.FunctionElement.select` to produce a SELECT construct. - Note that :class:`.FunctionElement` can be passed to + Note that :class:`.FunctionElement` can be passed to the :meth:`.Connectable.scalar` method of :class:`.Connection` or :class:`.Engine`. @@ -3255,10 +3255,10 @@ class FunctionElement(Executable, ColumnElement, FromClause): """Execute this :class:`.FunctionElement` against an embedded 'bind'. - This first calls :meth:`~.FunctionElement.select` to + This first calls :meth:`~.FunctionElement.select` to produce a SELECT construct. - Note that :class:`.FunctionElement` can be passed to + Note that :class:`.FunctionElement` can be passed to the :meth:`.Connectable.execute` method of :class:`.Connection` or :class:`.Engine`. @@ -3266,7 +3266,7 @@ class FunctionElement(Executable, ColumnElement, FromClause): return self.select().execute() def _bind_param(self, operator, obj): - return _BindParamClause(None, obj, _compared_to_operator=operator, + return _BindParamClause(None, obj, _compared_to_operator=operator, _compared_to_type=self.type, unique=True) @@ -3283,7 +3283,7 @@ class Function(FunctionElement): def __init__(self, name, *clauses, **kw): """Construct a :class:`.Function`. - The :attr:`.func` construct is normally used to construct + The :attr:`.func` construct is normally used to construct new :class:`.Function` instances. """ @@ -3346,7 +3346,7 @@ class _UnaryExpression(ColumnElement): __visit_name__ = 'unary' - def __init__(self, element, operator=None, modifier=None, + def __init__(self, element, operator=None, modifier=None, type_=None, negate=None): self.operator = operator self.modifier = modifier @@ -3401,7 +3401,7 @@ class _BinaryExpression(ColumnElement): __visit_name__ = 'binary' - def __init__(self, left, right, operator, type_=None, + def __init__(self, left, right, operator, type_=None, negate=None, modifiers=None): self.left = _literal_as_text(left).self_group(against=operator) self.right = _literal_as_text(right).self_group(against=operator) @@ -3431,7 +3431,7 @@ class _BinaryExpression(ColumnElement): return self.left, self.right def compare(self, other, **kw): - """Compare this :class:`_BinaryExpression` against the + """Compare this :class:`_BinaryExpression` against the given :class:`_BinaryExpression`.""" return ( @@ -3592,10 +3592,10 @@ class Join(FromClause): where(whereclause).\\ select_from(j) - :param whereclause: the WHERE criterion that will be sent to + :param whereclause: the WHERE criterion that will be sent to the :func:`select()` function - :param fold_equivalents: based on the join criterion of this + :param fold_equivalents: based on the join criterion of this :class:`.Join`, do not include repeat column names in the column list of the resulting select, for columns that are calculated to be "equivalent" @@ -3603,7 +3603,7 @@ class Join(FromClause): recursively apply to any joins directly nested by this one as well. - :param \**kwargs: all other kwargs are sent to the + :param \**kwargs: all other kwargs are sent to the underlying :func:`select()` function. """ @@ -3623,9 +3623,9 @@ class Join(FromClause): Used against a :class:`.Join` object, :meth:`~.Join.alias` calls the :meth:`~.Join.select` - method first so that a subquery against a + method first so that a subquery against a :func:`.select` construct is generated. - the :func:`~expression.select` construct also has the + the :func:`~expression.select` construct also has the ``correlate`` flag set to ``False`` and will not auto-correlate inside an enclosing :func:`~expression.select` construct. @@ -3642,7 +3642,7 @@ class Join(FromClause): name=name ) - See :func:`~.expression.alias` for further details on + See :func:`~.expression.alias` for further details on aliases. """ @@ -3755,9 +3755,9 @@ class CTE(Alias): """ __visit_name__ = 'cte' - def __init__(self, selectable, - name=None, - recursive=False, + def __init__(self, selectable, + name=None, + recursive=False, cte_alias=False, _restates=frozenset()): self.recursive = recursive @@ -3874,7 +3874,7 @@ class _FromGrouping(FromClause): class _Over(ColumnElement): """Represent an OVER clause. - This is a special operator against a so-called + This is a special operator against a so-called "window" function, as well as any aggregate function, which produces results relative to the result set itself. It's supported only by certain database @@ -3898,8 +3898,8 @@ class _Over(ColumnElement): return self.func.type def get_children(self, **kwargs): - return [c for c in - (self.func, self.partition_by, self.order_by) + return [c for c in + (self.func, self.partition_by, self.order_by) if c is not None] def _copy_internals(self, clone=_clone, **kw): @@ -3912,8 +3912,8 @@ class _Over(ColumnElement): @property def _from_objects(self): return list(itertools.chain( - *[c._from_objects for c in - (self.func, self.partition_by, self.order_by) + *[c._from_objects for c in + (self.func, self.partition_by, self.order_by) if c is not None] )) @@ -3958,8 +3958,8 @@ class _Label(ColumnElement): def self_group(self, against=None): sub_element = self._element.self_group(against=against) if sub_element is not self._element: - return _Label(self.name, - sub_element, + return _Label(self.name, + sub_element, type_=self._type) else: return self @@ -4004,7 +4004,7 @@ class ColumnClause(_Immutable, ColumnElement): s = select([c1, c2]).where(c1==5) There is also a variant on :func:`~.expression.column` known - as :func:`~.expression.literal_column` - the difference is that + as :func:`~.expression.literal_column` - the difference is that in the latter case, the string value is assumed to be an exact expression, rather than a column name, so that no quoting rules or similar are applied:: @@ -4013,8 +4013,8 @@ class ColumnClause(_Immutable, ColumnElement): s = select([literal_column("5 + 7")]) - :class:`.ColumnClause` can also be used in a table-like - fashion by combining the :func:`~.expression.column` function + :class:`.ColumnClause` can also be used in a table-like + fashion by combining the :func:`~.expression.column` function with the :func:`~.expression.table` function, to produce a "lightweight" form of table metadata:: @@ -4034,10 +4034,10 @@ class ColumnClause(_Immutable, ColumnElement): :param selectable: parent selectable. - :param type: :class:`.types.TypeEngine` object which can associate + :param type: :class:`.types.TypeEngine` object which can associate this :class:`.ColumnClause` with a type. - :param is_literal: if True, the :class:`.ColumnClause` is assumed to + :param is_literal: if True, the :class:`.ColumnClause` is assumed to be an exact expression that will be delivered to the output with no quoting rules applied regardless of case sensitive settings. the :func:`literal_column()` function is usually used to create such a @@ -4048,7 +4048,7 @@ class ColumnClause(_Immutable, ColumnElement): onupdate = default = server_default = server_onupdate = None - _memoized_property = util.group_expirable_memoized_property() + _memoized_property = util.group_expirable_memoized_property() def __init__(self, text, selectable=None, type_=None, is_literal=False): self.key = self.name = text @@ -4125,9 +4125,9 @@ class ColumnClause(_Immutable, ColumnElement): return name def label(self, name): - # currently, anonymous labels don't occur for + # currently, anonymous labels don't occur for # ColumnClause. The use at the moment - # is that they do not generate nicely for + # is that they do not generate nicely for # is_literal clauses. We would like to change # this so that label(None) acts as would be expected. # See [ticket:2168]. @@ -4148,9 +4148,9 @@ class ColumnClause(_Immutable, ColumnElement): # otherwise its considered to be a label is_literal = self.is_literal and (name is None or name == self.name) c = self._constructor( - _as_truncated(name or self.name), - selectable=selectable, - type_=self.type, + _as_truncated(name or self.name), + selectable=selectable, + type_=self.type, is_literal=is_literal ) c.proxies = [self] @@ -4166,8 +4166,8 @@ class TableClause(_Immutable, FromClause): """Represents a minimal "table" construct. The constructor for :class:`.TableClause` is the - :func:`~.expression.table` function. This produces - a lightweight table object that has only a name and a + :func:`~.expression.table` function. This produces + a lightweight table object that has only a name and a collection of columns, which are typically produced by the :func:`~.expression.column` function:: @@ -4185,7 +4185,7 @@ class TableClause(_Immutable, FromClause): the ``.c.`` collection and statement generation methods. It does **not** provide all the additional schema-level services - of :class:`~.schema.Table`, including constraints, references to other + of :class:`~.schema.Table`, including constraints, references to other tables, or support for :class:`.MetaData`-level services. It's useful on its own as an ad-hoc construct used to generate quick SQL statements when a more fully fledged :class:`~.schema.Table` is not on hand. @@ -4235,9 +4235,9 @@ class TableClause(_Immutable, FromClause): else: col = list(self.columns)[0] return select( - [func.count(col).label('tbl_row_count')], - whereclause, - from_obj=[self], + [func.count(col).label('tbl_row_count')], + whereclause, + from_obj=[self], **params) def insert(self, values=None, inline=False, **kwargs): @@ -4266,7 +4266,7 @@ class TableClause(_Immutable, FromClause): """ - return update(self, whereclause=whereclause, + return update(self, whereclause=whereclause, values=values, inline=inline, **kwargs) def delete(self, whereclause=None, **kwargs): @@ -4331,7 +4331,7 @@ class _SelectBase(Executable, FromClause): Typically, a select statement which has only one column in its columns clause is eligible to be used as a scalar expression. - The returned object is an instance of + The returned object is an instance of :class:`_ScalarSelect`. """ @@ -4365,8 +4365,8 @@ class _SelectBase(Executable, FromClause): Common table expressions are a SQL standard whereby SELECT statements can draw upon secondary statements specified along with the primary statement, using a clause called "WITH". - Special semantics regarding UNION can also be employed to - allow "recursive" queries, where a SELECT statement can draw + Special semantics regarding UNION can also be employed to + allow "recursive" queries, where a SELECT statement can draw upon the set of rows that have previously been selected. SQLAlchemy detects :class:`.CTE` objects, which are treated @@ -4381,11 +4381,11 @@ class _SelectBase(Executable, FromClause): in which case an anonymous symbol will be used at query compile time. :param recursive: if ``True``, will render ``WITH RECURSIVE``. - A recursive common table expression is intended to be used in + A recursive common table expression is intended to be used in conjunction with UNION ALL in order to derive rows from those already selected. - The following examples illustrate two examples from + The following examples illustrate two examples from Postgresql's documentation at http://www.postgresql.org/docs/8.4/static/queries-with.html. @@ -4404,23 +4404,23 @@ class _SelectBase(Executable, FromClause): ) regional_sales = select([ - orders.c.region, + orders.c.region, func.sum(orders.c.amount).label('total_sales') ]).group_by(orders.c.region).cte("regional_sales") top_regions = select([regional_sales.c.region]).\\ where( - regional_sales.c.total_sales > + regional_sales.c.total_sales > select([ func.sum(regional_sales.c.total_sales)/10 ]) ).cte("top_regions") statement = select([ - orders.c.region, - orders.c.product, - func.sum(orders.c.quantity).label("product_units"), + orders.c.region, + orders.c.product, + func.sum(orders.c.quantity).label("product_units"), func.sum(orders.c.amount).label("product_sales") ]).where(orders.c.region.in_( select([top_regions.c.region]) @@ -4442,8 +4442,8 @@ class _SelectBase(Executable, FromClause): ) included_parts = select([ - parts.c.sub_part, - parts.c.part, + parts.c.sub_part, + parts.c.part, parts.c.quantity]).\\ where(parts.c.part=='our part').\\ cte(recursive=True) @@ -4453,15 +4453,15 @@ class _SelectBase(Executable, FromClause): parts_alias = parts.alias() included_parts = included_parts.union_all( select([ - parts_alias.c.part, - parts_alias.c.sub_part, + parts_alias.c.part, + parts_alias.c.sub_part, parts_alias.c.quantity ]). where(parts_alias.c.part==incl_alias.c.sub_part) ) statement = select([ - included_parts.c.sub_part, + included_parts.c.sub_part, func.sum(included_parts.c.quantity).label('total_quantity') ]).\ select_from(included_parts.join(parts, @@ -4589,7 +4589,7 @@ class _ScalarSelect(_Grouping): return list(self.inner_columns)[0]._make_proxy(selectable, name) class CompoundSelect(_SelectBase): - """Forms the basis of ``UNION``, ``UNION ALL``, and other + """Forms the basis of ``UNION``, ``UNION ALL``, and other SELECT-based set operations.""" __visit_name__ = 'compound_select' @@ -4709,14 +4709,14 @@ class Select(_SelectBase): _memoized_property = _SelectBase._memoized_property - def __init__(self, - columns, - whereclause=None, - from_obj=None, - distinct=False, - having=None, - correlate=True, - prefixes=None, + def __init__(self, + columns, + whereclause=None, + from_obj=None, + distinct=False, + having=None, + correlate=True, + prefixes=None, **kwargs): """Construct a Select object. @@ -4744,14 +4744,14 @@ class Select(_SelectBase): self._distinct = True else: self._distinct = [ - _literal_as_text(e) + _literal_as_text(e) for e in util.to_list(distinct) ] self._correlate = set() if from_obj is not None: self._from_obj = util.OrderedSet( - _literal_as_text(f) + _literal_as_text(f) for f in util.to_list(from_obj)) else: self._from_obj = util.OrderedSet() @@ -4791,7 +4791,7 @@ class Select(_SelectBase): def _froms(self): # would love to cache this, # but there's just enough edge cases, particularly now that - # declarative encourages construction of SQL expressions + # declarative encourages construction of SQL expressions # without tables present, to just regen this each time. froms = [] seen = set() @@ -4829,7 +4829,7 @@ class Select(_SelectBase): # clones that are lexical equivalents. if self._from_cloned: toremove.update( - self._from_cloned[f] for f in + self._from_cloned[f] for f in toremove.intersection(self._from_cloned) if self._from_cloned[f]._is_lexical_equivalent(f) ) @@ -4981,14 +4981,14 @@ class Select(_SelectBase): return (column_collections and list(self.columns) or []) + \ self._raw_columns + list(self._froms) + \ - [x for x in - (self._whereclause, self._having, - self._order_by_clause, self._group_by_clause) + [x for x in + (self._whereclause, self._having, + self._order_by_clause, self._group_by_clause) if x is not None] @_generative def column(self, column): - """return a new select() construct with the given column expression + """return a new select() construct with the given column expression added to its columns clause. """ @@ -4996,20 +4996,20 @@ class Select(_SelectBase): @_generative def with_only_columns(self, columns): - """Return a new :func:`.select` construct with its columns + """Return a new :func:`.select` construct with its columns clause replaced with the given columns. .. versionchanged:: 0.7.3 - Due to a bug fix, this method has a slight + Due to a bug fix, this method has a slight behavioral change as of version 0.7.3. - Prior to version 0.7.3, the FROM clause of + Prior to version 0.7.3, the FROM clause of a :func:`.select` was calculated upfront and as new columns - were added; in 0.7.3 and later it's calculated + were added; in 0.7.3 and later it's calculated at compile time, fixing an issue regarding late binding - of columns to parent tables. This changes the behavior of + of columns to parent tables. This changes the behavior of :meth:`.Select.with_only_columns` in that FROM clauses no - longer represented in the new list are dropped, - but this behavior is more consistent in + longer represented in the new list are dropped, + but this behavior is more consistent in that the FROM clauses are consistently derived from the current columns clause. The original intent of this method is to allow trimming of the existing columns list to be fewer @@ -5018,8 +5018,8 @@ class Select(_SelectBase): been anticipated until 0.7.3 was released; the usage guidelines below illustrate how this should be done. - This method is exactly equivalent to as if the original - :func:`.select` had been called with the given columns + This method is exactly equivalent to as if the original + :func:`.select` had been called with the given columns clause. I.e. a statement:: s = select([table1.c.a, table1.c.b]) @@ -5029,8 +5029,8 @@ class Select(_SelectBase): s = select([table1.c.b]) - This means that FROM clauses which are only derived - from the column list will be discarded if the new column + This means that FROM clauses which are only derived + from the column list will be discarded if the new column list no longer contains that FROM:: >>> table1 = table('t1', column('a'), column('b')) @@ -5044,7 +5044,7 @@ class Select(_SelectBase): The preferred way to maintain a specific FROM clause in the construct, assuming it won't be represented anywhere - else (i.e. not in the WHERE clause, etc.) is to set it using + else (i.e. not in the WHERE clause, etc.) is to set it using :meth:`.Select.select_from`:: >>> s1 = select([table1.c.a, table2.c.b]).\\ @@ -5056,9 +5056,9 @@ class Select(_SelectBase): Care should also be taken to use the correct set of column objects passed to :meth:`.Select.with_only_columns`. Since the method is essentially equivalent to calling the - :func:`.select` construct in the first place with the given - columns, the columns passed to :meth:`.Select.with_only_columns` - should usually be a subset of those which were passed + :func:`.select` construct in the first place with the given + columns, the columns passed to :meth:`.Select.with_only_columns` + should usually be a subset of those which were passed to the :func:`.select` construct, not those which are available from the ``.c`` collection of that :func:`.select`. That is:: @@ -5073,8 +5073,8 @@ class Select(_SelectBase): The latter would produce the SQL:: - SELECT b - FROM (SELECT t1.a AS a, t1.b AS b + SELECT b + FROM (SELECT t1.a AS a, t1.b AS b FROM t1), t1 Since the :func:`.select` construct is essentially being @@ -5129,14 +5129,14 @@ class Select(_SelectBase): @_generative def prefix_with(self, *expr): """return a new select() construct which will apply the given - expressions, typically strings, to the start of its columns clause, + expressions, typically strings, to the start of its columns clause, not using any commas. In particular is useful for MySQL keywords. e.g.:: - select(['a', 'b']).prefix_with('HIGH_PRIORITY', - 'SQL_SMALL_RESULT', + select(['a', 'b']).prefix_with('HIGH_PRIORITY', + 'SQL_SMALL_RESULT', 'ALL') Would render:: @@ -5164,14 +5164,14 @@ class Select(_SelectBase): The "from" list is a unique set on the identity of each element, so adding an already present :class:`.Table` or other selectable will have no effect. Passing a :class:`.Join` that refers - to an already present :class:`.Table` or other selectable will have - the effect of concealing the presence of that selectable as + to an already present :class:`.Table` or other selectable will have + the effect of concealing the presence of that selectable as an individual element in the rendered FROM list, instead rendering it into a JOIN clause. While the typical purpose of :meth:`.Select.select_from` is to replace the default, derived FROM clause with a join, it can also be called with - individual table elements, multiple times if desired, in the case that the + individual table elements, multiple times if desired, in the case that the FROM clause cannot be fully derived from the columns clause:: select([func.count('*')]).select_from(table1) @@ -5269,8 +5269,8 @@ class Select(_SelectBase): def _populate_column_collection(self): for c in self.inner_columns: if hasattr(c, '_make_proxy'): - c._make_proxy(self, - name=self.use_labels + c._make_proxy(self, + name=self.use_labels and c._label or None) def self_group(self, against=None): @@ -5414,17 +5414,17 @@ class UpdateBase(Executable, ClauseElement): column expression. :class:`~sqlalchemy.schema.Table` objects will be expanded into their individual columns. - Upon compilation, a RETURNING clause, or database equivalent, - will be rendered within the statement. For INSERT and UPDATE, - the values are the newly inserted/updated values. For DELETE, + Upon compilation, a RETURNING clause, or database equivalent, + will be rendered within the statement. For INSERT and UPDATE, + the values are the newly inserted/updated values. For DELETE, the values are those of the rows which were deleted. Upon execution, the values of the columns to be returned are made available via the result set and can be iterated using ``fetchone()`` and similar. For DBAPIs which do not - natively support returning values (i.e. cx_oracle), + natively support returning values (i.e. cx_oracle), SQLAlchemy will approximate this behavior at the result level - so that a reasonable amount of behavioral neutrality is + so that a reasonable amount of behavioral neutrality is provided. Note that not all databases/DBAPIs @@ -5432,8 +5432,8 @@ class UpdateBase(Executable, ClauseElement): an exception is raised upon compilation and/or execution. For those who do support it, the functionality across backends varies greatly, including restrictions on executemany() - and other statements which return multiple rows. Please - read the documentation notes for the database in use in + and other statements which return multiple rows. Please + read the documentation notes for the database in use in order to determine the availability of RETURNING. """ @@ -5441,20 +5441,20 @@ class UpdateBase(Executable, ClauseElement): @_generative def with_hint(self, text, selectable=None, dialect_name="*"): - """Add a table hint for a single table to this + """Add a table hint for a single table to this INSERT/UPDATE/DELETE statement. .. note:: - :meth:`.UpdateBase.with_hint` currently applies only to + :meth:`.UpdateBase.with_hint` currently applies only to Microsoft SQL Server. For MySQL INSERT hints, use - :meth:`.Insert.prefix_with`. UPDATE/DELETE hints for + :meth:`.Insert.prefix_with`. UPDATE/DELETE hints for MySQL will be added in a future release. The text of the hint is rendered in the appropriate location for the database backend in use, relative to the :class:`.Table` that is the subject of this - statement, or optionally to that of the given + statement, or optionally to that of the given :class:`.Table` passed as the ``selectable`` argument. The ``dialect_name`` option will limit the rendering of a particular @@ -5492,7 +5492,7 @@ class ValuesBase(UpdateBase): """specify the VALUES clause for an INSERT statement, or the SET clause for an UPDATE. - :param \**kwargs: key value pairs representing the string key + :param \**kwargs: key value pairs representing the string key of a :class:`.Column` mapped to the value to be rendered into the VALUES or SET clause:: @@ -5510,7 +5510,7 @@ class ValuesBase(UpdateBase): See also: - :ref:`inserts_and_updates` - SQL Expression + :ref:`inserts_and_updates` - SQL Expression Language Tutorial :func:`~.expression.insert` - produce an ``INSERT`` statement @@ -5545,12 +5545,12 @@ class Insert(ValuesBase): _prefixes = () - def __init__(self, - table, - values=None, - inline=False, - bind=None, - prefixes=None, + def __init__(self, + table, + values=None, + inline=False, + bind=None, + prefixes=None, returning=None, **kwargs): ValuesBase.__init__(self, table, values) @@ -5593,12 +5593,12 @@ class Update(ValuesBase): """ __visit_name__ = 'update' - def __init__(self, - table, - whereclause, - values=None, - inline=False, - bind=None, + def __init__(self, + table, + whereclause, + values=None, + inline=False, + bind=None, returning=None, **kwargs): ValuesBase.__init__(self, table, values) @@ -5660,10 +5660,10 @@ class Delete(UpdateBase): __visit_name__ = 'delete' - def __init__(self, - table, - whereclause, - bind=None, + def __init__(self, + table, + whereclause, + bind=None, returning =None, **kwargs): self._bind = bind diff --git a/lib/sqlalchemy/sql/functions.py b/lib/sqlalchemy/sql/functions.py index aac97cff23..c4eb2042a2 100644 --- a/lib/sqlalchemy/sql/functions.py +++ b/lib/sqlalchemy/sql/functions.py @@ -33,11 +33,11 @@ class GenericFunction(Function): class next_value(Function): """Represent the 'next value', given a :class:`.Sequence` as it's single argument. - + Compiles into the appropriate function on each backend, or will raise NotImplementedError if used on a backend that does not provide support for sequences. - + """ type = sqltypes.Integer() name = "next_value" diff --git a/lib/sqlalchemy/sql/operators.py b/lib/sqlalchemy/sql/operators.py index 3fcbf7cf97..437358a104 100644 --- a/lib/sqlalchemy/sql/operators.py +++ b/lib/sqlalchemy/sql/operators.py @@ -21,25 +21,25 @@ from sqlalchemy.util import symbol class Operators(object): """Base of comparison and logical operators. - + Implements base methods :meth:`operate` and :meth:`reverse_operate`, as well as :meth:`__and__`, :meth:`__or__`, :meth:`__invert__`. - + Usually is used via its most common subclass :class:`.ColumnOperators`. - + """ def __and__(self, other): """Implement the ``&`` operator. - + When used with SQL expressions, results in an AND operation, equivalent to :func:`~.expression.and_`, that is:: - + a & b - + is equivalent to:: - + from sqlalchemy import and_ and_(a, b) @@ -47,7 +47,7 @@ class Operators(object): operator precedence; the ``&`` operator has the highest precedence. The operands should be enclosed in parenthesis if they contain further sub expressions:: - + (a == 2) & (b == 4) """ @@ -55,15 +55,15 @@ class Operators(object): def __or__(self, other): """Implement the ``|`` operator. - + When used with SQL expressions, results in an OR operation, equivalent to :func:`~.expression.or_`, that is:: - + a | b - + is equivalent to:: - + from sqlalchemy import or_ or_(a, b) @@ -71,7 +71,7 @@ class Operators(object): operator precedence; the ``|`` operator has the highest precedence. The operands should be enclosed in parenthesis if they contain further sub expressions:: - + (a == 2) | (b == 4) """ @@ -79,15 +79,15 @@ class Operators(object): def __invert__(self): """Implement the ``~`` operator. - - When used with SQL expressions, results in a - NOT operation, equivalent to + + When used with SQL expressions, results in a + NOT operation, equivalent to :func:`~.expression.not_`, that is:: - + ~a - + is equivalent to:: - + from sqlalchemy import not_ not_(a) @@ -123,16 +123,16 @@ class Operators(object): def operate(self, op, *other, **kwargs): """Operate on an argument. - + This is the lowest level of operation, raises :class:`NotImplementedError` by default. - - Overriding this on a subclass can allow common - behavior to be applied to all operations. + + Overriding this on a subclass can allow common + behavior to be applied to all operations. For example, overriding :class:`.ColumnOperators` - to apply ``func.lower()`` to the left and right + to apply ``func.lower()`` to the left and right side:: - + class MyComparator(ColumnOperators): def operate(self, op, other): return op(func.lower(self), func.lower(other)) @@ -142,48 +142,48 @@ class Operators(object): be a single scalar for most operations. :param \**kwargs: modifiers. These may be passed by special operators such as :meth:`ColumnOperators.contains`. - - + + """ raise NotImplementedError(str(op)) def reverse_operate(self, op, other, **kwargs): """Reverse operate on an argument. - + Usage is the same as :meth:`operate`. - + """ raise NotImplementedError(str(op)) class ColumnOperators(Operators): """Defines comparison and math operations. - + By default all methods call down to :meth:`Operators.operate` or :meth:`Operators.reverse_operate` - passing in the appropriate operator function from the + passing in the appropriate operator function from the Python builtin ``operator`` module or - a SQLAlchemy-specific operator function from + a SQLAlchemy-specific operator function from :mod:`sqlalchemy.expression.operators`. For example the ``__eq__`` function:: - + def __eq__(self, other): return self.operate(operators.eq, other) Where ``operators.eq`` is essentially:: - + def eq(a, b): return a == b - + A SQLAlchemy construct like :class:`.ColumnElement` ultimately overrides :meth:`.Operators.operate` and others - to return further :class:`.ClauseElement` constructs, + to return further :class:`.ClauseElement` constructs, so that the ``==`` operation above is replaced by a clause construct. - + The docstrings here will describe column-oriented behavior of each operator. For ORM-based operators on related objects and collections, see :class:`.RelationshipProperty.Comparator`. - + """ timetuple = None @@ -191,17 +191,17 @@ class ColumnOperators(Operators): def __lt__(self, other): """Implement the ``<`` operator. - + In a column context, produces the clause ``a < b``. - + """ return self.operate(lt, other) def __le__(self, other): """Implement the ``<=`` operator. - + In a column context, produces the clause ``a <= b``. - + """ return self.operate(le, other) @@ -209,7 +209,7 @@ class ColumnOperators(Operators): def __eq__(self, other): """Implement the ``==`` operator. - + In a column context, produces the clause ``a = b``. If the target is ``None``, produces ``a IS NULL``. @@ -221,66 +221,66 @@ class ColumnOperators(Operators): In a column context, produces the clause ``a != b``. If the target is ``None``, produces ``a IS NOT NULL``. - + """ return self.operate(ne, other) def __gt__(self, other): """Implement the ``>`` operator. - + In a column context, produces the clause ``a > b``. - + """ return self.operate(gt, other) def __ge__(self, other): """Implement the ``>=`` operator. - + In a column context, produces the clause ``a >= b``. - + """ return self.operate(ge, other) def __neg__(self): """Implement the ``-`` operator. - + In a column context, produces the clause ``-a``. - + """ return self.operate(neg) def concat(self, other): """Implement the 'concat' operator. - + In a column context, produces the clause ``a || b``, or uses the ``concat()`` operator on MySQL. - + """ return self.operate(concat_op, other) def like(self, other, escape=None): """Implement the ``like`` operator. - + In a column context, produces the clause ``a LIKE other``. - + """ return self.operate(like_op, other, escape=escape) def ilike(self, other, escape=None): """Implement the ``ilike`` operator. - + In a column context, produces the clause ``a ILIKE other``. - + """ return self.operate(ilike_op, other, escape=escape) def in_(self, other): """Implement the ``in`` operator. - + In a column context, produces the clause ``a IN other``. "other" may be a tuple/list of column expressions, or a :func:`~.expression.select` construct. - + """ return self.operate(in_op, other) @@ -288,31 +288,31 @@ class ColumnOperators(Operators): """Implement the ``startwith`` operator. In a column context, produces the clause ``LIKE '%'`` - + """ return self.operate(startswith_op, other, **kwargs) def endswith(self, other, **kwargs): """Implement the 'endswith' operator. - + In a column context, produces the clause ``LIKE '%'`` - + """ return self.operate(endswith_op, other, **kwargs) def contains(self, other, **kwargs): """Implement the 'contains' operator. - + In a column context, produces the clause ``LIKE '%%'`` - + """ return self.operate(contains_op, other, **kwargs) def match(self, other, **kwargs): """Implements the 'match' operator. - - In a column context, this produces a MATCH clause, i.e. - ``MATCH ''``. The allowed contents of ``other`` + + In a column context, this produces a MATCH clause, i.e. + ``MATCH ''``. The allowed contents of ``other`` are database backend specific. """ @@ -347,7 +347,7 @@ class ColumnOperators(Operators): """Implement the ``+`` operator in reverse. See :meth:`__add__`. - + """ return self.reverse_operate(add, other) @@ -355,7 +355,7 @@ class ColumnOperators(Operators): """Implement the ``-`` operator in reverse. See :meth:`__sub__`. - + """ return self.reverse_operate(sub, other) @@ -363,7 +363,7 @@ class ColumnOperators(Operators): """Implement the ``*`` operator in reverse. See :meth:`__mul__`. - + """ return self.reverse_operate(mul, other) @@ -371,7 +371,7 @@ class ColumnOperators(Operators): """Implement the ``/`` operator in reverse. See :meth:`__div__`. - + """ return self.reverse_operate(div, other) @@ -386,61 +386,61 @@ class ColumnOperators(Operators): def __add__(self, other): """Implement the ``+`` operator. - + In a column context, produces the clause ``a + b`` if the parent object has non-string affinity. - If the parent object has a string affinity, + If the parent object has a string affinity, produces the concatenation operator, ``a || b`` - see :meth:`concat`. - + """ return self.operate(add, other) def __sub__(self, other): """Implement the ``-`` operator. - + In a column context, produces the clause ``a - b``. - + """ return self.operate(sub, other) def __mul__(self, other): """Implement the ``*`` operator. - + In a column context, produces the clause ``a * b``. - + """ return self.operate(mul, other) def __div__(self, other): """Implement the ``/`` operator. - + In a column context, produces the clause ``a / b``. - + """ return self.operate(div, other) def __mod__(self, other): """Implement the ``%`` operator. - + In a column context, produces the clause ``a % b``. - + """ return self.operate(mod, other) def __truediv__(self, other): """Implement the ``//`` operator. - + In a column context, produces the clause ``a / b``. - + """ return self.operate(truediv, other) def __rtruediv__(self, other): """Implement the ``//`` operator in reverse. - + See :meth:`__truediv__`. - + """ return self.reverse_operate(truediv, other) @@ -525,7 +525,7 @@ def is_commutative(op): return op in _commutative def is_ordering_modifier(op): - return op in (asc_op, desc_op, + return op in (asc_op, desc_op, nullsfirst_op, nullslast_op) _associative = _commutative.union([concat_op, and_, or_]) diff --git a/lib/sqlalchemy/sql/util.py b/lib/sqlalchemy/sql/util.py index 8d2b5ecfd8..c0fb878692 100644 --- a/lib/sqlalchemy/sql/util.py +++ b/lib/sqlalchemy/sql/util.py @@ -27,8 +27,8 @@ def sort_tables(tables): tuples.append((parent_table, child_table)) for table in tables: - visitors.traverse(table, - {'schema_visitor':True}, + visitors.traverse(table, + {'schema_visitor':True}, {'foreign_key':visit_foreign_key}) tuples.extend( @@ -38,9 +38,9 @@ def sort_tables(tables): return list(topological.sort(tuples, tables)) def find_join_source(clauses, join_to): - """Given a list of FROM clauses and a selectable, - return the first index and element from the list of - clauses which can be joined against the selectable. returns + """Given a list of FROM clauses and a selectable, + return the first index and element from the list of + clauses which can be joined against the selectable. returns None, None if no match is found. e.g.:: @@ -62,8 +62,8 @@ def find_join_source(clauses, join_to): else: return None, None -def find_tables(clause, check_columns=False, - include_aliases=False, include_joins=False, +def find_tables(clause, check_columns=False, + include_aliases=False, include_joins=False, include_selects=False, include_crud=False): """locate Table objects within the given expression.""" @@ -112,7 +112,7 @@ def unwrap_order_by(clause): ( not isinstance(t, expression._UnaryExpression) or \ not operators.is_ordering_modifier(t.modifier) - ): + ): cols.add(t) else: for c in t.get_children(): @@ -167,7 +167,7 @@ def _quote_ddl_expr(element): class _repr_params(object): """A string view of bound parameters, truncating display to the given number of 'multi' parameter sets. - + """ def __init__(self, params, batches): self.params = params @@ -187,7 +187,7 @@ class _repr_params(object): def expression_as_ddl(clause): - """Given a SQL expression, convert for usage in DDL, such as + """Given a SQL expression, convert for usage in DDL, such as CREATE INDEX and CHECK CONSTRAINT. Converts bind params into quoted literals, column identifiers @@ -259,7 +259,7 @@ def join_condition(a, b, ignore_nonexistent_tables=False, a_subset=None): if left is None: continue for fk in sorted( - b.foreign_keys, + b.foreign_keys, key=lambda fk:fk.parent._creation_order): try: col = fk.get_referent(left) @@ -274,7 +274,7 @@ def join_condition(a, b, ignore_nonexistent_tables=False, a_subset=None): constraints.add(fk.constraint) if left is not b: for fk in sorted( - left.foreign_keys, + left.foreign_keys, key=lambda fk:fk.parent._creation_order): try: col = fk.get_referent(b) @@ -317,12 +317,12 @@ def join_condition(a, b, ignore_nonexistent_tables=False, a_subset=None): class Annotated(object): """clones a ClauseElement and applies an 'annotations' dictionary. - Unlike regular clones, this clone also mimics __hash__() and + Unlike regular clones, this clone also mimics __hash__() and __cmp__() of the original element so that it takes its place in hashed collections. A reference to the original element is maintained, for the important - reason of keeping its hash value current. When GC'ed, the + reason of keeping its hash value current. When GC'ed, the hash value may be reused, causing conflicts. """ @@ -338,13 +338,13 @@ class Annotated(object): try: cls = annotated_classes[element.__class__] except KeyError: - cls = annotated_classes[element.__class__] = type.__new__(type, - "Annotated%s" % element.__class__.__name__, + cls = annotated_classes[element.__class__] = type.__new__(type, + "Annotated%s" % element.__class__.__name__, (Annotated, element.__class__), {}) return object.__new__(cls) def __init__(self, element, values): - # force FromClause to generate their internal + # force FromClause to generate their internal # collections into __dict__ if isinstance(element, expression.FromClause): element.c @@ -404,7 +404,7 @@ for cls in expression.__dict__.values() + [schema.Column, schema.Table]: exec "annotated_classes[cls] = Annotated%s" % (cls.__name__) def _deep_annotate(element, annotations, exclude=None): - """Deep copy the given ClauseElement, annotating each element + """Deep copy the given ClauseElement, annotating each element with the given annotations dictionary. Elements within the exclude collection will be cloned but not annotated. @@ -449,17 +449,17 @@ def _deep_deannotate(element): element = clone(element) return element -def _shallow_annotate(element, annotations): - """Annotate the given ClauseElement and copy its internals so that - internal objects refer to the new annotated object. +def _shallow_annotate(element, annotations): + """Annotate the given ClauseElement and copy its internals so that + internal objects refer to the new annotated object. - Basically used to apply a "dont traverse" annotation to a - selectable, without digging throughout the whole - structure wasting time. - """ - element = element._annotate(annotations) - element._copy_internals() - return element + Basically used to apply a "dont traverse" annotation to a + selectable, without digging throughout the whole + structure wasting time. + """ + element = element._annotate(annotations) + element._copy_internals() + return element def splice_joins(left, right, stop_on=None): if left is None: @@ -538,7 +538,7 @@ def reduce_columns(columns, *clauses, **kw): return expression.ColumnSet(columns.difference(omit)) -def criterion_as_pairs(expression, consider_as_foreign_keys=None, +def criterion_as_pairs(expression, consider_as_foreign_keys=None, consider_as_referenced_keys=None, any_operator=False): """traverse an expression and locate binary criterion pairs.""" @@ -556,20 +556,20 @@ def criterion_as_pairs(expression, consider_as_foreign_keys=None, if consider_as_foreign_keys: if binary.left in consider_as_foreign_keys and \ - (binary.right is binary.left or + (binary.right is binary.left or binary.right not in consider_as_foreign_keys): pairs.append((binary.right, binary.left)) elif binary.right in consider_as_foreign_keys and \ - (binary.left is binary.right or + (binary.left is binary.right or binary.left not in consider_as_foreign_keys): pairs.append((binary.left, binary.right)) elif consider_as_referenced_keys: if binary.left in consider_as_referenced_keys and \ - (binary.right is binary.left or + (binary.right is binary.left or binary.right not in consider_as_referenced_keys): pairs.append((binary.left, binary.right)) elif binary.right in consider_as_referenced_keys and \ - (binary.left is binary.right or + (binary.left is binary.right or binary.left not in consider_as_referenced_keys): pairs.append((binary.right, binary.left)) else: @@ -586,17 +586,17 @@ def criterion_as_pairs(expression, consider_as_foreign_keys=None, def folded_equivalents(join, equivs=None): """Return a list of uniquely named columns. - The column list of the given Join will be narrowed + The column list of the given Join will be narrowed down to a list of all equivalently-named, equated columns folded into one column, where 'equated' means they are equated to each other in the ON clause of this join. This function is used by Join.select(fold_equivalents=True). - Deprecated. This function is used for a certain kind of + Deprecated. This function is used for a certain kind of "polymorphic_union" which is designed to achieve joined table inheritance where the base table has no "discriminator" - column; [ticket:1131] will provide a better way to + column; [ticket:1131] will provide a better way to achieve this. """ @@ -691,12 +691,12 @@ class ClauseAdapter(visitors.ReplacingCloningVisitor): def _corresponding_column(self, col, require_embedded, _seen=util.EMPTY_SET): newcol = self.selectable.corresponding_column( - col, + col, require_embedded=require_embedded) if newcol is None and col in self.equivalents and col not in _seen: for equiv in self.equivalents[col]: - newcol = self._corresponding_column(equiv, - require_embedded=require_embedded, + newcol = self._corresponding_column(equiv, + require_embedded=require_embedded, _seen=_seen.union([col])) if newcol is not None: return newcol @@ -722,14 +722,14 @@ class ClauseAdapter(visitors.ReplacingCloningVisitor): class ColumnAdapter(ClauseAdapter): """Extends ClauseAdapter with extra utility functions. - Provides the ability to "wrap" this ClauseAdapter + Provides the ability to "wrap" this ClauseAdapter around another, a columns dictionary which returns - adapted elements given an original, and an + adapted elements given an original, and an adapted_row() factory. """ - def __init__(self, selectable, equivalents=None, - chain_to=None, include=None, + def __init__(self, selectable, equivalents=None, + chain_to=None, include=None, exclude=None, adapt_required=False): ClauseAdapter.__init__(self, selectable, equivalents, include, exclude) if chain_to: @@ -765,7 +765,7 @@ class ColumnAdapter(ClauseAdapter): c = c.label(None) # adapt_required indicates that if we got the same column - # back which we put in (i.e. it passed through), + # back which we put in (i.e. it passed through), # it's not correct. this is used by eagerloading which # knows that all columns and expressions need to be adapted # to a result row, and a "passthrough" is definitely targeting diff --git a/lib/sqlalchemy/sql/visitors.py b/lib/sqlalchemy/sql/visitors.py index 5354fbcbb4..20c4b94294 100644 --- a/lib/sqlalchemy/sql/visitors.py +++ b/lib/sqlalchemy/sql/visitors.py @@ -8,15 +8,15 @@ SQLAlchemy schema and expression constructs rely on a Python-centric version of the classic "visitor" pattern as the primary way in which -they apply functionality. The most common use of this pattern -is statement compilation, where individual expression classes match -up to rendering methods that produce a string result. Beyond this, -the visitor system is also used to inspect expressions for various -information and patterns, as well as for usage in +they apply functionality. The most common use of this pattern +is statement compilation, where individual expression classes match +up to rendering methods that produce a string result. Beyond this, +the visitor system is also used to inspect expressions for various +information and patterns, as well as for usage in some kinds of expression transformation. Other kinds of transformation use a non-visitor traversal system. -For many examples of how the visit system is used, see the +For many examples of how the visit system is used, see the sqlalchemy.sql.util and the sqlalchemy.sql.compiler modules. For an introduction to clause adaption, see http://techspot.zzzeek.org/2008/01/23/expression-transformations/ @@ -28,18 +28,18 @@ import re from sqlalchemy import util import operator -__all__ = ['VisitableType', 'Visitable', 'ClauseVisitor', - 'CloningVisitor', 'ReplacingCloningVisitor', 'iterate', +__all__ = ['VisitableType', 'Visitable', 'ClauseVisitor', + 'CloningVisitor', 'ReplacingCloningVisitor', 'iterate', 'iterate_depthfirst', 'traverse_using', 'traverse', 'cloned_traverse', 'replacement_traverse'] class VisitableType(type): """Metaclass which assigns a `_compiler_dispatch` method to classes having a `__visit_name__` attribute. - + The _compiler_dispatch attribute becomes an instance method which looks approximately like the following:: - + def _compiler_dispatch (self, visitor, **kw): '''Look for an attribute named "visit_" + self.__visit_name__ on the visitor, and call it with the same kw params.''' @@ -92,7 +92,7 @@ class Visitable(object): __metaclass__ = VisitableType class ClauseVisitor(object): - """Base class for visitor objects which can traverse using + """Base class for visitor objects which can traverse using the traverse() function. """ @@ -144,7 +144,7 @@ class ClauseVisitor(object): return self class CloningVisitor(ClauseVisitor): - """Base class for visitor objects which can traverse using + """Base class for visitor objects which can traverse using the cloned_traverse() function. """ @@ -160,7 +160,7 @@ class CloningVisitor(ClauseVisitor): return cloned_traverse(obj, self.__traverse_options__, self._visitor_dict) class ReplacingCloningVisitor(CloningVisitor): - """Base class for visitor objects which can traverse using + """Base class for visitor objects which can traverse using the replacement_traverse() function. """ @@ -168,8 +168,8 @@ class ReplacingCloningVisitor(CloningVisitor): def replace(self, elem): """receive pre-copied elements during a cloning traversal. - If the method returns a new element, the element is used - instead of creating a simple copy of the element. Traversal + If the method returns a new element, the element is used + instead of creating a simple copy of the element. Traversal will halt on the newly returned element if it is re-encountered. """ return None @@ -232,7 +232,7 @@ def traverse_depthfirst(obj, opts, visitors): return traverse_using(iterate_depthfirst(obj, opts), obj, visitors) def cloned_traverse(obj, opts, visitors): - """clone the given expression structure, allowing + """clone the given expression structure, allowing modifications by visitors.""" cloned = util.column_dict() @@ -256,7 +256,7 @@ def cloned_traverse(obj, opts, visitors): def replacement_traverse(obj, opts, replace): - """clone the given expression structure, allowing element + """clone the given expression structure, allowing element replacement by a given replacement function.""" cloned = util.column_dict() diff --git a/lib/sqlalchemy/util/_collections.py b/lib/sqlalchemy/util/_collections.py index 1c407324ce..3f6cf51a6a 100644 --- a/lib/sqlalchemy/util/_collections.py +++ b/lib/sqlalchemy/util/_collections.py @@ -210,7 +210,7 @@ class OrderedDict(dict): try: self._list.append(key) except AttributeError: - # work around Python pickle loads() with + # work around Python pickle loads() with # dict subclass (seems to ignore __setstate__?) self._list = [key] dict.__setitem__(self, key, object) @@ -585,7 +585,7 @@ else: self[key] = value = self.creator(key) return value -# define collections that are capable of storing +# define collections that are capable of storing # ColumnElement objects as hashable keys/elements. column_set = set column_dict = dict @@ -595,12 +595,12 @@ populate_column_dict = PopulateDict def unique_list(seq, hashfunc=None): seen = {} if not hashfunc: - return [x for x in seq - if x not in seen + return [x for x in seq + if x not in seen and not seen.__setitem__(x, True)] else: - return [x for x in seq - if hashfunc(x) not in seen + return [x for x in seq + if hashfunc(x) not in seen and not seen.__setitem__(hashfunc(x), True)] class UniqueAppender(object): @@ -801,15 +801,15 @@ class LRUCache(dict): def _manage_size(self): while len(self) > self.capacity + self.capacity * self.threshold: - by_counter = sorted(dict.values(self), + by_counter = sorted(dict.values(self), key=operator.itemgetter(2), reverse=True) for item in by_counter[self.capacity:]: try: del self[item[0]] except KeyError: - # if we couldnt find a key, most - # likely some other thread broke in + # if we couldnt find a key, most + # likely some other thread broke in # on us. loop around and try again break @@ -870,7 +870,7 @@ class ScopedRegistry(object): pass class ThreadLocalRegistry(ScopedRegistry): - """A :class:`.ScopedRegistry` that uses a ``threading.local()`` + """A :class:`.ScopedRegistry` that uses a ``threading.local()`` variable for storage. """ diff --git a/lib/sqlalchemy/util/compat.py b/lib/sqlalchemy/util/compat.py index 534adc42b1..cf750b850f 100644 --- a/lib/sqlalchemy/util/compat.py +++ b/lib/sqlalchemy/util/compat.py @@ -55,7 +55,7 @@ else: # a controversial feature, required by MySQLdb currently def buffer(x): - return x + return x # Py2K buffer = buffer @@ -198,7 +198,7 @@ import time if win32 or jython: time_func = time.clock else: - time_func = time.time + time_func = time.time if sys.version_info >= (2, 5): any = any diff --git a/lib/sqlalchemy/util/langhelpers.py b/lib/sqlalchemy/util/langhelpers.py index 26c46dd112..94fbd5eb8e 100644 --- a/lib/sqlalchemy/util/langhelpers.py +++ b/lib/sqlalchemy/util/langhelpers.py @@ -242,7 +242,7 @@ def unbound_method_to_callable(func_or_cls): def generic_repr(obj, additional_kw=()): """Produce a __repr__() based on direct association of the __init__() specification vs. same-named attributes present. - + """ def genargs(): try: @@ -560,10 +560,10 @@ class importlater(object): from mypackage.somemodule import somesubmod except evaluted upon attribute access to "somesubmod". - + importlater() currently requires that resolve_all() be called, typically at the bottom of a package's __init__.py. - This is so that __import__ still called only at + This is so that __import__ still called only at module import time, and not potentially within a non-main thread later on. @@ -606,14 +606,14 @@ class importlater(object): importlater._unresolved.discard(self) if self._il_addtl: self._initial_import = __import__( - self._il_path, globals(), locals(), + self._il_path, globals(), locals(), [self._il_addtl]) else: self._initial_import = __import__(self._il_path) def __getattr__(self, key): if key == 'module': - raise ImportError("Could not resolve module %s" + raise ImportError("Could not resolve module %s" % self._full_path) try: attr = getattr(self.module, key) @@ -869,8 +869,8 @@ def warn(msg, stacklevel=3): If msg is a string, :class:`.exc.SAWarning` is used as the category. - .. note:: - + .. note:: + This function is swapped out when the test suite runs, with a compatible version that uses warnings.warn_explicit, so that the warnings registry can diff --git a/lib/sqlalchemy/util/topological.py b/lib/sqlalchemy/util/topological.py index 2ba86b23d0..58227af206 100644 --- a/lib/sqlalchemy/util/topological.py +++ b/lib/sqlalchemy/util/topological.py @@ -29,7 +29,7 @@ def sort_as_subsets(tuples, allitems): if not output: raise CircularDependencyError( "Circular dependency detected.", - find_cycles(tuples, allitems), + find_cycles(tuples, allitems), _gen_edges(edges) ) @@ -56,7 +56,7 @@ def find_cycles(tuples, allitems): output = set() - # we'd like to find all nodes that are + # we'd like to find all nodes that are # involved in cycles, so we do the full # pass through the whole thing for each # node in the original list. @@ -86,7 +86,7 @@ def find_cycles(tuples, allitems): def _gen_edges(edges): return set([ - (right, left) - for left in edges - for right in edges[left] + (right, left) + for left in edges + for right in edges[left] ]) diff --git a/setup.py b/setup.py index 53e9b7cfc5..e27b78b16e 100644 --- a/setup.py +++ b/setup.py @@ -45,11 +45,11 @@ ext_modules = [ sources=['lib/sqlalchemy/cextension/resultproxy.c']) ] -ext_errors = (CCompilerError, DistutilsExecError, DistutilsPlatformError) +ext_errors = (CCompilerError, DistutilsExecError, DistutilsPlatformError) if sys.platform == 'win32' and sys.version_info > (2, 6): # 2.6's distutils.msvc9compiler can raise an IOError when failing to # find the compiler - ext_errors += (IOError,) + ext_errors += (IOError,) class BuildFailed(Exception): @@ -95,7 +95,7 @@ def find_packages(dir_): packages.append(fragment.replace(os.sep, '.')) return packages -v_file = open(os.path.join(os.path.dirname(__file__), +v_file = open(os.path.join(os.path.dirname(__file__), 'lib', 'sqlalchemy', '__init__.py')) VERSION = re.compile(r".*__version__ = '(.*?)'", re.S).match(v_file.read()).group(1) diff --git a/test/aaa_profiling/test_memusage.py b/test/aaa_profiling/test_memusage.py index cffe8b71f7..3120683b35 100644 --- a/test/aaa_profiling/test_memusage.py +++ b/test/aaa_profiling/test_memusage.py @@ -267,7 +267,7 @@ class MemUsageTest(EnsureZeroed): x = counter[0] dec = 10 while dec > 0: - # trying to count in binary here, + # trying to count in binary here, # works enough to trip the test case if pow(2, dec) < x: setattr(w1, 'col%d' % dec, counter[0]) @@ -457,7 +457,7 @@ class MemUsageTest(EnsureZeroed): pass mapper(A, table1, properties={ - 'bs':relationship(B, secondary=table3, + 'bs':relationship(B, secondary=table3, backref='as', order_by=table3.c.t1) }) mapper(B, table2) diff --git a/test/aaa_profiling/test_orm.py b/test/aaa_profiling/test_orm.py index 8de217f046..0e9cda226f 100644 --- a/test/aaa_profiling/test_orm.py +++ b/test/aaa_profiling/test_orm.py @@ -90,7 +90,7 @@ class MergeTest(fixtures.MappedTest): @profiling.function_call_count(variance=0.10, versions={'2.5':1050, '2.6':1050, - '2.6+cextension':1005, + '2.6+cextension':1005, '2.7':1005, '3':1050} ) @@ -106,14 +106,14 @@ class MergeTest(fixtures.MappedTest): class LoadManyToOneFromIdentityTest(fixtures.MappedTest): """test overhead associated with many-to-one fetches. - Prior to the refactor of LoadLazyAttribute and + Prior to the refactor of LoadLazyAttribute and query._get(), the load from identity map took 2x as many calls (65K calls here instead of around 33K) to load 1000 related objects from the identity map. """ - # only need to test for unexpected variance in a large call + # only need to test for unexpected variance in a large call # count here, # so remove some platforms that have wildly divergent # callcounts. @@ -122,13 +122,13 @@ class LoadManyToOneFromIdentityTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): - parent = Table('parent', metadata, - Column('id', Integer, primary_key=True), + parent = Table('parent', metadata, + Column('id', Integer, primary_key=True), Column('data', String(20)), Column('child_id', Integer, ForeignKey('child.id')) ) - child = Table('child', metadata, + child = Table('child', metadata, Column('id', Integer,primary_key=True), Column('data', String(20)) ) @@ -162,10 +162,10 @@ class LoadManyToOneFromIdentityTest(fixtures.MappedTest): ]) parent.insert().execute([ { - 'id':i, - 'data':'p%dc%d' % (i, (i % 250) + 1), + 'id':i, + 'data':'p%dc%d' % (i, (i % 250) + 1), 'child_id':(i % 250) + 1 - } + } for i in xrange(1, 1000) ]) @@ -201,18 +201,18 @@ class MergeBackrefsTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): Table('a', metadata, - Column('id', Integer, primary_key=True), + Column('id', Integer, primary_key=True), Column('c_id', Integer, ForeignKey('c.id')) ) Table('b', metadata, - Column('id', Integer, primary_key=True), + Column('id', Integer, primary_key=True), Column('a_id', Integer, ForeignKey('a.id')) ) Table('c', metadata, - Column('id', Integer, primary_key=True), + Column('id', Integer, primary_key=True), ) Table('d', metadata, - Column('id', Integer, primary_key=True), + Column('id', Integer, primary_key=True), Column('a_id', Integer, ForeignKey('a.id')) ) @@ -248,7 +248,7 @@ class MergeBackrefsTest(fixtures.MappedTest): cls.classes.C, cls.classes.D s = Session() s.add_all([ - A(id=i, + A(id=i, bs=[B(id=(i * 50) + j) for j in xrange(1, 50)], c=C(id=i), ds=[D(id=(i * 50) + j) for j in xrange(1, 50)] @@ -263,7 +263,7 @@ class MergeBackrefsTest(fixtures.MappedTest): self.classes.C, self.classes.D s = Session() for a in [ - A(id=i, + A(id=i, bs=[B(id=(i * 50) + j) for j in xrange(1, 50)], c=C(id=i), ds=[D(id=(i * 50) + j) for j in xrange(1, 50)] diff --git a/test/aaa_profiling/test_resultset.py b/test/aaa_profiling/test_resultset.py index 64fa27de00..e07cb3c502 100644 --- a/test/aaa_profiling/test_resultset.py +++ b/test/aaa_profiling/test_resultset.py @@ -48,7 +48,7 @@ class ResultSetTest(fixtures.TestBase, AssertsExecutionResults): @profiling.function_call_count(versions={ '2.7':14396, '2.6':14396, - '2.6+cextension': 365, + '2.6+cextension': 365, '2.7+cextension':365}) def test_unicode(self): [tuple(row) for row in t2.select().execute().fetchall()] @@ -72,8 +72,8 @@ class ExecutionTest(fixtures.TestBase): # ensure initial connect activities complete c.execute("select 1") - @profiling.function_call_count(versions={'2.7':40, '2.6':40, '2.5':35, - '2.4':21, '3':40}, + @profiling.function_call_count(versions={'2.7':40, '2.6':40, '2.5':35, + '2.4':21, '3':40}, variance=.10) def go(): c.execute("select 1") @@ -85,10 +85,10 @@ class ExecutionTest(fixtures.TestBase): # ensure initial connect activities complete e.execute("select 1") - @profiling.function_call_count(versions={'2.4':41, '2.5':65, + @profiling.function_call_count(versions={'2.4':41, '2.5':65, '2.6':65, '3':61, '2.7':65, - '2.6+cextension':65}, + '2.6+cextension':65}, variance=.05) def go(): e.execute("select 1") diff --git a/test/aaa_profiling/test_zoomark.py b/test/aaa_profiling/test_zoomark.py index 86baae22ec..564d35f0c0 100644 --- a/test/aaa_profiling/test_zoomark.py +++ b/test/aaa_profiling/test_zoomark.py @@ -393,16 +393,16 @@ class ZooMarkTest(fixtures.TestBase): def test_profile_5_aggregates(self): self.test_baseline_5_aggregates() - @profiling.function_call_count(1788, {'2.4': 1118, '3.2':1647, + @profiling.function_call_count(1788, {'2.4': 1118, '3.2':1647, '2.7+cextension':1698}) def test_profile_6_editing(self): self.test_baseline_6_editing() - @profiling.function_call_count(2252, {'2.4': 1673, + @profiling.function_call_count(2252, {'2.4': 1673, '2.6':2412, '2.7':2412, '3.2':2396, - '2.7+cextension':2110, + '2.7+cextension':2110, '2.6+cextension': 2252}) def test_profile_7_multiview(self): self.test_baseline_7_multiview() diff --git a/test/aaa_profiling/test_zoomark_orm.py b/test/aaa_profiling/test_zoomark_orm.py index 99b4d92240..9947e94f28 100644 --- a/test/aaa_profiling/test_zoomark_orm.py +++ b/test/aaa_profiling/test_zoomark_orm.py @@ -335,7 +335,7 @@ class ZooMarkTest(fixtures.TestBase): def test_profile_1_create_tables(self): self.test_baseline_1_create_tables() - @profiling.function_call_count(5786, {'2.7+cextension':5683, + @profiling.function_call_count(5786, {'2.7+cextension':5683, '2.6+cextension':5992}) def test_profile_1a_populate(self): self.test_baseline_1a_populate() diff --git a/test/base/test_dependency.py b/test/base/test_dependency.py index 4be3c83901..f3e19982b0 100644 --- a/test/base/test_dependency.py +++ b/test/base/test_dependency.py @@ -86,7 +86,7 @@ class DependencySortTest(fixtures.TestBase): eq_(err.cycles, set(['node1', 'node3', 'node2', 'node5', 'node4'])) eq_(err.edges, set([('node3', 'node1'), ('node4', 'node1'), - ('node2', 'node3'), ('node1', 'node2'), + ('node2', 'node3'), ('node1', 'node2'), ('node4','node5'), ('node5', 'node4')])) def test_raise_on_cycle_two(self): @@ -108,7 +108,7 @@ class DependencySortTest(fixtures.TestBase): except exc.CircularDependencyError, err: eq_(err.cycles, set(['node1', 'node3', 'node2'])) eq_(err.edges, set([('node3', 'node1'), ('node2', 'node3'), - ('node3', 'node2'), ('node1', 'node2'), + ('node3', 'node2'), ('node1', 'node2'), ('node2','node4')])) def test_raise_on_cycle_three(self): @@ -224,7 +224,7 @@ class DependencySortTest(fixtures.TestBase): ]) # node6 only became present here once [ticket:2282] was addressed. eq_( - topological.find_cycles(tuples, allnodes), + topological.find_cycles(tuples, allnodes), set(['node1','node2', 'node4', 'node6']) ) @@ -258,23 +258,23 @@ class DependencySortTest(fixtures.TestBase): def test_find_multiple_cycles_four(self): tuples = [ - ('node6', 'node2'), - ('node15', 'node19'), + ('node6', 'node2'), + ('node15', 'node19'), ('node19', 'node2'), ('node4', 'node10'), ('node15', 'node13'), - ('node17', 'node11'), ('node1', 'node19'), ('node15', 'node8'), - ('node6', 'node20'), ('node14', 'node11'), ('node6', 'node14'), + ('node17', 'node11'), ('node1', 'node19'), ('node15', 'node8'), + ('node6', 'node20'), ('node14', 'node11'), ('node6', 'node14'), ('node11', 'node2'), ('node10', 'node20'), ('node1', 'node11'), ('node20', 'node19'), ('node4', 'node20'), ('node15', 'node20'), ('node9', 'node19'), ('node11', 'node10'), ('node11', 'node19'), ('node13', 'node6'), ('node3', 'node15'), ('node9', 'node11'), - ('node4', 'node17'), ('node2', 'node20'), ('node19', 'node10'), + ('node4', 'node17'), ('node2', 'node20'), ('node19', 'node10'), ('node8', 'node4'), ('node11', 'node3'), ('node6', 'node1') ] allnodes = ['node%d' % i for i in xrange(1, 21)] eq_( - topological.find_cycles(tuples, allnodes), - set(['node11', 'node10', 'node13', 'node15', 'node14', 'node17', - 'node19', 'node20', 'node8', 'node1', 'node3', + topological.find_cycles(tuples, allnodes), + set(['node11', 'node10', 'node13', 'node15', 'node14', 'node17', + 'node19', 'node20', 'node8', 'node1', 'node3', 'node2', 'node4', 'node6']) ) diff --git a/test/base/test_events.py b/test/base/test_events.py index 61a4b9c71b..57c06c328f 100644 --- a/test/base/test_events.py +++ b/test/base/test_events.py @@ -347,7 +347,7 @@ class TestCustomTargets(fixtures.TestBase): ) assert_raises( - exc.InvalidRequestError, + exc.InvalidRequestError, event.listen, listen, "event_one", self.Target ) diff --git a/test/base/test_except.py b/test/base/test_except.py index 18a06b9bd1..597f1132bd 100644 --- a/test/base/test_except.py +++ b/test/base/test_except.py @@ -5,8 +5,8 @@ from sqlalchemy import exc as sa_exceptions from test.lib import fixtures from test.lib.testing import eq_ -# Py3K -#StandardError = BaseException +# Py3K +#StandardError = BaseException # Py2K from exceptions import StandardError, KeyboardInterrupt, SystemExit # end Py2K @@ -54,7 +54,7 @@ class WrapTest(fixtures.TestBase): def test_tostring_large_dict(self): try: raise sa_exceptions.DBAPIError.instance('this is a message' - , + , {'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 5, 'f': 6, 'g': 7, 'h': 8, 'i': 9, 'j': 10, 'k': 11, }, OperationalError(), DatabaseError) @@ -64,8 +64,8 @@ class WrapTest(fixtures.TestBase): def test_tostring_large_list(self): try: - raise sa_exceptions.DBAPIError.instance('this is a message', - [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11,], + raise sa_exceptions.DBAPIError.instance('this is a message', + [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11,], OperationalError(), DatabaseError) except sa_exceptions.DBAPIError, exc: assert str(exc).startswith("(OperationalError) 'this is a " @@ -73,9 +73,9 @@ class WrapTest(fixtures.TestBase): def test_tostring_large_executemany(self): try: - raise sa_exceptions.DBAPIError.instance('this is a message', - [{1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, - {1: 1}, {1:1}, {1: 1}, {1: 1},], + raise sa_exceptions.DBAPIError.instance('this is a message', + [{1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, + {1: 1}, {1:1}, {1: 1}, {1: 1},], OperationalError(), DatabaseError) except sa_exceptions.DBAPIError, exc: eq_(str(exc) , @@ -84,7 +84,7 @@ class WrapTest(fixtures.TestBase): "1}, {1: 1}, {1: 1}]") try: raise sa_exceptions.DBAPIError.instance('this is a message', [ - {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, + {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1:1}, {1: 1}, {1: 1}, {1: 1}, ], OperationalError(), DatabaseError) except sa_exceptions.DBAPIError, exc: @@ -95,7 +95,7 @@ class WrapTest(fixtures.TestBase): "bound parameter sets ... {1: 1}, {1: 1}]" ) try: - raise sa_exceptions.DBAPIError.instance('this is a message', + raise sa_exceptions.DBAPIError.instance('this is a message', [ (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), @@ -127,7 +127,7 @@ class WrapTest(fixtures.TestBase): def test_db_error_noncompliant_dbapi(self): try: - raise sa_exceptions.DBAPIError.instance('', [], OutOfSpec(), + raise sa_exceptions.DBAPIError.instance('', [], OutOfSpec(), DatabaseError) except sa_exceptions.DBAPIError, e: self.assert_(e.__class__ is sa_exceptions.DBAPIError) diff --git a/test/base/test_utils.py b/test/base/test_utils.py index 3a7ce07f27..28b389e344 100644 --- a/test/base/test_utils.py +++ b/test/base/test_utils.py @@ -299,7 +299,7 @@ class IdentitySetTest(fixtures.TestBase): ids2 - ids1, IdentitySet([o2, o3]) ) - + ids2 -= ids1 eq_(ids2, IdentitySet([o2, o3])) diff --git a/test/bootstrap/noseplugin.py b/test/bootstrap/noseplugin.py index 89b164cbc5..d653fa5020 100644 --- a/test/bootstrap/noseplugin.py +++ b/test/bootstrap/noseplugin.py @@ -16,7 +16,7 @@ from test.bootstrap.config import ( _create_testing_engine, _engine_pool, _engine_strategy, _engine_uri, _list_dbs, _log, _prep_testing_database, _require, _reverse_topological, _server_side_cursors, _monkeypatch_cdecimal, _zero_timeout, - _set_table_options, base_config, db, db_label, db_url, file_config, post_configure, + _set_table_options, base_config, db, db_label, db_url, file_config, post_configure, pre_configure) log = logging.getLogger('nose.plugins.sqlalchemy') diff --git a/test/dialect/test_firebird.py b/test/dialect/test_firebird.py index 3241db730a..41533dbbdc 100644 --- a/test/dialect/test_firebird.py +++ b/test/dialect/test_firebird.py @@ -94,7 +94,7 @@ class DomainReflectionTest(fixtures.TestBase, AssertsExecutionResults): class BuggyDomainReflectionTest(fixtures.TestBase, AssertsExecutionResults): - """Test Firebird domains (and some other reflection bumps), + """Test Firebird domains (and some other reflection bumps), see [ticket:1663] and http://tracker.firebirdsql.org/browse/CORE-356""" __only_on__ = 'firebird' @@ -325,7 +325,7 @@ class TypesTest(fixtures.TestBase): @testing.provide_metadata def test_infinite_float(self): metadata = self.metadata - t = Table('t', metadata, + t = Table('t', metadata, Column('data', Float) ) metadata.create_all() diff --git a/test/dialect/test_maxdb.py b/test/dialect/test_maxdb.py index 8d76529d78..e0c3eafbe9 100644 --- a/test/dialect/test_maxdb.py +++ b/test/dialect/test_maxdb.py @@ -10,7 +10,7 @@ from test.lib import * # TODO -# - add "Database" test, a quick check for join behavior on different +# - add "Database" test, a quick check for join behavior on different # max versions # - full max-specific reflection suite # - datetime tests diff --git a/test/dialect/test_mssql.py b/test/dialect/test_mssql.py index 74e96c8efa..6d156cca17 100644 --- a/test/dialect/test_mssql.py +++ b/test/dialect/test_mssql.py @@ -84,8 +84,8 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): self.assert_compile( t.update().where(t.c.somecolumn=="q"). values(somecolumn="x"). - with_hint("WITH (PAGLOCK)", - selectable=targ, + with_hint("WITH (PAGLOCK)", + selectable=targ, dialect_name=darg), "UPDATE sometable WITH (PAGLOCK) " "SET somecolumn=:somecolumn " @@ -108,8 +108,8 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): for darg in ("*", "mssql"): self.assert_compile( t.delete().where(t.c.somecolumn=="q"). - with_hint("WITH (PAGLOCK)", - selectable=targ, + with_hint("WITH (PAGLOCK)", + selectable=targ, dialect_name=darg), "DELETE FROM sometable WITH (PAGLOCK) " "WHERE sometable.somecolumn = :somecolumn_1" @@ -132,8 +132,8 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): self.assert_compile( t.update().where(t.c.somecolumn==t2.c.somecolumn). values(somecolumn="x"). - with_hint("WITH (PAGLOCK)", - selectable=t2, + with_hint("WITH (PAGLOCK)", + selectable=t2, dialect_name=darg), "UPDATE sometable SET somecolumn=:somecolumn " "FROM sometable, othertable WITH (PAGLOCK) " @@ -147,8 +147,8 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): # for darg in ("*", "mssql"): # self.assert_compile( # t.delete().where(t.c.somecolumn==t2.c.somecolumn). - # with_hint("WITH (PAGLOCK)", - # selectable=t2, + # with_hint("WITH (PAGLOCK)", + # selectable=t2, # dialect_name=darg), # "" # ) @@ -163,7 +163,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): for expr, compile in [ ( - select([literal("x"), literal("y")]), + select([literal("x"), literal("y")]), "SELECT 'x' AS anon_1, 'y' AS anon_2", ), ( @@ -693,8 +693,8 @@ class ReflectionTest(fixtures.TestBase, ComparesTables): def test_indexes_cols_with_commas(self): metadata = self.metadata - t1 = Table('t', metadata, - Column('x, col', Integer, key='x'), + t1 = Table('t', metadata, + Column('x, col', Integer, key='x'), Column('y', Integer) ) Index('foo', t1.c.x, t1.c.y) @@ -712,7 +712,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables): def test_indexes_cols_with_spaces(self): metadata = self.metadata - t1 = Table('t', metadata, Column('x col', Integer, key='x'), + t1 = Table('t', metadata, Column('x col', Integer, key='x'), Column('y', Integer)) Index('foo', t1.c.x, t1.c.y) metadata.create_all() @@ -762,14 +762,14 @@ class QueryTest(testing.AssertsExecutionResults, fixtures.TestBase): case of a table having an identity (autoincrement) primary key column, and which also has a trigger configured to fire upon each insert and subsequently perform an - insert into a different table. + insert into a different table. SQLALchemy's MSSQL dialect by default will attempt to use an OUTPUT_INSERTED clause, which in this case will raise the following error: - ProgrammingError: (ProgrammingError) ('42000', 334, - "[Microsoft][SQL Server Native Client 10.0][SQL Server]The + ProgrammingError: (ProgrammingError) ('42000', 334, + "[Microsoft][SQL Server Native Client 10.0][SQL Server]The target table 't1' of the DML statement cannot have any enabled triggers if the statement contains an OUTPUT clause without INTO clause.", 7748) 'INSERT INTO t1 (descr) OUTPUT inserted.id @@ -796,7 +796,7 @@ class QueryTest(testing.AssertsExecutionResults, fixtures.TestBase): # though the ExecutionContext will still have a # _select_lastrowid, so the SELECT SCOPE_IDENTITY() will # hopefully be called instead. - implicit_returning = False + implicit_returning = False ) t2 = Table('t2', meta, Column('id', Integer, Sequence('fred', 200, 1), @@ -834,7 +834,7 @@ class QueryTest(testing.AssertsExecutionResults, fixtures.TestBase): testing.db, lambda: engine.execute(t1.insert()), ExactSQL("INSERT INTO t1 DEFAULT VALUES"), - # we dont have an event for + # we dont have an event for # "SELECT @@IDENTITY" part here. # this will be in 0.8 with #2459 ) @@ -844,7 +844,7 @@ class QueryTest(testing.AssertsExecutionResults, fixtures.TestBase): meta = MetaData(testing.db) con = testing.db.connect() con.execute('create schema paj') - tbl = Table('test', meta, + tbl = Table('test', meta, Column('id', Integer, primary_key=True), schema='paj') tbl.create() try: @@ -1005,12 +1005,12 @@ class MatchTest(fixtures.TestBase, AssertsCompiledSQL): Column('category_id', Integer, ForeignKey('cattable.id')), PrimaryKeyConstraint('id', name='PK_matchtable'), ) - DDL("""CREATE FULLTEXT INDEX - ON cattable (description) + DDL("""CREATE FULLTEXT INDEX + ON cattable (description) KEY INDEX PK_cattable""").execute_at('after-create' , matchtable) - DDL("""CREATE FULLTEXT INDEX - ON matchtable (title) + DDL("""CREATE FULLTEXT INDEX + ON matchtable (title) KEY INDEX PK_matchtable""").execute_at('after-create' , matchtable) metadata.create_all() @@ -1197,7 +1197,7 @@ class ParseConnectTest(fixtures.TestBase, AssertsCompiledSQL): url.make_url('mssql+pymssql://scott:tiger@somehost/test') connection = dialect.create_connect_args(u) eq_( - [[], {'host': 'somehost', 'password': 'tiger', + [[], {'host': 'somehost', 'password': 'tiger', 'user': 'scott', 'database': 'test'}], connection ) @@ -1205,7 +1205,7 @@ class ParseConnectTest(fixtures.TestBase, AssertsCompiledSQL): url.make_url('mssql+pymssql://scott:tiger@somehost:5000/test') connection = dialect.create_connect_args(u) eq_( - [[], {'host': 'somehost:5000', 'password': 'tiger', + [[], {'host': 'somehost:5000', 'password': 'tiger', 'user': 'scott', 'database': 'test'}], connection ) @@ -1890,7 +1890,7 @@ class ReflectHugeViewTest(fixtures.TestBase): ) self.view_str = view_str = \ "CREATE VIEW huge_named_view AS SELECT %s FROM base_table" % ( - ",".join("long_named_column_number_%d" % i + ",".join("long_named_column_number_%d" % i for i in xrange(self.col_num)) ) assert len(view_str) > 4000 diff --git a/test/dialect/test_mysql.py b/test/dialect/test_mysql.py index 8a880645c2..d794fd2b8c 100644 --- a/test/dialect/test_mysql.py +++ b/test/dialect/test_mysql.py @@ -26,7 +26,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): Column("master_ssl_verify_server_cert", Integer)) x = select([table.c.col1, table.c.master_ssl_verify_server_cert]) - self.assert_compile(x, + self.assert_compile(x, '''SELECT mysql_table.col1, mysql_table.`master_ssl_verify_server_cert` FROM mysql_table''') def test_create_index_simple(self): @@ -66,7 +66,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): def test_create_pk_plain(self): m = MetaData() - tbl = Table('testtbl', m, Column('data', String(255)), + tbl = Table('testtbl', m, Column('data', String(255)), PrimaryKeyConstraint('data')) self.assert_compile(schema.CreateTable(tbl), @@ -75,7 +75,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): def test_create_pk_with_using(self): m = MetaData() - tbl = Table('testtbl', m, Column('data', String(255)), + tbl = Table('testtbl', m, Column('data', String(255)), PrimaryKeyConstraint('data', mysql_using='btree')) self.assert_compile(schema.CreateTable(tbl), @@ -86,7 +86,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): class DialectTest(fixtures.TestBase): __only_on__ = 'mysql' - @testing.only_on(['mysql+mysqldb', 'mysql+oursql'], + @testing.only_on(['mysql+mysqldb', 'mysql+oursql'], 'requires particular SSL arguments') def test_ssl_arguments(self): dialect = testing.db.dialect @@ -98,12 +98,12 @@ class DialectTest(fixtures.TestBase): for k in ('use_unicode', 'found_rows', 'client_flag'): kwarg.pop(k, None) eq_( - kwarg, + kwarg, { - 'passwd': 'tiger', 'db': 'test', - 'ssl': {'ca': '/ca.pem', 'cert': '/cert.pem', - 'key': '/key.pem'}, - 'host': 'localhost', 'user': 'scott', + 'passwd': 'tiger', 'db': 'test', + 'ssl': {'ca': '/ca.pem', 'cert': '/cert.pem', + 'key': '/key.pem'}, + 'host': 'localhost', 'user': 'scott', 'port': 3306 } ) @@ -158,7 +158,7 @@ class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL): columns = [ # column type, args, kwargs, expected ddl - # e.g. Column(Integer(10, unsigned=True)) == + # e.g. Column(Integer(10, unsigned=True)) == # 'INTEGER(10) UNSIGNED' (mysql.MSNumeric, [], {}, 'NUMERIC'), @@ -592,7 +592,7 @@ class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL): # there's a slight assumption here that this test can # complete within the scope of a single second. # if needed, can break out the eq_() just to check for - # timestamps that are within a few seconds of "now" + # timestamps that are within a few seconds of "now" # using timedelta. now = testing.db.execute("select now()").scalar() @@ -730,7 +730,7 @@ class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL): enum_table.drop(checkfirst=True) enum_table.create() - assert_raises(exc.DBAPIError, enum_table.insert().execute, + assert_raises(exc.DBAPIError, enum_table.insert().execute, e1=None, e2=None, e3=None, e4=None) assert_raises(exc.StatementError, enum_table.insert().execute, @@ -745,8 +745,8 @@ class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL): res = enum_table.select().execute().fetchall() - expected = [(None, 'a', 'a', None, 'a', None, None, None), - ('a', 'a', 'a', 'a', 'a', 'a', 'a', "'a'"), + expected = [(None, 'a', 'a', None, 'a', None, None, None), + ('a', 'a', 'a', 'a', 'a', 'a', 'a', "'a'"), ('b', 'b', 'b', 'b', 'b', 'b', 'b', 'b')] # This is known to fail with MySQLDB 1.2.2 beta versions @@ -786,8 +786,8 @@ class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL): t1.insert().execute(value=u'drôle', value2=u'drôle') t1.insert().execute(value=u'réveillé', value2=u'réveillé') t1.insert().execute(value=u'S’il', value2=u'S’il') - eq_(t1.select().order_by(t1.c.id).execute().fetchall(), - [(1, u'drôle', u'drôle'), (2, u'réveillé', u'réveillé'), + eq_(t1.select().order_by(t1.c.id).execute().fetchall(), + [(1, u'drôle', u'drôle'), (2, u'réveillé', u'réveillé'), (3, u'S’il', u'S’il')] ) @@ -802,7 +802,7 @@ class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL): assert t2.c.value.type.enums[0:2] == \ (u'réveillé', u'drôle') #, u'S’il') # eh ? assert t2.c.value2.type.enums[0:2] == \ - (u'réveillé', u'drôle') #, u'S’il') # eh ? + (u'réveillé', u'drôle') #, u'S’il') # eh ? finally: metadata.drop_all() @@ -1153,7 +1153,7 @@ class SQLTest(fixtures.TestBase, AssertsCompiledSQL): ) eq_(gen(prefixes=['ALL']), 'SELECT ALL q') - eq_(gen(prefixes=['DISTINCTROW']), + eq_(gen(prefixes=['DISTINCTROW']), 'SELECT DISTINCTROW q') # Interaction with MySQL prefix extensions @@ -1182,7 +1182,7 @@ class SQLTest(fixtures.TestBase, AssertsCompiledSQL): ) self.assert_compile( - select(['q'], distinct='ALL', + select(['q'], distinct='ALL', prefixes=['HIGH_PRIORITY', 'SQL_SMALL_RESULT']), 'SELECT HIGH_PRIORITY SQL_SMALL_RESULT ALL q' ) @@ -1211,7 +1211,7 @@ class SQLTest(fixtures.TestBase, AssertsCompiledSQL): ) self.assert_compile( select([t]).limit(10), - "SELECT t.col1, t.col2 FROM t LIMIT %s", + "SELECT t.col1, t.col2 FROM t LIMIT %s", {'param_1':10}) self.assert_compile( @@ -1232,9 +1232,9 @@ class SQLTest(fixtures.TestBase, AssertsCompiledSQL): ): type_ = sqltypes.to_instance(type_) assert_raises_message( - exc.CompileError, + exc.CompileError, "VARCHAR requires a length on dialect mysql", - type_.compile, + type_.compile, dialect=mysql.dialect()) t1 = Table('sometable', MetaData(), @@ -1289,7 +1289,7 @@ class SQLTest(fixtures.TestBase, AssertsCompiledSQL): # 'SIGNED INTEGER' is a bigint, so this is ok. (m.MSBigInteger, "CAST(t.col AS SIGNED INTEGER)"), (m.MSBigInteger(unsigned=False), "CAST(t.col AS SIGNED INTEGER)"), - (m.MSBigInteger(unsigned=True), + (m.MSBigInteger(unsigned=True), "CAST(t.col AS UNSIGNED INTEGER)"), (m.MSBit, "t.col"), @@ -1412,7 +1412,7 @@ class SQLTest(fixtures.TestBase, AssertsCompiledSQL): tname = 'zyrenian_zyme_zyzzogeton_zyzzogeton' cname = 'zyrenian_zyme_zyzzogeton_zo' - t1 = Table(tname, MetaData(), + t1 = Table(tname, MetaData(), Column(cname, Integer, index=True), ) ix1 = list(t1.indexes)[0] diff --git a/test/dialect/test_oracle.py b/test/dialect/test_oracle.py index 9540fa9635..07214ed348 100644 --- a/test/dialect/test_oracle.py +++ b/test/dialect/test_oracle.py @@ -440,7 +440,7 @@ class CompatFlagsTest(fixtures.TestBase, AssertsCompiledSQL): self.assert_compile(Unicode(50),"VARCHAR2(50)",dialect=dialect) self.assert_compile(UnicodeText(),"CLOB",dialect=dialect) - dialect = oracle.dialect(implicit_returning=True, + dialect = oracle.dialect(implicit_returning=True, dbapi=testing.db.dialect.dbapi) dialect._get_server_version_info = server_version_info dialect.initialize(testing.db.connect()) @@ -483,23 +483,23 @@ class MultiSchemaTest(fixtures.TestBase, AssertsCompiledSQL): for stmt in """ create table test_schema.parent( - id integer primary key, + id integer primary key, data varchar2(50) ); create table test_schema.child( id integer primary key, - data varchar2(50), + data varchar2(50), parent_id integer references test_schema.parent(id) ); create synonym test_schema.ptable for test_schema.parent; create synonym test_schema.ctable for test_schema.child; --- can't make a ref from local schema to the --- remote schema's table without this, +-- can't make a ref from local schema to the +-- remote schema's table without this, -- *and* cant give yourself a grant ! --- so we give it to public. ideas welcome. +-- so we give it to public. ideas welcome. grant references on test_schema.parent to public; grant references on test_schema.child to public; """.split(";"): @@ -520,11 +520,11 @@ drop synonym test_schema.ptable; def test_create_same_names_explicit_schema(self): schema = testing.db.dialect.default_schema_name meta = MetaData(testing.db) - parent = Table('parent', meta, + parent = Table('parent', meta, Column('pid', Integer, primary_key=True), schema=schema ) - child = Table('child', meta, + child = Table('child', meta, Column('cid', Integer, primary_key=True), Column('pid', Integer, ForeignKey('%s.parent.pid' % schema)), schema=schema @@ -539,10 +539,10 @@ drop synonym test_schema.ptable; def test_create_same_names_implicit_schema(self): meta = MetaData(testing.db) - parent = Table('parent', meta, + parent = Table('parent', meta, Column('pid', Integer, primary_key=True), ) - child = Table('child', meta, + child = Table('child', meta, Column('cid', Integer, primary_key=True), Column('pid', Integer, ForeignKey('parent.pid')), ) @@ -560,7 +560,7 @@ drop synonym test_schema.ptable; parent = Table('parent', meta, autoload=True, schema='test_schema') child = Table('child', meta, autoload=True, schema='test_schema') - self.assert_compile(parent.join(child), + self.assert_compile(parent.join(child), "test_schema.parent JOIN test_schema.child ON " "test_schema.parent.id = test_schema.child.parent_id") select([parent, child]).\ @@ -659,8 +659,8 @@ class DialectTypesTest(fixtures.TestBase, AssertsCompiledSQL): __dialect__ = oracle.OracleDialect() def test_no_clobs_for_string_params(self): - """test that simple string params get a DBAPI type of - VARCHAR, not CLOB. This is to prevent setinputsizes + """test that simple string params get a DBAPI type of + VARCHAR, not CLOB. This is to prevent setinputsizes from setting up cx_oracle.CLOBs on string-based bind params [ticket:793].""" @@ -746,7 +746,7 @@ class TypesTest(fixtures.TestBase): @testing.fails_on('+zxjdbc', 'zxjdbc lacks the FIXED_CHAR dbapi type') def test_fixed_char(self): m = MetaData(testing.db) - t = Table('t1', m, + t = Table('t1', m, Column('id', Integer, primary_key=True), Column('data', CHAR(30), nullable=False) ) @@ -759,14 +759,14 @@ class TypesTest(fixtures.TestBase): dict(id=3, data="value 3") ) - eq_(t.select().where(t.c.data=='value 2').execute().fetchall(), + eq_(t.select().where(t.c.data=='value 2').execute().fetchall(), [(2, 'value 2 ')] ) m2 = MetaData(testing.db) t2 = Table('t1', m2, autoload=True) assert type(t2.c.data.type) is CHAR - eq_(t2.select().where(t2.c.data=='value 2').execute().fetchall(), + eq_(t2.select().where(t2.c.data=='value 2').execute().fetchall(), [(2, 'value 2 ')] ) @@ -833,7 +833,7 @@ class TypesTest(fixtures.TestBase): def test_numerics(self): m = MetaData(testing.db) - t1 = Table('t1', m, + t1 = Table('t1', m, Column('intcol', Integer), Column('numericcol', Numeric(precision=9, scale=2)), Column('floatcol1', Float()), @@ -847,11 +847,11 @@ class TypesTest(fixtures.TestBase): t1.create() try: t1.insert().execute( - intcol=1, - numericcol=5.2, - floatcol1=6.5, + intcol=1, + numericcol=5.2, + floatcol1=6.5, floatcol2 = 8.5, - doubleprec = 9.5, + doubleprec = 9.5, numbercol1=12, numbercol2=14.85, numbercol3=15.76 @@ -862,7 +862,7 @@ class TypesTest(fixtures.TestBase): for row in ( t1.select().execute().first(), - t2.select().execute().first() + t2.select().execute().first() ): for i, (val, type_) in enumerate(( (1, int), @@ -912,13 +912,13 @@ class TypesTest(fixtures.TestBase): foo.create() foo.insert().execute( - {'idata':5, 'ndata':decimal.Decimal("45.6"), - 'ndata2':decimal.Decimal("45.0"), + {'idata':5, 'ndata':decimal.Decimal("45.6"), + 'ndata2':decimal.Decimal("45.0"), 'nidata':decimal.Decimal('53'), 'fdata':45.68392}, ) stmt = """ - SELECT + SELECT idata, ndata, ndata2, @@ -931,11 +931,11 @@ class TypesTest(fixtures.TestBase): row = testing.db.execute(stmt).fetchall()[0] eq_([type(x) for x in row], [int, decimal.Decimal, decimal.Decimal, int, float]) eq_( - row, + row, (5, decimal.Decimal('45.6'), decimal.Decimal('45'), 53, 45.683920000000001) ) - # with a nested subquery, + # with a nested subquery, # both Numeric values that don't have decimal places, regardless # of their originating type, come back as ints with no useful # typing information beyond "numeric". So native handler @@ -944,7 +944,7 @@ class TypesTest(fixtures.TestBase): # totally sucks. stmt = """ - SELECT + SELECT (SELECT (SELECT idata FROM foo) FROM DUAL) AS idata, (SELECT CAST((SELECT ndata FROM foo) AS NUMERIC(20, 2)) FROM DUAL) AS ndata, @@ -958,25 +958,25 @@ class TypesTest(fixtures.TestBase): row = testing.db.execute(stmt).fetchall()[0] eq_([type(x) for x in row], [int, decimal.Decimal, int, int, decimal.Decimal]) eq_( - row, + row, (5, decimal.Decimal('45.6'), 45, 53, decimal.Decimal('45.68392')) ) - row = testing.db.execute(text(stmt, + row = testing.db.execute(text(stmt, typemap={ - 'idata':Integer(), - 'ndata':Numeric(20, 2), - 'ndata2':Numeric(20, 2), + 'idata':Integer(), + 'ndata':Numeric(20, 2), + 'ndata2':Numeric(20, 2), 'nidata':Numeric(5, 0), 'fdata':Float() })).fetchall()[0] eq_([type(x) for x in row], [int, decimal.Decimal, decimal.Decimal, decimal.Decimal, float]) - eq_(row, + eq_(row, (5, decimal.Decimal('45.6'), decimal.Decimal('45'), decimal.Decimal('53'), 45.683920000000001) ) stmt = """ - SELECT + SELECT anon_1.idata AS anon_1_idata, anon_1.ndata AS anon_1_ndata, anon_1.ndata2 AS anon_1_ndata2, @@ -984,15 +984,15 @@ class TypesTest(fixtures.TestBase): anon_1.fdata AS anon_1_fdata FROM (SELECT idata, ndata, ndata2, nidata, fdata FROM ( - SELECT + SELECT (SELECT (SELECT idata FROM foo) FROM DUAL) AS idata, - (SELECT CAST((SELECT ndata FROM foo) AS NUMERIC(20, 2)) + (SELECT CAST((SELECT ndata FROM foo) AS NUMERIC(20, 2)) FROM DUAL) AS ndata, - (SELECT CAST((SELECT ndata2 FROM foo) AS NUMERIC(20, 2)) + (SELECT CAST((SELECT ndata2 FROM foo) AS NUMERIC(20, 2)) FROM DUAL) AS ndata2, - (SELECT CAST((SELECT nidata FROM foo) AS NUMERIC(5, 0)) + (SELECT CAST((SELECT nidata FROM foo) AS NUMERIC(5, 0)) FROM DUAL) AS nidata, - (SELECT CAST((SELECT fdata FROM foo) AS FLOAT) FROM DUAL) + (SELECT CAST((SELECT fdata FROM foo) AS FLOAT) FROM DUAL) AS fdata FROM dual ) @@ -1002,29 +1002,29 @@ class TypesTest(fixtures.TestBase): eq_([type(x) for x in row], [int, decimal.Decimal, int, int, decimal.Decimal]) eq_(row, (5, decimal.Decimal('45.6'), 45, 53, decimal.Decimal('45.68392'))) - row = testing.db.execute(text(stmt, + row = testing.db.execute(text(stmt, typemap={ - 'anon_1_idata':Integer(), - 'anon_1_ndata':Numeric(20, 2), - 'anon_1_ndata2':Numeric(20, 2), - 'anon_1_nidata':Numeric(5, 0), + 'anon_1_idata':Integer(), + 'anon_1_ndata':Numeric(20, 2), + 'anon_1_ndata2':Numeric(20, 2), + 'anon_1_nidata':Numeric(5, 0), 'anon_1_fdata':Float() })).fetchall()[0] eq_([type(x) for x in row], [int, decimal.Decimal, decimal.Decimal, decimal.Decimal, float]) - eq_(row, + eq_(row, (5, decimal.Decimal('45.6'), decimal.Decimal('45'), decimal.Decimal('53'), 45.683920000000001) ) - row = testing.db.execute(text(stmt, + row = testing.db.execute(text(stmt, typemap={ - 'anon_1_idata':Integer(), - 'anon_1_ndata':Numeric(20, 2, asdecimal=False), - 'anon_1_ndata2':Numeric(20, 2, asdecimal=False), - 'anon_1_nidata':Numeric(5, 0, asdecimal=False), + 'anon_1_idata':Integer(), + 'anon_1_ndata':Numeric(20, 2, asdecimal=False), + 'anon_1_ndata2':Numeric(20, 2, asdecimal=False), + 'anon_1_nidata':Numeric(5, 0, asdecimal=False), 'anon_1_fdata':Float(asdecimal=True) })).fetchall()[0] eq_([type(x) for x in row], [int, float, float, float, decimal.Decimal]) - eq_(row, + eq_(row, (5, 45.6, 45, 53, decimal.Decimal('45.68392')) ) @@ -1092,7 +1092,7 @@ class TypesTest(fixtures.TestBase): # nvarchar returns unicode natively. cx_oracle # _OracleNVarChar type should be at play here. assert isinstance( - t2.c.data.type.dialect_impl(testing.db.dialect), + t2.c.data.type.dialect_impl(testing.db.dialect), cx_oracle._OracleNVarChar) data = u'm’a réveillé.' @@ -1140,12 +1140,12 @@ class TypesTest(fixtures.TestBase): def test_lobs_without_convert(self): engine = testing_engine(options=dict(auto_convert_lobs=False)) metadata = MetaData() - t = Table("z_test", metadata, Column('id', Integer, primary_key=True), + t = Table("z_test", metadata, Column('id', Integer, primary_key=True), Column('data', Text), Column('bindata', LargeBinary)) t.create(engine) try: - engine.execute(t.insert(), id=1, - data='this is text', + engine.execute(t.insert(), id=1, + data='this is text', bindata='this is binary') row = engine.execute(t.select()).first() eq_(row['data'].read(), 'this is text') @@ -1192,17 +1192,17 @@ class DontReflectIOTTest(fixtures.TestBase): """test that index overflow tables aren't included in table_names.""" - __only_on__ = 'oracle' + __only_on__ = 'oracle' def setup(self): testing.db.execute(""" CREATE TABLE admin_docindex( - token char(20), + token char(20), doc_id NUMBER, token_frequency NUMBER, token_offsets VARCHAR2(2000), CONSTRAINT pk_admin_docindex PRIMARY KEY (token, doc_id)) - ORGANIZATION INDEX + ORGANIZATION INDEX TABLESPACE users PCTTHRESHOLD 20 OVERFLOW TABLESPACE users @@ -1226,13 +1226,13 @@ class BufferedColumnTest(fixtures.TestBase, AssertsCompiledSQL): def setup_class(cls): global binary_table, stream, meta meta = MetaData(testing.db) - binary_table = Table('binary_table', meta, + binary_table = Table('binary_table', meta, Column('id', Integer, primary_key=True), Column('data', LargeBinary) ) meta.create_all() stream = os.path.join( - os.path.dirname(__file__), "..", + os.path.dirname(__file__), "..", 'binary_data_one.dat') stream = file(stream).read(12000) @@ -1259,7 +1259,7 @@ class UnsupportedIndexReflectTest(fixtures.TestBase): def setup(self): global metadata metadata = MetaData(testing.db) - t1 = Table('test_index_reflect', metadata, + t1 = Table('test_index_reflect', metadata, Column('data', String(20), primary_key=True) ) metadata.create_all() @@ -1290,7 +1290,7 @@ class RoundTripIndexTest(fixtures.TestBase): ) # "group" is a keyword, so lower case - normalind = Index('tableind', table.c.id_b, table.c.group) + normalind = Index('tableind', table.c.id_b, table.c.group) # create metadata.create_all() diff --git a/test/dialect/test_postgresql.py b/test/dialect/test_postgresql.py index 769f18ce9a..acfcedc90b 100644 --- a/test/dialect/test_postgresql.py +++ b/test/dialect/test_postgresql.py @@ -155,7 +155,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): def test_create_index_with_ops(self): m = MetaData() tbl = Table('testtbl', m, - Column('data', String), + Column('data', String), Column('data2', Integer, key='d2')) idx = Index('test_idx1', tbl.c.data, @@ -269,7 +269,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): Column("variadic", Integer)) x = select([table.c.col1, table.c.variadic]) - self.assert_compile(x, + self.assert_compile(x, '''SELECT pg_table.col1, pg_table."variadic" FROM pg_table''') @@ -332,7 +332,7 @@ class FloatCoercionTest(fixtures.TablesTest, AssertsExecutionResults): @testing.provide_metadata def test_arrays(self): metadata = self.metadata - t1 = Table('t', metadata, + t1 = Table('t', metadata, Column('x', postgresql.ARRAY(Float)), Column('y', postgresql.ARRAY(REAL)), Column('z', postgresql.ARRAY(postgresql.DOUBLE_PRECISION)), @@ -342,7 +342,7 @@ class FloatCoercionTest(fixtures.TablesTest, AssertsExecutionResults): t1.insert().execute(x=[5], y=[5], z=[6], q=[decimal.Decimal("6.4")]) row = t1.select().execute().first() eq_( - row, + row, ([5], [5], [6], [decimal.Decimal("6.4")]) ) @@ -415,7 +415,7 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL): metadata = MetaData(testing.db) t1 = Table('table', metadata, Column('id', Integer, primary_key=True), - Column('value', + Column('value', Enum(u'réveillé', u'drôle', u'S’il', name='onetwothreetype')) ) @@ -425,7 +425,7 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL): t1.insert().execute(value=u'drôle') t1.insert().execute(value=u'réveillé') t1.insert().execute(value=u'S’il') - eq_(t1.select().order_by(t1.c.id).execute().fetchall(), + eq_(t1.select().order_by(t1.c.id).execute().fetchall(), [(1, u'drôle'), (2, u'réveillé'), (3, u'S’il')] ) m2 = MetaData(testing.db) @@ -454,11 +454,11 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL): def test_disable_create(self): metadata = self.metadata - e1 = postgresql.ENUM('one', 'two', 'three', + e1 = postgresql.ENUM('one', 'two', 'three', name="myenum", create_type=False) - t1 = Table('e1', metadata, + t1 = Table('e1', metadata, Column('c1', e1) ) # table can be created separately @@ -472,14 +472,14 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL): def test_generate_multiple(self): """Test that the same enum twice only generates once for the create_all() call, without using checkfirst. - + A 'memo' collection held by the DDL runner now handles this. - + """ metadata = self.metadata - e1 = Enum('one', 'two', 'three', + e1 = Enum('one', 'two', 'three', name="myenum") t1 = Table('e1', metadata, Column('c1', e1) @@ -616,9 +616,9 @@ class NumericInterpretationTest(fixtures.TestBase): @testing.provide_metadata def test_numeric_default(self): metadata = self.metadata - # pg8000 appears to fail when the value is 0, + # pg8000 appears to fail when the value is 0, # returns an int instead of decimal. - t =Table('t', metadata, + t =Table('t', metadata, Column('id', Integer, primary_key=True), Column('nd', Numeric(asdecimal=True), default=1), Column('nf', Numeric(asdecimal=False), default=1), @@ -1195,8 +1195,8 @@ class DistinctOnTest(fixtures.TestBase, AssertsCompiledSQL): __dialect__ = postgresql.dialect() def setup(self): - self.table = Table('t', MetaData(), - Column('id',Integer, primary_key=True), + self.table = Table('t', MetaData(), + Column('id',Integer, primary_key=True), Column('a', String), Column('b', String), ) @@ -1228,7 +1228,7 @@ class DistinctOnTest(fixtures.TestBase, AssertsCompiledSQL): def test_on_columns_inline_list(self): self.assert_compile( - select([self.table], + select([self.table], distinct=[self.table.c.a, self.table.c.b]). order_by(self.table.c.a, self.table.c.b), "SELECT DISTINCT ON (t.a, t.b) t.id, " @@ -1488,28 +1488,28 @@ class ReflectionTest(fixtures.TestBase): m1 = MetaData() - t2_schema = Table('some_other_table', - m1, - schema="test_schema_2", - autoload=True, + t2_schema = Table('some_other_table', + m1, + schema="test_schema_2", + autoload=True, autoload_with=conn) - t1_schema = Table('some_table', - m1, - schema="test_schema", + t1_schema = Table('some_table', + m1, + schema="test_schema", autoload=True, autoload_with=conn) - t2_no_schema = Table('some_other_table', - m1, - autoload=True, + t2_no_schema = Table('some_other_table', + m1, + autoload=True, autoload_with=conn) - t1_no_schema = Table('some_table', - m1, - autoload=True, + t1_no_schema = Table('some_table', + m1, + autoload=True, autoload_with=conn) - # OK, this because, "test_schema" is + # OK, this because, "test_schema" is # in the search path, and might as well be # the default too. why would we assign # a "schema" to the Table ? @@ -1574,7 +1574,7 @@ class ReflectionTest(fixtures.TestBase): @testing.provide_metadata def test_index_reflection_modified(self): - """reflect indexes when a column name has changed - PG 9 + """reflect indexes when a column name has changed - PG 9 does not update the name of the column in the index def. [ticket:2141] @@ -1671,7 +1671,7 @@ class MiscTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL): current_encoding = c.connection.connection.encoding c.close() - # attempt to use an encoding that's not + # attempt to use an encoding that's not # already set if current_encoding == 'UTF8': test_encoding = 'LATIN1' @@ -2003,10 +2003,10 @@ class ArrayTest(fixtures.TestBase, AssertsExecutionResults): class Foo(object): pass - footable = Table('foo', metadata, - Column('id', Integer,primary_key=True), - Column('intarr', - postgresql.ARRAY(Integer, mutable=True), + footable = Table('foo', metadata, + Column('id', Integer,primary_key=True), + Column('intarr', + postgresql.ARRAY(Integer, mutable=True), nullable=True)) mapper(Foo, footable) metadata.create_all() @@ -2047,9 +2047,9 @@ class ArrayTest(fixtures.TestBase, AssertsExecutionResults): def test_tuple_flag(self): metadata = self.metadata assert_raises_message( - exc.ArgumentError, + exc.ArgumentError, "mutable must be set to False if as_tuple is True.", - postgresql.ARRAY, Integer, mutable=True, + postgresql.ARRAY, Integer, mutable=True, as_tuple=True) t1 = Table('t1', metadata, @@ -2064,10 +2064,10 @@ class ArrayTest(fixtures.TestBase, AssertsExecutionResults): r = testing.db.execute(t1.select().order_by(t1.c.id)).fetchall() eq_( - r, + r, [ - (1, ('1', '2', '3'), (5.4, 5.6)), - (2, ('4', '5', '6'), (1.0,)), + (1, ('1', '2', '3'), (5.4, 5.6)), + (2, ('4', '5', '6'), (1.0,)), (3, (('4', '5'), ('6', '7')), ((5.4, 5.6), (1.0, 1.1))) ] ) @@ -2319,7 +2319,7 @@ class UUIDTest(fixtures.TestBase): def test_uuid_string(self): import uuid self._test_round_trip( - Table('utable', MetaData(), + Table('utable', MetaData(), Column('data', postgresql.UUID()) ), str(uuid.uuid4()), @@ -2333,7 +2333,7 @@ class UUIDTest(fixtures.TestBase): def test_uuid_uuid(self): import uuid self._test_round_trip( - Table('utable', MetaData(), + Table('utable', MetaData(), Column('data', postgresql.UUID(as_uuid=True)) ), uuid.uuid4(), @@ -2392,16 +2392,16 @@ class MatchTest(fixtures.TestBase, AssertsCompiledSQL): {'id': 2, 'description': 'Ruby'}]) matchtable.insert().execute([{'id': 1, 'title' : 'Agile Web Development with Rails' - , 'category_id': 2}, + , 'category_id': 2}, {'id': 2, 'title': 'Dive Into Python', - 'category_id': 1}, + 'category_id': 1}, {'id': 3, 'title' : "Programming Matz's Ruby", - 'category_id': 2}, + 'category_id': 2}, {'id': 4, 'title' : 'The Definitive Guide to Django', - 'category_id': 1}, + 'category_id': 1}, {'id': 5, 'title' : 'Python in a Nutshell', 'category_id': 1}]) @@ -2494,12 +2494,12 @@ class TupleTest(fixtures.TestBase): testing.db.execute( select([ tuple_( - literal_column("'a'"), + literal_column("'a'"), literal_column("'b'") ).\ in_([ tuple_(*[ - literal_column("'%s'" % letter) + literal_column("'%s'" % letter) for letter in elem ]) for elem in test ]) diff --git a/test/dialect/test_pyodbc.py b/test/dialect/test_pyodbc.py index c2aec726bc..52d6bc7c45 100644 --- a/test/dialect/test_pyodbc.py +++ b/test/dialect/test_pyodbc.py @@ -12,6 +12,6 @@ class PyODBCTest(fixtures.TestBase): ("crap.crap.crap", ()), ]: eq_( - connector._parse_dbapi_version(vers), + connector._parse_dbapi_version(vers), expected ) \ No newline at end of file diff --git a/test/dialect/test_sqlite.py b/test/dialect/test_sqlite.py index 4fe67fd2ea..b76ac3ed85 100644 --- a/test/dialect/test_sqlite.py +++ b/test/dialect/test_sqlite.py @@ -84,11 +84,11 @@ class TestTypes(fixtures.TestBase, AssertsExecutionResults): t.create(engine) try: engine.execute(t.insert(), {'d1': datetime.date(2010, 5, - 10), + 10), 'd2': datetime.datetime( 2010, 5, 10, 12, 15, 25, )}) row = engine.execute(t.select()).first() - eq_(row, (1, datetime.date(2010, 5, 10), + eq_(row, (1, datetime.date(2010, 5, 10), datetime.datetime( 2010, 5, 10, 12, 15, 25, ))) r = engine.execute(func.current_date()).scalar() assert isinstance(r, basestring) @@ -251,7 +251,7 @@ class DefaultsTest(fixtures.TestBase, AssertsCompiledSQL): m2 = MetaData(db) t2 = Table('r_defaults', m2, autoload=True) self.assert_compile( - CreateTable(t2), + CreateTable(t2), "CREATE TABLE r_defaults (data VARCHAR(40) " "DEFAULT 'my_default', val INTEGER DEFAULT 0 " "NOT NULL)" @@ -261,7 +261,7 @@ class DefaultsTest(fixtures.TestBase, AssertsCompiledSQL): @testing.provide_metadata def test_boolean_default(self): - t= Table("t", self.metadata, + t= Table("t", self.metadata, Column("x", Boolean, server_default=sql.false())) t.create(testing.db) testing.db.execute(t.insert()) @@ -314,7 +314,7 @@ class DialectTest(fixtures.TestBase, AssertsExecutionResults): CREATE TABLE "django_admin_log" ( "id" integer NOT NULL PRIMARY KEY, "action_time" datetime NOT NULL, - "content_type_id" integer NULL + "content_type_id" integer NULL REFERENCES "django_content_type" ("id"), "object_id" text NULL, "change_message" text NOT NULL @@ -457,26 +457,26 @@ class SQLTest(fixtures.TestBase, AssertsCompiledSQL): sql.false(), "0" ) self.assert_compile( - sql.true(), + sql.true(), "1" ) def test_constraints_with_schemas(self): metadata = MetaData() - t1 = Table('t1', metadata, + t1 = Table('t1', metadata, Column('id', Integer, primary_key=True), schema='master') - t2 = Table('t2', metadata, + t2 = Table('t2', metadata, Column('id', Integer, primary_key=True), Column('t1_id', Integer, ForeignKey('master.t1.id')), schema='master' ) - t3 = Table('t3', metadata, + t3 = Table('t3', metadata, Column('id', Integer, primary_key=True), Column('t1_id', Integer, ForeignKey('master.t1.id')), schema='alternate' ) - t4 = Table('t4', metadata, + t4 = Table('t4', metadata, Column('id', Integer, primary_key=True), Column('t1_id', Integer, ForeignKey('master.t1.id')), ) @@ -607,17 +607,17 @@ class MatchTest(fixtures.TestBase, AssertsCompiledSQL): metadata = MetaData(testing.db) testing.db.execute(""" CREATE VIRTUAL TABLE cattable using FTS3 ( - id INTEGER NOT NULL, - description VARCHAR(50), + id INTEGER NOT NULL, + description VARCHAR(50), PRIMARY KEY (id) ) """) cattable = Table('cattable', metadata, autoload=True) testing.db.execute(""" CREATE VIRTUAL TABLE matchtable using FTS3 ( - id INTEGER NOT NULL, + id INTEGER NOT NULL, title VARCHAR(200), - category_id INTEGER NOT NULL, + category_id INTEGER NOT NULL, PRIMARY KEY (id) ) """) @@ -782,7 +782,7 @@ class ReflectFKConstraintTest(fixtures.TestBase): def test_name_not_none(self): # we don't have names for PK constraints, - # it appears we get back None in the pragma for + # it appears we get back None in the pragma for # FKs also (also it doesn't even appear to be documented on sqlite's docs # at http://www.sqlite.org/pragma.html#pragma_foreign_key_list # how did we ever know that's the "name" field ??) diff --git a/test/engine/test_ddlevents.py b/test/engine/test_ddlevents.py index c1616fcfbd..f910dd5eaa 100644 --- a/test/engine/test_ddlevents.py +++ b/test/engine/test_ddlevents.py @@ -264,16 +264,16 @@ class DDLExecutionTest(fixtures.TestBase): def test_deprecated_append_ddl_listener_table(self): metadata, users, engine = self.metadata, self.users, self.engine canary = [] - users.append_ddl_listener('before-create', + users.append_ddl_listener('before-create', lambda e, t, b:canary.append('mxyzptlk') ) - users.append_ddl_listener('after-create', + users.append_ddl_listener('after-create', lambda e, t, b:canary.append('klptzyxm') ) - users.append_ddl_listener('before-drop', + users.append_ddl_listener('before-drop', lambda e, t, b:canary.append('xyzzy') ) - users.append_ddl_listener('after-drop', + users.append_ddl_listener('after-drop', lambda e, t, b:canary.append('fnord') ) @@ -293,16 +293,16 @@ class DDLExecutionTest(fixtures.TestBase): def test_deprecated_append_ddl_listener_metadata(self): metadata, users, engine = self.metadata, self.users, self.engine canary = [] - metadata.append_ddl_listener('before-create', + metadata.append_ddl_listener('before-create', lambda e, t, b, tables=None:canary.append('mxyzptlk') ) - metadata.append_ddl_listener('after-create', + metadata.append_ddl_listener('after-create', lambda e, t, b, tables=None:canary.append('klptzyxm') ) - metadata.append_ddl_listener('before-drop', + metadata.append_ddl_listener('before-drop', lambda e, t, b, tables=None:canary.append('xyzzy') ) - metadata.append_ddl_listener('after-drop', + metadata.append_ddl_listener('after-drop', lambda e, t, b, tables=None:canary.append('fnord') ) @@ -541,7 +541,7 @@ class DDLTest(fixtures.TestBase, AssertsCompiledSQL): assert DDL('').execute_if(callable_=lambda d, y,z, **kw: True).\ _should_execute(tbl, cx) assert(DDL('').execute_if( - callable_=lambda d, y,z, **kw: z.engine.name + callable_=lambda d, y,z, **kw: z.engine.name != 'bogus'). _should_execute(tbl, cx)) diff --git a/test/engine/test_execute.py b/test/engine/test_execute.py index 7ccd42b73f..39d2b9a634 100644 --- a/test/engine/test_execute.py +++ b/test/engine/test_execute.py @@ -41,7 +41,7 @@ class ExecuteTest(fixtures.TestBase): def teardown_class(cls): metadata.drop_all() - @testing.fails_on("postgresql+pg8000", + @testing.fails_on("postgresql+pg8000", "pg8000 still doesn't allow single % without params") def test_no_params_option(self): stmt = "SELECT '%'" @@ -85,7 +85,7 @@ class ExecuteTest(fixtures.TestBase): ]: res = conn.execute( "select * from users where user_name=? or " - "user_name=? order by user_id", + "user_name=? order by user_id", *multiparam, **param) assert res.fetchall() == [ (1, 'jack'), @@ -126,7 +126,7 @@ class ExecuteTest(fixtures.TestBase): ]: res = conn.execute( "select * from users where user_name=%s or " - "user_name=%s order by user_id", + "user_name=%s order by user_id", *multiparam, **param) assert res.fetchall() == [ (1, 'jack'), @@ -152,7 +152,7 @@ class ExecuteTest(fixtures.TestBase): @testing.skip_if(lambda : testing.against('mysql+mysqldb'), 'db-api flaky') @testing.fails_on_everything_except('postgresql+psycopg2', - 'postgresql+pypostgresql', 'mysql+mysqlconnector', + 'postgresql+pypostgresql', 'mysql+mysqlconnector', 'mysql+pymysql') def test_raw_python(self): def go(conn): @@ -238,11 +238,11 @@ class ExecuteTest(fixtures.TestBase): def test_stmt_exception_pickleable_no_dbapi(self): self._test_stmt_exception_pickleable(Exception("hello world")) - @testing.fails_on("postgresql+psycopg2", + @testing.fails_on("postgresql+psycopg2", "Packages the cursor in the exception") - @testing.fails_on("mysql+oursql", + @testing.fails_on("mysql+oursql", "Exception doesn't come back exactly the same from pickle") - @testing.fails_on("oracle+cx_oracle", + @testing.fails_on("oracle+cx_oracle", "cx_oracle exception seems to be having " "some issue with pickling") def test_stmt_exception_pickleable_plus_dbapi(self): @@ -261,12 +261,12 @@ class ExecuteTest(fixtures.TestBase): def _test_stmt_exception_pickleable(self, orig): for sa_exc in ( - tsa.exc.StatementError("some error", - "select * from table", - {"foo":"bar"}, + tsa.exc.StatementError("some error", + "select * from table", + {"foo":"bar"}, orig), - tsa.exc.InterfaceError("select * from table", - {"foo":"bar"}, + tsa.exc.InterfaceError("select * from table", + {"foo":"bar"}, orig), tsa.exc.NoReferencedTableError("message", "tname"), tsa.exc.NoReferencedColumnError("message", "tname", "cname"), @@ -279,7 +279,7 @@ class ExecuteTest(fixtures.TestBase): eq_(repickled.params, {"foo":"bar"}) eq_(repickled.statement, sa_exc.statement) if hasattr(sa_exc, "connection_invalidated"): - eq_(repickled.connection_invalidated, + eq_(repickled.connection_invalidated, sa_exc.connection_invalidated) eq_(repickled.orig.args[0], orig.args[0]) @@ -403,7 +403,7 @@ class ConvenienceExecuteTest(fixtures.TablesTest): engine._connection_cls = MockConnection fn = self._trans_fn() assert_raises( - Exception, + Exception, engine.begin ) assert MockConnection.closed @@ -412,7 +412,7 @@ class ConvenienceExecuteTest(fixtures.TablesTest): fn = self._trans_rollback_fn() ctx = testing.db.begin() assert_raises_message( - Exception, + Exception, "breakage", testing.run_as_contextmanager, ctx, fn, 5, value=8 ) @@ -421,7 +421,7 @@ class ConvenienceExecuteTest(fixtures.TablesTest): def test_transaction_tlocal_engine_ctx_commit(self): fn = self._trans_fn() engine = engines.testing_engine(options=dict( - strategy='threadlocal', + strategy='threadlocal', pool=testing.db.pool)) ctx = engine.begin() testing.run_as_contextmanager(ctx, fn, 5, value=8) @@ -430,11 +430,11 @@ class ConvenienceExecuteTest(fixtures.TablesTest): def test_transaction_tlocal_engine_ctx_rollback(self): fn = self._trans_rollback_fn() engine = engines.testing_engine(options=dict( - strategy='threadlocal', + strategy='threadlocal', pool=testing.db.pool)) ctx = engine.begin() assert_raises_message( - Exception, + Exception, "breakage", testing.run_as_contextmanager, ctx, fn, 5, value=8 ) @@ -452,7 +452,7 @@ class ConvenienceExecuteTest(fixtures.TablesTest): conn = testing.db.connect() ctx = conn.begin() assert_raises_message( - Exception, + Exception, "breakage", testing.run_as_contextmanager, ctx, fn, 5, value=8 ) @@ -482,7 +482,7 @@ class ConvenienceExecuteTest(fixtures.TablesTest): def test_transaction_engine_fn_rollback(self): fn = self._trans_rollback_fn() assert_raises_message( - Exception, + Exception, "breakage", testing.db.transaction, fn, 5, value=8 ) @@ -498,7 +498,7 @@ class ConvenienceExecuteTest(fixtures.TablesTest): fn = self._trans_rollback_fn() conn = testing.db.connect() assert_raises( - Exception, + Exception, conn.transaction, fn, 5, value=8 ) self._assert_no_data() @@ -559,7 +559,7 @@ class LogParamsTest(fixtures.TestBase): def test_log_large_dict(self): self.eng.execute( - "INSERT INTO foo (data) values (:data)", + "INSERT INTO foo (data) values (:data)", [{"data":str(i)} for i in xrange(100)] ) eq_( @@ -572,7 +572,7 @@ class LogParamsTest(fixtures.TestBase): def test_log_large_list(self): self.eng.execute( - "INSERT INTO foo (data) values (?)", + "INSERT INTO foo (data) values (?)", [(str(i), ) for i in xrange(100)] ) eq_( @@ -591,7 +591,7 @@ class LogParamsTest(fixtures.TestBase): "{'data': '6'}, {'data': '7'} ... displaying 10 of " "100 total bound parameter sets ... {'data': '98'}, {'data': '99'}\]", lambda: self.eng.execute( - "INSERT INTO nonexistent (data) values (:data)", + "INSERT INTO nonexistent (data) values (:data)", [{"data":str(i)} for i in xrange(100)] ) ) @@ -605,7 +605,7 @@ class LogParamsTest(fixtures.TestBase): "10 of 100 total bound parameter sets ... " "\('98',\), \('99',\)\]", lambda: self.eng.execute( - "INSERT INTO nonexistent (data) values (?)", + "INSERT INTO nonexistent (data) values (?)", [(str(i), ) for i in xrange(100)] ) ) @@ -619,7 +619,7 @@ class LoggingNameTest(fixtures.TestBase): for name in [b.name for b in self.buf.buffer]: assert name in ( 'sqlalchemy.engine.base.Engine.%s' % eng_name, - 'sqlalchemy.pool.%s.%s' % + 'sqlalchemy.pool.%s.%s' % (eng.pool.__class__.__name__, pool_name) ) @@ -801,7 +801,7 @@ class MockStrategyTest(fixtures.TestBase): class ResultProxyTest(fixtures.TestBase): def test_nontuple_row(self): - """ensure the C version of BaseRowProxy handles + """ensure the C version of BaseRowProxy handles duck-type-dependent rows.""" from sqlalchemy.engine import RowProxy @@ -847,9 +847,9 @@ class ResultProxyTest(fixtures.TestBase): assert False execution_ctx_cls = engine.dialect.execution_ctx_cls - engine.dialect.execution_ctx_cls = type("FakeCtx", - (BreakRowcountMixin, - execution_ctx_cls), + engine.dialect.execution_ctx_cls = type("FakeCtx", + (BreakRowcountMixin, + execution_ctx_cls), {}) try: @@ -904,7 +904,7 @@ class AlternateResultProxyTest(fixtures.TestBase): from sqlalchemy.engine import base, default cls.engine = engine = testing_engine('sqlite://') m = MetaData() - cls.table = t = Table('test', m, + cls.table = t = Table('test', m, Column('x', Integer, primary_key=True), Column('y', String(50, convert_unicode='force')) ) @@ -958,7 +958,7 @@ class AlternateResultProxyTest(fixtures.TestBase): self._test_proxy(base.BufferedColumnResultProxy) class EngineEventsTest(fixtures.TestBase): - __requires__ = 'ad_hoc_engines', + __requires__ = 'ad_hoc_engines', def tearDown(self): Engine.dispatch._clear() @@ -1061,13 +1061,13 @@ class EngineEventsTest(fixtures.TestBase): params ): stmts.append((str(clauseelement), params, multiparams)) - def cursor_execute(conn, cursor, statement, parameters, + def cursor_execute(conn, cursor, statement, parameters, context, executemany): cursor_stmts.append((str(statement), parameters, None)) for engine in [ - engines.testing_engine(options=dict(implicit_returning=False)), + engines.testing_engine(options=dict(implicit_returning=False)), engines.testing_engine(options=dict(implicit_returning=False, strategy='threadlocal')) ]: @@ -1075,8 +1075,8 @@ class EngineEventsTest(fixtures.TestBase): event.listen(engine, 'before_cursor_execute', cursor_execute) m = MetaData(engine) - t1 = Table('t1', m, - Column('c1', Integer, primary_key=True), + t1 = Table('t1', m, + Column('c1', Integer, primary_key=True), Column('c2', String(50), default=func.lower('Foo'), primary_key=True) ) @@ -1152,7 +1152,7 @@ class EngineEventsTest(fixtures.TestBase): canary.append('execute') return clauseelement, multiparams, params - def cursor_execute(conn, cursor, statement, + def cursor_execute(conn, cursor, statement, parameters, context, executemany): canary.append('cursor_execute') return statement, parameters @@ -1208,9 +1208,9 @@ class EngineEventsTest(fixtures.TestBase): return go engine = engines.testing_engine() - for name in ['begin', 'savepoint', + for name in ['begin', 'savepoint', 'rollback_savepoint', 'release_savepoint', - 'rollback', 'begin_twophase', + 'rollback', 'begin_twophase', 'prepare_twophase', 'commit_twophase']: event.listen(engine, '%s' % name, tracker(name)) @@ -1230,9 +1230,9 @@ class EngineEventsTest(fixtures.TestBase): trans.prepare() trans.commit() - eq_(canary, ['begin', 'savepoint', + eq_(canary, ['begin', 'savepoint', 'rollback_savepoint', 'savepoint', 'release_savepoint', - 'rollback', 'begin_twophase', + 'rollback', 'begin_twophase', 'prepare_twophase', 'commit_twophase'] ) @@ -1242,7 +1242,7 @@ class ProxyConnectionTest(fixtures.TestBase): the deprecated ConnectionProxy interface. """ - __requires__ = 'ad_hoc_engines', + __requires__ = 'ad_hoc_engines', @testing.uses_deprecated(r'.*Use event.listen') @testing.fails_on('firebird', 'Data type unknown') @@ -1295,8 +1295,8 @@ class ProxyConnectionTest(fixtures.TestBase): proxy=MyProxy(), strategy='threadlocal')): m = MetaData(engine) - t1 = Table('t1', m, - Column('c1', Integer, primary_key=True), + t1 = Table('t1', m, + Column('c1', Integer, primary_key=True), Column('c2', String(50), default=func.lower('Foo'), primary_key=True) ) @@ -1418,9 +1418,9 @@ class ProxyConnectionTest(fixtures.TestBase): trans.commit() canary = [t for t in canary if t not in ('cursor_execute', 'execute')] - eq_(canary, ['begin', 'savepoint', + eq_(canary, ['begin', 'savepoint', 'rollback_savepoint', 'savepoint', 'release_savepoint', - 'rollback', 'begin_twophase', + 'rollback', 'begin_twophase', 'prepare_twophase', 'commit_twophase'] ) diff --git a/test/engine/test_parseconnect.py b/test/engine/test_parseconnect.py index 0326395578..5e6350d3ce 100644 --- a/test/engine/test_parseconnect.py +++ b/test/engine/test_parseconnect.py @@ -139,7 +139,7 @@ pool_timeout=10 assert e.echo is True for param, values in [ - ('convert_unicode', ('true', 'false', 'force')), + ('convert_unicode', ('true', 'false', 'force')), ('echo', ('true', 'false', 'debug')), ('echo_pool', ('true', 'false', 'debug')), ('use_native_unicode', ('true', 'false')), @@ -190,7 +190,7 @@ pool_timeout=10 assert e.pool._reset_on_return is expected assert_raises( - exc.ArgumentError, + exc.ArgumentError, create_engine, "postgresql://", pool_reset_on_return='hi', module=dbapi, _initialize=False @@ -249,7 +249,7 @@ pool_timeout=10 every backend. """ - # pretend pysqlite throws the + # pretend pysqlite throws the # "Cannot operate on a closed database." error # on connect. IRL we'd be getting Oracle's "shutdown in progress" diff --git a/test/engine/test_reconnect.py b/test/engine/test_reconnect.py index b545aca523..5496cb6d61 100644 --- a/test/engine/test_reconnect.py +++ b/test/engine/test_reconnect.py @@ -58,7 +58,7 @@ class MockReconnectTest(fixtures.TestBase): # note - using straight create_engine here # since we are testing gc db = create_engine( - 'postgresql://foo:bar@localhost/test', + 'postgresql://foo:bar@localhost/test', module=dbapi, _initialize=False) # monkeypatch disconnect checker @@ -205,7 +205,7 @@ class CursorErrTest(fixtures.TestBase): dbapi = MDBAPI() db = testing_engine( - 'postgresql://foo:bar@localhost/test', + 'postgresql://foo:bar@localhost/test', options=dict(module=dbapi, _initialize=False)) def test_cursor_explode(self): @@ -424,7 +424,7 @@ class RecycleTest(fixtures.TestBase): # set the pool recycle down to 1. # we aren't doing this inline with the - # engine create since cx_oracle takes way + # engine create since cx_oracle takes way # too long to create the 1st connection and don't # want to build a huge delay into this test. diff --git a/test/engine/test_reflection.py b/test/engine/test_reflection.py index f385a0fa23..f2fe9e3497 100644 --- a/test/engine/test_reflection.py +++ b/test/engine/test_reflection.py @@ -137,11 +137,11 @@ class ReflectionTest(fixtures.TestBase, ComparesTables): t2 = Table('t', m2, old_z, old_q) eq_(t2.primary_key.columns, (t2.c.z, )) t2 = Table('t', m2, old_y, - extend_existing=True, - autoload=True, + extend_existing=True, + autoload=True, autoload_with=testing.db) eq_( - set(t2.columns.keys()), + set(t2.columns.keys()), set(['x', 'y', 'z', 'q', 'id']) ) eq_(t2.primary_key.columns, (t2.c.id, )) @@ -152,11 +152,11 @@ class ReflectionTest(fixtures.TestBase, ComparesTables): m3 = MetaData() t3 = Table('t', m3, Column('z', Integer)) - t3 = Table('t', m3, extend_existing=False, - autoload=True, + t3 = Table('t', m3, extend_existing=False, + autoload=True, autoload_with=testing.db) eq_( - set(t3.columns.keys()), + set(t3.columns.keys()), set(['z']) ) @@ -167,12 +167,12 @@ class ReflectionTest(fixtures.TestBase, ComparesTables): t4 = Table('t', m4, old_z, old_q) eq_(t4.primary_key.columns, (t4.c.z, )) t4 = Table('t', m4, old_y, - extend_existing=True, - autoload=True, + extend_existing=True, + autoload=True, autoload_replace=False, autoload_with=testing.db) eq_( - set(t4.columns.keys()), + set(t4.columns.keys()), set(['x', 'y', 'z', 'q', 'id']) ) eq_(t4.primary_key.columns, (t4.c.id, )) @@ -212,8 +212,8 @@ class ReflectionTest(fixtures.TestBase, ComparesTables): m2 = MetaData() b2 = Table('b', m2, Column('a_id', Integer, sa.ForeignKey('a.id'))) a2 = Table('a', m2, autoload=True, autoload_with=testing.db) - b2 = Table('b', m2, extend_existing=True, autoload=True, - autoload_with=testing.db, + b2 = Table('b', m2, extend_existing=True, autoload=True, + autoload_with=testing.db, autoload_replace=False) assert b2.c.id is not None @@ -387,7 +387,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables): meta4 = MetaData(testing.db) - u4 = Table('users', meta4, + u4 = Table('users', meta4, Column('id', sa.Integer, key='u_id', primary_key=True), autoload=True) @@ -449,7 +449,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables): @testing.provide_metadata def test_override_keys(self): - """test that columns can be overridden with a 'key', + """test that columns can be overridden with a 'key', and that ForeignKey targeting during reflection still works.""" meta = self.metadata @@ -464,7 +464,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables): ) meta.create_all() m2 = MetaData(testing.db) - a2 = Table('a', m2, + a2 = Table('a', m2, Column('x', sa.Integer, primary_key=True, key='x1'), autoload=True) b2 = Table('b', m2, autoload=True) @@ -515,7 +515,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables): meta.create_all() meta2 = MetaData(testing.db) - a2 = Table('addresses', meta2, + a2 = Table('addresses', meta2, Column('user_id',sa.Integer, sa.ForeignKey('users.id')), autoload=True) u2 = Table('users', meta2, autoload=True) @@ -654,7 +654,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables): @testing.crashes('oracle', 'FIXME: unknown, confirm not fails_on') - @testing.fails_on('+informixdb', + @testing.fails_on('+informixdb', "FIXME: should be supported via the " "DELIMITED env var but that breaks " "everything else for now") @@ -680,15 +680,15 @@ class ReflectionTest(fixtures.TestBase, ComparesTables): check_col = 'true' quoter = meta.bind.dialect.identifier_preparer.quote_identifier - table_b = Table('false', meta, - Column('create', sa.Integer, primary_key=True), + table_b = Table('false', meta, + Column('create', sa.Integer, primary_key=True), Column('true', sa.Integer,sa.ForeignKey('select.not')), sa.CheckConstraint('%s <> 1' % quoter(check_col), name='limit') ) - table_c = Table('is', meta, - Column('or', sa.Integer, nullable=False, primary_key=True), + table_c = Table('is', meta, + Column('or', sa.Integer, nullable=False, primary_key=True), Column('join', sa.Integer, nullable=False, primary_key=True), sa.PrimaryKeyConstraint('or', 'join', name='to') ) @@ -838,15 +838,15 @@ class ReflectionTest(fixtures.TestBase, ComparesTables): m2.reflect(views=False) eq_( - set(m2.tables), + set(m2.tables), set(['users', 'email_addresses', 'dingalings']) ) m2 = MetaData(testing.db) m2.reflect(views=True) eq_( - set(m2.tables), - set(['email_addresses_v', 'users_v', + set(m2.tables), + set(['email_addresses_v', 'users_v', 'users', 'dingalings', 'email_addresses']) ) finally: @@ -858,16 +858,16 @@ class CreateDropTest(fixtures.TestBase): def setup_class(cls): global metadata, users metadata = MetaData() - users = Table('users', metadata, + users = Table('users', metadata, Column('user_id', sa.Integer, sa.Sequence('user_id_seq', optional=True), - primary_key=True), + primary_key=True), Column('user_name',sa.String(40))) addresses = Table('email_addresses', metadata, Column('address_id', sa.Integer, sa.Sequence('address_id_seq', optional=True), - primary_key=True), + primary_key=True), Column('user_id', sa.Integer, sa.ForeignKey(users.c.user_id)), Column('email_address', sa.String(40))) @@ -942,8 +942,8 @@ class SchemaManipulationTest(fixtures.TestBase): meta = MetaData() users = Table('users', meta, Column('id', sa.Integer)) - addresses = Table('addresses', meta, - Column('id', sa.Integer), + addresses = Table('addresses', meta, + Column('id', sa.Integer), Column('user_id', sa.Integer)) fk = sa.ForeignKeyConstraint(['user_id'],[users.c.id]) @@ -980,7 +980,7 @@ class UnicodeReflectionTest(fixtures.TestBase): (u'\u6e2c\u8a66', u'col_\u6e2c\u8a66', u'ix_\u6e2c\u8a66'), ] - # as you can see, our options for this kind of thing + # as you can see, our options for this kind of thing # are really limited unless you're on PG or SQLite # forget about it on these backends @@ -990,7 +990,7 @@ class UnicodeReflectionTest(fixtures.TestBase): elif testing.against("mysql") and \ not testing.requires._has_mysql_fully_case_sensitive(): names = no_multibyte_period.union(no_case_sensitivity) - # mssql + pyodbc + freetds can't compare multibyte names to + # mssql + pyodbc + freetds can't compare multibyte names to # information_schema.tables.table_name elif testing.against("mssql"): names = no_multibyte_period.union(no_has_table) @@ -1161,8 +1161,8 @@ class SchemaTest(fixtures.TestBase): m2 = MetaData(schema="test_schema", bind=testing.db) m2.reflect() eq_( - set(m2.tables), - set(['test_schema.dingalings', 'test_schema.users', + set(m2.tables), + set(['test_schema.dingalings', 'test_schema.users', 'test_schema.email_addresses']) ) @@ -1239,7 +1239,7 @@ def createTables(meta, schema=None): ) dingalings = Table("dingalings", meta, Column('dingaling_id', sa.Integer, primary_key=True), - Column('address_id', sa.Integer, + Column('address_id', sa.Integer, sa.ForeignKey('%semail_addresses.address_id' % schema_prefix)), Column('data', sa.String(30)), schema=schema, @@ -1317,11 +1317,11 @@ class CaseSensitiveTest(fixtures.TablesTest): @classmethod def define_tables(cls, metadata): - Table('SomeTable', metadata, + Table('SomeTable', metadata, Column('x', Integer, primary_key=True), test_needs_fk=True ) - Table('SomeOtherTable', metadata, + Table('SomeOtherTable', metadata, Column('x', Integer, primary_key=True), Column('y', Integer, sa.ForeignKey("SomeTable.x")), test_needs_fk=True @@ -1340,8 +1340,8 @@ class CaseSensitiveTest(fixtures.TablesTest): eq_(t1.name, "SomeTable") assert t1.c.x is not None - @testing.fails_if(lambda: - testing.against(('mysql', '<', (5, 5))) and + @testing.fails_if(lambda: + testing.against(('mysql', '<', (5, 5))) and not testing.requires._has_mysql_fully_case_sensitive() ) def test_reflect_via_fk(self): diff --git a/test/engine/test_transaction.py b/test/engine/test_transaction.py index 04a3e642cf..709f0d2f1a 100644 --- a/test/engine/test_transaction.py +++ b/test/engine/test_transaction.py @@ -1204,7 +1204,7 @@ class IsolationLevelTest(fixtures.TestBase): eng = testing_engine(options=dict()) conn = eng.connect() eq_( - eng.dialect.get_isolation_level(conn.connection), + eng.dialect.get_isolation_level(conn.connection), self._default_isolation_level() ) @@ -1212,13 +1212,13 @@ class IsolationLevelTest(fixtures.TestBase): conn.connection, self._non_default_isolation_level() ) eq_( - eng.dialect.get_isolation_level(conn.connection), + eng.dialect.get_isolation_level(conn.connection), self._non_default_isolation_level() ) eng.dialect.reset_isolation_level(conn.connection) eq_( - eng.dialect.get_isolation_level(conn.connection), + eng.dialect.get_isolation_level(conn.connection), self._default_isolation_level() ) @@ -1243,17 +1243,17 @@ class IsolationLevelTest(fixtures.TestBase): def test_invalid_level(self): eng = testing_engine(options=dict(isolation_level='FOO')) assert_raises_message( - exc.ArgumentError, + exc.ArgumentError, "Invalid value '%s' for isolation_level. " - "Valid isolation levels for %s are %s" % - ("FOO", eng.dialect.name, + "Valid isolation levels for %s are %s" % + ("FOO", eng.dialect.name, ", ".join(eng.dialect._isolation_lookup)), eng.connect) def test_per_connection(self): from sqlalchemy.pool import QueuePool eng = testing_engine(options=dict( - poolclass=QueuePool, + poolclass=QueuePool, pool_size=2, max_overflow=0)) c1 = eng.connect() @@ -1292,7 +1292,7 @@ class IsolationLevelTest(fixtures.TestBase): r"on Connection.execution_options\(\), or " r"per-engine using the isolation_level " r"argument to create_engine\(\).", - select([1]).execution_options, + select([1]).execution_options, isolation_level=self._non_default_isolation_level() ) @@ -1305,7 +1305,7 @@ class IsolationLevelTest(fixtures.TestBase): r"To set engine-wide isolation level, " r"use the isolation_level argument to create_engine\(\).", create_engine, - testing.db.url, + testing.db.url, execution_options={'isolation_level': self._non_default_isolation_level} ) diff --git a/test/ext/test_associationproxy.py b/test/ext/test_associationproxy.py index c9feddf4c9..af703d2549 100644 --- a/test/ext/test_associationproxy.py +++ b/test/ext/test_associationproxy.py @@ -641,8 +641,8 @@ class ProxyFactoryTest(ListTest): ) class Parent(object): - children = association_proxy('_children', 'name', - proxy_factory=CustomProxy, + children = association_proxy('_children', 'name', + proxy_factory=CustomProxy, proxy_bulk_set=CustomProxy.extend ) @@ -1017,17 +1017,17 @@ class ComparatorTest(fixtures.MappedTest, AssertsCompiledSQL): @classmethod def define_tables(cls, metadata): - Table('userkeywords', metadata, + Table('userkeywords', metadata, Column('keyword_id', Integer,ForeignKey('keywords.id'), primary_key=True), Column('user_id', Integer, ForeignKey('users.id')) ) - Table('users', metadata, + Table('users', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('name', String(64)), Column('singular_id', Integer, ForeignKey('singular.id')) ) - Table('keywords', metadata, + Table('keywords', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('keyword', String(64)), @@ -1090,7 +1090,7 @@ class ComparatorTest(fixtures.MappedTest, AssertsCompiledSQL): }) mapper(UserKeyword, userkeywords, properties={ - 'user' : relationship(User, backref='user_keywords'), + 'user' : relationship(User, backref='user_keywords'), 'keyword' : relationship(Keyword) }) mapper(Singular, singular, properties={ @@ -1288,7 +1288,7 @@ class ComparatorTest(fixtures.MappedTest, AssertsCompiledSQL): User = self.classes.User self.assert_compile( self.session.query(User).join( - User.keywords.local_attr, + User.keywords.local_attr, User.keywords.remote_attr), "SELECT users.id AS users_id, users.name AS users_name, " "users.singular_id AS users_singular_id " @@ -1321,7 +1321,7 @@ class DictOfTupleUpdateTest(fixtures.TestBase): m = MetaData() a = Table('a', m, Column('id', Integer, primary_key=True)) - b = Table('b', m, Column('id', Integer, primary_key=True), + b = Table('b', m, Column('id', Integer, primary_key=True), Column('aid', Integer, ForeignKey('a.id'))) mapper(A, a, properties={ 'orig':relationship(B, collection_class=attribute_mapped_collection('key')) diff --git a/test/ext/test_compiler.py b/test/ext/test_compiler.py index 318a1e76c4..1098b5926f 100644 --- a/test/ext/test_compiler.py +++ b/test/ext/test_compiler.py @@ -123,7 +123,7 @@ class UserDefinedTest(fixtures.TestBase, AssertsCompiledSQL): ) def test_annotations(self): - """test that annotated clause constructs use the + """test that annotated clause constructs use the decorated class' compiler. """ @@ -356,7 +356,7 @@ class DefaultOnExistingTest(fixtures.TestBase, AssertsCompiledSQL): return "BIND(%s)" % compiler.visit_bindparam(element, **kw) self.assert_compile( - t.select().where(t.c.c == 5), + t.select().where(t.c.c == 5), "SELECT t.a, t.b, t.c FROM t WHERE t.c = BIND(:c_1)", use_default_dialect=True ) @@ -373,7 +373,7 @@ class DefaultOnExistingTest(fixtures.TestBase, AssertsCompiledSQL): return "BIND(%s)" % compiler.visit_bindparam(element, **kw) self.assert_compile( - t.insert(), + t.insert(), "INSERT INTO t (a, b) VALUES (BIND(:a), BIND(:b))", {'a':1, 'b':2}, use_default_dialect=True diff --git a/test/ext/test_hybrid.py b/test/ext/test_hybrid.py index b85d4b0fc2..7f5667fd2e 100644 --- a/test/ext/test_hybrid.py +++ b/test/ext/test_hybrid.py @@ -214,7 +214,7 @@ class PropertyValueTest(fixtures.TestBase, AssertsCompiledSQL): A = self._fixture(False) a1 = A(_value=5) assert_raises_message( - AttributeError, + AttributeError, "can't set attribute", setattr, a1, 'value', 10 ) @@ -223,7 +223,7 @@ class PropertyValueTest(fixtures.TestBase, AssertsCompiledSQL): A = self._fixture(False) a1 = A(_value=5) assert_raises_message( - AttributeError, + AttributeError, "can't delete attribute", delattr, a1, 'value' ) diff --git a/test/ext/test_sqlsoup.py b/test/ext/test_sqlsoup.py index 09e7535b2c..94e246a411 100644 --- a/test/ext/test_sqlsoup.py +++ b/test/ext/test_sqlsoup.py @@ -87,7 +87,7 @@ class SQLSoupTest(fixtures.TestBase): exc.ArgumentError, "'tablename' or 'selectable' argument is " "required.", - db.map_to, 'users', + db.map_to, 'users', ) def test_map_to_string_not_selectable(self): diff --git a/test/lib/__init__.py b/test/lib/__init__.py index 68876c4479..b36db71fca 100644 --- a/test/lib/__init__.py +++ b/test/lib/__init__.py @@ -1,6 +1,6 @@ """Testing environment and utilities. -This package contains base classes and routines used by +This package contains base classes and routines used by the unit tests. Tests are based on Nose and bootstrapped by noseplugin.NoseSQLAlchemy. diff --git a/test/lib/engines.py b/test/lib/engines.py index c8a44dc44e..e226d11bc0 100644 --- a/test/lib/engines.py +++ b/test/lib/engines.py @@ -214,7 +214,7 @@ def utf8_engine(url=None, options=None): if config.db.dialect.name == 'mysql' and \ config.db.driver in ['mysqldb', 'pymysql']: - # note 1.2.1.gamma.6 or greater of MySQLdb + # note 1.2.1.gamma.6 or greater of MySQLdb # needed here url = url or config.db_url url = engine_url.make_url(url) @@ -231,7 +231,7 @@ def mock_engine(dialect_name=None): by an Engine. It should not be used in other cases, as assert_compile() and - assert_sql_execution() are much better choices with fewer + assert_sql_execution() are much better choices with fewer moving parts. """ @@ -250,7 +250,7 @@ def mock_engine(dialect_name=None): def print_sql(): d = engine.dialect return "\n".join( - str(s.compile(dialect=d)) + str(s.compile(dialect=d)) for s in engine.mock ) engine = create_engine(dialect_name + '://', @@ -263,10 +263,10 @@ def mock_engine(dialect_name=None): class DBAPIProxyCursor(object): """Proxy a DBAPI cursor. - + Tests can provide subclasses of this to intercept DBAPI-level cursor operations. - + """ def __init__(self, engine, conn): self.engine = engine @@ -287,10 +287,10 @@ class DBAPIProxyCursor(object): class DBAPIProxyConnection(object): """Proxy a DBAPI connection. - + Tests can provide subclasses of this to intercept DBAPI-level connection operations. - + """ def __init__(self, engine, cursor_cls): self.conn = self._sqla_unwrap = engine.pool._creator() @@ -307,9 +307,9 @@ class DBAPIProxyConnection(object): return getattr(self.conn, key) def proxying_engine(conn_cls=DBAPIProxyConnection, cursor_cls=DBAPIProxyCursor): - """Produce an engine that provides proxy hooks for + """Produce an engine that provides proxy hooks for common methods. - + """ def mock_conn(): return conn_cls(config.db, cursor_cls) @@ -330,7 +330,7 @@ class ReplayableSession(object): # Py3K #Natives = set([getattr(types, t) # for t in dir(types) if not t.startswith('_')]). \ - # union([type(t) if not isinstance(t, type) + # union([type(t) if not isinstance(t, type) # else t for t in __builtins__.values()]).\ # difference([getattr(types, t) # for t in ('FunctionType', 'BuiltinFunctionType', diff --git a/test/lib/fixtures.py b/test/lib/fixtures.py index 41a72c9a49..451eeb43b2 100644 --- a/test/lib/fixtures.py +++ b/test/lib/fixtures.py @@ -268,7 +268,7 @@ class MappedTest(_ORMTest, TablesTest, testing.AssertsExecutionResults): """Run a setup method, framing the operation with a Base class that will catch new subclasses to be established within the "classes" registry. - + """ cls_registry = cls.classes class FindFixture(type): @@ -289,7 +289,7 @@ class MappedTest(_ORMTest, TablesTest, testing.AssertsExecutionResults): def _teardown_each_mappers(self): # some tests create mappers in the test bodies - # and will define setup_mappers as None - + # and will define setup_mappers as None - # clear mappers in any case if self.run_setup_mappers != 'once': sa.orm.clear_mappers() @@ -328,7 +328,7 @@ class DeclarativeMappedTest(MappedTest): cls, classname, bases, dict_) class DeclarativeBasic(object): __table_cls__ = schema.Table - _DeclBase = declarative_base(metadata=cls.declarative_meta, + _DeclBase = declarative_base(metadata=cls.declarative_meta, metaclass=FindFixtureDeclarative, cls=DeclarativeBasic) cls.DeclarativeBasic = _DeclBase diff --git a/test/lib/profiling.py b/test/lib/profiling.py index bac9e549fe..f471457185 100644 --- a/test/lib/profiling.py +++ b/test/lib/profiling.py @@ -69,12 +69,12 @@ def profiled(target=None, **target_opts): else: stats.print_stats() - print_callers = target_opts.get('print_callers', + print_callers = target_opts.get('print_callers', profile_config['print_callers']) if print_callers: stats.print_callers() - print_callees = target_opts.get('print_callees', + print_callees = target_opts.get('print_callees', profile_config['print_callees']) if print_callees: stats.print_callees() diff --git a/test/lib/requires.py b/test/lib/requires.py index d52d26e324..9c0526514a 100644 --- a/test/lib/requires.py +++ b/test/lib/requires.py @@ -136,7 +136,7 @@ def update_from(fn): """Target must support UPDATE..FROM syntax""" return _chain_decorators_on( fn, - only_on(('postgresql', 'mssql', 'mysql'), + only_on(('postgresql', 'mssql', 'mysql'), "Backend does not support UPDATE..FROM") ) @@ -373,7 +373,7 @@ def python25(fn): def cpython(fn): return _chain_decorators_on( fn, - skip_if(lambda: util.jython or util.pypy, + skip_if(lambda: util.jython or util.pypy, "cPython interpreter needed" ) ) @@ -409,11 +409,11 @@ def sqlite(fn): def ad_hoc_engines(fn): """Test environment must allow ad-hoc engine/connection creation. - + DBs that scale poorly for many connections, even when closed, i.e. Oracle, may use the "--low-connections" option which flags this requirement as not present. - + """ return _chain_decorators_on( fn, diff --git a/test/lib/testing.py b/test/lib/testing.py index 4dae400701..92817b6040 100644 --- a/test/lib/testing.py +++ b/test/lib/testing.py @@ -95,7 +95,7 @@ def db_spec(*dbs): def fails_on(dbs, reason): - """Mark a test as expected to fail on the specified database + """Mark a test as expected to fail on the specified database implementation. Unlike ``crashes``, tests marked as ``fails_on`` will be run @@ -425,7 +425,7 @@ def resetwarnings(): util.warn = util.langhelpers.warn = testing_warn warnings.filterwarnings('ignore', - category=sa_exc.SAPendingDeprecationWarning) + category=sa_exc.SAPendingDeprecationWarning) warnings.filterwarnings('error', category=sa_exc.SADeprecationWarning) warnings.filterwarnings('error', category=sa_exc.SAWarning) @@ -479,9 +479,9 @@ def _chain_decorators_on(fn, *decorators): def run_as_contextmanager(ctx, fn, *arg, **kw): """Run the given function under the given contextmanager, - simulating the behavior of 'with' to support older + simulating the behavior of 'with' to support older Python versions. - + """ obj = ctx.__enter__() @@ -576,8 +576,8 @@ class adict(dict): class AssertsCompiledSQL(object): - def assert_compile(self, clause, result, params=None, - checkparams=None, dialect=None, + def assert_compile(self, clause, result, params=None, + checkparams=None, dialect=None, checkpositional=None, use_default_dialect=False, allow_dialect_select=False): diff --git a/test/orm/inheritance/test_assorted_poly.py b/test/orm/inheritance/test_assorted_poly.py index 985d892d81..6ff989e736 100644 --- a/test/orm/inheritance/test_assorted_poly.py +++ b/test/orm/inheritance/test_assorted_poly.py @@ -29,17 +29,17 @@ class RelationshipTest1(fixtures.MappedTest): global people, managers people = Table('people', metadata, - Column('person_id', Integer, Sequence('person_id_seq', - optional=True), + Column('person_id', Integer, Sequence('person_id_seq', + optional=True), primary_key=True), - Column('manager_id', Integer, - ForeignKey('managers.person_id', + Column('manager_id', Integer, + ForeignKey('managers.person_id', use_alter=True, name="mpid_fq")), Column('name', String(50)), Column('type', String(30))) managers = Table('managers', metadata, - Column('person_id', Integer, ForeignKey('people.person_id'), + Column('person_id', Integer, ForeignKey('people.person_id'), primary_key=True), Column('status', String(30)), Column('manager_name', String(50)) @@ -64,7 +64,7 @@ class RelationshipTest1(fixtures.MappedTest): mapper(Manager, managers, inherits=Person, inherit_condition=people.c.person_id==managers.c.person_id) - eq_(class_mapper(Person).get_property('manager').synchronize_pairs, + eq_(class_mapper(Person).get_property('manager').synchronize_pairs, [(managers.c.person_id,people.c.manager_id)]) session = create_session() @@ -86,9 +86,9 @@ class RelationshipTest1(fixtures.MappedTest): pass mapper(Person, people) - mapper(Manager, managers, inherits=Person, + mapper(Manager, managers, inherits=Person, inherit_condition=people.c.person_id== - managers.c.person_id, + managers.c.person_id, properties={ 'employee':relationship(Person, primaryjoin=( people.c.manager_id == @@ -115,7 +115,7 @@ class RelationshipTest2(fixtures.MappedTest): def define_tables(cls, metadata): global people, managers, data people = Table('people', metadata, - Column('person_id', Integer, primary_key=True, + Column('person_id', Integer, primary_key=True, test_needs_autoincrement=True), Column('name', String(50)), Column('type', String(30))) @@ -128,7 +128,7 @@ class RelationshipTest2(fixtures.MappedTest): ) data = Table('data', metadata, - Column('person_id', Integer, ForeignKey('managers.person_id'), + Column('person_id', Integer, ForeignKey('managers.person_id'), primary_key=True), Column('data', String(30)) ) @@ -155,14 +155,14 @@ class RelationshipTest2(fixtures.MappedTest): if jointype == "join1": poly_union = polymorphic_union({ 'person':people.select(people.c.type=='person'), - 'manager':join(people, managers, + 'manager':join(people, managers, people.c.person_id==managers.c.person_id) }, None) polymorphic_on=poly_union.c.type elif jointype == "join2": poly_union = polymorphic_union({ 'person':people.select(people.c.type=='person'), - 'manager':managers.join(people, + 'manager':managers.join(people, people.c.person_id==managers.c.person_id) }, None) polymorphic_on=poly_union.c.type @@ -176,35 +176,35 @@ class RelationshipTest2(fixtures.MappedTest): self.data = data mapper(Data, data) - mapper(Person, people, - with_polymorphic=('*', poly_union), - polymorphic_identity='person', + mapper(Person, people, + with_polymorphic=('*', poly_union), + polymorphic_identity='person', polymorphic_on=polymorphic_on) if usedata: - mapper(Manager, managers, - inherits=Person, + mapper(Manager, managers, + inherits=Person, inherit_condition=people.c.person_id== - managers.c.person_id, + managers.c.person_id, polymorphic_identity='manager', properties={ 'colleague':relationship( - Person, + Person, primaryjoin=managers.c.manager_id== - people.c.person_id, + people.c.person_id, lazy='select', uselist=False), 'data':relationship(Data, uselist=False) } ) else: - mapper(Manager, managers, inherits=Person, + mapper(Manager, managers, inherits=Person, inherit_condition=people.c.person_id== - managers.c.person_id, + managers.c.person_id, polymorphic_identity='manager', properties={ - 'colleague':relationship(Person, + 'colleague':relationship(Person, primaryjoin=managers.c.manager_id== - people.c.person_id, + people.c.person_id, lazy='select', uselist=False) } ) @@ -231,20 +231,20 @@ class RelationshipTest3(fixtures.MappedTest): def define_tables(cls, metadata): global people, managers, data people = Table('people', metadata, - Column('person_id', Integer, primary_key=True, + Column('person_id', Integer, primary_key=True, test_needs_autoincrement=True), Column('colleague_id', Integer, ForeignKey('people.person_id')), Column('name', String(50)), Column('type', String(30))) managers = Table('managers', metadata, - Column('person_id', Integer, ForeignKey('people.person_id'), + Column('person_id', Integer, ForeignKey('people.person_id'), primary_key=True), Column('status', String(30)), ) data = Table('data', metadata, - Column('person_id', Integer, ForeignKey('people.person_id'), + Column('person_id', Integer, ForeignKey('people.person_id'), primary_key=True), Column('data', String(30)) ) @@ -263,13 +263,13 @@ def _generate_test(jointype="join1", usedata=False): if jointype == "join1": poly_union = polymorphic_union({ - 'manager':managers.join(people, + 'manager':managers.join(people, people.c.person_id==managers.c.person_id), 'person':people.select(people.c.type=='person') }, None) elif jointype =="join2": poly_union = polymorphic_union({ - 'manager':join(people, managers, + 'manager':join(people, managers, people.c.person_id==managers.c.person_id), 'person':people.select(people.c.type=='person') }, None) @@ -282,34 +282,34 @@ def _generate_test(jointype="join1", usedata=False): mapper(Data, data) if usedata: - mapper(Person, people, - with_polymorphic=('*', poly_union), - polymorphic_identity='person', + mapper(Person, people, + with_polymorphic=('*', poly_union), + polymorphic_identity='person', polymorphic_on=people.c.type, properties={ - 'colleagues':relationship(Person, + 'colleagues':relationship(Person, primaryjoin=people.c.colleague_id== - people.c.person_id, - remote_side=people.c.colleague_id, + people.c.person_id, + remote_side=people.c.colleague_id, uselist=True), 'data':relationship(Data, uselist=False) } ) else: - mapper(Person, people, - with_polymorphic=('*', poly_union), - polymorphic_identity='person', + mapper(Person, people, + with_polymorphic=('*', poly_union), + polymorphic_identity='person', polymorphic_on=people.c.type, properties={ - 'colleagues':relationship(Person, + 'colleagues':relationship(Person, primaryjoin=people.c.colleague_id==people.c.person_id, remote_side=people.c.colleague_id, uselist=True) } ) - mapper(Manager, managers, inherits=Person, + mapper(Manager, managers, inherits=Person, inherit_condition=people.c.person_id== - managers.c.person_id, + managers.c.person_id, polymorphic_identity='manager') sess = create_session() @@ -355,22 +355,22 @@ class RelationshipTest4(fixtures.MappedTest): def define_tables(cls, metadata): global people, engineers, managers, cars people = Table('people', metadata, - Column('person_id', Integer, primary_key=True, + Column('person_id', Integer, primary_key=True, test_needs_autoincrement=True), Column('name', String(50))) engineers = Table('engineers', metadata, - Column('person_id', Integer, ForeignKey('people.person_id'), + Column('person_id', Integer, ForeignKey('people.person_id'), primary_key=True), Column('status', String(30))) managers = Table('managers', metadata, - Column('person_id', Integer, ForeignKey('people.person_id'), + Column('person_id', Integer, ForeignKey('people.person_id'), primary_key=True), Column('longer_status', String(70))) cars = Table('cars', metadata, - Column('car_id', Integer, primary_key=True, + Column('car_id', Integer, primary_key=True, test_needs_autoincrement=True), Column('owner', Integer, ForeignKey('people.person_id'))) @@ -411,17 +411,17 @@ class RelationshipTest4(fixtures.MappedTest): 'manager':people.join(managers), }, "type", 'employee_join') - person_mapper = mapper(Person, people, - with_polymorphic=('*', employee_join), - polymorphic_on=employee_join.c.type, + person_mapper = mapper(Person, people, + with_polymorphic=('*', employee_join), + polymorphic_on=employee_join.c.type, polymorphic_identity='person') - engineer_mapper = mapper(Engineer, engineers, - inherits=person_mapper, + engineer_mapper = mapper(Engineer, engineers, + inherits=person_mapper, polymorphic_identity='engineer') - manager_mapper = mapper(Manager, managers, - inherits=person_mapper, + manager_mapper = mapper(Manager, managers, + inherits=person_mapper, polymorphic_identity='manager') - car_mapper = mapper(Car, cars, + car_mapper = mapper(Car, cars, properties= {'employee': relationship(person_mapper)}) @@ -485,23 +485,23 @@ class RelationshipTest5(fixtures.MappedTest): def define_tables(cls, metadata): global people, engineers, managers, cars people = Table('people', metadata, - Column('person_id', Integer, primary_key=True, + Column('person_id', Integer, primary_key=True, test_needs_autoincrement=True), Column('name', String(50)), Column('type', String(50))) engineers = Table('engineers', metadata, - Column('person_id', Integer, ForeignKey('people.person_id'), + Column('person_id', Integer, ForeignKey('people.person_id'), primary_key=True), Column('status', String(30))) managers = Table('managers', metadata, - Column('person_id', Integer, ForeignKey('people.person_id'), + Column('person_id', Integer, ForeignKey('people.person_id'), primary_key=True), Column('longer_status', String(70))) cars = Table('cars', metadata, - Column('car_id', Integer, primary_key=True, + Column('car_id', Integer, primary_key=True, test_needs_autoincrement=True), Column('owner', Integer, ForeignKey('people.person_id'))) @@ -530,14 +530,14 @@ class RelationshipTest5(fixtures.MappedTest): def __repr__(self): return "Car number %d" % self.car_id - person_mapper = mapper(Person, people, - polymorphic_on=people.c.type, + person_mapper = mapper(Person, people, + polymorphic_on=people.c.type, polymorphic_identity='person') - engineer_mapper = mapper(Engineer, engineers, - inherits=person_mapper, + engineer_mapper = mapper(Engineer, engineers, + inherits=person_mapper, polymorphic_identity='engineer') - manager_mapper = mapper(Manager, managers, - inherits=person_mapper, + manager_mapper = mapper(Manager, managers, + inherits=person_mapper, polymorphic_identity='manager') car_mapper = mapper(Car, cars, properties= { 'manager':relationship( @@ -564,15 +564,15 @@ class RelationshipTest6(fixtures.MappedTest): def define_tables(cls, metadata): global people, managers, data people = Table('people', metadata, - Column('person_id', Integer, primary_key=True, + Column('person_id', Integer, primary_key=True, test_needs_autoincrement=True), Column('name', String(50)), ) managers = Table('managers', metadata, - Column('person_id', Integer, ForeignKey('people.person_id'), + Column('person_id', Integer, ForeignKey('people.person_id'), primary_key=True), - Column('colleague_id', Integer, + Column('colleague_id', Integer, ForeignKey('managers.person_id')), Column('status', String(30)), ) @@ -585,13 +585,13 @@ class RelationshipTest6(fixtures.MappedTest): mapper(Person, people) - mapper(Manager, managers, inherits=Person, + mapper(Manager, managers, inherits=Person, inherit_condition=people.c.person_id==\ managers.c.person_id, properties={ - 'colleague':relationship(Manager, + 'colleague':relationship(Manager, primaryjoin=managers.c.colleague_id==\ - managers.c.person_id, + managers.c.person_id, lazy='select', uselist=False) } ) @@ -613,7 +613,7 @@ class RelationshipTest7(fixtures.MappedTest): def define_tables(cls, metadata): global people, engineers, managers, cars, offroad_cars cars = Table('cars', metadata, - Column('car_id', Integer, primary_key=True, + Column('car_id', Integer, primary_key=True, test_needs_autoincrement=True), Column('name', String(30))) @@ -622,20 +622,20 @@ class RelationshipTest7(fixtures.MappedTest): nullable=False,primary_key=True)) people = Table('people', metadata, - Column('person_id', Integer, primary_key=True, + Column('person_id', Integer, primary_key=True, test_needs_autoincrement=True), - Column('car_id', Integer, ForeignKey('cars.car_id'), + Column('car_id', Integer, ForeignKey('cars.car_id'), nullable=False), Column('name', String(50))) engineers = Table('engineers', metadata, - Column('person_id', Integer, ForeignKey('people.person_id'), + Column('person_id', Integer, ForeignKey('people.person_id'), primary_key=True), Column('field', String(30))) managers = Table('managers', metadata, - Column('person_id', Integer, ForeignKey('people.person_id'), + Column('person_id', Integer, ForeignKey('people.person_id'), primary_key=True), Column('category', String(70))) @@ -659,12 +659,12 @@ class RelationshipTest7(fixtures.MappedTest): class Engineer(Person): def __repr__(self): - return "Engineer %s, field %s" % (self.name, + return "Engineer %s, field %s" % (self.name, self.field) class Manager(Person): def __repr__(self): - return "Manager %s, category %s" % (self.name, + return "Manager %s, category %s" % (self.name, self.category) class Car(PersistentObject): @@ -686,7 +686,7 @@ class RelationshipTest7(fixtures.MappedTest): car_join = polymorphic_union( { 'car' : cars.outerjoin(offroad_cars).\ - select(offroad_cars.c.car_id == None, + select(offroad_cars.c.car_id == None, fold_equivalents=True), 'offroad' : cars.join(offroad_cars) }, "type", 'car_join') @@ -695,20 +695,20 @@ class RelationshipTest7(fixtures.MappedTest): with_polymorphic=('*', car_join) ,polymorphic_on=car_join.c.type, polymorphic_identity='car', ) - offroad_car_mapper = mapper(Offraod_Car, offroad_cars, + offroad_car_mapper = mapper(Offraod_Car, offroad_cars, inherits=car_mapper, polymorphic_identity='offroad') person_mapper = mapper(Person, people, - with_polymorphic=('*', employee_join), + with_polymorphic=('*', employee_join), polymorphic_on=employee_join.c.type, polymorphic_identity='person', properties={ 'car':relationship(car_mapper) }) - engineer_mapper = mapper(Engineer, engineers, - inherits=person_mapper, + engineer_mapper = mapper(Engineer, engineers, + inherits=person_mapper, polymorphic_identity='engineer') - manager_mapper = mapper(Manager, managers, - inherits=person_mapper, + manager_mapper = mapper(Manager, managers, + inherits=person_mapper, polymorphic_identity='manager') session = create_session() @@ -735,13 +735,13 @@ class RelationshipTest8(fixtures.MappedTest): def define_tables(cls, metadata): global taggable, users taggable = Table('taggable', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('type', String(30)), Column('owner_id', Integer, ForeignKey('taggable.id')), ) users = Table ('users', metadata, - Column('id', Integer, ForeignKey('taggable.id'), + Column('id', Integer, ForeignKey('taggable.id'), primary_key=True), Column('data', String(50)), ) @@ -753,9 +753,9 @@ class RelationshipTest8(fixtures.MappedTest): class User(Taggable): pass - mapper( Taggable, taggable, - polymorphic_on=taggable.c.type, - polymorphic_identity='taggable', + mapper( Taggable, taggable, + polymorphic_on=taggable.c.type, + polymorphic_identity='taggable', properties = { 'owner' : relationship (User, primaryjoin=taggable.c.owner_id ==taggable.c.id, @@ -764,7 +764,7 @@ class RelationshipTest8(fixtures.MappedTest): }) - mapper(User, users, inherits=Taggable, + mapper(User, users, inherits=Taggable, polymorphic_identity='user', inherit_condition=users.c.id == taggable.c.id, ) @@ -796,33 +796,33 @@ class GenerativeTest(fixtures.TestBase, AssertsExecutionResults): metadata = MetaData(testing.db) # table definitions status = Table('status', metadata, - Column('status_id', Integer, primary_key=True, + Column('status_id', Integer, primary_key=True, test_needs_autoincrement=True), Column('name', String(20))) people = Table('people', metadata, - Column('person_id', Integer, primary_key=True, + Column('person_id', Integer, primary_key=True, test_needs_autoincrement=True), - Column('status_id', Integer, ForeignKey('status.status_id'), + Column('status_id', Integer, ForeignKey('status.status_id'), nullable=False), Column('name', String(50))) engineers = Table('engineers', metadata, - Column('person_id', Integer, ForeignKey('people.person_id'), + Column('person_id', Integer, ForeignKey('people.person_id'), primary_key=True), Column('field', String(30))) managers = Table('managers', metadata, - Column('person_id', Integer, ForeignKey('people.person_id'), + Column('person_id', Integer, ForeignKey('people.person_id'), primary_key=True), Column('category', String(70))) cars = Table('cars', metadata, - Column('car_id', Integer, primary_key=True, + Column('car_id', Integer, primary_key=True, test_needs_autoincrement=True), - Column('status_id', Integer, ForeignKey('status.status_id'), + Column('status_id', Integer, ForeignKey('status.status_id'), nullable=False), - Column('owner', Integer, ForeignKey('people.person_id'), + Column('owner', Integer, ForeignKey('people.person_id'), nullable=False)) metadata.create_all() @@ -868,18 +868,18 @@ class GenerativeTest(fixtures.TestBase, AssertsExecutionResults): status_mapper = mapper(Status, status) person_mapper = mapper(Person, people, - with_polymorphic=('*', employee_join), + with_polymorphic=('*', employee_join), polymorphic_on=employee_join.c.type, - polymorphic_identity='person', + polymorphic_identity='person', properties={'status':relationship(status_mapper)}) - engineer_mapper = mapper(Engineer, engineers, - inherits=person_mapper, + engineer_mapper = mapper(Engineer, engineers, + inherits=person_mapper, polymorphic_identity='engineer') - manager_mapper = mapper(Manager, managers, - inherits=person_mapper, + manager_mapper = mapper(Manager, managers, + inherits=person_mapper, polymorphic_identity='manager') car_mapper = mapper(Car, cars, properties= { - 'employee':relationship(person_mapper), + 'employee':relationship(person_mapper), 'status':relationship(status_mapper)}) session = create_session() @@ -891,10 +891,10 @@ class GenerativeTest(fixtures.TestBase, AssertsExecutionResults): session.add(dead) session.flush() - # TODO: we haven't created assertions for all + # TODO: we haven't created assertions for all # the data combinations created here - # creating 5 managers named from M1 to M5 + # creating 5 managers named from M1 to M5 # and 5 engineers named from E1 to E5 # M4, M5, E4 and E5 are dead for i in range(1,5): @@ -933,13 +933,13 @@ class GenerativeTest(fixtures.TestBase, AssertsExecutionResults): "status Status active]") r = session.query(Engineer).join('status').\ filter(Person.name.in_( - ['E2', 'E3', 'E4', 'M4', 'M2', 'M1']) & + ['E2', 'E3', 'E4', 'M4', 'M2', 'M1']) & (status.c.name=="active")).order_by(Person.name) eq_(str(list(r)), "[Engineer E2, field X, status Status " "active, Engineer E3, field X, status " "Status active]") - r = session.query(Person).filter(exists([1], + r = session.query(Person).filter(exists([1], Car.owner==Person.person_id)) eq_(str(list(r)), "[Engineer E4, field X, status Status dead]") @@ -949,20 +949,20 @@ class MultiLevelTest(fixtures.MappedTest): global table_Employee, table_Engineer, table_Manager table_Employee = Table( 'Employee', metadata, Column( 'name', type_= String(100), ), - Column( 'id', primary_key= True, type_= Integer, + Column( 'id', primary_key= True, type_= Integer, test_needs_autoincrement=True), Column( 'atype', type_= String(100), ), ) table_Engineer = Table( 'Engineer', metadata, Column( 'machine', type_= String(100), ), - Column( 'id', Integer, ForeignKey( 'Employee.id', ), + Column( 'id', Integer, ForeignKey( 'Employee.id', ), primary_key= True), ) table_Manager = Table( 'Manager', metadata, Column( 'duties', type_= String(100), ), - Column( 'id', Integer, ForeignKey( 'Engineer.id', ), + Column( 'id', Integer, ForeignKey( 'Engineer.id', ), primary_key= True, ), ) @@ -971,23 +971,23 @@ class MultiLevelTest(fixtures.MappedTest): def set( me, **kargs): for k,v in kargs.iteritems(): setattr( me, k, v) return me - def __str__(me): + def __str__(me): return str(me.__class__.__name__)+':'+str(me.name) __repr__ = __str__ - class Engineer(Employee): + class Engineer(Employee): pass - class Manager(Engineer): + class Manager(Engineer): pass pu_Employee = polymorphic_union( { - 'Manager': table_Employee.join( + 'Manager': table_Employee.join( table_Engineer).join( table_Manager), - 'Engineer': select([table_Employee, - table_Engineer.c.machine], - table_Employee.c.atype == 'Engineer', + 'Engineer': select([table_Employee, + table_Engineer.c.machine], + table_Employee.c.atype == 'Engineer', from_obj=[ table_Employee.join(table_Engineer)]), - 'Employee': table_Employee.select( + 'Employee': table_Employee.select( table_Employee.c.atype == 'Employee'), }, None, 'pu_employee', ) @@ -1000,9 +1000,9 @@ class MultiLevelTest(fixtures.MappedTest): pu_Engineer = polymorphic_union( { 'Manager': table_Employee.join( table_Engineer). join( table_Manager), - 'Engineer': select([table_Employee, - table_Engineer.c.machine], - table_Employee.c.atype == 'Engineer', + 'Engineer': select([table_Employee, + table_Engineer.c.machine], + table_Employee.c.atype == 'Engineer', from_obj=[ table_Employee.join(table_Engineer) ]), @@ -1025,7 +1025,7 @@ class MultiLevelTest(fixtures.MappedTest): a = Employee().set( name= 'one') b = Engineer().set( egn= 'two', machine= 'any') - c = Manager().set( name= 'head', machine= 'fast', + c = Manager().set( name= 'head', machine= 'fast', duties= 'many') session = create_session() @@ -1044,13 +1044,13 @@ class ManyToManyPolyTest(fixtures.MappedTest): collection_table base_item_table = Table( 'base_item', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('child_name', String(255), default=None)) item_table = Table( 'item', metadata, - Column('id', Integer, ForeignKey('base_item.id'), + Column('id', Integer, ForeignKey('base_item.id'), primary_key=True), Column('dummy', Integer, default=0)) @@ -1061,7 +1061,7 @@ class ManyToManyPolyTest(fixtures.MappedTest): collection_table = Table( 'collection', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('name', Unicode(255))) @@ -1084,8 +1084,8 @@ class ManyToManyPolyTest(fixtures.MappedTest): with_polymorphic=('*', item_join), polymorphic_on=base_item_table.c.child_name, polymorphic_identity='BaseItem', - properties=dict(collections=relationship(Collection, - secondary=base_item_collection_table, + properties=dict(collections=relationship(Collection, + secondary=base_item_collection_table, backref="items"))) mapper( @@ -1102,7 +1102,7 @@ class CustomPKTest(fixtures.MappedTest): def define_tables(cls, metadata): global t1, t2 t1 = Table('t1', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('type', String(30), nullable=False), Column('data', String(30))) @@ -1127,9 +1127,9 @@ class CustomPKTest(fixtures.MappedTest): d['t2'] = t1.join(t2) pjoin = polymorphic_union(d, None, 'pjoin') - mapper(T1, t1, polymorphic_on=t1.c.type, - polymorphic_identity='t1', - with_polymorphic=('*', pjoin), + mapper(T1, t1, polymorphic_on=t1.c.type, + polymorphic_identity='t1', + with_polymorphic=('*', pjoin), primary_key=[pjoin.c.id]) mapper(T2, t2, inherits=T1, polymorphic_identity='t2') ot1 = T1() @@ -1140,7 +1140,7 @@ class CustomPKTest(fixtures.MappedTest): sess.flush() sess.expunge_all() - # query using get(), using only one value. + # query using get(), using only one value. # this requires the select_table mapper # has the same single-col primary key. assert sess.query(T1).get(ot1.id).id == ot1.id @@ -1165,8 +1165,8 @@ class CustomPKTest(fixtures.MappedTest): d['t2'] = t1.join(t2) pjoin = polymorphic_union(d, None, 'pjoin') - mapper(T1, t1, polymorphic_on=t1.c.type, - polymorphic_identity='t1', + mapper(T1, t1, polymorphic_on=t1.c.type, + polymorphic_identity='t1', with_polymorphic=('*', pjoin)) mapper(T2, t2, inherits=T1, polymorphic_identity='t2') assert len(class_mapper(T1).primary_key) == 1 @@ -1179,7 +1179,7 @@ class CustomPKTest(fixtures.MappedTest): sess.flush() sess.expunge_all() - # query using get(), using only one value. this requires the + # query using get(), using only one value. this requires the # select_table mapper # has the same single-col primary key. assert sess.query(T1).get(ot1.id).id == ot1.id @@ -1194,7 +1194,7 @@ class InheritingEagerTest(fixtures.MappedTest): global people, employees, tags, peopleTags people = Table('people', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('_type', String(30), nullable=False), ) @@ -1206,7 +1206,7 @@ class InheritingEagerTest(fixtures.MappedTest): ) tags = Table('tags', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('label', String(50), nullable=False), ) @@ -1233,10 +1233,10 @@ class InheritingEagerTest(fixtures.MappedTest): def __init__(self, label): self.label = label - mapper(Person, people, polymorphic_on=people.c._type, + mapper(Person, people, polymorphic_on=people.c._type, polymorphic_identity='person', properties={ - 'tags': relationship(Tag, - secondary=peopleTags, + 'tags': relationship(Tag, + secondary=peopleTags, backref='people', lazy='joined') }) mapper(Employee, employees, inherits=Person, @@ -1264,24 +1264,24 @@ class InheritingEagerTest(fixtures.MappedTest): class MissingPolymorphicOnTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): - tablea = Table('tablea', metadata, - Column('id', Integer, primary_key=True, + tablea = Table('tablea', metadata, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('adata', String(50)), ) - tableb = Table('tableb', metadata, - Column('id', Integer, primary_key=True, + tableb = Table('tableb', metadata, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('aid', Integer, ForeignKey('tablea.id')), Column('data', String(50)), ) - tablec = Table('tablec', metadata, - Column('id', Integer, ForeignKey('tablea.id'), + tablec = Table('tablec', metadata, + Column('id', Integer, ForeignKey('tablea.id'), primary_key=True), Column('cdata', String(50)), ) - tabled = Table('tabled', metadata, - Column('id', Integer, ForeignKey('tablec.id'), + tabled = Table('tabled', metadata, + Column('id', Integer, ForeignKey('tablec.id'), primary_key=True), Column('ddata', String(50)), ) @@ -1303,13 +1303,13 @@ class MissingPolymorphicOnTest(fixtures.MappedTest): A, B, C, D = self.classes.A, self.classes.B, self.classes.C, \ self.classes.D poly_select = select( - [tablea, tableb.c.data.label('discriminator')], + [tablea, tableb.c.data.label('discriminator')], from_obj=tablea.join(tableb)).alias('poly') mapper(B, tableb) - mapper(A, tablea, + mapper(A, tablea, with_polymorphic=('*', poly_select), - polymorphic_on=poly_select.c.discriminator, + polymorphic_on=poly_select.c.discriminator, properties={ 'b':relationship(B, uselist=False) }) @@ -1324,9 +1324,9 @@ class MissingPolymorphicOnTest(fixtures.MappedTest): sess.flush() sess.expunge_all() eq_( - sess.query(A).all(), + sess.query(A).all(), [ - C(cdata='c1', adata='a1'), + C(cdata='c1', adata='a1'), D(cdata='c2', adata='a2', ddata='d2') ] ) @@ -1335,17 +1335,17 @@ class JoinedInhAdjacencyTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): Table('people', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('type', String(30)), ) Table('users', metadata, - Column('id', Integer, ForeignKey('people.id'), + Column('id', Integer, ForeignKey('people.id'), primary_key=True), Column('supervisor_id', Integer, ForeignKey('people.id')), ) Table('dudes', metadata, - Column('id', Integer, ForeignKey('users.id'), + Column('id', Integer, ForeignKey('users.id'), primary_key=True), ) @@ -1463,13 +1463,13 @@ class Ticket2419Test(fixtures.DeclarativeMappedTest): class A(Base): __tablename__ = "a" - id = Column(Integer, primary_key=True, + id = Column(Integer, primary_key=True, test_needs_autoincrement=True) class B(Base): __tablename__ = "b" - id = Column(Integer, primary_key=True, + id = Column(Integer, primary_key=True, test_needs_autoincrement=True) ds = relationship("D") es = relationship("E") @@ -1494,7 +1494,7 @@ class Ticket2419Test(fixtures.DeclarativeMappedTest): test_needs_autoincrement=True) b_id = Column(Integer, ForeignKey('b.id')) - @testing.fails_on("oracle", + @testing.fails_on("oracle", "seems like oracle's query engine can't " "handle this, not clear if there's an " "expression-level bug on our end though") diff --git a/test/orm/inheritance/test_basic.py b/test/orm/inheritance/test_basic.py index 3785fcdaa4..a8cfe5e9a6 100644 --- a/test/orm/inheritance/test_basic.py +++ b/test/orm/inheritance/test_basic.py @@ -77,15 +77,15 @@ class O2MTest(fixtures.MappedTest): class PolymorphicOnNotLocalTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): - t1 = Table('t1', metadata, + t1 = Table('t1', metadata, Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), + test_needs_autoincrement=True), Column('x', String(10)), Column('q', String(10))) - t2 = Table('t2', metadata, + t2 = Table('t2', metadata, Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('y', String(10)), + test_needs_autoincrement=True), + Column('y', String(10)), Column('xid', ForeignKey('t1.id'))) @classmethod @@ -183,7 +183,7 @@ class PolymorphicOnNotLocalTest(fixtures.MappedTest): "discriminator":column_property(expr) }, polymorphic_identity="parent", polymorphic_on=expr) - mapper(Child, t2, inherits=Parent, + mapper(Child, t2, inherits=Parent, polymorphic_identity="child") self._roundtrip(parent_ident='p', child_ident='c') @@ -215,7 +215,7 @@ class PolymorphicOnNotLocalTest(fixtures.MappedTest): self._roundtrip(parent_ident='p', child_ident='c') def test_polymorphic_on_expr_implicit_map_no_label_single(self): - """test that single_table_criterion is propagated + """test that single_table_criterion is propagated with a standalone expr""" t2, t1 = self.tables.t2, self.tables.t1 Parent, Child = self.classes.Parent, self.classes.Child @@ -230,7 +230,7 @@ class PolymorphicOnNotLocalTest(fixtures.MappedTest): self._roundtrip(parent_ident='p', child_ident='c') def test_polymorphic_on_expr_implicit_map_w_label_single(self): - """test that single_table_criterion is propagated + """test that single_table_criterion is propagated with a standalone expr""" t2, t1 = self.tables.t2, self.tables.t1 Parent, Child = self.classes.Parent, self.classes.Child @@ -256,7 +256,7 @@ class PolymorphicOnNotLocalTest(fixtures.MappedTest): "discriminator":cprop }, polymorphic_identity="parent", polymorphic_on=cprop) - mapper(Child, t2, inherits=Parent, + mapper(Child, t2, inherits=Parent, polymorphic_identity="child") self._roundtrip(parent_ident='p', child_ident='c') @@ -273,7 +273,7 @@ class PolymorphicOnNotLocalTest(fixtures.MappedTest): "discriminator":cprop }, polymorphic_identity="parent", polymorphic_on="discriminator") - mapper(Child, t2, inherits=Parent, + mapper(Child, t2, inherits=Parent, polymorphic_identity="child") self._roundtrip(parent_ident='p', child_ident='c') @@ -330,8 +330,8 @@ class FalseDiscriminatorTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): global t1 - t1 = Table('t1', metadata, - Column('id', Integer, primary_key=True, test_needs_autoincrement=True), + t1 = Table('t1', metadata, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('type', Boolean, nullable=False)) def test_false_on_sub(self): @@ -407,12 +407,12 @@ class PolymorphicAttributeManagementTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): Table('table_a', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('class_name', String(50)) ) Table('table_b', metadata, - Column('id', Integer, ForeignKey('table_a.id'), + Column('id', Integer, ForeignKey('table_a.id'), primary_key=True), Column('class_name', String(50)) ) @@ -434,13 +434,13 @@ class PolymorphicAttributeManagementTest(fixtures.MappedTest): class C(B): pass - mapper(A, table_a, - polymorphic_on=table_a.c.class_name, + mapper(A, table_a, + polymorphic_on=table_a.c.class_name, polymorphic_identity='a') - mapper(B, table_b, inherits=A, - polymorphic_on=table_b.c.class_name, + mapper(B, table_b, inherits=A, + polymorphic_on=table_b.c.class_name, polymorphic_identity='b') - mapper(C, table_c, inherits=B, + mapper(C, table_c, inherits=B, polymorphic_identity='c') def test_poly_configured_immediate(self): @@ -491,25 +491,25 @@ class CascadeTest(fixtures.MappedTest): def define_tables(cls, metadata): global t1, t2, t3, t4 t1= Table('t1', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('data', String(30)) ) t2 = Table('t2', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('t1id', Integer, ForeignKey('t1.id')), Column('type', String(30)), Column('data', String(30)) ) t3 = Table('t3', metadata, - Column('id', Integer, ForeignKey('t2.id'), + Column('id', Integer, ForeignKey('t2.id'), primary_key=True), Column('moredata', String(30))) t4 = Table('t4', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('t3id', Integer, ForeignKey('t3.id')), Column('data', String(30))) @@ -759,7 +759,7 @@ class EagerLazyTest(fixtures.MappedTest): self.assert_(len(q.first().eager) == 1) class EagerTargetingTest(fixtures.MappedTest): - """test a scenario where joined table inheritance might be + """test a scenario where joined table inheritance might be confused as an eagerly loaded joined table.""" @classmethod @@ -784,7 +784,7 @@ class EagerTargetingTest(fixtures.MappedTest): class B(A): pass - mapper(A, a_table, polymorphic_on=a_table.c.type, polymorphic_identity='A', + mapper(A, a_table, polymorphic_on=a_table.c.type, polymorphic_identity='A', properties={ 'children': relationship(A, order_by=a_table.c.name) }) @@ -947,9 +947,9 @@ class VersioningTest(fixtures.MappedTest): class Stuff(Base): pass mapper(Stuff, stuff) - mapper(Base, base, - polymorphic_on=base.c.discriminator, - version_id_col=base.c.version_id, + mapper(Base, base, + polymorphic_on=base.c.discriminator, + version_id_col=base.c.version_id, polymorphic_identity=1, properties={ 'stuff':relationship(Stuff) }) @@ -973,7 +973,7 @@ class VersioningTest(fixtures.MappedTest): sess.flush() assert_raises(orm_exc.StaleDataError, - sess2.query(Base).with_lockmode('read').get, + sess2.query(Base).with_lockmode('read').get, s1.id) if not testing.db.dialect.supports_sane_rowcount: @@ -996,8 +996,8 @@ class VersioningTest(fixtures.MappedTest): class Sub(Base): pass - mapper(Base, base, - polymorphic_on=base.c.discriminator, + mapper(Base, base, + polymorphic_on=base.c.discriminator, version_id_col=base.c.version_id, polymorphic_identity=1) mapper(Sub, subtable, inherits=Base, polymorphic_identity=2) @@ -1075,16 +1075,16 @@ class DistinctPKTest(fixtures.MappedTest): def test_explicit_props(self): person_mapper = mapper(Person, person_table) mapper(Employee, employee_table, inherits=person_mapper, - properties={'pid':person_table.c.id, + properties={'pid':person_table.c.id, 'eid':employee_table.c.id}) self._do_test(False) def test_explicit_composite_pk(self): person_mapper = mapper(Person, person_table) - mapper(Employee, employee_table, - inherits=person_mapper, + mapper(Employee, employee_table, + inherits=person_mapper, primary_key=[person_table.c.id, employee_table.c.id]) - assert_raises_message(sa_exc.SAWarning, + assert_raises_message(sa_exc.SAWarning, r"On mapper Mapper\|Employee\|employees, " "primary key column 'persons.id' is being " "combined with distinct primary key column 'employees.id' " @@ -1190,7 +1190,7 @@ class OverrideColKeyTest(fixtures.MappedTest): def define_tables(cls, metadata): global base, subtable - base = Table('base', metadata, + base = Table('base', metadata, Column('base_id', Integer, primary_key=True, test_needs_autoincrement=True), Column('data', String(255)), Column('sqlite_fixer', String(10)) @@ -1241,7 +1241,7 @@ class OverrideColKeyTest(fixtures.MappedTest): class_mapper(Sub).get_property('id').columns, [base.c.base_id, subtable.c.base_id] ) - + s1 = Sub() s1.id = 10 sess = create_session() @@ -1414,7 +1414,7 @@ class OptimizedLoadTest(fixtures.MappedTest): Column('type', String(50)), Column('counter', Integer, server_default="1") ) - Table('sub', metadata, + Table('sub', metadata, Column('id', Integer, ForeignKey('base.id'), primary_key=True), Column('sub', String(50)), Column('counter', Integer, server_default="1"), @@ -1431,7 +1431,7 @@ class OptimizedLoadTest(fixtures.MappedTest): ) def test_optimized_passes(self): - """"test that the 'optimized load' routine doesn't crash when + """"test that the 'optimized load' routine doesn't crash when a column in the join condition is not available.""" base, sub = self.tables.base, self.tables.sub @@ -1446,7 +1446,7 @@ class OptimizedLoadTest(fixtures.MappedTest): # redefine Sub's "id" to favor the "id" col in the subtable. # "id" is also part of the primary join condition - mapper(Sub, sub, inherits=Base, + mapper(Sub, sub, inherits=Base, polymorphic_identity='sub', properties={'id':[sub.c.id, base.c.id]}) sess = sessionmaker()() @@ -1455,8 +1455,8 @@ class OptimizedLoadTest(fixtures.MappedTest): sess.commit() sess.expunge_all() - # load s1 via Base. s1.id won't populate since it's relative to - # the "sub" table. The optimized load kicks in and tries to + # load s1 via Base. s1.id won't populate since it's relative to + # the "sub" table. The optimized load kicks in and tries to # generate on the primary join, but cannot since "id" is itself unloaded. # the optimized load needs to return "None" so regular full-row loading proceeds s1 = sess.query(Base).first() @@ -1501,7 +1501,7 @@ class OptimizedLoadTest(fixtures.MappedTest): sess.expunge_all() # query a bunch of rows to ensure there's no cartesian # product against "base" occurring, it is in fact - # detecting that "base" needs to be in the join + # detecting that "base" needs to be in the join # criterion eq_( sess.query(Base).order_by(Base.id).all(), @@ -1587,24 +1587,24 @@ class OptimizedLoadTest(fixtures.MappedTest): pass class Sub(Base): pass - mapper(Base, base, polymorphic_on=base.c.type, + mapper(Base, base, polymorphic_on=base.c.type, polymorphic_identity='base') m = mapper(Sub, sub, inherits=Base, polymorphic_identity='sub') s1 = Sub() - assert m._optimized_get_statement(attributes.instance_state(s1), + assert m._optimized_get_statement(attributes.instance_state(s1), ['counter2']) is None # loads s1.id as None eq_(s1.id, None) # this now will come up with a value of None for id - should reject - assert m._optimized_get_statement(attributes.instance_state(s1), + assert m._optimized_get_statement(attributes.instance_state(s1), ['counter2']) is None s1.id = 1 attributes.instance_state(s1).commit_all(s1.__dict__, None) - assert m._optimized_get_statement(attributes.instance_state(s1), + assert m._optimized_get_statement(attributes.instance_state(s1), ['counter2']) is not None def test_load_expired_on_pending_twolevel(self): @@ -1619,7 +1619,7 @@ class OptimizedLoadTest(fixtures.MappedTest): class SubSub(Sub): pass - mapper(Base, base, polymorphic_on=base.c.type, + mapper(Base, base, polymorphic_on=base.c.type, polymorphic_identity='base') mapper(Sub, sub, inherits=Base, polymorphic_identity='sub') mapper(SubSub, subsub, inherits=Sub, polymorphic_identity='subsub') @@ -1722,7 +1722,7 @@ class InhCondTest(fixtures.TestBase): mapper(Base, base_table) # succeeds, despite "owner" table not configured yet - m2 = mapper(Derived, derived_table, + m2 = mapper(Derived, derived_table, inherits=Base) assert m2.inherit_condition.compare( base_table.c.id==derived_table.c.id @@ -1734,7 +1734,7 @@ class InhCondTest(fixtures.TestBase): Column("id", Integer, primary_key=True) ) derived_table = Table("derived", m, - Column("id", Integer, ForeignKey('base.id'), + Column("id", Integer, ForeignKey('base.id'), primary_key=True), Column('order_id', Integer, ForeignKey('order.foo')) ) @@ -1784,7 +1784,7 @@ class InhCondTest(fixtures.TestBase): Column("id", Integer, primary_key=True) ) derived_table = Table("derived", m2, - Column("id", Integer, ForeignKey('base.id'), + Column("id", Integer, ForeignKey('base.id'), primary_key=True), ) @@ -1815,7 +1815,7 @@ class InhCondTest(fixtures.TestBase): Column("id", Integer, primary_key=True) ) derived_table = Table("derived", m, - Column("id", Integer, ForeignKey('base.q'), + Column("id", Integer, ForeignKey('base.q'), primary_key=True), ) @@ -1840,12 +1840,12 @@ class PKDiscriminatorTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): parents = Table('parents', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('name', String(60))) children = Table('children', metadata, - Column('id', Integer, ForeignKey('parents.id'), + Column('id', Integer, ForeignKey('parents.id'), primary_key=True), Column('type', Integer,primary_key=True), Column('name', String(60))) @@ -1894,7 +1894,7 @@ class NoPolyIdentInMiddleTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): Table('base', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('type', String(50), nullable=False), ) @@ -1971,7 +1971,7 @@ class DeleteOrphanTest(fixtures.MappedTest): """Test the fairly obvious, that an error is raised when attempting to insert an orphan. - Previous SQLA versions would check this constraint + Previous SQLA versions would check this constraint in memory which is the original rationale for this test. """ @@ -2016,14 +2016,14 @@ class PolymorphicUnionTest(fixtures.TestBase, testing.AssertsCompiledSQL): __dialect__ = 'default' def _fixture(self): - t1 = table('t1', column('c1', Integer), - column('c2', Integer), + t1 = table('t1', column('c1', Integer), + column('c2', Integer), column('c3', Integer)) - t2 = table('t2', column('c1', Integer), column('c2', Integer), - column('c3', Integer), + t2 = table('t2', column('c1', Integer), column('c2', Integer), + column('c3', Integer), column('c4', Integer)) - t3 = table('t3', column('c1', Integer), - column('c3', Integer), + t3 = table('t3', column('c1', Integer), + column('c3', Integer), column('c5', Integer)) return t1, t2, t3 @@ -2074,12 +2074,12 @@ class NameConflictTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): content = Table('content', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('type', String(30)) ) foo = Table('foo', metadata, - Column('id', Integer, ForeignKey('content.id'), + Column('id', Integer, ForeignKey('content.id'), primary_key=True), Column('content_type', String(30)) ) @@ -2089,9 +2089,9 @@ class NameConflictTest(fixtures.MappedTest): pass class Foo(Content): pass - mapper(Content, self.tables.content, + mapper(Content, self.tables.content, polymorphic_on=self.tables.content.c.type) - mapper(Foo, self.tables.foo, inherits=Content, + mapper(Foo, self.tables.foo, inherits=Content, polymorphic_identity='foo') sess = create_session() f = Foo() diff --git a/test/orm/inheritance/test_magazine.py b/test/orm/inheritance/test_magazine.py index 840270e589..a1118aa86f 100644 --- a/test/orm/inheritance/test_magazine.py +++ b/test/orm/inheritance/test_magazine.py @@ -176,10 +176,10 @@ def _generate_round_trip_test(use_unions=False, use_joins=False): 'magazine': relationship(Magazine, backref=backref('pages', order_by=page_table.c.page_no)) }) - classified_page_mapper = mapper(ClassifiedPage, - classified_page_table, - inherits=magazine_page_mapper, - polymorphic_identity='c', + classified_page_mapper = mapper(ClassifiedPage, + classified_page_table, + inherits=magazine_page_mapper, + polymorphic_identity='c', primary_key=[page_table.c.id]) diff --git a/test/orm/inheritance/test_poly_persistence.py b/test/orm/inheritance/test_poly_persistence.py index 6939479b11..5b5844b703 100644 --- a/test/orm/inheritance/test_poly_persistence.py +++ b/test/orm/inheritance/test_poly_persistence.py @@ -27,20 +27,20 @@ class PolymorphTest(fixtures.MappedTest): global companies, people, engineers, managers, boss companies = Table('companies', metadata, - Column('company_id', Integer, primary_key=True, + Column('company_id', Integer, primary_key=True, test_needs_autoincrement=True), Column('name', String(50))) people = Table('people', metadata, - Column('person_id', Integer, primary_key=True, + Column('person_id', Integer, primary_key=True, test_needs_autoincrement=True), - Column('company_id', Integer, ForeignKey('companies.company_id'), + Column('company_id', Integer, ForeignKey('companies.company_id'), nullable=False), Column('name', String(50)), Column('type', String(30))) engineers = Table('engineers', metadata, - Column('person_id', Integer, ForeignKey('people.person_id'), + Column('person_id', Integer, ForeignKey('people.person_id'), primary_key=True), Column('status', String(30)), Column('engineer_name', String(50)), @@ -48,14 +48,14 @@ class PolymorphTest(fixtures.MappedTest): ) managers = Table('managers', metadata, - Column('person_id', Integer, ForeignKey('people.person_id'), + Column('person_id', Integer, ForeignKey('people.person_id'), primary_key=True), Column('status', String(30)), Column('manager_name', String(50)) ) boss = Table('boss', metadata, - Column('boss_id', Integer, ForeignKey('managers.person_id'), + Column('boss_id', Integer, ForeignKey('managers.person_id'), primary_key=True), Column('golf_swing', String(30)), ) @@ -74,14 +74,14 @@ class InsertOrderTest(PolymorphTest): 'person':people.select(people.c.type=='person'), }, None, 'pjoin') - person_mapper = mapper(Person, people, - with_polymorphic=('*', person_join), - polymorphic_on=person_join.c.type, + person_mapper = mapper(Person, people, + with_polymorphic=('*', person_join), + polymorphic_on=person_join.c.type, polymorphic_identity='person') - mapper(Engineer, engineers, inherits=person_mapper, + mapper(Engineer, engineers, inherits=person_mapper, polymorphic_identity='engineer') - mapper(Manager, managers, inherits=person_mapper, + mapper(Manager, managers, inherits=person_mapper, polymorphic_identity='manager') mapper(Company, companies, properties={ 'employees': relationship(Person, @@ -113,16 +113,16 @@ class RoundTripTest(PolymorphTest): def _generate_round_trip_test(include_base, lazy_relationship, redefine_colprop, with_polymorphic): """generates a round trip test. - + include_base - whether or not to include the base 'person' type in the union. - + lazy_relationship - whether or not the Company relationship to People is lazy or eager. - + redefine_colprop - if we redefine the 'name' column to be 'people_name' on the base Person class - + use_literal_join - primary join condition is explicitly specified """ def test_roundtrip(self): @@ -158,21 +158,21 @@ def _generate_round_trip_test(include_base, lazy_relationship, manager_with_polymorphic = None if redefine_colprop: - person_mapper = mapper(Person, people, - with_polymorphic=person_with_polymorphic, - polymorphic_on=people.c.type, - polymorphic_identity='person', + person_mapper = mapper(Person, people, + with_polymorphic=person_with_polymorphic, + polymorphic_on=people.c.type, + polymorphic_identity='person', properties= {'person_name':people.c.name}) else: - person_mapper = mapper(Person, people, - with_polymorphic=person_with_polymorphic, - polymorphic_on=people.c.type, + person_mapper = mapper(Person, people, + with_polymorphic=person_with_polymorphic, + polymorphic_on=people.c.type, polymorphic_identity='person') - mapper(Engineer, engineers, inherits=person_mapper, + mapper(Engineer, engineers, inherits=person_mapper, polymorphic_identity='engineer') - mapper(Manager, managers, inherits=person_mapper, - with_polymorphic=manager_with_polymorphic, + mapper(Manager, managers, inherits=person_mapper, + with_polymorphic=manager_with_polymorphic, polymorphic_identity='manager') mapper(Boss, boss, inherits=Manager, polymorphic_identity='boss') @@ -190,19 +190,19 @@ def _generate_round_trip_test(include_base, lazy_relationship, person_attribute_name = 'name' employees = [ - Manager(status='AAB', manager_name='manager1', + Manager(status='AAB', manager_name='manager1', **{person_attribute_name:'pointy haired boss'}), - Engineer(status='BBA', engineer_name='engineer1', - primary_language='java', + Engineer(status='BBA', engineer_name='engineer1', + primary_language='java', **{person_attribute_name:'dilbert'}), ] if include_base: employees.append(Person(**{person_attribute_name:'joesmith'})) employees += [ - Engineer(status='CGG', engineer_name='engineer2', - primary_language='python', + Engineer(status='CGG', engineer_name='engineer2', + primary_language='python', **{person_attribute_name:'wally'}), - Manager(status='ABA', manager_name='manager2', + Manager(status='ABA', manager_name='manager2', **{person_attribute_name:'jsmith'}) ] @@ -222,7 +222,7 @@ def _generate_round_trip_test(include_base, lazy_relationship, session.expunge_all() eq_(session.query(Person).filter( - Person.person_id==dilbert.person_id).one(), + Person.person_id==dilbert.person_id).one(), dilbert) session.expunge_all() @@ -242,9 +242,9 @@ def _generate_round_trip_test(include_base, lazy_relationship, else: self.assert_sql_count(testing.db, go, 6) - # test selecting from the query, using the base + # test selecting from the query, using the base # mapped table (people) as the selection criterion. - # in the case of the polymorphic Person query, + # in the case of the polymorphic Person query, # the "people" selectable should be adapted to be "person_join" eq_( session.query(Person).filter( @@ -264,9 +264,9 @@ def _generate_round_trip_test(include_base, lazy_relationship, dilbert ) - # test selecting from the query, joining against + # test selecting from the query, joining against # an alias of the base "people" table. test that - # the "palias" alias does *not* get sucked up + # the "palias" alias does *not* get sucked up # into the "person_join" conversion. palias = people.alias("palias") dilbert = session.query(Person).get(dilbert.person_id) @@ -287,35 +287,35 @@ def _generate_round_trip_test(include_base, lazy_relationship, session.expunge_all() def go(): - session.query(Person).filter(getattr(Person, + session.query(Person).filter(getattr(Person, person_attribute_name)=='dilbert').first() self.assert_sql_count(testing.db, go, 1) session.expunge_all() - dilbert = session.query(Person).filter(getattr(Person, + dilbert = session.query(Person).filter(getattr(Person, person_attribute_name)=='dilbert').first() def go(): - # assert that only primary table is queried for + # assert that only primary table is queried for # already-present-in-session - d = session.query(Person).filter(getattr(Person, + d = session.query(Person).filter(getattr(Person, person_attribute_name)=='dilbert').first() self.assert_sql_count(testing.db, go, 1) # test standalone orphans - daboss = Boss(status='BBB', - manager_name='boss', - golf_swing='fore', + daboss = Boss(status='BBB', + manager_name='boss', + golf_swing='fore', **{person_attribute_name:'daboss'}) session.add(daboss) assert_raises(sa_exc.DBAPIError, session.flush) c = session.query(Company).first() daboss.company = c - manager_list = [e for e in c.employees + manager_list = [e for e in c.employees if isinstance(e, Manager)] session.flush() session.expunge_all() - eq_(session.query(Manager).order_by(Manager.person_id).all(), + eq_(session.query(Manager).order_by(Manager.person_id).all(), manager_list) c = session.query(Company).first() @@ -337,11 +337,11 @@ for lazy_relationship in [True, False]: for with_polymorphic in ['unions', 'joins', 'auto', 'none']: if with_polymorphic == 'unions': for include_base in [True, False]: - _generate_round_trip_test(include_base, - lazy_relationship, + _generate_round_trip_test(include_base, + lazy_relationship, redefine_colprop, with_polymorphic) else: - _generate_round_trip_test(False, - lazy_relationship, + _generate_round_trip_test(False, + lazy_relationship, redefine_colprop, with_polymorphic) diff --git a/test/orm/inheritance/test_polymorphic_rel.py b/test/orm/inheritance/test_polymorphic_rel.py index 792bd81099..f34b8439fe 100644 --- a/test/orm/inheritance/test_polymorphic_rel.py +++ b/test/orm/inheritance/test_polymorphic_rel.py @@ -42,7 +42,7 @@ class _PolymorphicTestBase(object): def test_loads_at_once(self): """ - Test that all objects load from the full query, when + Test that all objects load from the full query, when with_polymorphic is used. """ @@ -53,7 +53,7 @@ class _PolymorphicTestBase(object): self.assert_sql_count(testing.db, go, count) def test_primary_eager_aliasing_one(self): - # For both joinedload() and subqueryload(), if the original q is + # For both joinedload() and subqueryload(), if the original q is # not loading the subclass table, the joinedload doesn't happen. sess = create_session() @@ -92,7 +92,7 @@ class _PolymorphicTestBase(object): def test_get_one(self): """ - For all mappers, ensure the primary key has been calculated as + For all mappers, ensure the primary key has been calculated as just the "person_id" column. """ sess = create_session() @@ -438,7 +438,7 @@ class _PolymorphicTestBase(object): def test_join_from_columns_or_subclass_six(self): sess = create_session() if self.select_type == '': - # this now raises, due to [ticket:1892]. Manager.person_id + # this now raises, due to [ticket:1892]. Manager.person_id # is now the "person_id" column on Manager. SQL is incorrect. assert_raises( sa_exc.DBAPIError, @@ -447,8 +447,8 @@ class _PolymorphicTestBase(object): Manager.person_id == paperwork.c.person_id) .order_by(Person.name).all) elif self.select_type == 'Unions': - # with the union, not something anyone would really be using - # here, it joins to the full result set. This is 0.6's + # with the union, not something anyone would really be using + # here, it joins to the full result set. This is 0.6's # behavior and is more or less wrong. expected = [ (u'dilbert',), @@ -465,7 +465,7 @@ class _PolymorphicTestBase(object): .order_by(Person.name).all(), expected) else: - # when a join is present and managers.person_id is available, + # when a join is present and managers.person_id is available, # you get the managers. expected = [ (u'dogbert',), @@ -531,7 +531,7 @@ class _PolymorphicTestBase(object): def test_polymorphic_option(self): """ - Test that polymorphic loading sets state.load_path with its + Test that polymorphic loading sets state.load_path with its actual mapper on a subclass, and not the superclass mapper. """ @@ -558,7 +558,7 @@ class _PolymorphicTestBase(object): def test_expire(self): """ - Test that individual column refresh doesn't get tripped up by + Test that individual column refresh doesn't get tripped up by the select_table mapper. """ @@ -614,7 +614,7 @@ class _PolymorphicTestBase(object): def test_with_polymorphic_five(self): sess = create_session() def go(): - # limit the polymorphic join down to just "Person", + # limit the polymorphic join down to just "Person", # overriding select_table eq_(sess.query(Person) .with_polymorphic(Person).all(), @@ -633,7 +633,7 @@ class _PolymorphicTestBase(object): def test_with_polymorphic_seven(self): sess = create_session() - # compare to entities without related collections to prevent + # compare to entities without related collections to prevent # additional lazy SQL from firing on loaded entities eq_(sess.query(Person).with_polymorphic('*').all(), self._emps_wo_relationships_fixture()) @@ -687,8 +687,8 @@ class _PolymorphicTestBase(object): sess = create_session() def go(): - # currently, it doesn't matter if we say Company.employees, - # or Company.employees.of_type(Engineer). joinedloader + # currently, it doesn't matter if we say Company.employees, + # or Company.employees.of_type(Engineer). joinedloader # doesn't pick up on the "of_type()" as of yet. eq_(sess.query(Company) .options(joinedload_all( @@ -696,8 +696,8 @@ class _PolymorphicTestBase(object): Engineer.machines)) .all(), expected) - # in the case of select_type='', the joinedload - # doesn't take in this case; it joinedloads company->people, + # in the case of select_type='', the joinedload + # doesn't take in this case; it joinedloads company->people, # then a load for each of 5 rows, then lazyload of "machines" count = {'':7, 'Polymorphic':1}.get(self.select_type, 2) self.assert_sql_count(testing.db, go, count) @@ -1011,8 +1011,8 @@ class _PolymorphicTestBase(object): .filter(Engineer.engineer_name == 'vlad').one(), c2) - # same, using explicit join condition. Query.join() must - # adapt the on clause here to match the subquery wrapped around + # same, using explicit join condition. Query.join() must + # adapt the on clause here to match the subquery wrapped around # "people join engineers". eq_(sess.query(Company) .join(Engineer, Company.company_id == Engineer.company_id) @@ -1055,10 +1055,10 @@ class _PolymorphicTestBase(object): expected) def test_nesting_queries(self): - # query.statement places a flag "no_adapt" on the returned - # statement. This prevents the polymorphic adaptation in the - # second "filter" from hitting it, which would pollute the - # subquery and usually results in recursion overflow errors + # query.statement places a flag "no_adapt" on the returned + # statement. This prevents the polymorphic adaptation in the + # second "filter" from hitting it, which would pollute the + # subquery and usually results in recursion overflow errors # within the adaption. sess = create_session() subq = (sess.query(engineers.c.person_id) @@ -1230,8 +1230,8 @@ class _PolymorphicTestBase(object): #def test_mixed_entities(self): # sess = create_session() - # TODO: I think raise error on these for now. different - # inheritance/loading schemes have different results here, + # TODO: I think raise error on these for now. different + # inheritance/loading schemes have different results here, # all incorrect # # eq_( @@ -1241,8 +1241,8 @@ class _PolymorphicTestBase(object): #def test_mixed_entities(self): # sess = create_session() # eq_(sess.query( - # Person.name, - # Engineer.primary_language, + # Person.name, + # Engineer.primary_language, # Manager.manager_name) # .all(), # []) diff --git a/test/orm/inheritance/test_relationship.py b/test/orm/inheritance/test_relationship.py index 8db5f6b3b2..a4e19b988b 100644 --- a/test/orm/inheritance/test_relationship.py +++ b/test/orm/inheritance/test_relationship.py @@ -562,7 +562,7 @@ class SelfReferentialM2MTest(fixtures.MappedTest, AssertsCompiledSQL): sess.add(c1) sess.flush() - # test that the splicing of the join works here, doesn't break in + # test that the splicing of the join works here, doesn't break in # the middle of "parent join child1" q = sess.query(Child1).options(joinedload('left_child2')) self.assert_compile(q.limit(1).with_labels().statement, diff --git a/test/orm/inheritance/test_single.py b/test/orm/inheritance/test_single.py index d05551ef4a..774626c48e 100644 --- a/test/orm/inheritance/test_single.py +++ b/test/orm/inheritance/test_single.py @@ -93,7 +93,7 @@ class SingleInheritanceTest(testing.AssertsCompiledSQL, fixtures.MappedTest): ealias = aliased(Engineer) eq_( - session.query(Manager, ealias).all(), + session.query(Manager, ealias).all(), [(m1, e1), (m1, e2)] ) @@ -124,7 +124,7 @@ class SingleInheritanceTest(testing.AssertsCompiledSQL, fixtures.MappedTest): # TODO: I think raise error on this for now # self.assertEquals( - # session.query(Employee.name, Manager.manager_data, Engineer.engineer_info).all(), + # session.query(Employee.name, Manager.manager_data, Engineer.engineer_info).all(), # [] # ) @@ -169,7 +169,7 @@ class SingleInheritanceTest(testing.AssertsCompiledSQL, fixtures.MappedTest): sess.flush() eq_( - sess.query(Manager).select_from(employees.select().limit(10)).all(), + sess.query(Manager).select_from(employees.select().limit(10)).all(), [m1, m2] ) @@ -389,7 +389,7 @@ class RelationshipToSingleTest(testing.AssertsCompiledSQL, fixtures.MappedTest): "SELECT companies.company_id AS companies_company_id, " "companies.name AS companies_name, employees.name AS employees_name " "FROM companies LEFT OUTER JOIN employees ON companies.company_id " - "= employees.company_id AND employees.type IN (:type_1)" + "= employees.company_id AND employees.type IN (:type_1)" ) def test_outer_join_alias(self): @@ -450,7 +450,7 @@ class RelationshipToSingleTest(testing.AssertsCompiledSQL, fixtures.MappedTest): eq_(c2.engineers, [e1]) sess.expunge_all() - eq_(sess.query(Company).order_by(Company.name).all(), + eq_(sess.query(Company).order_by(Company.name).all(), [ Company(name='c1', engineers=[JuniorEngineer(name='Ed')]), Company(name='c2', engineers=[Engineer(name='Kurt')]) @@ -459,7 +459,7 @@ class RelationshipToSingleTest(testing.AssertsCompiledSQL, fixtures.MappedTest): # eager load join should limit to only "Engineer" sess.expunge_all() - eq_(sess.query(Company).options(joinedload('engineers')).order_by(Company.name).all(), + eq_(sess.query(Company).options(joinedload('engineers')).order_by(Company.name).all(), [ Company(name='c1', engineers=[JuniorEngineer(name='Ed')]), Company(name='c2', engineers=[Engineer(name='Kurt')]) diff --git a/test/orm/test_assorted_eager.py b/test/orm/test_assorted_eager.py index 2eddfde9c2..dded00256f 100644 --- a/test/orm/test_assorted_eager.py +++ b/test/orm/test_assorted_eager.py @@ -2,7 +2,7 @@ Derived from mailing list-reported problems and trac tickets. -These are generally very old 0.1-era tests and at some point should +These are generally very old 0.1-era tests and at some point should be cleaned up and modernized. """ diff --git a/test/orm/test_attributes.py b/test/orm/test_attributes.py index e94afdb1be..4702cc231b 100644 --- a/test/orm/test_attributes.py +++ b/test/orm/test_attributes.py @@ -45,7 +45,7 @@ class AttributeImplAPITest(fixtures.MappedTest): b2 = B() A.b.impl.append( - attributes.instance_state(a1), + attributes.instance_state(a1), attributes.instance_dict(a1), b1, None ) @@ -56,7 +56,7 @@ class AttributeImplAPITest(fixtures.MappedTest): "Object not " "associated with on attribute 'b'", A.b.impl.remove, - attributes.instance_state(a1), + attributes.instance_state(a1), attributes.instance_dict(a1), b2, None ) @@ -68,14 +68,14 @@ class AttributeImplAPITest(fixtures.MappedTest): b2 = B() A.b.impl.append( - attributes.instance_state(a1), + attributes.instance_state(a1), attributes.instance_dict(a1), b1, None ) assert a1.b is b1 A.b.impl.pop( - attributes.instance_state(a1), + attributes.instance_state(a1), attributes.instance_dict(a1), b2, None ) assert a1.b is b1 @@ -87,14 +87,14 @@ class AttributeImplAPITest(fixtures.MappedTest): b1 = B() A.b.impl.append( - attributes.instance_state(a1), + attributes.instance_state(a1), attributes.instance_dict(a1), b1, None ) assert a1.b is b1 A.b.impl.pop( - attributes.instance_state(a1), + attributes.instance_state(a1), attributes.instance_dict(a1), b1, None ) assert a1.b is None @@ -107,7 +107,7 @@ class AttributeImplAPITest(fixtures.MappedTest): b2 = B() A.b.impl.append( - attributes.instance_state(a1), + attributes.instance_state(a1), attributes.instance_dict(a1), b1, None ) @@ -117,7 +117,7 @@ class AttributeImplAPITest(fixtures.MappedTest): ValueError, r"list.remove\(x\): x not in list", A.b.impl.remove, - attributes.instance_state(a1), + attributes.instance_state(a1), attributes.instance_dict(a1), b2, None ) @@ -129,14 +129,14 @@ class AttributeImplAPITest(fixtures.MappedTest): b2 = B() A.b.impl.append( - attributes.instance_state(a1), + attributes.instance_state(a1), attributes.instance_dict(a1), b1, None ) assert a1.b == [b1] A.b.impl.pop( - attributes.instance_state(a1), + attributes.instance_state(a1), attributes.instance_dict(a1), b2, None ) assert a1.b == [b1] @@ -148,14 +148,14 @@ class AttributeImplAPITest(fixtures.MappedTest): b1 = B() A.b.impl.append( - attributes.instance_state(a1), + attributes.instance_state(a1), attributes.instance_dict(a1), b1, None ) assert a1.b == [b1] A.b.impl.pop( - attributes.instance_state(a1), + attributes.instance_state(a1), attributes.instance_dict(a1), b1, None ) assert a1.b == [] @@ -299,9 +299,9 @@ class AttributesTest(fixtures.ORMTest): instrumentation.register_class(Foo) instrumentation.register_class(Bar) - attributes.register_attribute(Foo, - 'bars', - uselist=True, + attributes.register_attribute(Foo, + 'bars', + uselist=True, useobject=True) assert_raises_message( @@ -466,20 +466,20 @@ class AttributesTest(fixtures.ORMTest): return attributes.PASSIVE_NO_RESULT return b2 - attributes.register_attribute(Foo, 'bars', - uselist=True, - useobject=True, + attributes.register_attribute(Foo, 'bars', + uselist=True, + useobject=True, callable_=loadcollection, extension=[ReceiveEvents('bars')]) - attributes.register_attribute(Foo, 'bar', - uselist=False, - useobject=True, + attributes.register_attribute(Foo, 'bar', + uselist=False, + useobject=True, callable_=loadscalar, extension=[ReceiveEvents('bar')]) - attributes.register_attribute(Foo, 'scalar', - uselist=False, + attributes.register_attribute(Foo, 'scalar', + uselist=False, useobject=False, extension=[ReceiveEvents('scalar')]) @@ -610,7 +610,7 @@ class AttributesTest(fixtures.ORMTest): ]) def test_lazytrackparent(self): - """test that the "hasparent" flag works properly + """test that the "hasparent" flag works properly when lazy loaders and backrefs are used """ @@ -631,9 +631,9 @@ class AttributesTest(fixtures.ORMTest): # create objects as if they'd been freshly loaded from the database (without history) b = Blog() p1 = Post() - attributes.instance_state(b).set_callable(attributes.instance_dict(b), + attributes.instance_state(b).set_callable(attributes.instance_dict(b), 'posts', lambda passive:[p1]) - attributes.instance_state(p1).set_callable(attributes.instance_dict(p1), + attributes.instance_state(p1).set_callable(attributes.instance_dict(p1), 'blog', lambda passive:b) p1, attributes.instance_state(b).commit_all(attributes.instance_dict(b)) @@ -687,11 +687,11 @@ class AttributesTest(fixtures.ORMTest): return "this is the bar attr" def func3(state, passive): return "this is the shared attr" - attributes.register_attribute(Foo, 'element', uselist=False, + attributes.register_attribute(Foo, 'element', uselist=False, callable_=func1, useobject=True) - attributes.register_attribute(Foo, 'element2', uselist=False, + attributes.register_attribute(Foo, 'element2', uselist=False, callable_=func3, useobject=True) - attributes.register_attribute(Bar, 'element', uselist=False, + attributes.register_attribute(Bar, 'element', uselist=False, callable_=func2, useobject=True) x = Foo() @@ -945,12 +945,12 @@ class GetNoValueTest(fixtures.ORMTest): instrumentation.register_class(Foo) instrumentation.register_class(Bar) if expected is not None: - attributes.register_attribute(Foo, - "attr", useobject=True, + attributes.register_attribute(Foo, + "attr", useobject=True, uselist=False, callable_=lazy_callable) else: - attributes.register_attribute(Foo, - "attr", useobject=True, + attributes.register_attribute(Foo, + "attr", useobject=True, uselist=False) f1 = Foo() @@ -1102,7 +1102,7 @@ class BackrefTest(fixtures.ORMTest): instrumentation.register_class(Port) instrumentation.register_class(Jack) - attributes.register_attribute(Port, 'jack', uselist=False, + attributes.register_attribute(Port, 'jack', uselist=False, useobject=True, backref="port") attributes.register_attribute(Jack, 'port', uselist=False, @@ -1275,7 +1275,7 @@ class PendingBackrefTest(fixtures.ORMTest): p4.blog = b assert called[0] == 0 eq_(attributes.instance_state(b). - get_history('posts', attributes.PASSIVE_OFF), + get_history('posts', attributes.PASSIVE_OFF), ([p, p4], [p1, p2, p3], [])) assert called[0] == 1 @@ -1298,7 +1298,7 @@ class PendingBackrefTest(fixtures.ORMTest): lazy_load = [p, p2] # lazy loaded + pending get added together. # This isn't seen often with the ORM due - # to usual practices surrounding the + # to usual practices surrounding the # load/flush/load cycle. eq_(b.posts, [p, p2, p]) eq_(called[0], 1) @@ -1343,7 +1343,7 @@ class HistoryTest(fixtures.TestBase): instrumentation.register_class(Foo) attributes.register_attribute( - Foo, 'someattr', + Foo, 'someattr', uselist=uselist, useobject=useobject, active_history=active_history, @@ -1379,20 +1379,20 @@ class HistoryTest(fixtures.TestBase): attributes.instance_dict(f)) def test_committed_value_init(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=False) f = Foo() eq_(self._someattr_committed_state(f), None) def test_committed_value_set(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=False) f = Foo() f.someattr = 3 eq_(self._someattr_committed_state(f), None) def test_committed_value_set_commit(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=False) f = Foo() f.someattr = 3 @@ -1400,25 +1400,25 @@ class HistoryTest(fixtures.TestBase): eq_(self._someattr_committed_state(f), 3) def test_scalar_init(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=False) f = Foo() eq_(self._someattr_history(f), ((), (), ())) def test_object_init(self): - Foo = self._fixture(uselist=False, useobject=True, + Foo = self._fixture(uselist=False, useobject=True, active_history=False) f = Foo() eq_(self._someattr_history(f), ((), (), ())) def test_object_init_active_history(self): - Foo = self._fixture(uselist=False, useobject=True, + Foo = self._fixture(uselist=False, useobject=True, active_history=True) f = Foo() eq_(self._someattr_history(f), ((), (), ())) def test_scalar_no_init_side_effect(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=False) f = Foo() self._someattr_history(f) @@ -1427,14 +1427,14 @@ class HistoryTest(fixtures.TestBase): assert 'someattr' not in attributes.instance_state(f).committed_state def test_scalar_set(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=False) f = Foo() f.someattr = 'hi' eq_(self._someattr_history(f), (['hi'], (), ())) def test_scalar_set_commit(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=False) f = Foo() f.someattr = 'hi' @@ -1442,7 +1442,7 @@ class HistoryTest(fixtures.TestBase): eq_(self._someattr_history(f), ((), ['hi'], ())) def test_scalar_set_commit_reset(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=False) f = Foo() f.someattr = 'hi' @@ -1451,7 +1451,7 @@ class HistoryTest(fixtures.TestBase): eq_(self._someattr_history(f), (['there'], (), ['hi'])) def test_scalar_set_commit_reset_commit(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=False) f = Foo() f.someattr = 'hi' @@ -1461,7 +1461,7 @@ class HistoryTest(fixtures.TestBase): eq_(self._someattr_history(f), ((), ['there'], ())) def test_scalar_set_commit_reset_commit_del(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=False) f = Foo() f.someattr = 'there' @@ -1470,14 +1470,14 @@ class HistoryTest(fixtures.TestBase): eq_(self._someattr_history(f), ((), (), ['there'])) def test_scalar_set_dict(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=False) f = Foo() f.__dict__['someattr'] = 'new' eq_(self._someattr_history(f), ((), ['new'], ())) def test_scalar_set_dict_set(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=False) f = Foo() f.__dict__['someattr'] = 'new' @@ -1486,7 +1486,7 @@ class HistoryTest(fixtures.TestBase): eq_(self._someattr_history(f), (['old'], (), ['new'])) def test_scalar_set_dict_set_commit(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=False) f = Foo() f.__dict__['someattr'] = 'new' @@ -1496,14 +1496,14 @@ class HistoryTest(fixtures.TestBase): eq_(self._someattr_history(f), ((), ['old'], ())) def test_scalar_set_None(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=False) f = Foo() f.someattr = None eq_(self._someattr_history(f), ([None], (), ())) def test_scalar_set_None_from_dict_set(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=False) f = Foo() f.__dict__['someattr'] = 'new' @@ -1511,7 +1511,7 @@ class HistoryTest(fixtures.TestBase): eq_(self._someattr_history(f), ([None], (), ['new'])) def test_scalar_set_twice_no_commit(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=False) f = Foo() f.someattr = 'one' @@ -1520,13 +1520,13 @@ class HistoryTest(fixtures.TestBase): eq_(self._someattr_history(f), (['two'], (), ())) def test_scalar_active_init(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=True) f = Foo() eq_(self._someattr_history(f), ((), (), ())) def test_scalar_active_no_init_side_effect(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=True) f = Foo() self._someattr_history(f) @@ -1535,14 +1535,14 @@ class HistoryTest(fixtures.TestBase): assert 'someattr' not in attributes.instance_state(f).committed_state def test_scalar_active_set(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=True) f = Foo() f.someattr = 'hi' eq_(self._someattr_history(f), (['hi'], (), ())) def test_scalar_active_set_commit(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=True) f = Foo() f.someattr = 'hi' @@ -1550,7 +1550,7 @@ class HistoryTest(fixtures.TestBase): eq_(self._someattr_history(f), ((), ['hi'], ())) def test_scalar_active_set_commit_reset(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=True) f = Foo() f.someattr = 'hi' @@ -1559,7 +1559,7 @@ class HistoryTest(fixtures.TestBase): eq_(self._someattr_history(f), (['there'], (), ['hi'])) def test_scalar_active_set_commit_reset_commit(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=True) f = Foo() f.someattr = 'hi' @@ -1569,7 +1569,7 @@ class HistoryTest(fixtures.TestBase): eq_(self._someattr_history(f), ((), ['there'], ())) def test_scalar_active_set_commit_reset_commit_del(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=True) f = Foo() f.someattr = 'there' @@ -1578,14 +1578,14 @@ class HistoryTest(fixtures.TestBase): eq_(self._someattr_history(f), ((), (), ['there'])) def test_scalar_active_set_dict(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=True) f = Foo() f.__dict__['someattr'] = 'new' eq_(self._someattr_history(f), ((), ['new'], ())) def test_scalar_active_set_dict_set(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=True) f = Foo() f.__dict__['someattr'] = 'new' @@ -1594,7 +1594,7 @@ class HistoryTest(fixtures.TestBase): eq_(self._someattr_history(f), (['old'], (), ['new'])) def test_scalar_active_set_dict_set_commit(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=True) f = Foo() f.__dict__['someattr'] = 'new' @@ -1604,14 +1604,14 @@ class HistoryTest(fixtures.TestBase): eq_(self._someattr_history(f), ((), ['old'], ())) def test_scalar_active_set_None(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=True) f = Foo() f.someattr = None eq_(self._someattr_history(f), ([None], (), ())) def test_scalar_active_set_None_from_dict_set(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=True) f = Foo() f.__dict__['someattr'] = 'new' @@ -1619,7 +1619,7 @@ class HistoryTest(fixtures.TestBase): eq_(self._someattr_history(f), ([None], (), ['new'])) def test_scalar_active_set_twice_no_commit(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=True) f = Foo() f.someattr = 'one' @@ -1629,14 +1629,14 @@ class HistoryTest(fixtures.TestBase): def test_mutable_scalar_init(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=False, mutable_scalars=True,copy_function=dict) f = Foo() eq_(self._someattr_history(f), ((), (), ())) def test_mutable_scalar_no_init_side_effect(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=False, mutable_scalars=True,copy_function=dict) f = Foo() @@ -1645,7 +1645,7 @@ class HistoryTest(fixtures.TestBase): assert 'someattr' not in attributes.instance_state(f).committed_state def test_mutable_scalar_set(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=False, mutable_scalars=True,copy_function=dict) f = Foo() @@ -1653,7 +1653,7 @@ class HistoryTest(fixtures.TestBase): eq_(self._someattr_history(f), ([{'foo': 'hi'}], (), ())) def test_mutable_scalar_set_commit(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=False, mutable_scalars=True,copy_function=dict) f = Foo() @@ -1664,7 +1664,7 @@ class HistoryTest(fixtures.TestBase): {'foo': 'hi'}) def test_mutable_scalar_set_commit_reset(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=False, mutable_scalars=True,copy_function=dict) f = Foo() @@ -1676,7 +1676,7 @@ class HistoryTest(fixtures.TestBase): 'someattr'), ([{'foo': 'there'}], (), [{'foo': 'hi'}])) def test_mutable_scalar_set_commit_reset_commit(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=False, mutable_scalars=True,copy_function=dict) f = Foo() @@ -1687,7 +1687,7 @@ class HistoryTest(fixtures.TestBase): eq_(self._someattr_history(f), ((), [{'foo': 'there'}], ())) def test_mutable_scalar_set_dict(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=False, mutable_scalars=True,copy_function=dict) f = Foo() @@ -1695,7 +1695,7 @@ class HistoryTest(fixtures.TestBase): eq_(self._someattr_history(f), ((), [{'foo': 'new'}], ())) def test_mutable_scalar_set_dict_set(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=False, mutable_scalars=True,copy_function=dict) f = Foo() @@ -1705,7 +1705,7 @@ class HistoryTest(fixtures.TestBase): eq_(self._someattr_history(f), ([{'foo': 'old'}], (), [{'foo': 'new'}])) def test_mutable_scalar_set_dict_set_commit(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=False, mutable_scalars=True,copy_function=dict) f = Foo() @@ -1715,14 +1715,14 @@ class HistoryTest(fixtures.TestBase): eq_(self._someattr_history(f), ((), [{'foo': 'old'}], ())) def test_scalar_inplace_mutation_set(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=False) f = Foo() f.someattr = {'a': 'b'} eq_(self._someattr_history(f), ([{'a': 'b'}], (), ())) def test_scalar_inplace_mutation_set_commit(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=False) f = Foo() f.someattr = {'a': 'b'} @@ -1730,7 +1730,7 @@ class HistoryTest(fixtures.TestBase): eq_(self._someattr_history(f), ((), [{'a': 'b'}], ())) def test_scalar_inplace_mutation_set_commit_set(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=False) f = Foo() f.someattr = {'a': 'b'} @@ -1739,7 +1739,7 @@ class HistoryTest(fixtures.TestBase): eq_(self._someattr_history(f), ((), [{'a': 'c'}], ())) def test_scalar_inplace_mutation_set_commit_flag_modified(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=False) f = Foo() f.someattr = {'a': 'b'} @@ -1748,7 +1748,7 @@ class HistoryTest(fixtures.TestBase): eq_(self._someattr_history(f), ([{'a': 'b'}], (), ())) def test_scalar_inplace_mutation_set_commit_set_flag_modified(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=False) f = Foo() f.someattr = {'a': 'b'} @@ -1758,7 +1758,7 @@ class HistoryTest(fixtures.TestBase): eq_(self._someattr_history(f), ([{'a': 'c'}], (), ())) def test_scalar_inplace_mutation_set_commit_flag_modified_set(self): - Foo = self._fixture(uselist=False, useobject=False, + Foo = self._fixture(uselist=False, useobject=False, active_history=False) f = Foo() f.someattr = {'a': 'b'} @@ -2439,8 +2439,8 @@ class ListenerTest(fixtures.ORMTest): (make_a, make_b), (make_b, make_c) ] - elements = [make_a, make_b, make_c, - instrument_a, instrument_b, instrument_c, + elements = [make_a, make_b, make_c, + instrument_a, instrument_b, instrument_c, attr_a, attr_b, attr_c, events_a] for i, series in enumerate(all_partial_orderings(ordering, elements)): diff --git a/test/orm/test_backref_mutations.py b/test/orm/test_backref_mutations.py index c633cb8eec..ce635e61c8 100644 --- a/test/orm/test_backref_mutations.py +++ b/test/orm/test_backref_mutations.py @@ -121,7 +121,7 @@ class O2MCollectionTest(_fixtures.FixtureTest): # backref fires assert a1.user is u2 - # everything expires, no changes in + # everything expires, no changes in # u1.addresses, so all is fine sess.commit() assert a1 not in u1.addresses @@ -143,7 +143,7 @@ class O2MCollectionTest(_fixtures.FixtureTest): u1.addresses # direct set - the "old" is "fetched", - # but only from the local session - not the + # but only from the local session - not the # database, due to the PASSIVE_NO_FETCH flag. # this is a more fine grained behavior introduced # in 0.6 @@ -207,7 +207,7 @@ class O2MCollectionTest(_fixtures.FixtureTest): sess.add_all([u1, u2, a1]) sess.commit() - # direct set - the fetching of the + # direct set - the fetching of the # "old" u1 here allows the backref # to remove it from the addresses collection a1.user = u2 @@ -230,7 +230,7 @@ class O2MCollectionTest(_fixtures.FixtureTest): # u1.addresses is loaded u1.addresses - # direct set - the fetching of the + # direct set - the fetching of the # "old" u1 here allows the backref # to remove it from the addresses collection a1.user = u2 @@ -455,8 +455,8 @@ class O2OScalarOrphanTest(_fixtures.FixtureTest): mapper(Address, addresses) mapper(User, users, properties = { - 'address':relationship(Address, uselist=False, - backref=backref('user', single_parent=True, + 'address':relationship(Address, uselist=False, + backref=backref('user', single_parent=True, cascade="all, delete-orphan")) }) @@ -491,7 +491,7 @@ class M2MCollectionMoveTest(_fixtures.FixtureTest): cls.classes.Item) mapper(Item, items, properties={ - 'keywords':relationship(Keyword, secondary=item_keywords, + 'keywords':relationship(Keyword, secondary=item_keywords, backref='items') }) mapper(Keyword, keywords) @@ -603,8 +603,8 @@ class M2MScalarMoveTest(_fixtures.FixtureTest): cls.classes.Item) mapper(Item, items, properties={ - 'keyword':relationship(Keyword, secondary=item_keywords, - uselist=False, + 'keyword':relationship(Keyword, secondary=item_keywords, + uselist=False, backref=backref("item", uselist=False)) }) mapper(Keyword, keywords) @@ -718,7 +718,7 @@ class M2MStaleBackrefTest(_fixtures.FixtureTest): cls.classes.Item) mapper(Item, items, properties={ - 'keywords':relationship(Keyword, secondary=item_keywords, + 'keywords':relationship(Keyword, secondary=item_keywords, backref='items') }) mapper(Keyword, keywords) diff --git a/test/orm/test_cascade.py b/test/orm/test_cascade.py index ebd1e4476e..ab547f92ac 100644 --- a/test/orm/test_cascade.py +++ b/test/orm/test_cascade.py @@ -64,7 +64,7 @@ class CascadeArgTest(fixtures.MappedTest): def test_cascade_immutable(self): assert isinstance( - orm_util.CascadeOptions("all, delete-orphan"), + orm_util.CascadeOptions("all, delete-orphan"), frozenset) class O2MCascadeDeleteOrphanTest(fixtures.MappedTest): @@ -387,7 +387,7 @@ class O2MCascadeTest(fixtures.MappedTest): @classmethod def setup_mappers(cls): users, User, Address, addresses = ( - cls.tables.users, cls.classes.User, + cls.tables.users, cls.classes.User, cls.classes.Address, cls.tables.addresses) mapper(Address, addresses) @@ -507,8 +507,8 @@ class NoSaveCascadeFlushTest(_fixtures.FixtureTest): run_inserts = None - def _one_to_many_fixture(self, o2m_cascade=True, - m2o_cascade=True, + def _one_to_many_fixture(self, o2m_cascade=True, + m2o_cascade=True, o2m=False, m2o=False, o2m_cascade_backrefs=True, @@ -522,10 +522,10 @@ class NoSaveCascadeFlushTest(_fixtures.FixtureTest): if o2m: if m2o: addresses_rel = {'addresses':relationship( - Address, + Address, cascade_backrefs=o2m_cascade_backrefs, cascade=o2m_cascade and 'save-update' or '', - backref=backref('user', + backref=backref('user', cascade=m2o_cascade and 'save-update' or '', cascade_backrefs=m2o_cascade_backrefs ) @@ -533,7 +533,7 @@ class NoSaveCascadeFlushTest(_fixtures.FixtureTest): else: addresses_rel = {'addresses':relationship( - Address, + Address, cascade=o2m_cascade and 'save-update' or '', cascade_backrefs=o2m_cascade_backrefs, )} @@ -551,8 +551,8 @@ class NoSaveCascadeFlushTest(_fixtures.FixtureTest): mapper(User, users, properties=addresses_rel) mapper(Address, addresses, properties=user_rel) - def _many_to_many_fixture(self, fwd_cascade=True, - bkd_cascade=True, + def _many_to_many_fixture(self, fwd_cascade=True, + bkd_cascade=True, fwd=False, bkd=False, fwd_cascade_backrefs=True, @@ -567,11 +567,11 @@ class NoSaveCascadeFlushTest(_fixtures.FixtureTest): if fwd: if bkd: keywords_rel = {'keywords':relationship( - Keyword, + Keyword, secondary=item_keywords, cascade_backrefs=fwd_cascade_backrefs, cascade=fwd_cascade and 'save-update' or '', - backref=backref('items', + backref=backref('items', cascade=bkd_cascade and 'save-update' or '', cascade_backrefs=bkd_cascade_backrefs ) @@ -579,7 +579,7 @@ class NoSaveCascadeFlushTest(_fixtures.FixtureTest): else: keywords_rel = {'keywords':relationship( - Keyword, + Keyword, secondary=item_keywords, cascade=fwd_cascade and 'save-update' or '', cascade_backrefs=fwd_cascade_backrefs, @@ -663,7 +663,7 @@ class NoSaveCascadeFlushTest(_fixtures.FixtureTest): def test_o2m_backref_child_transient(self): User, Address = self.classes.User, self.classes.Address - self._one_to_many_fixture(o2m=True, m2o=True, + self._one_to_many_fixture(o2m=True, m2o=True, o2m_cascade=False) sess = Session() u1 = User(name='u1') @@ -679,7 +679,7 @@ class NoSaveCascadeFlushTest(_fixtures.FixtureTest): def test_o2m_backref_child_transient_nochange(self): User, Address = self.classes.User, self.classes.Address - self._one_to_many_fixture(o2m=True, m2o=True, + self._one_to_many_fixture(o2m=True, m2o=True, o2m_cascade=False) sess = Session() u1 = User(name='u1') @@ -697,7 +697,7 @@ class NoSaveCascadeFlushTest(_fixtures.FixtureTest): def test_o2m_backref_child_expunged(self): User, Address = self.classes.User, self.classes.Address - self._one_to_many_fixture(o2m=True, m2o=True, + self._one_to_many_fixture(o2m=True, m2o=True, o2m_cascade=False) sess = Session() u1 = User(name='u1') @@ -717,7 +717,7 @@ class NoSaveCascadeFlushTest(_fixtures.FixtureTest): def test_o2m_backref_child_expunged_nochange(self): User, Address = self.classes.User, self.classes.Address - self._one_to_many_fixture(o2m=True, m2o=True, + self._one_to_many_fixture(o2m=True, m2o=True, o2m_cascade=False) sess = Session() u1 = User(name='u1') @@ -935,7 +935,7 @@ class NoSaveCascadeFlushTest(_fixtures.FixtureTest): def test_m2m_backref_child_transient(self): Item, Keyword = self.classes.Item, self.classes.Keyword - self._many_to_many_fixture(fwd=True, bkd=True, + self._many_to_many_fixture(fwd=True, bkd=True, fwd_cascade=False) sess = Session() i1 = Item(description='i1') @@ -951,7 +951,7 @@ class NoSaveCascadeFlushTest(_fixtures.FixtureTest): def test_m2m_backref_child_transient_nochange(self): Item, Keyword = self.classes.Item, self.classes.Keyword - self._many_to_many_fixture(fwd=True, bkd=True, + self._many_to_many_fixture(fwd=True, bkd=True, fwd_cascade=False) sess = Session() i1 = Item(description='i1') @@ -969,7 +969,7 @@ class NoSaveCascadeFlushTest(_fixtures.FixtureTest): def test_m2m_backref_child_expunged(self): Item, Keyword = self.classes.Item, self.classes.Keyword - self._many_to_many_fixture(fwd=True, bkd=True, + self._many_to_many_fixture(fwd=True, bkd=True, fwd_cascade=False) sess = Session() i1 = Item(description='i1') @@ -989,7 +989,7 @@ class NoSaveCascadeFlushTest(_fixtures.FixtureTest): def test_m2m_backref_child_expunged_nochange(self): Item, Keyword = self.classes.Item, self.classes.Keyword - self._many_to_many_fixture(fwd=True, bkd=True, + self._many_to_many_fixture(fwd=True, bkd=True, fwd_cascade=False) sess = Session() i1 = Item(description='i1') @@ -1432,19 +1432,19 @@ class M2OCascadeDeleteNoOrphanTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): Table('t1', metadata, Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('data',String(50)), + test_needs_autoincrement=True), + Column('data',String(50)), Column('t2id', Integer, ForeignKey('t2.id'))) - Table('t2', metadata, + Table('t2', metadata, Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('data',String(50)), + test_needs_autoincrement=True), + Column('data',String(50)), Column('t3id', Integer, ForeignKey('t3.id'))) - Table('t3', metadata, + Table('t3', metadata, Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), + test_needs_autoincrement=True), Column('data', String(50))) @classmethod @@ -1757,8 +1757,8 @@ class M2MCascadeTest(fixtures.MappedTest): mapper(A, a, properties={ - 'bs':relationship(B, - secondary=atob, + 'bs':relationship(B, + secondary=atob, cascade="all, delete-orphan", single_parent=True, backref=backref('a', uselist=False)) }) @@ -1782,7 +1782,7 @@ class O2MSelfReferentialDetelOrphanTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): Table('node', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('parent_id', Integer, ForeignKey('node.id')) ) @@ -1798,10 +1798,10 @@ class O2MSelfReferentialDetelOrphanTest(fixtures.MappedTest): node = cls.tables.node mapper(Node, node, properties={ "children":relationship( - Node, - cascade="all, delete-orphan", + Node, + cascade="all, delete-orphan", backref=backref( - "parent", + "parent", remote_side=node.c.id ) ) @@ -1837,12 +1837,12 @@ class NoBackrefCascadeTest(_fixtures.FixtureTest): mapper(Address, addresses) mapper(User, users, properties={ - 'addresses':relationship(Address, backref='user', + 'addresses':relationship(Address, backref='user', cascade_backrefs=False) }) mapper(Dingaling, dingalings, properties={ - 'address' : relationship(Address, backref='dingalings', + 'address' : relationship(Address, backref='dingalings', cascade_backrefs=False) }) @@ -1965,7 +1965,7 @@ class PendingOrphanTestSingleLevel(fixtures.MappedTest): pass def test_pending_standalone_orphan(self): - """Standalone 'orphan' objects can now be persisted, if the underlying + """Standalone 'orphan' objects can now be persisted, if the underlying constraints of the database allow it. This now supports persisting of objects based on foreign key @@ -2002,7 +2002,7 @@ class PendingOrphanTestSingleLevel(fixtures.MappedTest): assert_raises(sa_exc.DBAPIError, s.commit) s.rollback() - # can assign o.user_id by foreign key, + # can assign o.user_id by foreign key, # flush succeeds u = User() s.add(u) @@ -2025,7 +2025,7 @@ class PendingOrphanTestSingleLevel(fixtures.MappedTest): mapper(Address, addresses) mapper(User, users, properties=dict( - addresses=relationship(Address, cascade="all,delete-orphan", + addresses=relationship(Address, cascade="all,delete-orphan", backref="user") )) s = create_session() @@ -2379,14 +2379,14 @@ class DoubleParentM2OOrphanTest(fixtures.MappedTest): class CollectionAssignmentOrphanTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): - Table('table_a', metadata, + Table('table_a', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('name', String(30))) - Table('table_b', metadata, + Table('table_b', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), - Column('name', String(30)), + Column('name', String(30)), Column('a_id', Integer, ForeignKey('table_a.id'))) def test_basic(self): @@ -2534,7 +2534,7 @@ class O2MConflictTest(fixtures.MappedTest): self.tables.child) mapper(Parent, parent, properties={ - 'child':relationship(Child, uselist=False, + 'child':relationship(Child, uselist=False, cascade="all, delete, delete-orphan") }) mapper(Child, child) @@ -2548,8 +2548,8 @@ class O2MConflictTest(fixtures.MappedTest): self.tables.child) mapper(Parent, parent, properties={ - 'child':relationship(Child, uselist=False, - cascade="all, delete, delete-orphan", + 'child':relationship(Child, uselist=False, + cascade="all, delete, delete-orphan", backref='parent') }) mapper(Child, child) @@ -2564,8 +2564,8 @@ class O2MConflictTest(fixtures.MappedTest): mapper(Parent, parent) mapper(Child, child, properties = { - 'parent' : relationship(Parent, uselist=False, single_parent=True, - backref=backref('child', uselist=False), + 'parent' : relationship(Parent, uselist=False, single_parent=True, + backref=backref('child', uselist=False), cascade="all,delete,delete-orphan") }) self._do_move_test(True) @@ -2579,8 +2579,8 @@ class O2MConflictTest(fixtures.MappedTest): mapper(Parent, parent) mapper(Child, child, properties = { - 'parent' : relationship(Parent, uselist=False, single_parent=True, - backref=backref('child', uselist=True), + 'parent' : relationship(Parent, uselist=False, single_parent=True, + backref=backref('child', uselist=True), cascade="all,delete,delete-orphan") }) self._do_move_test(True) @@ -2597,7 +2597,7 @@ class PartialFlushTest(fixtures.MappedTest): Column("descr", String(50)) ) - Table("noninh_child", metadata, + Table("noninh_child", metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('base_id', Integer, ForeignKey('base.id')) @@ -2635,7 +2635,7 @@ class PartialFlushTest(fixtures.MappedTest): sess.flush([b1]) # c1, c2 get cascaded into the session on o2m. - # not sure if this is how I like this + # not sure if this is how I like this # to work but that's how it works for now. assert c1 in sess and c1 not in sess.new assert c2 in sess and c2 not in sess.new @@ -2682,7 +2682,7 @@ class PartialFlushTest(fixtures.MappedTest): inherits=Base, properties={'parent': relationship( Parent, - backref='children', + backref='children', primaryjoin=inh_child.c.parent_id == parent.c.id )} ) diff --git a/test/orm/test_collection.py b/test/orm/test_collection.py index 42a0ded34b..b3de03aaef 100644 --- a/test/orm/test_collection.py +++ b/test/orm/test_collection.py @@ -1567,7 +1567,7 @@ class DictHelpersTest(fixtures.MappedTest): ((Foo.id, Foo.bar_id), Foo(id=3, bar_id=12), (3, 12)) ): eq_( - collections.column_mapped_collection(spec)().keyfunc(obj), + collections.column_mapped_collection(spec)().keyfunc(obj), expected ) @@ -1622,11 +1622,11 @@ class ColumnMappedWSerialize(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): - Table('foo', metadata, + Table('foo', metadata, Column('id', Integer(), primary_key=True), Column('b', String(128)) ) - Table('bar', metadata, + Table('bar', metadata, Column('id', Integer(), primary_key=True), Column('foo_id', Integer, ForeignKey('foo.id')), Column('bat_id', Integer), @@ -1673,7 +1673,7 @@ class ColumnMappedWSerialize(fixtures.MappedTest): for spec, obj, expected in specs: coll = collections.column_mapped_collection(spec)() eq_( - coll.keyfunc(obj), + coll.keyfunc(obj), expected ) # ensure we do the right thing with __reduce__ diff --git a/test/orm/test_compile.py b/test/orm/test_compile.py index 68505b0e69..1b2714d700 100644 --- a/test/orm/test_compile.py +++ b/test/orm/test_compile.py @@ -164,7 +164,7 @@ class CompileTest(fixtures.ORMTest): meta = MetaData() a = Table('a', meta, Column('id', Integer, primary_key=True)) - b = Table('b', meta, Column('id', Integer, primary_key=True), + b = Table('b', meta, Column('id', Integer, primary_key=True), Column('a_id', Integer, ForeignKey('a.id'))) class A(object):pass diff --git a/test/orm/test_composites.py b/test/orm/test_composites.py index b4e3d016cf..5e5775dc28 100644 --- a/test/orm/test_composites.py +++ b/test/orm/test_composites.py @@ -19,14 +19,14 @@ class PointTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): Table('graphs', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('name', String(30))) Table('edges', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), - Column('graph_id', Integer, + Column('graph_id', Integer, ForeignKey('graphs.id')), Column('x1', Integer), Column('y1', Integer), @@ -120,7 +120,7 @@ class PointTest(fixtures.MappedTest): self.classes.Point) # current contract. the composite is None - # when hasn't been populated etc. on a + # when hasn't been populated etc. on a # pending/transient object. e1 = Edge() assert e1.end is None @@ -134,7 +134,7 @@ class PointTest(fixtures.MappedTest): # created unconditionally in all cases. # but as we are just trying to fix [ticket:2308] and # [ticket:2309] without changing behavior we maintain - # that only "persistent" gets the composite with the + # that only "persistent" gets the composite with the # Nones sess.flush() @@ -180,7 +180,7 @@ class PointTest(fixtures.MappedTest): g.edges[1] eq_( - sess.query(Edge).filter(Edge.start==None).all(), + sess.query(Edge).filter(Edge.start==None).all(), [] ) @@ -207,7 +207,7 @@ class PointTest(fixtures.MappedTest): sess = self._fixture() eq_( - sess.query(Edge.start, Edge.end).all(), + sess.query(Edge.start, Edge.end).all(), [(3, 4, 5, 6), (14, 5, 2, 7)] ) @@ -221,7 +221,7 @@ class PointTest(fixtures.MappedTest): del e.end sess.flush() eq_( - sess.query(Edge.start, Edge.end).all(), + sess.query(Edge.start, Edge.end).all(), [(3, 4, 5, 6), (14, 5, None, None)] ) @@ -267,9 +267,9 @@ class PrimaryKeyTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): Table('graphs', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), - Column('version_id', Integer, primary_key=True, + Column('version_id', Integer, primary_key=True, nullable=True), Column('name', String(30))) @@ -360,7 +360,7 @@ class DefaultsTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): Table('foobars', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('x1', Integer, default=2), Column('x2', Integer), @@ -396,10 +396,10 @@ class DefaultsTest(fixtures.MappedTest): self.goofy_x1, self.x2, self.x3, self.x4 ) mapper(Foobar, foobars, properties=dict( - foob=sa.orm.composite(FBComposite, - foobars.c.x1, - foobars.c.x2, - foobars.c.x3, + foob=sa.orm.composite(FBComposite, + foobars.c.x1, + foobars.c.x2, + foobars.c.x3, foobars.c.x4) )) @@ -436,16 +436,16 @@ class MappedSelectTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): Table('descriptions', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('d1', String(20)), Column('d2', String(20)), ) Table('values', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), - Column('description_id', Integer, + Column('description_id', Integer, ForeignKey('descriptions.id'), nullable=False), Column('v1', String(20)), @@ -472,7 +472,7 @@ class MappedSelectTest(fixtures.MappedTest): desc_values = select( [values, descriptions.c.d1, descriptions.c.d2], descriptions.c.id == values.c.description_id - ).alias('descriptions_values') + ).alias('descriptions_values') mapper(Descriptions, descriptions, properties={ 'values': relationship(Values, lazy='dynamic'), @@ -484,7 +484,7 @@ class MappedSelectTest(fixtures.MappedTest): }) mapper(Values, desc_values, properties={ - 'custom_values': composite(CustomValues, + 'custom_values': composite(CustomValues, desc_values.c.v1, desc_values.c.v2), @@ -520,16 +520,16 @@ class MappedSelectTest(fixtures.MappedTest): class ManyToOneTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): - Table('a', + Table('a', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('b1', String(20)), Column('b2_id', Integer, ForeignKey('b.id')) ) Table('b', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('data', String(20)) ) @@ -595,7 +595,7 @@ class ConfigurationTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): Table('edge', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('x1', Integer), Column('y1', Integer), @@ -674,9 +674,9 @@ class ConfigurationTest(fixtures.MappedTest): self.classes.Edge, self.classes.Point) mapper(Edge, edge, properties={ - 'start':sa.orm.composite(Point, edge.c.x1, edge.c.y1, + 'start':sa.orm.composite(Point, edge.c.x1, edge.c.y1, deferred=True, group='s'), - 'end': sa.orm.composite(Point, edge.c.x2, edge.c.y2, + 'end': sa.orm.composite(Point, edge.c.x2, edge.c.y2, deferred=True) }) self._test_roundtrip() @@ -685,7 +685,7 @@ class ComparatorTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): Table('edge', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('x1', Integer), Column('y1', Integer), @@ -732,7 +732,7 @@ class ComparatorTest(fixtures.MappedTest): return diff_x * diff_x + diff_y * diff_y <= d * d mapper(Edge, edge, properties={ - 'start': sa.orm.composite(Point, edge.c.x1, edge.c.y1, + 'start': sa.orm.composite(Point, edge.c.x1, edge.c.y1, comparator_factory=CustomComparator), 'end': sa.orm.composite(Point, edge.c.x2, edge.c.y2) }) @@ -769,7 +769,7 @@ class ComparatorTest(fixtures.MappedTest): e2 eq_( - sess.query(Edge).filter(Edge.start==None).all(), + sess.query(Edge).filter(Edge.start==None).all(), [] ) diff --git a/test/orm/test_cycles.py b/test/orm/test_cycles.py index 81f9c1ccde..dce8e04deb 100644 --- a/test/orm/test_cycles.py +++ b/test/orm/test_cycles.py @@ -478,7 +478,7 @@ class BiDirectionalOneToManyTest(fixtures.MappedTest): class BiDirectionalOneToManyTest2(fixtures.MappedTest): - """Two mappers with a one-to-many relationship to each other, + """Two mappers with a one-to-many relationship to each other, with a second one-to-many on one of the mappers""" run_define_tables = 'each' @@ -667,8 +667,8 @@ class OneToManyManyToOneTest(fixtures.MappedTest): sess.delete(p) self.assert_sql_execution( - testing.db, - sess.flush, + testing.db, + sess.flush, ExactSQL("UPDATE person SET favorite_ball_id=:favorite_ball_id " "WHERE person.id = :person_id", lambda ctx: {'person_id': p.id, 'favorite_ball_id': None}), @@ -718,7 +718,7 @@ class OneToManyManyToOneTest(fixtures.MappedTest): p2, b1.person ) - # do it the other way + # do it the other way p3.balls.append(b1) sess.commit() eq_( @@ -798,7 +798,7 @@ class OneToManyManyToOneTest(fixtures.MappedTest): sess.delete(p) - self.assert_sql_execution(testing.db, sess.flush, + self.assert_sql_execution(testing.db, sess.flush, CompiledSQL("UPDATE ball SET person_id=:person_id " "WHERE ball.id = :ball_id", lambda ctx:[ @@ -912,7 +912,7 @@ class SelfReferentialPostUpdateTest(fixtures.MappedTest): # pre-trigger lazy loader on 'cats' to make the test easier cats.children self.assert_sql_execution( - testing.db, + testing.db, session.flush, AllOf( CompiledSQL("UPDATE node SET prev_sibling_id=:prev_sibling_id " @@ -935,12 +935,12 @@ class SelfReferentialPostUpdateTest(fixtures.MappedTest): session.delete(root) self.assert_sql_execution( - testing.db, + testing.db, session.flush, CompiledSQL("UPDATE node SET next_sibling_id=:next_sibling_id " - "WHERE node.id = :node_id", + "WHERE node.id = :node_id", lambda ctx: [ - {'node_id': about.id, 'next_sibling_id': None}, + {'node_id': about.id, 'next_sibling_id': None}, {'node_id': stories.id, 'next_sibling_id': None} ] ), @@ -1180,7 +1180,7 @@ class PostUpdateBatchingTest(fixtures.MappedTest): p1.c3 = c31 self.assert_sql_execution( - testing.db, + testing.db, sess.flush, CompiledSQL( "UPDATE parent SET c1_id=:c1_id, c2_id=:c2_id, " @@ -1192,7 +1192,7 @@ class PostUpdateBatchingTest(fixtures.MappedTest): p1.c1 = p1.c2 = p1.c3 = None self.assert_sql_execution( - testing.db, + testing.db, sess.flush, CompiledSQL( "UPDATE parent SET c1_id=:c1_id, c2_id=:c2_id, " diff --git a/test/orm/test_default_strategies.py b/test/orm/test_default_strategies.py index 7dbc9adcb4..675cebda83 100644 --- a/test/orm/test_default_strategies.py +++ b/test/orm/test_default_strategies.py @@ -16,11 +16,11 @@ class DefaultStrategyOptionsTest(_fixtures.FixtureTest): # keywords are not part of self.static.user_all_result, so # verify all the item keywords were loaded, with no more sql. # 'any' verifies at least some items have keywords; we build - # a list for any([...]) instead of any(...) to prove we've + # a list for any([...]) instead of any(...) to prove we've # iterated all the items with no sql. f = util.flatten_iterator - assert any([i.keywords for i in - f([o.items for o in f([u.orders for u in users])])]) + assert any([i.keywords for i in + f([o.items for o in f([u.orders for u in users])])]) self.assert_sql_count(testing.db, go, 0) def _assert_addresses_loaded(self, users): @@ -85,13 +85,13 @@ class DefaultStrategyOptionsTest(_fixtures.FixtureTest): mapper(User, users, properties=dict( addresses=relationship(Address, lazy=True, order_by=addresses.c.id), - orders=relationship(Order, + orders=relationship(Order, order_by=orders.c.id))) return create_session() def test_downgrade_baseline(self): - """Mapper strategy defaults load as expected + """Mapper strategy defaults load as expected (compare to rest of DefaultStrategyOptionsTest downgrade tests).""" sess = self._downgrade_fixture() users = [] @@ -107,11 +107,11 @@ class DefaultStrategyOptionsTest(_fixtures.FixtureTest): self._assert_fully_loaded(users) def test_disable_eagerloads(self): - """Mapper eager load strategy defaults can be shut off + """Mapper eager load strategy defaults can be shut off with enable_eagerloads(False).""" - # While this isn't testing a mapper option, it is included - # as baseline reference for how XYZload('*') option + # While this isn't testing a mapper option, it is included + # as baseline reference for how XYZload('*') option # should work, namely, it shouldn't affect later queries # (see other test_select_s) sess = self._downgrade_fixture() @@ -156,8 +156,8 @@ class DefaultStrategyOptionsTest(_fixtures.FixtureTest): sa.orm.subqueryload, '*', User.addresses ) def test_select_with_joinedload(self): - """Mapper load strategy defaults can be downgraded with - lazyload('*') option, while explicit joinedload() option + """Mapper load strategy defaults can be downgraded with + lazyload('*') option, while explicit joinedload() option is still honored""" sess = self._downgrade_fixture() users = [] @@ -181,8 +181,8 @@ class DefaultStrategyOptionsTest(_fixtures.FixtureTest): self.assert_sql_count(testing.db, go, 3) def test_select_with_subqueryload(self): - """Mapper load strategy defaults can be downgraded with - lazyload('*') option, while explicit subqueryload() option + """Mapper load strategy defaults can be downgraded with + lazyload('*') option, while explicit subqueryload() option is still honored""" sess = self._downgrade_fixture() users = [] @@ -215,8 +215,8 @@ class DefaultStrategyOptionsTest(_fixtures.FixtureTest): eq_(users, self.static.user_all_result) def test_noload_with_joinedload(self): - """Mapper load strategy defaults can be downgraded with - noload('*') option, while explicit joinedload() option + """Mapper load strategy defaults can be downgraded with + noload('*') option, while explicit joinedload() option is still honored""" sess = self._downgrade_fixture() users = [] @@ -240,8 +240,8 @@ class DefaultStrategyOptionsTest(_fixtures.FixtureTest): self.assert_sql_count(testing.db, go, 0) def test_noload_with_subqueryload(self): - """Mapper load strategy defaults can be downgraded with - noload('*') option, while explicit subqueryload() option + """Mapper load strategy defaults can be downgraded with + noload('*') option, while explicit subqueryload() option is still honored""" sess = self._downgrade_fixture() users = [] @@ -268,7 +268,7 @@ class DefaultStrategyOptionsTest(_fixtures.FixtureTest): self.assert_sql_count(testing.db, go, 0) def test_joined(self): - """Mapper load strategy defaults can be upgraded with + """Mapper load strategy defaults can be upgraded with joinedload('*') option.""" sess = self._upgrade_fixture() users = [] @@ -285,7 +285,7 @@ class DefaultStrategyOptionsTest(_fixtures.FixtureTest): self._assert_fully_loaded(users) def test_joined_with_lazyload(self): - """Mapper load strategy defaults can be upgraded with + """Mapper load strategy defaults can be upgraded with joinedload('*') option, while explicit lazyload() option is still honored""" sess = self._upgrade_fixture() @@ -316,7 +316,7 @@ class DefaultStrategyOptionsTest(_fixtures.FixtureTest): self.assert_sql_count(testing.db, go, 1) def test_joined_with_subqueryload(self): - """Mapper load strategy defaults can be upgraded with + """Mapper load strategy defaults can be upgraded with joinedload('*') option, while explicit subqueryload() option is still honored""" sess = self._upgrade_fixture() @@ -335,7 +335,7 @@ class DefaultStrategyOptionsTest(_fixtures.FixtureTest): self._assert_fully_loaded(users) def test_subquery(self): - """Mapper load strategy defaults can be upgraded with + """Mapper load strategy defaults can be upgraded with subqueryload('*') option.""" sess = self._upgrade_fixture() users = [] @@ -352,7 +352,7 @@ class DefaultStrategyOptionsTest(_fixtures.FixtureTest): self._assert_fully_loaded(users) def test_subquery_with_lazyload(self): - """Mapper load strategy defaults can be upgraded with + """Mapper load strategy defaults can be upgraded with subqueryload('*') option, while explicit lazyload() option is still honored""" sess = self._upgrade_fixture() @@ -382,8 +382,8 @@ class DefaultStrategyOptionsTest(_fixtures.FixtureTest): self.assert_sql_count(testing.db, go, 1) def test_subquery_with_joinedload(self): - """Mapper load strategy defaults can be upgraded with - subqueryload('*') option, while multiple explicit + """Mapper load strategy defaults can be upgraded with + subqueryload('*') option, while multiple explicit joinedload() options are still honored""" sess = self._upgrade_fixture() users = [] diff --git a/test/orm/test_defaults.py b/test/orm/test_defaults.py index 8063d92b71..a246cddaea 100644 --- a/test/orm/test_defaults.py +++ b/test/orm/test_defaults.py @@ -42,7 +42,7 @@ class TriggerDefaultsTest(fixtures.MappedTest): sa.DDL("CREATE TRIGGER dt_ins BEFORE INSERT ON dt " "FOR EACH ROW BEGIN " "SET NEW.col2='ins'; SET NEW.col4='ins'; END", - on=lambda ddl, event, target, bind, **kw: + on=lambda ddl, event, target, bind, **kw: bind.engine.name not in ('oracle', 'mssql', 'sqlite') ), ): @@ -67,7 +67,7 @@ class TriggerDefaultsTest(fixtures.MappedTest): sa.DDL("CREATE TRIGGER dt_up BEFORE UPDATE ON dt " "FOR EACH ROW BEGIN " "SET NEW.col3='up'; SET NEW.col4='up'; END", - on=lambda ddl, event, target, bind, **kw: + on=lambda ddl, event, target, bind, **kw: bind.engine.name not in ('oracle', 'mssql', 'sqlite') ), ): diff --git a/test/orm/test_descriptor.py b/test/orm/test_descriptor.py index c306ebd05c..33308880ea 100644 --- a/test/orm/test_descriptor.py +++ b/test/orm/test_descriptor.py @@ -9,7 +9,7 @@ from test.lib import fixtures from test.lib.testing import eq_ class TestDescriptor(descriptor_props.DescriptorProperty): - def __init__(self, cls, key, descriptor=None, doc=None, + def __init__(self, cls, key, descriptor=None, doc=None, comparator_factory = None): self.parent = cls.__mapper__ self.key = key diff --git a/test/orm/test_eager_relations.py b/test/orm/test_eager_relations.py index a0d8fd3820..8f0f109e90 100644 --- a/test/orm/test_eager_relations.py +++ b/test/orm/test_eager_relations.py @@ -88,7 +88,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): self.classes.User) mapper(User, users, properties = { - 'addresses':relationship(mapper(Address, addresses), + 'addresses':relationship(mapper(Address, addresses), lazy='joined', order_by=addresses.c.email_address), }) q = create_session().query(User) @@ -114,8 +114,8 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): self.classes.User) mapper(User, users, properties = { - 'addresses':relationship(mapper(Address, addresses), - lazy='joined', + 'addresses':relationship(mapper(Address, addresses), + lazy='joined', order_by=[addresses.c.email_address, addresses.c.id]), }) q = create_session().query(User) @@ -135,7 +135,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): ], q.order_by(User.id).all()) def test_orderby_related(self): - """A regular mapper select on a single table can + """A regular mapper select on a single table can order by a relationship to a second table""" Address, addresses, users, User = (self.classes.Address, @@ -300,11 +300,11 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): 'orders':relationship(Order, order_by=orders.c.id), # o2m, m2o }) mapper(Order, orders, properties={ - 'items':relationship(Item, + 'items':relationship(Item, secondary=order_items, order_by=items.c.id), #m2m }) mapper(Item, items, properties={ - 'keywords':relationship(Keyword, + 'keywords':relationship(Keyword, secondary=item_keywords, order_by=keywords.c.id) #m2m }) @@ -312,20 +312,20 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): for opt, count in [ (( - joinedload(User.orders, Order.items), + joinedload(User.orders, Order.items), ), 10), ((joinedload("orders.items"), ), 10), (( - joinedload(User.orders, ), - joinedload(User.orders, Order.items), - joinedload(User.orders, Order.items, Item.keywords), + joinedload(User.orders, ), + joinedload(User.orders, Order.items), + joinedload(User.orders, Order.items, Item.keywords), ), 1), (( - joinedload(User.orders, Order.items, Item.keywords), + joinedload(User.orders, Order.items, Item.keywords), ), 10), (( - joinedload(User.orders, Order.items), - joinedload(User.orders, Order.items, Item.keywords), + joinedload(User.orders, Order.items), + joinedload(User.orders, Order.items, Item.keywords), ), 5), ]: sess = create_session() @@ -428,7 +428,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): eq_(self.static.user_address_result, sess.query(User).order_by(User.id).all()) def test_double(self): - """Eager loading with two relationships simultaneously, + """Eager loading with two relationships simultaneously, from the same table, using aliases.""" users, orders, User, Address, Order, addresses = (self.tables.users, @@ -489,7 +489,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): self.assert_sql_count(testing.db, go, 1) def test_double_same_mappers(self): - """Eager loading with two relationships simulatneously, + """Eager loading with two relationships simulatneously, from the same table, using aliases.""" addresses, items, order_items, orders, Item, User, Address, Order, users = (self.tables.addresses, @@ -566,7 +566,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): self.assert_sql_count(testing.db, go, 1) def test_no_false_hits(self): - """Eager loaders don't interpret main table columns as + """Eager loaders don't interpret main table columns as part of their eager load.""" addresses, orders, User, Address, Order, users = (self.tables.addresses, @@ -665,8 +665,8 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): sess = create_session() q = sess.query(Item) - l = q.filter((Item.description=='item 2') | - (Item.description=='item 5') | + l = q.filter((Item.description=='item 2') | + (Item.description=='item 5') | (Item.description=='item 3')).\ order_by(Item.id).limit(2).all() @@ -674,9 +674,9 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): @testing.fails_on('maxdb', 'FIXME: unknown') def test_limit_3(self): - """test that the ORDER BY is propagated from the inner + """test that the ORDER BY is propagated from the inner select to the outer select, when using the - 'wrapped' select statement resulting from the combination of + 'wrapped' select statement resulting from the combination of eager loading and limit/offset clauses.""" addresses, items, order_items, orders, Item, User, Address, Order, users = (self.tables.addresses, @@ -732,12 +732,12 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): self.tables.users, self.tables.orders) - # tests the LIMIT/OFFSET aliasing on a mapper + # tests the LIMIT/OFFSET aliasing on a mapper # against a select. original issue from ticket #904 sel = sa.select([users, addresses.c.email_address], users.c.id==addresses.c.user_id).alias('useralias') mapper(User, sel, properties={ - 'orders':relationship(Order, primaryjoin=sel.c.id==orders.c.user_id, + 'orders':relationship(Order, primaryjoin=sel.c.id==orders.c.user_id, lazy='joined', order_by=orders.c.id) }) mapper(Order, orders) @@ -770,7 +770,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): u1 = sess.query(User).filter(User.id==8).one() def go(): eq_(u1.addresses[0].user, u1) - self.assert_sql_execution(testing.db, go, + self.assert_sql_execution(testing.db, go, CompiledSQL( "SELECT addresses.id AS addresses_id, addresses.user_id AS " "addresses_user_id, addresses.email_address AS " @@ -781,7 +781,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): def test_manytoone_limit(self): - """test that the subquery wrapping only occurs with + """test that the subquery wrapping only occurs with limit/offset and m2m or o2m joins present.""" users, items, order_items, Order, Item, User, Address, orders, addresses = (self.tables.users, @@ -884,7 +884,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): ) self.assert_compile( - sess.query(User).options(joinedload("orders", innerjoin=True), + sess.query(User).options(joinedload("orders", innerjoin=True), joinedload("orders.address", innerjoin=True)).limit(10), "SELECT anon_1.users_id AS anon_1_users_id, anon_1.users_name AS anon_1_users_name, " "addresses_1.id AS addresses_1_id, addresses_1.user_id AS addresses_1_user_id, " @@ -906,7 +906,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): self.classes.User) mapper(User, users, properties = dict( - address = relationship(mapper(Address, addresses), + address = relationship(mapper(Address, addresses), lazy='joined', uselist=False) )) q = create_session().query(User) @@ -948,10 +948,10 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): self.tables.orders) - # use a primaryjoin intended to defeat SA's usage of + # use a primaryjoin intended to defeat SA's usage of # query.get() for a many-to-one lazyload mapper(Order, orders, properties = dict( - address = relationship(mapper(Address, addresses), + address = relationship(mapper(Address, addresses), primaryjoin=and_( addresses.c.id==orders.c.address_id, addresses.c.email_address != None @@ -1019,7 +1019,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): 'orders':relationship(Order, backref='user', lazy='joined', order_by=orders.c.id), 'max_order':relationship( - mapper(Order, max_orders, non_primary=True), + mapper(Order, max_orders, non_primary=True), lazy='joined', uselist=False) }) @@ -1043,7 +1043,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): self.assert_sql_count(testing.db, go, 1) def test_uselist_false_warning(self): - """test that multiple rows received by a + """test that multiple rows received by a uselist=False raises a warning.""" User, users, orders, Order = (self.classes.User, @@ -1114,7 +1114,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): ], q.all()) def test_aliasing(self): - """test that eager loading uses aliases to insulate the eager + """test that eager loading uses aliases to insulate the eager load from regular criterion against those tables.""" Address, addresses, users, User = (self.classes.Address, @@ -1124,7 +1124,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): mapper(User, users, properties = dict( - addresses = relationship(mapper(Address, addresses), + addresses = relationship(mapper(Address, addresses), lazy='joined', order_by=addresses.c.id) )) q = create_session().query(User) @@ -1139,20 +1139,20 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): self.classes.User) mapper(User, users, properties = dict( - addresses = relationship(mapper(Address, addresses), lazy='joined', + addresses = relationship(mapper(Address, addresses), lazy='joined', innerjoin=True, order_by=addresses.c.id) )) sess = create_session() eq_( [User(id=7, addresses=[ Address(id=1) ]), - User(id=8, - addresses=[ Address(id=2, email_address='ed@wood.com'), - Address(id=3, email_address='ed@bettyboop.com'), + User(id=8, + addresses=[ Address(id=2, email_address='ed@wood.com'), + Address(id=3, email_address='ed@bettyboop.com'), Address(id=4, email_address='ed@lala.com'), ]), User(id=9, addresses=[ Address(id=5) ])] ,sess.query(User).all() ) - self.assert_compile(sess.query(User), + self.assert_compile(sess.query(User), "SELECT users.id AS users_id, users.name AS users_name, " "addresses_1.id AS addresses_1_id, addresses_1.user_id AS addresses_1_user_id, " "addresses_1.email_address AS addresses_1_email_address FROM users JOIN " @@ -1169,11 +1169,11 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): self.tables.orders) mapper(User, users, properties = dict( - orders =relationship(Order, innerjoin=True, + orders =relationship(Order, innerjoin=True, lazy=False) )) mapper(Order, orders, properties=dict( - items=relationship(Item, secondary=order_items, lazy=False, + items=relationship(Item, secondary=order_items, lazy=False, innerjoin=True) )) mapper(Item, items) @@ -1231,7 +1231,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): orders =relationship(Order, lazy=False) )) mapper(Order, orders, properties=dict( - items=relationship(Item, secondary=order_items, lazy=False, + items=relationship(Item, secondary=order_items, lazy=False, innerjoin=True) )) mapper(Item, items) @@ -1284,14 +1284,14 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): )) mapper(Item, items) sess = create_session() - self.assert_compile(sess.query(User).options(joinedload(User.orders, innerjoin=True)), + self.assert_compile(sess.query(User).options(joinedload(User.orders, innerjoin=True)), "SELECT users.id AS users_id, users.name AS users_name, orders_1.id AS orders_1_id, " "orders_1.user_id AS orders_1_user_id, orders_1.address_id AS orders_1_address_id, " "orders_1.description AS orders_1_description, orders_1.isopen AS orders_1_isopen " "FROM users JOIN orders AS orders_1 ON users.id = orders_1.user_id ORDER BY orders_1.id" , use_default_dialect=True) - self.assert_compile(sess.query(User).options(joinedload_all(User.orders, Order.items, innerjoin=True)), + self.assert_compile(sess.query(User).options(joinedload_all(User.orders, Order.items, innerjoin=True)), "SELECT users.id AS users_id, users.name AS users_name, items_1.id AS items_1_id, " "items_1.description AS items_1_description, orders_1.id AS orders_1_id, " "orders_1.user_id AS orders_1_user_id, orders_1.address_id AS orders_1_address_id, " @@ -1304,17 +1304,17 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): def go(): eq_( sess.query(User).options( - joinedload(User.orders, innerjoin=True), + joinedload(User.orders, innerjoin=True), joinedload(User.orders, Order.items, innerjoin=True)). order_by(User.id).all(), - [User(id=7, - orders=[ - Order(id=1, items=[ Item(id=1), Item(id=2), Item(id=3)]), - Order(id=3, items=[ Item(id=3), Item(id=4), Item(id=5)]), + [User(id=7, + orders=[ + Order(id=1, items=[ Item(id=1), Item(id=2), Item(id=3)]), + Order(id=3, items=[ Item(id=3), Item(id=4), Item(id=5)]), Order(id=5, items=[Item(id=5)])]), User(id=9, orders=[ - Order(id=2, items=[ Item(id=1), Item(id=2), Item(id=3)]), + Order(id=2, items=[ Item(id=1), Item(id=2), Item(id=3)]), Order(id=4, items=[ Item(id=1), Item(id=5)])]) ] ) @@ -1531,7 +1531,7 @@ class LoadOnExistingTest(_fixtures.FixtureTest): User, Order, Item = self.classes.User, \ self.classes.Order, self.classes.Item mapper(User, self.tables.users, properties={ - 'orders':relationship(Order), + 'orders':relationship(Order), }) mapper(Order, self.tables.orders, properties={ 'items':relationship(Item, secondary=self.tables.order_items), @@ -1807,8 +1807,8 @@ class SelfReferentialEagerTest(fixtures.MappedTest): self.children.append(node) mapper(Node, nodes, properties={ - 'children':relationship(Node, - lazy='joined', + 'children':relationship(Node, + lazy='joined', join_depth=3, order_by=nodes.c.id) }) sess = create_session() @@ -1913,7 +1913,7 @@ class SelfReferentialEagerTest(fixtures.MappedTest): sess.expunge_all() def go(): - eq_( + eq_( Node(data='n1', children=[Node(data='n11'), Node(data='n12')]), sess.query(Node).order_by(Node.id).first(), ) @@ -1975,7 +1975,7 @@ class SelfReferentialEagerTest(fixtures.MappedTest): options(joinedload('children.children')).first() # test that the query isn't wrapping the initial query for eager loading. - self.assert_sql_execution(testing.db, go, + self.assert_sql_execution(testing.db, go, CompiledSQL( "SELECT nodes.id AS nodes_id, nodes.parent_id AS " "nodes_parent_id, nodes.data AS nodes_data FROM nodes " @@ -2220,25 +2220,25 @@ class MixedEntitiesTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): eq_( [ ( - User(addresses=[Address(email_address=u'fred@fred.com')], name=u'fred'), + User(addresses=[Address(email_address=u'fred@fred.com')], name=u'fred'), Order(description=u'order 2', isopen=0, items=[Item(description=u'item 1'), Item(description=u'item 2'), Item(description=u'item 3')]), - User(addresses=[Address(email_address=u'jack@bean.com')], name=u'jack'), + User(addresses=[Address(email_address=u'jack@bean.com')], name=u'jack'), Order(description=u'order 3', isopen=1, items=[Item(description=u'item 3'), Item(description=u'item 4'), Item(description=u'item 5')]) - ), + ), ( - User(addresses=[Address(email_address=u'fred@fred.com')], name=u'fred'), + User(addresses=[Address(email_address=u'fred@fred.com')], name=u'fred'), Order(description=u'order 2', isopen=0, items=[Item(description=u'item 1'), Item(description=u'item 2'), Item(description=u'item 3')]), - User(addresses=[Address(email_address=u'jack@bean.com')], name=u'jack'), + User(addresses=[Address(email_address=u'jack@bean.com')], name=u'jack'), Order(address_id=None, description=u'order 5', isopen=0, items=[Item(description=u'item 5')]) - ), + ), ( - User(addresses=[Address(email_address=u'fred@fred.com')], name=u'fred'), + User(addresses=[Address(email_address=u'fred@fred.com')], name=u'fred'), Order(description=u'order 4', isopen=1, items=[Item(description=u'item 1'), Item(description=u'item 5')]), - User(addresses=[Address(email_address=u'jack@bean.com')], name=u'jack'), + User(addresses=[Address(email_address=u'jack@bean.com')], name=u'jack'), Order(address_id=None, description=u'order 5', isopen=0, items=[Item(description=u'item 5')]) - ), + ), ], sess.query(User, Order, u1, o1).\ join(Order, User.orders).options(joinedload(User.addresses), joinedload(Order.items)).filter(User.id==9).\ @@ -2281,7 +2281,7 @@ class MixedEntitiesTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): (User(id=9, addresses=[Address(id=5)]), Order(id=4, items=[Item(id=1), Item(id=5)])), ], sess.query(User, oalias).join(oalias, User.orders). - options(joinedload(User.addresses), + options(joinedload(User.addresses), joinedload(oalias.items)). filter(User.id==9). order_by(User.id, oalias.id).all(), @@ -2410,10 +2410,10 @@ class SubqueryTest(fixtures.MappedTest): }) session = create_session() - session.add(User(name='joe', tags=[Tag(score1=5.0, score2=3.0), + session.add(User(name='joe', tags=[Tag(score1=5.0, score2=3.0), Tag(score1=55.0, score2=1.0)])) - session.add(User(name='bar', tags=[Tag(score1=5.0, score2=4.0), - Tag(score1=50.0, score2=1.0), + session.add(User(name='bar', tags=[Tag(score1=5.0, score2=4.0), + Tag(score1=50.0, score2=1.0), Tag(score1=15.0, score2=2.0)])) session.flush() session.expunge_all() @@ -2525,7 +2525,7 @@ class CorrelatedSubqueryTest(fixtures.MappedTest): if aliasstuff: salias = stuff.alias() else: - # if we don't alias the 'stuff' table within the correlated subquery, + # if we don't alias the 'stuff' table within the correlated subquery, # it gets aliased in the eager load along with the "stuff" table to "stuff_1". # but it's a scalar subquery, and this doesn't actually matter salias = stuff diff --git a/test/orm/test_events.py b/test/orm/test_events.py index f8158369c2..2a9d67a28c 100644 --- a/test/orm/test_events.py +++ b/test/orm/test_events.py @@ -129,11 +129,11 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): sess.flush() eq_(canary, ['init', 'before_insert', - 'after_insert', 'expire', 'translate_row', + 'after_insert', 'expire', 'translate_row', 'populate_instance', 'refresh', 'append_result', 'translate_row', 'create_instance', 'populate_instance', 'load', 'append_result', - 'before_update', 'after_update', 'before_delete', + 'before_update', 'after_update', 'before_delete', 'after_delete']) def test_merge(self): @@ -226,10 +226,10 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): sess.add(k1) sess.flush() eq_(canary1, - ['init', + ['init', 'before_insert', 'after_insert']) eq_(canary2, - ['init', + ['init', 'before_insert', 'after_insert']) canary1[:]= [] @@ -468,7 +468,7 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest): assert my_listener in s.dispatch.before_flush def test_sessionmaker_listen(self): - """test that listen can be applied to individual + """test that listen can be applied to individual scoped_session() classes.""" def my_listener_one(*arg, **kw): @@ -567,16 +567,16 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest): mapper(User, users) - sess, canary = self._listener_fixture(autoflush=False, + sess, canary = self._listener_fixture(autoflush=False, autocommit=True, expire_on_commit=False) u = User(name='u1') sess.add(u) sess.flush() eq_( - canary, + canary, [ 'after_attach', 'before_flush', 'after_begin', - 'after_flush', 'after_flush_postexec', + 'after_flush', 'after_flush_postexec', 'before_commit', 'after_commit',] ) @@ -596,10 +596,10 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest): sess.commit ) sess.rollback() - eq_(canary, ['after_attach', 'before_commit', 'before_flush', - 'after_begin', 'after_flush', 'after_flush_postexec', - 'after_commit', 'after_attach', 'before_commit', - 'before_flush', 'after_begin', 'after_rollback', + eq_(canary, ['after_attach', 'before_commit', 'before_flush', + 'after_begin', 'after_flush', 'after_flush_postexec', + 'after_commit', 'after_attach', 'before_commit', + 'before_flush', 'after_begin', 'after_rollback', 'after_soft_rollback', 'after_soft_rollback']) def test_can_use_session_in_outer_rollback_hook(self): @@ -721,7 +721,7 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest): u = User(name='u1') sess.add(u) sess.flush() - eq_(sess.query(User).order_by(User.name).all(), + eq_(sess.query(User).order_by(User.name).all(), [ User(name='another u1'), User(name='u1') @@ -729,7 +729,7 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest): ) sess.flush() - eq_(sess.query(User).order_by(User.name).all(), + eq_(sess.query(User).order_by(User.name).all(), [ User(name='another u1'), User(name='u1') @@ -738,7 +738,7 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest): u.name='u2' sess.flush() - eq_(sess.query(User).order_by(User.name).all(), + eq_(sess.query(User).order_by(User.name).all(), [ User(name='another u1'), User(name='another u2'), @@ -748,7 +748,7 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest): sess.delete(u) sess.flush() - eq_(sess.query(User).order_by(User.name).all(), + eq_(sess.query(User).order_by(User.name).all(), [ User(name='another u1'), ] @@ -769,14 +769,14 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest): u = User(name='u1') sess.add(u) sess.flush() - eq_(sess.query(User).order_by(User.name).all(), + eq_(sess.query(User).order_by(User.name).all(), [User(name='u1')] ) sess.add(User(name='u2')) sess.flush() sess.expunge_all() - eq_(sess.query(User).order_by(User.name).all(), + eq_(sess.query(User).order_by(User.name).all(), [ User(name='u1 modified'), User(name='u2') @@ -786,7 +786,7 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest): class MapperExtensionTest(_fixtures.FixtureTest): - """Superseded by MapperEventsTest - test backwards + """Superseded by MapperEventsTest - test backwards compatibility of MapperExtension.""" run_inserts = None @@ -938,10 +938,10 @@ class MapperExtensionTest(_fixtures.FixtureTest): sess.add(k1) sess.flush() eq_(methods1, - ['instrument_class', 'init_instance', + ['instrument_class', 'init_instance', 'before_insert', 'after_insert']) eq_(methods2, - ['instrument_class', 'init_instance', + ['instrument_class', 'init_instance', 'before_insert', 'after_insert']) del methods1[:] @@ -1021,7 +1021,7 @@ class MapperExtensionTest(_fixtures.FixtureTest): class AttributeExtensionTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): - Table('t1', + Table('t1', metadata, Column('id', Integer, primary_key=True), Column('type', String(40)), @@ -1074,7 +1074,7 @@ class AttributeExtensionTest(fixtures.MappedTest): eq_(b1.data, 'ex1b2') eq_(c1.data, 'ex2c2') - eq_(ext_msg, ["Ex1 'a1'", "Ex1 'b1'", "Ex2 'c1'", + eq_(ext_msg, ["Ex1 'a1'", "Ex1 'b1'", "Ex2 'c1'", "Ex1 'a2'", "Ex1 'b2'", "Ex2 'c2'"]) diff --git a/test/orm/test_expire.py b/test/orm/test_expire.py index c73306665c..16ca8b0ba2 100644 --- a/test/orm/test_expire.py +++ b/test/orm/test_expire.py @@ -67,7 +67,7 @@ class ExpireTest(_fixtures.FixtureTest): u = s.query(User).get(7) s.expunge_all() - assert_raises_message(sa_exc.InvalidRequestError, + assert_raises_message(sa_exc.InvalidRequestError, r"is not persistent within this Session", s.expire, u) def test_get_refreshes(self): @@ -138,12 +138,12 @@ class ExpireTest(_fixtures.FixtureTest): s.rollback() assert u in s - # but now its back, rollback has occurred, the + # but now its back, rollback has occurred, the # _remove_newly_deleted is reverted eq_(u.name, 'chuck') def test_deferred(self): - """test that unloaded, deferred attributes aren't included in the + """test that unloaded, deferred attributes aren't included in the expiry list.""" Order, orders = self.classes.Order, self.tables.orders @@ -185,7 +185,7 @@ class ExpireTest(_fixtures.FixtureTest): self.classes.User) mapper(User, users, properties={ - 'addresses':relationship(Address, + 'addresses':relationship(Address, order_by=addresses.c.email_address) }) mapper(Address, addresses) @@ -193,21 +193,21 @@ class ExpireTest(_fixtures.FixtureTest): u = s.query(User).get(8) adlist = u.addresses eq_(adlist, [ - Address(email_address='ed@bettyboop.com'), + Address(email_address='ed@bettyboop.com'), Address(email_address='ed@lala.com'), - Address(email_address='ed@wood.com'), + Address(email_address='ed@wood.com'), ]) a1 = u.addresses[2] a1.email_address = 'aaaaa' s.expire(u, ['addresses']) eq_(u.addresses, [ - Address(email_address='aaaaa'), - Address(email_address='ed@bettyboop.com'), + Address(email_address='aaaaa'), + Address(email_address='ed@bettyboop.com'), Address(email_address='ed@lala.com'), ]) def test_refresh_collection_exception(self): - """test graceful failure for currently unsupported + """test graceful failure for currently unsupported immediate refresh of a collection""" users, Address, addresses, User = (self.tables.users, @@ -222,12 +222,12 @@ class ExpireTest(_fixtures.FixtureTest): mapper(Address, addresses) s = create_session(autoflush=True, autocommit=False) u = s.query(User).get(8) - assert_raises_message(sa_exc.InvalidRequestError, - "properties specified for refresh", + assert_raises_message(sa_exc.InvalidRequestError, + "properties specified for refresh", s.refresh, u, ['addresses']) # in contrast to a regular query with no columns - assert_raises_message(sa_exc.InvalidRequestError, + assert_raises_message(sa_exc.InvalidRequestError, "no columns with which to SELECT", s.query().all) def test_refresh_cancels_expire(self): @@ -862,7 +862,7 @@ class ExpireTest(_fixtures.FixtureTest): self.classes.User) mapper(User, users, properties={ - 'addresses':relationship(Address, backref='user', lazy='joined', + 'addresses':relationship(Address, backref='user', lazy='joined', order_by=addresses.c.id), }) mapper(Address, addresses) @@ -941,7 +941,7 @@ class ExpireTest(_fixtures.FixtureTest): u1 = sess.query(User).options(undefer(User.name)).first() assert 'name' not in attributes.instance_state(u1).callables - # mass expire, the attribute was loaded, + # mass expire, the attribute was loaded, # the attribute gets the callable sess.expire(u1) assert isinstance( @@ -954,7 +954,7 @@ class ExpireTest(_fixtures.FixtureTest): assert 'name' not in attributes.instance_state(u1).callables # mass expire, attribute was loaded but then deleted, - # the callable goes away - the state wants to flip + # the callable goes away - the state wants to flip # it back to its "deferred" loader. sess.expunge_all() u1 = sess.query(User).options(undefer(User.name)).first() @@ -1164,7 +1164,7 @@ class ExpiredPendingTest(_fixtures.FixtureTest): # which attach to u1 will expect to be "pending" sess.expire(u1, ['addresses']) - # attach an Address. now its "pending" + # attach an Address. now its "pending" # in user.addresses a2 = Address(email_address='a2') a2.user = u1 diff --git a/test/orm/test_froms.py b/test/orm/test_froms.py index c8fc0af79b..73b1b095c5 100644 --- a/test/orm/test_froms.py +++ b/test/orm/test_froms.py @@ -56,7 +56,7 @@ class QueryTest(_fixtures.FixtureTest): mapper(Keyword, keywords) mapper(Node, nodes, properties={ - 'children':relationship(Node, + 'children':relationship(Node, backref=backref('parent', remote_side=[nodes.c.id]) ) }) @@ -77,13 +77,13 @@ class RawSelectTest(QueryTest, AssertsCompiledSQL): sess = create_session() - self.assert_compile(sess.query(users).select_from(users.select()).with_labels().statement, + self.assert_compile(sess.query(users).select_from(users.select()).with_labels().statement, "SELECT users.id AS users_id, users.name AS users_name FROM users, " "(SELECT users.id AS id, users.name AS name FROM users) AS anon_1", dialect=default.DefaultDialect() ) - self.assert_compile(sess.query(users, exists([1], from_obj=addresses)).with_labels().statement, + self.assert_compile(sess.query(users, exists([1], from_obj=addresses)).with_labels().statement, "SELECT users.id AS users_id, users.name AS users_name, EXISTS " "(SELECT 1 FROM addresses) AS anon_1 FROM users", dialect=default.DefaultDialect() @@ -95,7 +95,7 @@ class RawSelectTest(QueryTest, AssertsCompiledSQL): s = sess.query(addresses.c.id.label('id'), addresses.c.email_address.label('email')).\ filter(addresses.c.user_id==users.c.id).correlate(users).statement.alias() - self.assert_compile(sess.query(users, s.c.email).select_from(users.join(s, s.c.id==users.c.id)).with_labels().statement, + self.assert_compile(sess.query(users, s.c.email).select_from(users.join(s, s.c.id==users.c.id)).with_labels().statement, "SELECT users.id AS users_id, users.name AS users_name, anon_1.email AS anon_1_email " "FROM users JOIN (SELECT addresses.id AS id, addresses.email_address AS email FROM addresses " "WHERE addresses.user_id = users.id) AS anon_1 ON anon_1.id = users.id", @@ -103,11 +103,11 @@ class RawSelectTest(QueryTest, AssertsCompiledSQL): ) x = func.lala(users.c.id).label('foo') - self.assert_compile(sess.query(x).filter(x==5).statement, + self.assert_compile(sess.query(x).filter(x==5).statement, "SELECT lala(users.id) AS foo FROM users WHERE lala(users.id) = :param_1", dialect=default.DefaultDialect()) self.assert_compile(sess.query(func.sum(x).label('bar')).statement, - "SELECT sum(lala(users.id)) AS bar FROM users", dialect=default.DefaultDialect()) + "SELECT sum(lala(users.id)) AS bar FROM users", dialect=default.DefaultDialect()) class FromSelfTest(QueryTest, AssertsCompiledSQL): @@ -162,7 +162,7 @@ class FromSelfTest(QueryTest, AssertsCompiledSQL): Address = self.classes.Address eq_( - create_session().query(Address.user_id, + create_session().query(Address.user_id, func.count(Address.id).label('count')).\ group_by(Address.user_id). order_by(Address.user_id).all(), @@ -171,7 +171,7 @@ class FromSelfTest(QueryTest, AssertsCompiledSQL): eq_( create_session().query(Address.user_id, Address.id).\ - from_self(Address.user_id, + from_self(Address.user_id, func.count(Address.id)).\ group_by(Address.user_id). order_by(Address.user_id).all(), @@ -225,11 +225,11 @@ class FromSelfTest(QueryTest, AssertsCompiledSQL): from_self(User.name, ualias.name). order_by(User.name, ualias.name).all(), [ - (u'chuck', u'ed'), - (u'chuck', u'fred'), - (u'chuck', u'jack'), - (u'ed', u'jack'), - (u'fred', u'ed'), + (u'chuck', u'ed'), + (u'chuck', u'fred'), + (u'chuck', u'jack'), + (u'ed', u'jack'), + (u'fred', u'ed'), (u'fred', u'jack') ] ) @@ -250,10 +250,10 @@ class FromSelfTest(QueryTest, AssertsCompiledSQL): join(ualias.addresses). order_by(ualias.name, Address.email_address).all(), [ - (u'ed', u'fred@fred.com'), - (u'jack', u'ed@bettyboop.com'), - (u'jack', u'ed@lala.com'), - (u'jack', u'ed@wood.com'), + (u'ed', u'fred@fred.com'), + (u'jack', u'ed@bettyboop.com'), + (u'jack', u'ed@lala.com'), + (u'jack', u'ed@wood.com'), (u'jack', u'fred@fred.com')] ) @@ -280,8 +280,8 @@ class FromSelfTest(QueryTest, AssertsCompiledSQL): from_self().\ options(joinedload('addresses')).first(), - (User(id=8, - addresses=[Address(), Address(), Address()]), + (User(id=8, + addresses=[Address(), Address(), Address()]), Address(id=2)), ) @@ -427,7 +427,7 @@ class ColumnAccessTest(QueryTest, AssertsCompiledSQL): ) def test_anonymous_expression_plus_aliased_join(self): - """test that the 'dont alias non-ORM' rule remains for other + """test that the 'dont alias non-ORM' rule remains for other kinds of aliasing when _from_selectable() is used.""" User = self.classes.User @@ -495,15 +495,15 @@ class AddEntityEquivalenceTest(fixtures.MappedTest, AssertsCompiledSQL): class D(A): pass - mapper(A, a, - polymorphic_identity='a', + mapper(A, a, + polymorphic_identity='a', polymorphic_on=a.c.type, with_polymorphic= ('*', None), properties={ 'link':relation( B, uselist=False, backref='back') }) - mapper(B, b, - polymorphic_identity='b', + mapper(B, b, + polymorphic_identity='b', polymorphic_on=b.c.type, with_polymorphic= ('*', None) ) @@ -518,9 +518,9 @@ class AddEntityEquivalenceTest(fixtures.MappedTest, AssertsCompiledSQL): sess = create_session() sess.add_all([ - B(name='b1'), - A(name='a1', link= C(name='c1',age=3)), - C(name='c2',age=6), + B(name='b1'), + A(name='a1', link= C(name='c1',age=3)), + C(name='c2',age=6), A(name='a2') ]) sess.flush() @@ -539,7 +539,7 @@ class AddEntityEquivalenceTest(fixtures.MappedTest, AssertsCompiledSQL): eq_( q.all(), [( - A(bid=2, id=1, name=u'a1', type=u'a'), + A(bid=2, id=1, name=u'a1', type=u'a'), C(age=3, id=2, name=u'c1', type=u'c') )] ) @@ -552,7 +552,7 @@ class AddEntityEquivalenceTest(fixtures.MappedTest, AssertsCompiledSQL): eq_( q.all(), [( - C(age=3, id=2, name=u'c1', type=u'c'), + C(age=3, id=2, name=u'c1', type=u'c'), A(bid=2, id=1, name=u'a1', type=u'a') )] ) @@ -576,7 +576,7 @@ class InstancesTest(QueryTest, AssertsCompiledSQL): q = sess.query(User) def go(): - l = list(q.options(contains_alias('ulist'), + l = list(q.options(contains_alias('ulist'), contains_eager('addresses')).\ instances(query.execute())) assert self.static.user_address_result == l @@ -585,7 +585,7 @@ class InstancesTest(QueryTest, AssertsCompiledSQL): sess.expunge_all() def go(): - l = q.options(contains_alias('ulist'), + l = q.options(contains_alias('ulist'), contains_eager('addresses')).\ from_statement(query).all() assert self.static.user_address_result == l @@ -598,7 +598,7 @@ class InstancesTest(QueryTest, AssertsCompiledSQL): assert self.static.user_address_result == l self.assert_sql_count(testing.db, go, 1) - # same thing, but alias addresses, so that the adapter + # same thing, but alias addresses, so that the adapter # generated by select_from() is wrapped within # the adapter created by contains_eager() adalias = addresses.alias() @@ -653,8 +653,8 @@ class InstancesTest(QueryTest, AssertsCompiledSQL): selectquery = users.\ outerjoin(addresses).\ - select(users.c.id<10, - use_labels=True, + select(users.c.id<10, + use_labels=True, order_by=[users.c.id, addresses.c.id]) q = sess.query(User) @@ -692,7 +692,7 @@ class InstancesTest(QueryTest, AssertsCompiledSQL): adalias = addresses.alias('adalias') selectquery = users.outerjoin(adalias).\ - select(use_labels=True, + select(use_labels=True, order_by=[users.c.id, adalias.c.id]) # string alias name @@ -713,7 +713,7 @@ class InstancesTest(QueryTest, AssertsCompiledSQL): adalias = addresses.alias('adalias') selectquery = users.outerjoin(adalias).\ - select(use_labels=True, + select(use_labels=True, order_by=[users.c.id, adalias.c.id]) # expression.Alias object @@ -762,7 +762,7 @@ class InstancesTest(QueryTest, AssertsCompiledSQL): # test using string alias with more than one level deep def go(): l = list(q.options( - contains_eager('orders', alias='o1'), + contains_eager('orders', alias='o1'), contains_eager('orders.items', alias='i1') ).instances(query.execute())) assert self.static.user_order_result == l @@ -789,7 +789,7 @@ class InstancesTest(QueryTest, AssertsCompiledSQL): # test using Alias with more than one level deep def go(): l = list(q.options( - contains_eager('orders', alias=oalias), + contains_eager('orders', alias=oalias), contains_eager('orders.items', alias=ialias) ).instances(query.execute())) assert self.static.user_order_result == l @@ -808,7 +808,7 @@ class InstancesTest(QueryTest, AssertsCompiledSQL): ialias = aliased(Item) def go(): l = q.options( - contains_eager(User.orders, alias=oalias), + contains_eager(User.orders, alias=oalias), contains_eager(User.orders, Order.items, alias=ialias) ).\ outerjoin(oalias, User.orders).\ @@ -830,21 +830,21 @@ class InstancesTest(QueryTest, AssertsCompiledSQL): join(User.addresses).\ join(Address.dingaling).\ options( - contains_eager(User.addresses, Address.dingaling), + contains_eager(User.addresses, Address.dingaling), ) def go(): eq_( q.all(), - # note we only load the Address records that - # have a Dingaling here due to using the inner + # note we only load the Address records that + # have a Dingaling here due to using the inner # join for the eager load [ User(name=u'ed', addresses=[ - Address(email_address=u'ed@wood.com', - dingaling=Dingaling(data='ding 1/2')), - ]), + Address(email_address=u'ed@wood.com', + dingaling=Dingaling(data='ding 1/2')), + ]), User(name=u'fred', addresses=[ - Address(email_address=u'fred@fred.com', + Address(email_address=u'fred@fred.com', dingaling=Dingaling(data='ding 2/5')) ]) ] @@ -866,21 +866,21 @@ class InstancesTest(QueryTest, AssertsCompiledSQL): join(User.addresses).\ join(da, Address.dingaling).\ options( - contains_eager(User.addresses, Address.dingaling, alias=da), + contains_eager(User.addresses, Address.dingaling, alias=da), ) def go(): eq_( q.all(), - # note we only load the Address records that - # have a Dingaling here due to using the inner + # note we only load the Address records that + # have a Dingaling here due to using the inner # join for the eager load [ User(name=u'ed', addresses=[ - Address(email_address=u'ed@wood.com', - dingaling=Dingaling(data='ding 1/2')), - ]), + Address(email_address=u'ed@wood.com', + dingaling=Dingaling(data='ding 1/2')), + ]), User(name=u'fred', addresses=[ - Address(email_address=u'fred@fred.com', + Address(email_address=u'fred@fred.com', dingaling=Dingaling(data='ding 2/5')) ]) ] @@ -961,8 +961,8 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL): q2 = q.order_by(User.id).\ values(User.name, User.name + " " + cast(User.id, String(50))) eq_( - list(q2), - [(u'jack', u'jack 7'), (u'ed', u'ed 8'), + list(q2), + [(u'jack', u'jack 7'), (u'ed', u'ed 8'), (u'fred', u'fred 9'), (u'chuck', u'chuck 10')] ) @@ -970,8 +970,8 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL): filter(User.name.like('%e%')).\ order_by(User.id, Address.id).\ values(User.name, Address.email_address) - eq_(list(q2), - [(u'ed', u'ed@wood.com'), (u'ed', u'ed@bettyboop.com'), + eq_(list(q2), + [(u'ed', u'ed@wood.com'), (u'ed', u'ed@bettyboop.com'), (u'ed', u'ed@lala.com'), (u'fred', u'fred@fred.com')]) q2 = q.join('addresses').\ @@ -1033,9 +1033,9 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL): filter(u2.id>1).\ order_by(User.id, sel.c.id, u2.id).\ values(User.name, sel.c.name, u2.name) - eq_(list(q2), [(u'jack', u'jack', u'jack'), (u'jack', u'jack', u'ed'), - (u'jack', u'jack', u'fred'), (u'jack', u'jack', u'chuck'), - (u'ed', u'ed', u'jack'), (u'ed', u'ed', u'ed'), + eq_(list(q2), [(u'jack', u'jack', u'jack'), (u'jack', u'jack', u'ed'), + (u'jack', u'jack', u'fred'), (u'jack', u'jack', u'chuck'), + (u'ed', u'ed', u'jack'), (u'ed', u'ed', u'ed'), (u'ed', u'ed', u'fred'), (u'ed', u'ed', u'chuck')]) @testing.fails_on('mssql', 'FIXME: unknown') @@ -1067,7 +1067,7 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL): def test_correlated_subquery(self): - """test that a subquery constructed from ORM attributes doesn't leak out + """test that a subquery constructed from ORM attributes doesn't leak out those entities to the outermost query. """ @@ -1086,7 +1086,7 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL): # we don't want Address to be outside of the subquery here eq_( list(sess.query(User, subq)[0:3]), - [(User(id=7,name=u'jack'), 1), (User(id=8,name=u'ed'), 3), + [(User(id=7,name=u'jack'), 1), (User(id=8,name=u'ed'), 3), (User(id=9,name=u'fred'), 1)] ) @@ -1099,7 +1099,7 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL): # we don't want Address to be outside of the subquery here eq_( list(sess.query(User, subq)[0:3]), - [(User(id=7,name=u'jack'), 1), (User(id=8,name=u'ed'), 3), + [(User(id=7,name=u'jack'), 1), (User(id=8,name=u'ed'), 3), (User(id=9,name=u'fred'), 1)] ) @@ -1119,43 +1119,43 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL): eq_(list(q2), [(u'jack',), (u'ed',)]) eq_(sess.query(User.name, Address.email_address).filter(User.id==Address.user_id).all(), [ - (u'jack', u'jack@bean.com'), (u'ed', u'ed@wood.com'), - (u'ed', u'ed@bettyboop.com'), (u'ed', u'ed@lala.com'), + (u'jack', u'jack@bean.com'), (u'ed', u'ed@wood.com'), + (u'ed', u'ed@bettyboop.com'), (u'ed', u'ed@lala.com'), (u'fred', u'fred@fred.com') ]) eq_(sess.query(User.name, func.count(Address.email_address)).\ outerjoin(User.addresses).group_by(User.id, User.name).\ - order_by(User.id).all(), + order_by(User.id).all(), [(u'jack', 1), (u'ed', 3), (u'fred', 1), (u'chuck', 0)] ) eq_(sess.query(User, func.count(Address.email_address)).\ outerjoin(User.addresses).group_by(User).\ - order_by(User.id).all(), - [(User(name='jack',id=7), 1), (User(name='ed',id=8), 3), + order_by(User.id).all(), + [(User(name='jack',id=7), 1), (User(name='ed',id=8), 3), (User(name='fred',id=9), 1), (User(name='chuck',id=10), 0)] ) eq_(sess.query(func.count(Address.email_address), User).\ outerjoin(User.addresses).group_by(User).\ - order_by(User.id).all(), - [(1, User(name='jack',id=7)), (3, User(name='ed',id=8)), + order_by(User.id).all(), + [(1, User(name='jack',id=7)), (3, User(name='ed',id=8)), (1, User(name='fred',id=9)), (0, User(name='chuck',id=10))] ) adalias = aliased(Address) eq_(sess.query(User, func.count(adalias.email_address)).\ outerjoin(adalias, 'addresses').group_by(User).\ - order_by(User.id).all(), - [(User(name='jack',id=7), 1), (User(name='ed',id=8), 3), + order_by(User.id).all(), + [(User(name='jack',id=7), 1), (User(name='ed',id=8), 3), (User(name='fred',id=9), 1), (User(name='chuck',id=10), 0)] ) eq_(sess.query(func.count(adalias.email_address), User).\ outerjoin(adalias, User.addresses).group_by(User).\ order_by(User.id).all(), - [(1, User(name=u'jack',id=7)), (3, User(name=u'ed',id=8)), + [(1, User(name=u'jack',id=7)), (3, User(name=u'ed',id=8)), (1, User(name=u'fred',id=9)), (0, User(name=u'chuck',id=10))] ) @@ -1166,11 +1166,11 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL): from_self(User, adalias.email_address).\ order_by(User.id, adalias.id).all(), [ - (User(name=u'jack',id=7), u'jack@bean.com'), - (User(name=u'ed',id=8), u'ed@wood.com'), + (User(name=u'jack',id=7), u'jack@bean.com'), + (User(name=u'ed',id=8), u'ed@wood.com'), (User(name=u'ed',id=8), u'ed@bettyboop.com'), - (User(name=u'ed',id=8), u'ed@lala.com'), - (User(name=u'fred',id=9), u'fred@fred.com'), + (User(name=u'ed',id=8), u'ed@lala.com'), + (User(name=u'fred',id=9), u'fred@fred.com'), (User(name=u'chuck',id=10), None) ] ) @@ -1181,8 +1181,8 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL): filter(Address.email_address.like('%ed%')).\ from_self().all(), [ - User(name=u'ed',id=8), - User(name=u'fred',id=9), + User(name=u'ed',id=8), + User(name=u'fred',id=9), ] ) @@ -1203,24 +1203,24 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL): q.all(), [(User(addresses=[ Address(user_id=7,email_address=u'jack@bean.com',id=1)], - name=u'jack',id=7), u'jack@bean.com'), + name=u'jack',id=7), u'jack@bean.com'), (User(addresses=[ - Address(user_id=8,email_address=u'ed@wood.com',id=2), - Address(user_id=8,email_address=u'ed@bettyboop.com',id=3), + Address(user_id=8,email_address=u'ed@wood.com',id=2), + Address(user_id=8,email_address=u'ed@bettyboop.com',id=3), Address(user_id=8,email_address=u'ed@lala.com',id=4)], - name=u'ed',id=8), u'ed@wood.com'), + name=u'ed',id=8), u'ed@wood.com'), (User(addresses=[ - Address(user_id=8,email_address=u'ed@wood.com',id=2), - Address(user_id=8,email_address=u'ed@bettyboop.com',id=3), - Address(user_id=8,email_address=u'ed@lala.com',id=4)],name=u'ed',id=8), - u'ed@bettyboop.com'), + Address(user_id=8,email_address=u'ed@wood.com',id=2), + Address(user_id=8,email_address=u'ed@bettyboop.com',id=3), + Address(user_id=8,email_address=u'ed@lala.com',id=4)],name=u'ed',id=8), + u'ed@bettyboop.com'), (User(addresses=[ - Address(user_id=8,email_address=u'ed@wood.com',id=2), - Address(user_id=8,email_address=u'ed@bettyboop.com',id=3), - Address(user_id=8,email_address=u'ed@lala.com',id=4)],name=u'ed',id=8), - u'ed@lala.com'), - (User(addresses=[Address(user_id=9,email_address=u'fred@fred.com',id=5)],name=u'fred',id=9), - u'fred@fred.com'), + Address(user_id=8,email_address=u'ed@wood.com',id=2), + Address(user_id=8,email_address=u'ed@bettyboop.com',id=3), + Address(user_id=8,email_address=u'ed@lala.com',id=4)],name=u'ed',id=8), + u'ed@lala.com'), + (User(addresses=[Address(user_id=9,email_address=u'fred@fred.com',id=5)],name=u'fred',id=9), + u'fred@fred.com'), (User(addresses=[],name=u'chuck',id=10), None)] ) @@ -1275,11 +1275,11 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL): eq_( q.all(), [ - (Order(address_id=1,description=u'order 3',isopen=1,user_id=7,id=3), - Order(address_id=1,description=u'order 1',isopen=0,user_id=7,id=1)), - (Order(address_id=None,description=u'order 5',isopen=0,user_id=7,id=5), - Order(address_id=1,description=u'order 1',isopen=0,user_id=7,id=1)), - (Order(address_id=None,description=u'order 5',isopen=0,user_id=7,id=5), + (Order(address_id=1,description=u'order 3',isopen=1,user_id=7,id=3), + Order(address_id=1,description=u'order 1',isopen=0,user_id=7,id=1)), + (Order(address_id=None,description=u'order 5',isopen=0,user_id=7,id=5), + Order(address_id=1,description=u'order 1',isopen=0,user_id=7,id=1)), + (Order(address_id=None,description=u'order 5',isopen=0,user_id=7,id=5), Order(address_id=1,description=u'order 3',isopen=1,user_id=7,id=3)) ] ) @@ -1489,9 +1489,9 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL): assert q.all() == expected # test with a straight statement - s = select([users, func.count(addresses.c.id).label('count'), - ("Name:" + users.c.name).label('concat')], - from_obj=[users.outerjoin(addresses)], + s = select([users, func.count(addresses.c.id).label('count'), + ("Name:" + users.c.name).label('concat')], + from_obj=[users.outerjoin(addresses)], group_by=[c for c in users.c], order_by=[users.c.id]) q = create_session().query(User) l = q.add_column("count").add_column("concat").from_statement(s).all() @@ -1556,15 +1556,15 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL): self.assert_compile(q, exp) for crit, j, exp in [ - (ua.id + Address.id, ua.addresses, + (ua.id + Address.id, ua.addresses, "SELECT users_1.id + addresses.id AS anon_1 " "FROM users AS users_1 JOIN addresses " "ON users_1.id = addresses.user_id"), - (ua.id + aa.id, (aa, ua.addresses), + (ua.id + aa.id, (aa, ua.addresses), "SELECT users_1.id + addresses_1.id AS anon_1 " "FROM users AS users_1 JOIN addresses AS " "addresses_1 ON users_1.id = addresses_1.user_id"), - (ua.id + aa.id, (ua, aa.user), + (ua.id + aa.id, (ua, aa.user), "SELECT users_1.id + addresses_1.id AS anon_1 " "FROM addresses AS addresses_1 JOIN " "users AS users_1 " @@ -1580,7 +1580,7 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL): User, Address = self.classes.User, self.classes.Address sess = Session() - agg_address = sess.query(Address.id, + agg_address = sess.query(Address.id, func.sum(func.length(Address.email_address)).label('email_address') ).group_by(Address.user_id) ag1 = aliased(Address, agg_address.subquery()) @@ -1803,7 +1803,7 @@ class SelectFromTest(QueryTest, AssertsCompiledSQL): eq_(sess.query(User).select_from(sel).\ join('orders', 'items', 'keywords').\ filter(Keyword.name.in_(['red', 'big', 'round'])).\ - all(), + all(), [ User(name=u'jack',id=7) ]) @@ -1811,7 +1811,7 @@ class SelectFromTest(QueryTest, AssertsCompiledSQL): eq_(sess.query(User).select_from(sel).\ join('orders', 'items', 'keywords', aliased=True).\ filter(Keyword.name.in_(['red', 'big', 'round'])).\ - all(), + all(), [ User(name=u'jack',id=7) ]) @@ -1822,13 +1822,13 @@ class SelectFromTest(QueryTest, AssertsCompiledSQL): options(joinedload_all('orders.items.keywords')). join('orders', 'items', 'keywords', aliased=True). filter(Keyword.name.in_(['red', 'big', 'round'])).\ - all(), + all(), [ User(name=u'jack',orders=[ Order(description=u'order 1',items=[ Item(description=u'item 1', keywords=[ - Keyword(name=u'red'), + Keyword(name=u'red'), Keyword(name=u'big'), Keyword(name=u'round') ]), @@ -1987,7 +1987,7 @@ class ExternalColumnsTest(QueryTest): sess.query(Address).options(joinedload('user')).all() - eq_(sess.query(User).all(), + eq_(sess.query(User).all(), [ User(id=7, concat=14, count=1), User(id=8, concat=16, count=3), @@ -2011,13 +2011,13 @@ class ExternalColumnsTest(QueryTest): def go(): eq_(sess.query(Address).\ options(joinedload('user')).\ - order_by(Address.id).all(), + order_by(Address.id).all(), address_result) self.assert_sql_count(testing.db, go, 1) ualias = aliased(User) eq_( - sess.query(Address, ualias).join(ualias, 'user').all(), + sess.query(Address, ualias).join(ualias, 'user').all(), [(address, address.user) for address in address_result] ) @@ -2060,11 +2060,11 @@ class ExternalColumnsTest(QueryTest): ] ) - eq_(list(sess.query(Address).join('user').values(Address.id, User.id, User.concat, User.count)), + eq_(list(sess.query(Address).join('user').values(Address.id, User.id, User.concat, User.count)), [(1, 7, 14, 1), (2, 8, 16, 3), (3, 8, 16, 3), (4, 8, 16, 3), (5, 9, 18, 1)] ) - eq_(list(sess.query(Address, ua).select_from(join(Address,ua, 'user')).values(Address.id, ua.id, ua.concat, ua.count)), + eq_(list(sess.query(Address, ua).select_from(join(Address,ua, 'user')).values(Address.id, ua.id, ua.concat, ua.count)), [(1, 7, 14, 1), (2, 8, 16, 3), (3, 8, 16, 3), (4, 8, 16, 3), (5, 9, 18, 1)] ) @@ -2077,7 +2077,7 @@ class ExternalColumnsTest(QueryTest): self.tables.addresses) # in this test, we have a subquery on User that accesses "addresses", underneath - # an joinedload for "addresses". So the "addresses" alias adapter needs to *not* hit + # an joinedload for "addresses". So the "addresses" alias adapter needs to *not* hit # the "addresses" table within the "user" subquery, but "user" still needs to be adapted. # therefore the long standing practice of eager adapters being "chained" has been removed # since its unnecessary and breaks this exact condition. @@ -2131,17 +2131,17 @@ class ExternalColumnsTest(QueryTest): class TestOverlyEagerEquivalentCols(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): - base = Table('base', metadata, + base = Table('base', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('data', String(50)) ) - sub1 = Table('sub1', metadata, + sub1 = Table('sub1', metadata, Column('id', Integer, ForeignKey('base.id'), primary_key=True), Column('data', String(50)) ) - sub2 = Table('sub2', metadata, + sub2 = Table('sub2', metadata, Column('id', Integer, ForeignKey('base.id'), ForeignKey('sub1.id'), primary_key=True), Column('data', String(50)) ) diff --git a/test/orm/test_hasparent.py b/test/orm/test_hasparent.py index 3940c03dab..73b9fb3b2f 100644 --- a/test/orm/test_hasparent.py +++ b/test/orm/test_hasparent.py @@ -19,7 +19,7 @@ class ParentRemovalTest(fixtures.MappedTest): """Test that the 'hasparent' flag gets flipped to False only if we're sure this object is the real parent. - In ambiguous cases a stale data exception is + In ambiguous cases a stale data exception is raised. """ @@ -120,8 +120,8 @@ class ParentRemovalTest(fixtures.MappedTest): u1 = s.query(User).first() - # primary key change. now we - # can't rely on state.key as the + # primary key change. now we + # can't rely on state.key as the # identifier. u1.id = 5 a1.user_id = 5 @@ -146,7 +146,7 @@ class ParentRemovalTest(fixtures.MappedTest): def test_stale_state_negative_child_expired(self): """illustrate the current behavior of expiration on the child. - + there's some uncertainty here in how this use case should work. diff --git a/test/orm/test_joins.py b/test/orm/test_joins.py index db7c78cdd6..31f7ed5e74 100644 --- a/test/orm/test_joins.py +++ b/test/orm/test_joins.py @@ -57,7 +57,7 @@ class QueryTest(_fixtures.FixtureTest): mapper(Keyword, keywords) mapper(Node, nodes, properties={ - 'children':relationship(Node, + 'children':relationship(Node, backref=backref('parent', remote_side=[nodes.c.id]) ) }) @@ -140,17 +140,17 @@ class InheritedJoinTest(fixtures.MappedTest, AssertsCompiledSQL): mapper(Machine, machines) - mapper(Person, people, - polymorphic_on=people.c.type, - polymorphic_identity='person', - order_by=people.c.person_id, + mapper(Person, people, + polymorphic_on=people.c.type, + polymorphic_identity='person', + order_by=people.c.person_id, properties={ 'paperwork':relationship(Paperwork, order_by=paperwork.c.paperwork_id) }) mapper(Engineer, engineers, inherits=Person, polymorphic_identity='engineer', properties={ 'machines':relationship(Machine, order_by=machines.c.machine_id) }) - mapper(Manager, managers, + mapper(Manager, managers, inherits=Person, polymorphic_identity='manager') mapper(Boss, boss, inherits=Manager, polymorphic_identity='boss') mapper(Paperwork, paperwork) @@ -405,7 +405,7 @@ class JoinTest(QueryTest, AssertsCompiledSQL): ) def test_multi_tuple_form(self): - """test the 'tuple' form of join, now superseded + """test the 'tuple' form of join, now superseded by the two-element join() form. Not deprecating this style as of yet. @@ -432,7 +432,7 @@ class JoinTest(QueryTest, AssertsCompiledSQL): self.assert_compile( sess.query(User).join( - (Order, User.id==Order.user_id), + (Order, User.id==Order.user_id), (Item, Order.items)), "SELECT users.id AS users_id, users.name AS users_name " "FROM users JOIN orders ON users.id = orders.user_id " @@ -617,8 +617,8 @@ class JoinTest(QueryTest, AssertsCompiledSQL): for oalias,ialias in [(True, True), (False, False), (True, False), (False, True)]: eq_( sess.query(User).join('orders', aliased=oalias).\ - join('items', - from_joinpoint=True, + join('items', + from_joinpoint=True, aliased=ialias).\ filter(Item.description == 'item 4').all(), [User(name='jack')] @@ -628,7 +628,7 @@ class JoinTest(QueryTest, AssertsCompiledSQL): eq_( sess.query(User).join('orders', aliased=oalias).\ filter(Order.user_id==9).\ - join('items', from_joinpoint=True, + join('items', from_joinpoint=True, aliased=ialias).\ filter(Item.description=='item 4').all(), [] @@ -637,7 +637,7 @@ class JoinTest(QueryTest, AssertsCompiledSQL): orderalias = aliased(Order) itemalias = aliased(Item) eq_( - sess.query(User).join(orderalias, 'orders'). + sess.query(User).join(orderalias, 'orders'). join(itemalias, 'items', from_joinpoint=True). filter(itemalias.description == 'item 4').all(), [User(name='jack')] @@ -692,7 +692,7 @@ class JoinTest(QueryTest, AssertsCompiledSQL): sess.query(User).join, Address, Address.user, ) - # but this one would silently fail + # but this one would silently fail adalias = aliased(Address) assert_raises( sa_exc.InvalidRequestError, @@ -848,7 +848,7 @@ class JoinTest(QueryTest, AssertsCompiledSQL): # be using the aliased flag in this way. self.assert_compile( sess.query(User).join(User.orders, aliased=True). - join(Item, + join(Item, and_(Order.id==order_items.c.order_id, order_items.c.item_id==Item.id), from_joinpoint=True, aliased=True ), @@ -862,7 +862,7 @@ class JoinTest(QueryTest, AssertsCompiledSQL): oalias = orders.select() self.assert_compile( sess.query(User).join(oalias, User.orders). - join(Item, + join(Item, and_(Order.id==order_items.c.order_id, order_items.c.item_id==Item.id), from_joinpoint=True ), @@ -938,7 +938,7 @@ class JoinTest(QueryTest, AssertsCompiledSQL): ) eq_( - sess.query(User.name).join(Order, User.id==Order.user_id). + sess.query(User.name).join(Order, User.id==Order.user_id). join(order_items, Order.id==order_items.c.order_id). join(Item, order_items.c.item_id==Item.id). filter(Item.description == 'item 4').all(), @@ -1063,8 +1063,8 @@ class JoinTest(QueryTest, AssertsCompiledSQL): sess.query(OrderAlias).join('items').filter_by(description='item 3').\ order_by(OrderAlias.id).all(), [ - Order(address_id=1,description=u'order 1',isopen=0,user_id=7,id=1), - Order(address_id=4,description=u'order 2',isopen=0,user_id=9,id=2), + Order(address_id=1,description=u'order 1',isopen=0,user_id=7,id=1), + Order(address_id=4,description=u'order 2',isopen=0,user_id=9,id=2), Order(address_id=1,description=u'order 3',isopen=1,user_id=7,id=3) ] ) @@ -1076,8 +1076,8 @@ class JoinTest(QueryTest, AssertsCompiledSQL): filter_by(description='item 3').\ order_by(User.id, OrderAlias.id).all(), [ - (User(name=u'jack',id=7), Order(address_id=1,description=u'order 1',isopen=0,user_id=7,id=1), u'item 3'), - (User(name=u'jack',id=7), Order(address_id=1,description=u'order 3',isopen=1,user_id=7,id=3), u'item 3'), + (User(name=u'jack',id=7), Order(address_id=1,description=u'order 1',isopen=0,user_id=7,id=1), u'item 3'), + (User(name=u'jack',id=7), Order(address_id=1,description=u'order 3',isopen=1,user_id=7,id=3), u'item 3'), (User(name=u'fred',id=9), Order(address_id=4,description=u'order 2',isopen=0,user_id=9,id=2), u'item 3') ] ) @@ -1112,7 +1112,7 @@ class JoinTest(QueryTest, AssertsCompiledSQL): IAlias = aliased(Item) q = sess.query(Order, IAlias).select_from(join(Order, IAlias, 'items')).filter(IAlias.description=='item 3') l = q.all() - eq_(l, + eq_(l, [ (order1, item3), (order2, item3), @@ -1235,10 +1235,10 @@ class JoinTest(QueryTest, AssertsCompiledSQL): eq_( sess.query(User, ualias).filter(User.id > ualias.id).order_by(desc(ualias.id), User.name).all(), [ - (User(id=10,name=u'chuck'), User(id=9,name=u'fred')), - (User(id=10,name=u'chuck'), User(id=8,name=u'ed')), - (User(id=9,name=u'fred'), User(id=8,name=u'ed')), - (User(id=10,name=u'chuck'), User(id=7,name=u'jack')), + (User(id=10,name=u'chuck'), User(id=9,name=u'fred')), + (User(id=10,name=u'chuck'), User(id=8,name=u'ed')), + (User(id=9,name=u'fred'), User(id=8,name=u'ed')), + (User(id=10,name=u'chuck'), User(id=7,name=u'jack')), (User(id=8,name=u'ed'), User(id=7,name=u'jack')), (User(id=9,name=u'fred'), User(id=7,name=u'jack')) ] @@ -1338,7 +1338,7 @@ class JoinFromSelectableTest(fixtures.MappedTest, AssertsCompiledSQL): @classmethod def define_tables(cls, metadata): - Table('table1', metadata, + Table('table1', metadata, Column('id', Integer, primary_key=True) ) Table('table2', metadata, @@ -1563,9 +1563,9 @@ class SelfRefMixedTest(fixtures.MappedTest, AssertsCompiledSQL): backref=backref('parent', remote_side=[nodes.c.id]) ), 'subs' : relationship(Sub), - 'assoc':relationship(Node, - secondary=assoc_table, - primaryjoin=nodes.c.id==assoc_table.c.left_id, + 'assoc':relationship(Node, + secondary=assoc_table, + primaryjoin=nodes.c.id==assoc_table.c.left_id, secondaryjoin=nodes.c.id==assoc_table.c.right_id) }) mapper(Sub, sub_table) @@ -1618,13 +1618,13 @@ class CreateJoinsTest(fixtures.ORMTest, AssertsCompiledSQL): def _inherits_fixture(self): m = MetaData() base = Table('base', m, Column('id', Integer, primary_key=True)) - a = Table('a', m, + a = Table('a', m, Column('id', Integer, ForeignKey('base.id'), primary_key=True), Column('b_id', Integer, ForeignKey('b.id'))) - b = Table('b', m, + b = Table('b', m, Column('id', Integer, ForeignKey('base.id'), primary_key=True), Column('c_id', Integer, ForeignKey('c.id'))) - c = Table('c', m, + c = Table('c', m, Column('id', Integer, ForeignKey('base.id'), primary_key=True)) class Base(object): pass @@ -1801,7 +1801,7 @@ class SelfReferentialTest(fixtures.MappedTest, AssertsCompiledSQL): filter(Node.data=='n122').filter(parent.data=='n12').\ filter(grandparent.data=='n1').from_self().limit(1) - # parent, grandparent *are* inside the from_self(), so they + # parent, grandparent *are* inside the from_self(), so they # should get aliased to the outside. self.assert_compile( q, @@ -1975,7 +1975,7 @@ class SelfReferentialTest(fixtures.MappedTest, AssertsCompiledSQL): sess = create_session() eq_(sess.query(Node).filter(Node.children.any(Node.data=='n1')).all(), []) eq_(sess.query(Node).filter(Node.children.any(Node.data=='n12')).all(), [Node(data='n1')]) - eq_(sess.query(Node).filter(~Node.children.any()).order_by(Node.id).all(), + eq_(sess.query(Node).filter(~Node.children.any()).order_by(Node.id).all(), [Node(data='n11'), Node(data='n13'),Node(data='n121'),Node(data='n122'),Node(data='n123'),]) def test_has(self): @@ -1983,7 +1983,7 @@ class SelfReferentialTest(fixtures.MappedTest, AssertsCompiledSQL): sess = create_session() - eq_(sess.query(Node).filter(Node.parent.has(Node.data=='n12')).order_by(Node.id).all(), + eq_(sess.query(Node).filter(Node.parent.has(Node.data=='n12')).order_by(Node.id).all(), [Node(data='n121'),Node(data='n122'),Node(data='n123')]) eq_(sess.query(Node).filter(Node.parent.has(Node.data=='n122')).all(), []) eq_(sess.query(Node).filter(~Node.parent.has()).all(), [Node(data='n1')]) diff --git a/test/orm/test_lazy_relations.py b/test/orm/test_lazy_relations.py index dd50dfa3df..297d027f5d 100644 --- a/test/orm/test_lazy_relations.py +++ b/test/orm/test_lazy_relations.py @@ -264,7 +264,7 @@ class LazyTest(_fixtures.FixtureTest): User(id=8, address=Address(id=3)), User(id=9, address=None), User(id=10, address=None), - ], + ], list(q) ) @@ -397,7 +397,7 @@ class LazyTest(_fixtures.FixtureTest): SomeDBInteger, ]: m = sa.MetaData() - users = Table('users', m, + users = Table('users', m, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('name', String(30), nullable=False), ) @@ -486,7 +486,7 @@ class LazyTest(_fixtures.FixtureTest): self.assert_sql_count(testing.db, go, 1) class GetterStateTest(_fixtures.FixtureTest): - """test lazyloader on non-existent attribute returns + """test lazyloader on non-existent attribute returns expected attribute symbols, maintain expected state""" run_inserts = None @@ -516,8 +516,8 @@ class GetterStateTest(_fixtures.FixtureTest): User, Address, sess, a1 = self._u_ad_fixture(False) eq_( Address.user.impl.get( - attributes.instance_state(a1), - attributes.instance_dict(a1), + attributes.instance_state(a1), + attributes.instance_dict(a1), passive=attributes.PASSIVE_RETURN_NEVER_SET), attributes.NEVER_SET ) @@ -528,8 +528,8 @@ class GetterStateTest(_fixtures.FixtureTest): User, Address, sess, a1 = self._u_ad_fixture(False) eq_( Address.user.impl.get_history( - attributes.instance_state(a1), - attributes.instance_dict(a1), + attributes.instance_state(a1), + attributes.instance_dict(a1), passive=attributes.PASSIVE_RETURN_NEVER_SET), ((), (), ()) ) @@ -540,8 +540,8 @@ class GetterStateTest(_fixtures.FixtureTest): User, Address, sess, a1 = self._u_ad_fixture(False) eq_( Address.user.impl.get( - attributes.instance_state(a1), - attributes.instance_dict(a1), + attributes.instance_state(a1), + attributes.instance_dict(a1), passive=attributes.PASSIVE_NO_INITIALIZE), attributes.PASSIVE_NO_RESULT ) @@ -552,8 +552,8 @@ class GetterStateTest(_fixtures.FixtureTest): User, Address, sess, a1 = self._u_ad_fixture(False) eq_( Address.user.impl.get_history( - attributes.instance_state(a1), - attributes.instance_dict(a1), + attributes.instance_state(a1), + attributes.instance_dict(a1), passive=attributes.PASSIVE_NO_INITIALIZE), attributes.HISTORY_BLANK ) @@ -564,8 +564,8 @@ class GetterStateTest(_fixtures.FixtureTest): User, Address, sess, a1 = self._u_ad_fixture(True) eq_( Address.user.impl.get( - attributes.instance_state(a1), - attributes.instance_dict(a1), + attributes.instance_state(a1), + attributes.instance_dict(a1), passive=attributes.PASSIVE_NO_INITIALIZE), attributes.PASSIVE_NO_RESULT ) @@ -576,8 +576,8 @@ class GetterStateTest(_fixtures.FixtureTest): User, Address, sess, a1 = self._u_ad_fixture(True) eq_( Address.user.impl.get_history( - attributes.instance_state(a1), - attributes.instance_dict(a1), + attributes.instance_state(a1), + attributes.instance_dict(a1), passive=attributes.PASSIVE_NO_INITIALIZE), attributes.HISTORY_BLANK ) @@ -588,8 +588,8 @@ class GetterStateTest(_fixtures.FixtureTest): User, Address, sess, a1 = self._u_ad_fixture(True) eq_( Address.user.impl.get( - attributes.instance_state(a1), - attributes.instance_dict(a1), + attributes.instance_state(a1), + attributes.instance_dict(a1), passive=attributes.PASSIVE_RETURN_NEVER_SET), User(name='ed') ) @@ -598,8 +598,8 @@ class GetterStateTest(_fixtures.FixtureTest): User, Address, sess, a1 = self._u_ad_fixture(True) eq_( Address.user.impl.get_history( - attributes.instance_state(a1), - attributes.instance_dict(a1), + attributes.instance_state(a1), + attributes.instance_dict(a1), passive=attributes.PASSIVE_RETURN_NEVER_SET), ((), [User(name='ed'), ], ()) ) diff --git a/test/orm/test_legacy_mutable.py b/test/orm/test_legacy_mutable.py index dc88033db4..46baebe2be 100644 --- a/test/orm/test_legacy_mutable.py +++ b/test/orm/test_legacy_mutable.py @@ -1,4 +1,4 @@ -"""Test the interaction of :class:`.MutableType` as well as the +"""Test the interaction of :class:`.MutableType` as well as the ``mutable=True`` flag with the ORM. For new mutablity functionality, see test.ext.test_mutable. @@ -291,7 +291,7 @@ class PickledDictsTest(fixtures.MappedTest): Table('mutable_t', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), - Column('data', + Column('data', sa.PickleType(comparator=operator.eq, mutable=True))) @classmethod diff --git a/test/orm/test_load_on_fks.py b/test/orm/test_load_on_fks.py index 031ac66054..c6f0d06468 100644 --- a/test/orm/test_load_on_fks.py +++ b/test/orm/test_load_on_fks.py @@ -230,7 +230,7 @@ class LoadOnFKsTest(AssertsExecutionResults, fixtures.TestBase): #if manualflush and (not loadrel or fake_autoexpire): # # a flush occurs, we get p2 # assert c1.parent is p2 - #elif not loadrel and not loadfk: + #elif not loadrel and not loadfk: # # problematically - we get None since committed state # # is empty when c1.parent_id was mutated, since we want # # to save on selects. this is diff --git a/test/orm/test_lockmode.py b/test/orm/test_lockmode.py index 9fc2ea0740..b2bc608659 100644 --- a/test/orm/test_lockmode.py +++ b/test/orm/test_lockmode.py @@ -43,8 +43,8 @@ class LockModeTest(_fixtures.FixtureTest, AssertsCompiledSQL): User = self.classes.User sess = Session() assert_raises_message( - Exception, "Unknown lockmode 'unknown_mode'", - self.assert_compile, + Exception, "Unknown lockmode 'unknown_mode'", + self.assert_compile, sess.query(User.id).with_lockmode('unknown_mode'), None, dialect=default.DefaultDialect() ) diff --git a/test/orm/test_manytomany.py b/test/orm/test_manytomany.py index db7f635658..ed9075833b 100644 --- a/test/orm/test_manytomany.py +++ b/test/orm/test_manytomany.py @@ -107,7 +107,7 @@ class M2MTest(fixtures.MappedTest): mapper(Place, place, properties={ 'places': relationship( Place, - secondary=place_place, + secondary=place_place, primaryjoin=place.c.place_id==place_place.c.pl1_id, secondaryjoin=place.c.place_id==place_place.c.pl2_id, order_by=place_place.c.pl2_id @@ -162,7 +162,7 @@ class M2MTest(fixtures.MappedTest): mapper(Place, place, properties={ 'child_places': relationship( Place, - secondary=place_place, + secondary=place_place, primaryjoin=place.c.place_id==place_place.c.pl1_id, secondaryjoin=place.c.place_id==place_place.c.pl2_id, order_by=place_place.c.pl2_id, @@ -268,7 +268,7 @@ class M2MTest(fixtures.MappedTest): self.tables.transition) mapper(Place, place, properties={ - 'transitions':relationship(Transition, secondary=place_input, + 'transitions':relationship(Transition, secondary=place_input, passive_updates=False) }) mapper(Transition, transition) diff --git a/test/orm/test_mapper.py b/test/orm/test_mapper.py index 79ae7ff590..5a88d937fd 100644 --- a/test/orm/test_mapper.py +++ b/test/orm/test_mapper.py @@ -104,13 +104,13 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL): 'addresses':relationship(boom) }) - # test that QueryableAttribute.__str__() doesn't + # test that QueryableAttribute.__str__() doesn't # cause a compile. eq_(str(User.addresses), "User.addresses") def test_exceptions_sticky(self): """test preservation of mapper compile errors raised during hasattr(), - as well as for redundant mapper compile calls. Test that + as well as for redundant mapper compile calls. Test that repeated calls don't stack up error messages. """ @@ -165,7 +165,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL): assert_raises(sa.exc.ArgumentError, mapper, User, s) def test_reconfigure_on_other_mapper(self): - """A configure trigger on an already-configured mapper + """A configure trigger on an already-configured mapper still triggers a check against all mappers.""" users, Address, addresses, User = (self.tables.users, @@ -219,7 +219,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL): : addresses.c.user_id}) def test_constructor_exc(self): - """TypeError is raised for illegal constructor args, + """TypeError is raised for illegal constructor args, whether or not explicit __init__ is present [ticket:908].""" users, addresses = self.tables.users, self.tables.addresses @@ -237,7 +237,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL): assert_raises(TypeError, Bar, x=5) def test_sort_states_comparisons(self): - """test that _sort_states() doesn't compare + """test that _sort_states() doesn't compare insert_order to state.key, for set of mixed persistent/pending. In particular Python 3 disallows this. @@ -247,7 +247,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL): def __init__(self, id): self.id = id m = MetaData() - foo_t = Table('foo', m, + foo_t = Table('foo', m, Column('id', String, primary_key=True) ) m = mapper(Foo, foo_t) @@ -508,7 +508,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL): ) def test_column_prop_deannotate(self): - """test that column property deannotates, + """test that column property deannotates, bringing expressions down to the original mapped columns. """ User, users = self.classes.User, self.tables.users @@ -593,7 +593,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL): class AddressUser(User): pass m1 = mapper(User, users, polymorphic_identity='user') - m2 = mapper(AddressUser, addresses, inherits=User, + m2 = mapper(AddressUser, addresses, inherits=User, polymorphic_identity='address') m3 = mapper(AddressUser, addresses, non_primary=True) assert m3._identity_class is m2._identity_class @@ -638,7 +638,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL): class Sub(Base): pass mapper(Base, users) - assert_raises_message(sa.exc.InvalidRequestError, + assert_raises_message(sa.exc.InvalidRequestError, "Configure a primary mapper first", mapper, Sub, addresses, non_primary=True ) @@ -666,7 +666,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL): pass class Empty(object):pass - empty = mapper(Empty, t, properties={'empty_id' : t.c.id}, + empty = mapper(Empty, t, properties={'empty_id' : t.c.id}, include_properties=[]) p_m = mapper(Person, t, polymorphic_on=t.c.type, include_properties=('id', 'type', 'name')) @@ -704,7 +704,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL): want = set(want) eq_(have, want) - assert_props(HasDef, ['h_boss_id', 'h_employee_number', 'h_id', + assert_props(HasDef, ['h_boss_id', 'h_employee_number', 'h_id', 'name', 'h_name', 'h_vendor_id', 'h_type']) assert_props(Person, ['id', 'name', 'type']) assert_instrumented(Person, ['id', 'name', 'type']) @@ -725,7 +725,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL): assert_props(Frob, ['f_id', 'f_type', 'f_name', ]) - # putting the discriminator column in exclude_properties, + # putting the discriminator column in exclude_properties, # very weird. As of 0.7.4 this re-maps it. class Foo(Person): pass @@ -841,8 +841,8 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL): self.tables.addresses, self.classes.Address) - m = mapper(Address, - addresses.join(email_bounces), + m = mapper(Address, + addresses.join(email_bounces), properties={'id':[addresses.c.id, email_bounces.c.id]} ) configure_mappers() @@ -1291,8 +1291,8 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL): eq_(User.uc_name.method2('x'), "method2") assert_raises_message( - AttributeError, - "Neither 'extendedproperty' object nor 'UCComparator' object has an attribute 'nonexistent'", + AttributeError, + "Neither 'extendedproperty' object nor 'UCComparator' object has an attribute 'nonexistent'", getattr, User.uc_name, 'nonexistent') # test compile @@ -1338,8 +1338,8 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL): }) assert_raises_message( - AttributeError, - "Neither 'InstrumentedAttribute' object nor 'MyComparator' object has an attribute 'nonexistent'", + AttributeError, + "Neither 'InstrumentedAttribute' object nor 'MyComparator' object has an attribute 'nonexistent'", getattr, User.name, "nonexistent") eq_(str((User.name == 'ed').compile(dialect=sa.engine.default.DefaultDialect())) , "lower(users.name) = lower(:lower_1)") @@ -1464,13 +1464,13 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL): }) assert_raises_message( - sa.orm.exc.UnmappedClassError, + sa.orm.exc.UnmappedClassError, "Class 'test.orm._fixtures.Address' is not mapped", sa.orm.configure_mappers) def test_unmapped_not_type_error(self): assert_raises_message( - sa.exc.ArgumentError, + sa.exc.ArgumentError, "Class object expected, got '5'.", class_mapper, 5 ) @@ -1567,8 +1567,8 @@ class DocumentTest(fixtures.TestBase): pass mapper(Foo, t1, properties={ - 'bars':relationship(Bar, - doc="bar relationship", + 'bars':relationship(Bar, + doc="bar relationship", backref=backref('foo',doc='foo relationship') ), 'foober':column_property(t1.c.col3, doc='alternate data col'), @@ -1705,7 +1705,7 @@ class OptionsTest(_fixtures.FixtureTest): self.sql_count_(1, go) def test_eager_degrade(self): - """An eager relationship automatically degrades to a lazy relationship + """An eager relationship automatically degrades to a lazy relationship if eager columns are not available""" Address, addresses, users, User = (self.classes.Address, @@ -1714,7 +1714,7 @@ class OptionsTest(_fixtures.FixtureTest): self.classes.User) mapper(User, users, properties=dict( - addresses = relationship(mapper(Address, addresses), + addresses = relationship(mapper(Address, addresses), lazy='joined', order_by=addresses.c.id))) sess = create_session() @@ -1996,7 +1996,7 @@ class ValidatorTest(_fixtures.FixtureTest): sess.flush() sess.expunge_all() eq_( - sess.query(User).filter_by(name='edward').one(), + sess.query(User).filter_by(name='edward').one(), User(name='edward', addresses=[Address(email_address='foo@bar.com')]) ) @@ -2027,7 +2027,7 @@ class ValidatorTest(_fixtures.FixtureTest): eq_( dict((k, v[0].__name__) for k, v in u_m.validators.items()), - {'name':'validate_name', + {'name':'validate_name', 'addresses':'validate_address'} ) @@ -2064,20 +2064,20 @@ class ValidatorTest(_fixtures.FixtureTest): u1.addresses = [a2, a3] eq_(canary, [ - ('name', 'ed', False), - ('name', 'mary', False), - ('name', 'mary', True), + ('name', 'ed', False), + ('name', 'mary', False), + ('name', 'mary', True), # append a1 - ('addresses', a1, False), + ('addresses', a1, False), # remove a1 - ('addresses', a1, True), + ('addresses', a1, True), # set to [a1, a2] - this is two appends ('addresses', a1, False), ('addresses', a2, False), # set to [a2, a3] - this is a remove of a1, # append of a3. the appends are first. ('addresses', a3, False), - ('addresses', a1, True), - ] + ('addresses', a1, True), + ] ) class ComparatorFactoryTest(_fixtures.FixtureTest, AssertsCompiledSQL): @@ -2135,12 +2135,12 @@ class ComparatorFactoryTest(_fixtures.FixtureTest, AssertsCompiledSQL): comparator_factory=MyFactory) }) self.assert_compile( - User.name == 'ed', + User.name == 'ed', "foobar(users.name) = foobar(:foobar_1)", dialect=default.DefaultDialect()) self.assert_compile( - aliased(User).name == 'ed', + aliased(User).name == 'ed', "foobar(users_1.name) = foobar(:foobar_1)", dialect=default.DefaultDialect()) @@ -2164,7 +2164,7 @@ class ComparatorFactoryTest(_fixtures.FixtureTest, AssertsCompiledSQL): mapper(User, users) mapper(Address, addresses, properties={ - 'user':relationship(User, comparator_factory=MyFactory, + 'user':relationship(User, comparator_factory=MyFactory, backref=backref("addresses", comparator_factory=MyFactory2) ) } @@ -2472,9 +2472,9 @@ class DeferredTest(_fixtures.FixtureTest): order_select = sa.select([ - orders.c.id, - orders.c.user_id, - orders.c.address_id, + orders.c.id, + orders.c.user_id, + orders.c.address_id, orders.c.description, orders.c.isopen]).alias() mapper(Order, order_select, properties={ @@ -2529,7 +2529,7 @@ class SecondaryOptionsTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): - Table("base", metadata, + Table("base", metadata, Column('id', Integer, primary_key=True), Column('type', String(50), nullable=False) ) @@ -2562,11 +2562,11 @@ class SecondaryOptionsTest(fixtures.MappedTest): mapper(Base, base, polymorphic_on=base.c.type, properties={ 'related':relationship(Related, uselist=False) }) - mapper(Child1, child1, inherits=Base, - polymorphic_identity='child1', + mapper(Child1, child1, inherits=Base, + polymorphic_identity='child1', properties={ - 'child2':relationship(Child2, - primaryjoin=child1.c.child2id==base.c.id, + 'child2':relationship(Child2, + primaryjoin=child1.c.child2id==base.c.id, foreign_keys=child1.c.child2id) }) mapper(Child2, child2, inherits=Base, polymorphic_identity='child2') @@ -2620,18 +2620,18 @@ class SecondaryOptionsTest(fixtures.MappedTest): eq_( child1s.all(), [ - Child1(id=1, related=Related(id=1)), - Child1(id=2, related=Related(id=2)), + Child1(id=1, related=Related(id=1)), + Child1(id=2, related=Related(id=2)), Child1(id=3, related=Related(id=3)) ] ) self.assert_sql_count(testing.db, go, 1) c1 = child1s[0] - + self.assert_sql_execution( - testing.db, - lambda: c1.child2, + testing.db, + lambda: c1.child2, CompiledSQL( "SELECT child2.id AS child2_id, base.id AS base_id, base.type AS base_type " "FROM base JOIN child2 ON base.id = child2.id " @@ -2657,8 +2657,8 @@ class SecondaryOptionsTest(fixtures.MappedTest): c1 = child1s[0] self.assert_sql_execution( - testing.db, - lambda: c1.child2, + testing.db, + lambda: c1.child2, CompiledSQL( "SELECT child2.id AS child2_id, base.id AS base_id, base.type AS base_type " "FROM base JOIN child2 ON base.id = child2.id WHERE base.id = :param_1", @@ -2691,8 +2691,8 @@ class SecondaryOptionsTest(fixtures.MappedTest): # this *does* joinedload self.assert_sql_execution( - testing.db, - lambda: c1.child2, + testing.db, + lambda: c1.child2, CompiledSQL( "SELECT child2.id AS child2_id, base.id AS base_id, base.type AS base_type, " "related_1.id AS related_1_id FROM base JOIN child2 ON base.id = child2.id " @@ -3033,8 +3033,8 @@ class RequirementsTest(fixtures.MappedTest): ]) s.commit() eq_( - [(h1.value, h1.id, h2.value, h2.id) - for h1, h2 in + [(h1.value, h1.id, h2.value, h2.id) + for h1, h2 in s.query(H1, H2).join(H1.h2s).order_by(H1.id, H2.id)], [ ('abc', 1, 'abc', 1), diff --git a/test/orm/test_merge.py b/test/orm/test_merge.py index 5885a4bda7..8878152749 100644 --- a/test/orm/test_merge.py +++ b/test/orm/test_merge.py @@ -269,7 +269,7 @@ class MergeTest(_fixtures.FixtureTest): sess = create_session() # merge empty stuff. goes in as NULL. - # not sure what this was originally trying to + # not sure what this was originally trying to # test. u1 = sess.merge(User(id=1)) sess.flush() @@ -307,7 +307,7 @@ class MergeTest(_fixtures.FixtureTest): sess.flush() # blow it away from u5, but don't - # mark as expired. so it would just + # mark as expired. so it would just # be blank. del u5.data @@ -552,7 +552,7 @@ class MergeTest(_fixtures.FixtureTest): sess2 = create_session() a2 = sess2.merge(a1) eq_( - attributes.get_history(a2, 'user'), + attributes.get_history(a2, 'user'), ([u2], (), ()) ) assert a2 in sess2.dirty @@ -562,7 +562,7 @@ class MergeTest(_fixtures.FixtureTest): sess2 = create_session() a2 = sess2.merge(a1, load=False) eq_( - attributes.get_history(a2, 'user'), + attributes.get_history(a2, 'user'), ((), [u1], ()) ) assert a2 not in sess2.dirty @@ -1099,7 +1099,7 @@ class MergeTest(_fixtures.FixtureTest): class M2ONoUseGetLoadingTest(fixtures.MappedTest): - """Merge a one-to-many. The many-to-one on the other side is set up + """Merge a one-to-many. The many-to-one on the other side is set up so that use_get is False. See if skipping the "m2o" merge vs. doing it saves on SQL calls. @@ -1130,11 +1130,11 @@ class M2ONoUseGetLoadingTest(fixtures.MappedTest): user, address = cls.tables.user, cls.tables.address mapper(User, user, properties={ 'addresses':relationship(Address, backref= - backref('user', + backref('user', # needlessly complex primaryjoin so that the # use_get flag is False primaryjoin=and_( - user.c.id==address.c.user_id, + user.c.id==address.c.user_id, user.c.id==user.c.id ) ) @@ -1149,20 +1149,20 @@ class M2ONoUseGetLoadingTest(fixtures.MappedTest): User, Address = cls.classes.User, cls.classes.Address s = Session() s.add_all([ - User(id=1, name='u1', addresses=[Address(id=1, email='a1'), + User(id=1, name='u1', addresses=[Address(id=1, email='a1'), Address(id=2, email='a2')]) ]) s.commit() # "persistent" - we get at an Address that was already present. - # With the "skip bidirectional" check removed, the "set" emits SQL + # With the "skip bidirectional" check removed, the "set" emits SQL # for the "previous" version in any case, # address.user_id is 1, you get a load. def test_persistent_access_none(self): User, Address = self.classes.User, self.classes.Address s = Session() def go(): - u1 = User(id=1, + u1 = User(id=1, addresses =[Address(id=1), Address(id=2)] ) u2 = s.merge(u1) @@ -1172,7 +1172,7 @@ class M2ONoUseGetLoadingTest(fixtures.MappedTest): User, Address = self.classes.User, self.classes.Address s = Session() def go(): - u1 = User(id=1, + u1 = User(id=1, addresses =[Address(id=1), Address(id=2)] ) u2 = s.merge(u1) @@ -1184,7 +1184,7 @@ class M2ONoUseGetLoadingTest(fixtures.MappedTest): User, Address = self.classes.User, self.classes.Address s = Session() def go(): - u1 = User(id=1, + u1 = User(id=1, addresses =[Address(id=1), Address(id=2)] ) u2 = s.merge(u1) @@ -1203,8 +1203,8 @@ class M2ONoUseGetLoadingTest(fixtures.MappedTest): User, Address = self.classes.User, self.classes.Address s = Session() def go(): - u1 = User(id=1, - addresses =[Address(id=1), Address(id=2), + u1 = User(id=1, + addresses =[Address(id=1), Address(id=2), Address(id=3, email='a3')] ) u2 = s.merge(u1) @@ -1216,8 +1216,8 @@ class M2ONoUseGetLoadingTest(fixtures.MappedTest): User, Address = self.classes.User, self.classes.Address s = Session() def go(): - u1 = User(id=1, - addresses =[Address(id=1), Address(id=2), + u1 = User(id=1, + addresses =[Address(id=1), Address(id=2), Address(id=3, email='a3')] ) u2 = s.merge(u1) @@ -1230,7 +1230,7 @@ class M2ONoUseGetLoadingTest(fixtures.MappedTest): class MutableMergeTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): - Table("data", metadata, + Table("data", metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('data', PickleType(comparator=operator.eq)) ) @@ -1257,7 +1257,7 @@ class MutableMergeTest(fixtures.MappedTest): class CompositeNullPksTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): - Table("data", metadata, + Table("data", metadata, Column('pk1', String(10), primary_key=True), Column('pk2', String(10), primary_key=True), ) @@ -1326,7 +1326,7 @@ class LoadOnPendingTest(fixtures.MappedTest): r = self.classes.Rock(id=0, description='moldy') r.bug = bug m = self.sess.merge(r) - # we've already passed ticket #2374 problem since merge() returned, + # we've already passed ticket #2374 problem since merge() returned, # but for good measure: assert m is not r eq_(m,r) @@ -1344,13 +1344,13 @@ class LoadOnPendingTest(fixtures.MappedTest): self._merge_delete_orphan_o2o_with(self.classes.Bug(id=1)) class PolymorphicOnTest(fixtures.MappedTest): - """Test merge() of polymorphic object when polymorphic_on + """Test merge() of polymorphic object when polymorphic_on isn't a Column""" @classmethod def define_tables(cls, metadata): Table('employees', metadata, - Column('employee_id', Integer, primary_key=True, + Column('employee_id', Integer, primary_key=True, test_needs_autoincrement=True), Column('type', String(1), nullable=False), Column('data', String(50)), @@ -1366,9 +1366,9 @@ class PolymorphicOnTest(fixtures.MappedTest): pass def _setup_polymorphic_on_mappers(self): - employee_mapper = mapper(self.classes.Employee, + employee_mapper = mapper(self.classes.Employee, self.tables.employees, - polymorphic_on=case(value=self.tables.employees.c.type, + polymorphic_on=case(value=self.tables.employees.c.type, whens={ 'E': 'employee', 'M': 'manager', @@ -1388,7 +1388,7 @@ class PolymorphicOnTest(fixtures.MappedTest): """ self._setup_polymorphic_on_mappers() - m = self.classes.Manager(employee_id=55, type='M', + m = self.classes.Manager(employee_id=55, type='M', data='original data') self.sess.add(m) self.sess.commit() @@ -1397,7 +1397,7 @@ class PolymorphicOnTest(fixtures.MappedTest): m = self.classes.Manager(employee_id=55, data='updated data') merged = self.sess.merge(m) - # we've already passed ticket #2449 problem since + # we've already passed ticket #2449 problem since # merge() returned, but for good measure: assert m is not merged eq_(m,merged) diff --git a/test/orm/test_naturalpks.py b/test/orm/test_naturalpks.py index f0f3ebb211..402cfb7337 100644 --- a/test/orm/test_naturalpks.py +++ b/test/orm/test_naturalpks.py @@ -33,20 +33,20 @@ class NaturalPKTest(fixtures.MappedTest): addresses = Table('addresses', metadata, Column('email', String(50), primary_key=True), - Column('username', String(50), + Column('username', String(50), ForeignKey('users.username', **fk_args)), test_needs_fk=True) items = Table('items', metadata, Column('itemname', String(50), primary_key=True), - Column('description', String(100)), + Column('description', String(100)), test_needs_fk=True) users_to_items = Table('users_to_items', metadata, - Column('username', String(50), + Column('username', String(50), ForeignKey('users.username', **fk_args), primary_key=True), - Column('itemname', String(50), + Column('itemname', String(50), ForeignKey('items.itemname', **fk_args), primary_key=True), test_needs_fk=True) @@ -168,15 +168,15 @@ class NaturalPKTest(fixtures.MappedTest): def go(): sess.flush() if not passive_updates: - # test passive_updates=False; + # test passive_updates=False; #load addresses, update user, update 2 addresses - self.assert_sql_count(testing.db, go, 4) + self.assert_sql_count(testing.db, go, 4) else: # test passive_updates=True; update user - self.assert_sql_count(testing.db, go, 1) + self.assert_sql_count(testing.db, go, 1) sess.expunge_all() assert User(username='jack', addresses=[ - Address(username='jack'), + Address(username='jack'), Address(username='jack')]) == \ sess.query(User).get('jack') @@ -349,9 +349,9 @@ class NaturalPKTest(fixtures.MappedTest): def test_manytomany_passive(self): self._test_manytomany(True) - # mysqldb executemany() of the association table fails to + # mysqldb executemany() of the association table fails to # report the correct row count - @testing.fails_if(lambda: testing.against('mysql') + @testing.fails_if(lambda: testing.against('mysql') and not testing.against('+zxjdbc')) def test_manytomany_nonpassive(self): self._test_manytomany(False) @@ -489,7 +489,7 @@ class ReversePKsTest(fixtures.MappedTest): session.add(a_editable) session.commit() - # do the switch in both directions - + # do the switch in both directions - # one or the other should raise the error # based on platform dictionary ordering a_published.status = ARCHIVED @@ -509,9 +509,9 @@ class ReversePKsTest(fixtures.MappedTest): class SelfReferentialTest(fixtures.MappedTest): - # mssql, mysql don't allow + # mssql, mysql don't allow # ON UPDATE on self-referential keys - __unsupported_on__ = ('mssql','mysql') + __unsupported_on__ = ('mssql','mysql') @classmethod def define_tables(cls, metadata): @@ -596,8 +596,8 @@ class SelfReferentialTest(fixtures.MappedTest): Node, nodes = self.classes.Node, self.tables.nodes mapper(Node, nodes, properties={ - 'parentnode':relationship(Node, - remote_side=nodes.c.name, + 'parentnode':relationship(Node, + remote_side=nodes.c.name, passive_updates=passive) } ) @@ -686,7 +686,7 @@ class NonPKCascadeTest(fixtures.MappedTest): u1.username = 'ed' sess.flush() assert u1.addresses[0].username == 'ed' - eq_(sa.select([addresses.c.username]).execute().fetchall(), + eq_(sa.select([addresses.c.username]).execute().fetchall(), [('ed',), ('ed',)]) sess.expunge_all() @@ -698,14 +698,14 @@ class NonPKCascadeTest(fixtures.MappedTest): def go(): sess.flush() if not passive_updates: - # test passive_updates=False; load addresses, + # test passive_updates=False; load addresses, # update user, update 2 addresses - self.assert_sql_count(testing.db, go, 4) + self.assert_sql_count(testing.db, go, 4) else: # test passive_updates=True; update user self.assert_sql_count(testing.db, go, 1) sess.expunge_all() - assert User(username='jack', + assert User(username='jack', addresses=[Address(username='jack'), Address(username='jack')]) == \ sess.query(User).get(u1.id) @@ -719,7 +719,7 @@ class NonPKCascadeTest(fixtures.MappedTest): a1 = sess.query(Address).get(a1.id) eq_(a1.username, None) - eq_(sa.select([addresses.c.username]).execute().fetchall(), + eq_(sa.select([addresses.c.username]).execute().fetchall(), [(None,), (None,)]) u1 = sess.query(User).get(u1.id) @@ -742,7 +742,7 @@ class CascadeToFKPKTest(fixtures.MappedTest, testing.AssertsCompiledSQL): test_needs_fk=True) Table('addresses', metadata, - Column('username', String(50), + Column('username', String(50), ForeignKey('users.username', **fk_args), primary_key=True ), @@ -777,7 +777,7 @@ class CascadeToFKPKTest(fixtures.MappedTest, testing.AssertsCompiledSQL): def _test_o2m_change(self, passive_updates): """Change the PK of a related entity to another. - "on update cascade" is not involved here, so the mapper has + "on update cascade" is not involved here, so the mapper has to do the UPDATE itself. """ @@ -945,7 +945,7 @@ class CascadeToFKPKTest(fixtures.MappedTest, testing.AssertsCompiledSQL): def _test_onetomany(self, passive_updates): """Change the PK of a related entity via foreign key cascade. - For databases that require "on update cascade", the mapper + For databases that require "on update cascade", the mapper has to identify the row by the new value, not the old, when it does the update. @@ -969,7 +969,7 @@ class CascadeToFKPKTest(fixtures.MappedTest, testing.AssertsCompiledSQL): sess.flush() eq_(a1.username, 'ed') eq_(a2.username, 'ed') - eq_(sa.select([addresses.c.username]).execute().fetchall(), + eq_(sa.select([addresses.c.username]).execute().fetchall(), [('ed',), ('ed',)]) u1.username = 'jack' @@ -986,7 +986,7 @@ class JoinedInheritanceTest(fixtures.MappedTest): """Test cascades of pk->pk/fk on joined table inh.""" # mssql doesn't allow ON UPDATE on self-referential keys - __unsupported_on__ = ('mssql',) + __unsupported_on__ = ('mssql',) __requires__ = 'skip_mysql_on_windows', @@ -1006,13 +1006,13 @@ class JoinedInheritanceTest(fixtures.MappedTest): Column('name', String(50), ForeignKey('person.name', **fk_args), primary_key=True), Column('primary_language', String(50)), - Column('boss_name', String(50), + Column('boss_name', String(50), ForeignKey('manager.name', **fk_args)), test_needs_fk=True ) Table('manager', metadata, - Column('name', String(50), + Column('name', String(50), ForeignKey('person.name', **fk_args), primary_key=True), Column('paperwork', String(50)), @@ -1057,12 +1057,12 @@ class JoinedInheritanceTest(fixtures.MappedTest): self.classes.Engineer, self.tables.engineer) - mapper(Person, person, polymorphic_on=person.c.type, + mapper(Person, person, polymorphic_on=person.c.type, polymorphic_identity='person', passive_updates=passive_updates) mapper(Engineer, engineer, inherits=Person, polymorphic_identity='engineer', properties={ - 'boss':relationship(Manager, + 'boss':relationship(Manager, primaryjoin=manager.c.name==engineer.c.boss_name, passive_updates=passive_updates ) @@ -1087,12 +1087,12 @@ class JoinedInheritanceTest(fixtures.MappedTest): self.classes.Engineer, self.tables.engineer) - mapper(Person, person, polymorphic_on=person.c.type, + mapper(Person, person, polymorphic_on=person.c.type, polymorphic_identity='person', passive_updates=passive_updates) mapper(Engineer, engineer, inherits=Person, polymorphic_identity='engineer', properties={ - 'boss':relationship(Manager, + 'boss':relationship(Manager, primaryjoin=manager.c.name==engineer.c.boss_name, passive_updates=passive_updates ) diff --git a/test/orm/test_pickled.py b/test/orm/test_pickled.py index f2d292832e..44e572e835 100644 --- a/test/orm/test_pickled.py +++ b/test/orm/test_pickled.py @@ -183,7 +183,7 @@ class PickleTest(fixtures.MappedTest): sess = Session() u1 = User(name='ed', addresses=[ Address( - email_address='ed@bar.com', + email_address='ed@bar.com', ) ]) @@ -217,7 +217,7 @@ class PickleTest(fixtures.MappedTest): sess.expunge_all() u1 = sess.query(User).\ - options(sa.orm.defer('name'), + options(sa.orm.defer('name'), sa.orm.defer('addresses.email_address')).\ get(u1.id) assert 'name' not in u1.__dict__ @@ -303,16 +303,16 @@ class PickleTest(fixtures.MappedTest): u2 = pickle.loads(pickle.dumps(u1)) def test_collection_setstate(self): - """test a particular cycle that requires CollectionAdapter + """test a particular cycle that requires CollectionAdapter to not rely upon InstanceState to deserialize.""" m = MetaData() - c1 = Table('c1', m, - Column('parent_id', String, + c1 = Table('c1', m, + Column('parent_id', String, ForeignKey('p.id'), primary_key=True) ) c2 = Table('c2', m, - Column('parent_id', String, + Column('parent_id', String, ForeignKey('p.id'), primary_key=True) ) p = Table('p', m, @@ -352,7 +352,7 @@ class PickleTest(fixtures.MappedTest): mapper(User, users, properties={ 'addresses':relationship( - Address, + Address, collection_class= attribute_mapped_collection('email_address') ) @@ -363,7 +363,7 @@ class PickleTest(fixtures.MappedTest): for loads, dumps in picklers(): repickled = loads(dumps(u1)) eq_(u1.addresses, repickled.addresses) - eq_(repickled.addresses['email1'], + eq_(repickled.addresses['email1'], Address(email_address="email1")) def test_column_mapped_collection(self): @@ -371,7 +371,7 @@ class PickleTest(fixtures.MappedTest): mapper(User, users, properties={ 'addresses':relationship( - Address, + Address, collection_class= column_mapped_collection( addresses.c.email_address) @@ -386,7 +386,7 @@ class PickleTest(fixtures.MappedTest): for loads, dumps in picklers(): repickled = loads(dumps(u1)) eq_(u1.addresses, repickled.addresses) - eq_(repickled.addresses['email1'], + eq_(repickled.addresses['email1'], Address(email_address="email1")) def test_composite_column_mapped_collection(self): @@ -394,7 +394,7 @@ class PickleTest(fixtures.MappedTest): mapper(User, users, properties={ 'addresses':relationship( - Address, + Address, collection_class= column_mapped_collection([ addresses.c.id, @@ -410,7 +410,7 @@ class PickleTest(fixtures.MappedTest): for loads, dumps in picklers(): repickled = loads(dumps(u1)) eq_(u1.addresses, repickled.addresses) - eq_(repickled.addresses[(1, 'email1')], + eq_(repickled.addresses[(1, 'email1')], Address(id=1, email_address="email1")) class PolymorphicDeferredTest(fixtures.MappedTest): @@ -534,7 +534,7 @@ class CustomSetupTeardownTest(fixtures.MappedTest): test_needs_fk=True ) def test_rebuild_state(self): - """not much of a 'test', but illustrate how to + """not much of a 'test', but illustrate how to remove instance-level state before pickling. """ diff --git a/test/orm/test_query.py b/test/orm/test_query.py index 3f1035599d..33bc1edb2f 100644 --- a/test/orm/test_query.py +++ b/test/orm/test_query.py @@ -60,7 +60,7 @@ class QueryTest(_fixtures.FixtureTest): mapper(Keyword, keywords) mapper(Node, nodes, properties={ - 'children':relationship(Node, + 'children':relationship(Node, backref=backref('parent', remote_side=[nodes.c.id]) ) }) @@ -213,7 +213,7 @@ class GetTest(QueryTest): assert_raises(sa_exc.InvalidRequestError, q.get, (5, )) def test_get_null_pk(self): - """test that a mapping which can have None in a + """test that a mapping which can have None in a PK (i.e. map to an outerjoin) works with get().""" users, addresses = self.tables.users, self.tables.addresses @@ -354,7 +354,7 @@ class GetTest(QueryTest): s = create_session() q = s.query(User).filter(User.id==1) eq_( - str(q).replace('\n',''), + str(q).replace('\n',''), 'SELECT users.id AS users_id, users.name AS users_name FROM users WHERE users.id = ?' ) @@ -469,21 +469,21 @@ class InvalidGenerationsTest(QueryTest, AssertsCompiledSQL): s = create_session() q = s.query(User).order_by(User.id) - self.assert_compile(q, + self.assert_compile(q, "SELECT users.id AS users_id, users.name AS users_name FROM users ORDER BY users.id", use_default_dialect=True) assert_raises(sa_exc.InvalidRequestError, q._no_select_modifiers, "foo") q = q.order_by(None) - self.assert_compile(q, + self.assert_compile(q, "SELECT users.id AS users_id, users.name AS users_name FROM users", use_default_dialect=True) assert_raises(sa_exc.InvalidRequestError, q._no_select_modifiers, "foo") q = q.order_by(False) - self.assert_compile(q, + self.assert_compile(q, "SELECT users.id AS users_id, users.name AS users_name FROM users", use_default_dialect=True) @@ -535,7 +535,7 @@ class OperatorTest(QueryTest, AssertsCompiledSQL): create_session().query(User) for (py_op, sql_op) in ((operator.add, '+'), (operator.mul, '*'), - (operator.sub, '-'), + (operator.sub, '-'), # Py3k #(operator.truediv, '/'), # Py2K @@ -616,7 +616,7 @@ class OperatorTest(QueryTest, AssertsCompiledSQL): def test_relationship(self): User, Address = self.classes.User, self.classes.Address - self._test(User.addresses.any(Address.id==17), + self._test(User.addresses.any(Address.id==17), "EXISTS (SELECT 1 " "FROM addresses " "WHERE users.id = addresses.user_id AND addresses.id = :id_1)" @@ -640,14 +640,14 @@ class OperatorTest(QueryTest, AssertsCompiledSQL): # auto self-referential aliasing self._test( - Node.children.any(Node.data=='n1'), + Node.children.any(Node.data=='n1'), "EXISTS (SELECT 1 FROM nodes AS nodes_1 WHERE " "nodes.id = nodes_1.parent_id AND nodes_1.data = :data_1)" ) # needs autoaliasing self._test( - Node.children==None, + Node.children==None, "NOT (EXISTS (SELECT 1 FROM nodes AS nodes_1 WHERE nodes.id = nodes_1.parent_id))" ) @@ -662,44 +662,44 @@ class OperatorTest(QueryTest, AssertsCompiledSQL): ) self._test( - nalias.children==None, + nalias.children==None, "NOT (EXISTS (SELECT 1 FROM nodes WHERE nodes_1.id = nodes.parent_id))" ) self._test( - nalias.children.any(Node.data=='some data'), + nalias.children.any(Node.data=='some data'), "EXISTS (SELECT 1 FROM nodes WHERE " "nodes_1.id = nodes.parent_id AND nodes.data = :data_1)") # fails, but I think I want this to fail #self._test( - # Node.children.any(nalias.data=='some data'), + # Node.children.any(nalias.data=='some data'), # "EXISTS (SELECT 1 FROM nodes AS nodes_1 WHERE " # "nodes.id = nodes_1.parent_id AND nodes_1.data = :data_1)" # ) self._test( - nalias.parent.has(Node.data=='some data'), + nalias.parent.has(Node.data=='some data'), "EXISTS (SELECT 1 FROM nodes WHERE nodes.id = nodes_1.parent_id AND nodes.data = :data_1)" ) self._test( - Node.parent.has(Node.data=='some data'), + Node.parent.has(Node.data=='some data'), "EXISTS (SELECT 1 FROM nodes AS nodes_1 WHERE nodes_1.id = nodes.parent_id AND nodes_1.data = :data_1)" ) self._test( - Node.parent == Node(id=7), + Node.parent == Node(id=7), ":param_1 = nodes.parent_id" ) self._test( - nalias.parent == Node(id=7), + nalias.parent == Node(id=7), ":param_1 = nodes_1.parent_id" ) self._test( - nalias.parent != Node(id=7), + nalias.parent != Node(id=7), 'nodes_1.parent_id != :parent_id_1 OR nodes_1.parent_id IS NULL' ) @@ -774,7 +774,7 @@ class ExpressionTest(QueryTest, AssertsCompiledSQL): self.classes.Address) session = create_session() - s = session.query(User).filter(and_(addresses.c.email_address == bindparam('emailad'), + s = session.query(User).filter(and_(addresses.c.email_address == bindparam('emailad'), Address.user_id==User.id)).statement l = list(session.query(User).instances(s.execute(emailad = 'jack@bean.com'))) @@ -845,7 +845,7 @@ class ExpressionTest(QueryTest, AssertsCompiledSQL): q = session.query(User.id).filter(User.id==7).label('foo') self.assert_compile( - session.query(q), + session.query(q), "SELECT (SELECT users.id FROM users WHERE users.id = :id_1) AS foo" ) @@ -916,7 +916,7 @@ class ExpressionTest(QueryTest, AssertsCompiledSQL): s = create_session() - # TODO: do we want aliased() to detect a query and convert to subquery() + # TODO: do we want aliased() to detect a query and convert to subquery() # automatically ? q1 = s.query(Address).filter(Address.email_address=='jack@bean.com') adalias = aliased(Address, q1.subquery()) @@ -1209,9 +1209,9 @@ class FilterTest(QueryTest, AssertsCompiledSQL): ) # o2o - eq_([Address(id=1), Address(id=3), Address(id=4)], + eq_([Address(id=1), Address(id=3), Address(id=4)], sess.query(Address).filter(Address.dingaling==None).order_by(Address.id).all()) - eq_([Address(id=1), Address(id=3), Address(id=4)], + eq_([Address(id=1), Address(id=3), Address(id=4)], sess.query(Address).filter(Address.dingaling==null()).order_by(Address.id).all()) eq_([Address(id=2), Address(id=5)], sess.query(Address).filter(Address.dingaling != None).order_by(Address.id).all()) eq_([Address(id=2), Address(id=5)], sess.query(Address).filter(Address.dingaling != null()).order_by(Address.id).all()) @@ -1274,11 +1274,11 @@ class SetOpsTest(QueryTest, AssertsCompiledSQL): ed = s.query(User).filter(User.name=='ed') jack = s.query(User).filter(User.name=='jack') - eq_(fred.union(ed).order_by(User.name).all(), + eq_(fred.union(ed).order_by(User.name).all(), [User(name='ed'), User(name='fred')] ) - eq_(fred.union(ed, jack).order_by(User.name).all(), + eq_(fred.union(ed, jack).order_by(User.name).all(), [User(name='ed'), User(name='fred'), User(name='jack')] ) @@ -1304,7 +1304,7 @@ class SetOpsTest(QueryTest, AssertsCompiledSQL): ) def test_union_literal_expressions_compile(self): - """test that column expressions translate during + """test that column expressions translate during the _from_statement() portion of union(), others""" User = self.classes.User @@ -1344,13 +1344,13 @@ class SetOpsTest(QueryTest, AssertsCompiledSQL): for q in (q3.order_by(User.id, "anon_1_anon_2"), q6.order_by(User.id, "foo")): eq_(q.all(), [ - (User(id=7, name=u'jack'), u'x'), - (User(id=7, name=u'jack'), u'y'), - (User(id=8, name=u'ed'), u'x'), - (User(id=8, name=u'ed'), u'y'), - (User(id=9, name=u'fred'), u'x'), - (User(id=9, name=u'fred'), u'y'), - (User(id=10, name=u'chuck'), u'x'), + (User(id=7, name=u'jack'), u'x'), + (User(id=7, name=u'jack'), u'y'), + (User(id=8, name=u'ed'), u'x'), + (User(id=8, name=u'ed'), u'y'), + (User(id=9, name=u'fred'), u'x'), + (User(id=9, name=u'fred'), u'y'), + (User(id=10, name=u'chuck'), u'x'), (User(id=10, name=u'chuck'), u'y') ] ) @@ -1444,11 +1444,11 @@ class SetOpsTest(QueryTest, AssertsCompiledSQL): fred = s.query(User).filter(User.name=='fred') ed = s.query(User).filter(User.name=='ed') jack = s.query(User).filter(User.name=='jack') - eq_(fred.intersect(ed, jack).all(), + eq_(fred.intersect(ed, jack).all(), [] ) - eq_(fred.union(ed).intersect(ed.union(jack)).all(), + eq_(fred.union(ed).intersect(ed.union(jack)).all(), [User(name='ed')] ) @@ -1463,9 +1463,9 @@ class SetOpsTest(QueryTest, AssertsCompiledSQL): def go(): eq_( - fred.union(ed).order_by(User.name).options(joinedload(User.addresses)).all(), + fred.union(ed).order_by(User.name).options(joinedload(User.addresses)).all(), [ - User(name='ed', addresses=[Address(), Address(), Address()]), + User(name='ed', addresses=[Address(), Address(), Address()]), User(name='fred', addresses=[Address()]) ] ) @@ -1511,7 +1511,7 @@ class CountTest(QueryTest): s = create_session() # '*' is favored here as the most common character, # it is reported that Informix doesn't like count(1), - # rumors about Oracle preferring count(1) don't appear + # rumors about Oracle preferring count(1) don't appear # to be well founded. self.assert_sql_execution( testing.db, @@ -1520,7 +1520,7 @@ class CountTest(QueryTest): "SELECT count(*) AS count_1 FROM " "(SELECT users.id AS users_id, users.name " "AS users_name FROM users) AS anon_1", - {} + {} ) ) @@ -1581,9 +1581,9 @@ class DistinctTest(QueryTest): create_session().query(User).order_by(User.id).distinct().all() ) eq_( - [User(id=7), User(id=9), User(id=8),User(id=10)], + [User(id=7), User(id=9), User(id=8),User(id=10)], create_session().query(User).distinct().order_by(desc(User.name)).all() - ) + ) def test_joined(self): """test that orderbys from a joined table get placed into the columns clause when DISTINCT is used""" @@ -1927,8 +1927,8 @@ class SynonymTest(QueryTest): options(joinedload(User.orders_syn)).all() eq_(result, [ User(id=7, name='jack', orders=[ - Order(description=u'order 1'), - Order(description=u'order 3'), + Order(description=u'order 1'), + Order(description=u'order 3'), Order(description=u'order 5') ]) ]) @@ -1943,8 +1943,8 @@ class SynonymTest(QueryTest): options(joinedload(User.orders_syn_2)).all() eq_(result, [ User(id=7, name='jack', orders=[ - Order(description=u'order 1'), - Order(description=u'order 3'), + Order(description=u'order 1'), + Order(description=u'order 3'), Order(description=u'order 5') ]) ]) @@ -1959,8 +1959,8 @@ class SynonymTest(QueryTest): options(joinedload('orders_syn_2')).all() eq_(result, [ User(id=7, name='jack', orders=[ - Order(description=u'order 1'), - Order(description=u'order 3'), + Order(description=u'order 1'), + Order(description=u'order 3'), Order(description=u'order 5') ]) ]) @@ -1999,7 +1999,7 @@ class SynonymTest(QueryTest): u1 = q.filter_by(**{nameprop:'jack'}).one() o = sess.query(Order).with_parent(u1, property=orderprop).all() - assert [Order(description="order 1"), + assert [Order(description="order 1"), Order(description="order 3"), Order(description="order 5")] == o @@ -2056,7 +2056,7 @@ class ImmediateTest(_fixtures.FixtureTest): sess.query(User, Address).join(User.addresses).one) # this result returns multiple rows, the first - # two rows being the same. but uniquing is + # two rows being the same. but uniquing is # not applied for a column based result. assert_raises(sa.orm.exc.MultipleResultsFound, sess.query(User.id). @@ -2065,10 +2065,10 @@ class ImmediateTest(_fixtures.FixtureTest): order_by(User.id). one) - # test that a join which ultimately returns - # multiple identities across many rows still - # raises, even though the first two rows are of - # the same identity and unique filtering + # test that a join which ultimately returns + # multiple identities across many rows still + # raises, even though the first two rows are of + # the same identity and unique filtering # is applied ([ticket:1688]) assert_raises(sa.orm.exc.MultipleResultsFound, sess.query(User). @@ -2170,7 +2170,7 @@ class OptionsTest(QueryTest): def _assert_path_result(self, opt, q, paths, mappers): eq_( opt._get_paths(q, False), - ([self._make_path(p) for p in paths], + ([self._make_path(p) for p in paths], [class_mapper(c) for c in mappers]) ) @@ -2222,10 +2222,10 @@ class OptionsTest(QueryTest): opt = self._option_fixture("orders.items.keywords") self._assert_path_result(opt, q, [ - (User, 'orders'), + (User, 'orders'), (User, 'orders', Order, 'items'), (User, 'orders', Order, 'items', Item, 'keywords') - ], + ], [User, Order, Item]) def test_path_multilevel_attribute(self): @@ -2238,10 +2238,10 @@ class OptionsTest(QueryTest): opt = self._option_fixture(User.orders, Order.items, Item.keywords) self._assert_path_result(opt, q, [ - (User, 'orders'), + (User, 'orders'), (User, 'orders', Order, 'items'), (User, 'orders', Order, 'items', Item, 'keywords') - ], + ], [User, Order, Item]) def test_with_current_matching_string(self): @@ -2562,7 +2562,7 @@ class OptionsNoPropTest(_fixtures.FixtureTest): r"Mapper\|Keyword\|keywords in this Query." ) - @testing.fails_if(lambda:True, + @testing.fails_if(lambda:True, "PropertyOption doesn't yet check for relation/column on end result") def test_option_against_non_relation_basestring(self): Item = self.classes.Item @@ -2574,7 +2574,7 @@ class OptionsNoPropTest(_fixtures.FixtureTest): "does not refer to a mapped entity" ) - @testing.fails_if(lambda:True, + @testing.fails_if(lambda:True, "PropertyOption doesn't yet check for relation/column on end result") def test_option_against_multi_non_relation_basestring(self): Item = self.classes.Item @@ -2676,7 +2676,7 @@ class OptionsNoPropTest(_fixtures.FixtureTest): @classmethod def setup_mappers(cls): users, User, addresses, Address, orders, Order = ( - cls.tables.users, cls.classes.User, + cls.tables.users, cls.classes.User, cls.tables.addresses, cls.classes.Address, cls.tables.orders, cls.classes.Order) mapper(User, users, properties={ @@ -2705,9 +2705,9 @@ class OptionsNoPropTest(_fixtures.FixtureTest): key = ('loaderstrategy', (class_mapper(Item), 'keywords')) assert key in q._attributes - def _assert_eager_with_entity_exception(self, entity_list, options, + def _assert_eager_with_entity_exception(self, entity_list, options, message): - assert_raises_message(sa.exc.ArgumentError, + assert_raises_message(sa.exc.ArgumentError, message, create_session().query(*entity_list).options, *options) diff --git a/test/orm/test_relationships.py b/test/orm/test_relationships.py index d718c9d2d5..53944cd670 100644 --- a/test/orm/test_relationships.py +++ b/test/orm/test_relationships.py @@ -23,23 +23,23 @@ class DependencyTwoParentTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): Table("tbl_a", metadata, - Column("id", Integer, primary_key=True, + Column("id", Integer, primary_key=True, test_needs_autoincrement=True), Column("name", String(128))) Table("tbl_b", metadata, - Column("id", Integer, primary_key=True, + Column("id", Integer, primary_key=True, test_needs_autoincrement=True), Column("name", String(128))) Table("tbl_c", metadata, - Column("id", Integer, primary_key=True, + Column("id", Integer, primary_key=True, test_needs_autoincrement=True), - Column("tbl_a_id", Integer, ForeignKey("tbl_a.id"), + Column("tbl_a_id", Integer, ForeignKey("tbl_a.id"), nullable=False), Column("name", String(128))) Table("tbl_d", metadata, - Column("id", Integer, primary_key=True, + Column("id", Integer, primary_key=True, test_needs_autoincrement=True), - Column("tbl_c_id", Integer, ForeignKey("tbl_c.id"), + Column("tbl_c_id", Integer, ForeignKey("tbl_c.id"), nullable=False), Column("tbl_b_id", Integer, ForeignKey("tbl_b.id")), Column("name", String(128))) @@ -67,11 +67,11 @@ class DependencyTwoParentTest(fixtures.MappedTest): cls.tables.tbl_d) mapper(A, tbl_a, properties=dict( - c_rows=relationship(C, cascade="all, delete-orphan", + c_rows=relationship(C, cascade="all, delete-orphan", backref="a_row"))) mapper(B, tbl_b) mapper(C, tbl_c, properties=dict( - d_rows=relationship(D, cascade="all, delete-orphan", + d_rows=relationship(D, cascade="all, delete-orphan", backref="c_row"))) mapper(D, tbl_d, properties=dict( b_row=relationship(B))) @@ -116,7 +116,7 @@ class DependencyTwoParentTest(fixtures.MappedTest): class CompositeSelfRefFKTest(fixtures.MappedTest): """Tests a composite FK where, in - the relationship(), one col points + the relationship(), one col points to itself in the same table. this is a very unusual case:: @@ -139,7 +139,7 @@ class CompositeSelfRefFKTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): Table('company_t', metadata, - Column('company_id', Integer, primary_key=True, + Column('company_id', Integer, primary_key=True, test_needs_autoincrement=True), Column('name', String(30))) @@ -175,9 +175,9 @@ class CompositeSelfRefFKTest(fixtures.MappedTest): mapper(Company, company_t) mapper(Employee, employee_t, properties= { - 'company':relationship(Company, + 'company':relationship(Company, primaryjoin=employee_t.c.company_id== - company_t.c.company_id, + company_t.c.company_id, backref='employees'), 'reports_to':relationship(Employee, primaryjoin= sa.and_( @@ -295,7 +295,7 @@ class FKsAsPksTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): Table("tableA", metadata, - Column("id",Integer,primary_key=True, + Column("id",Integer,primary_key=True, test_needs_autoincrement=True), Column("foo",Integer,), test_needs_fk=True) @@ -313,7 +313,7 @@ class FKsAsPksTest(fixtures.MappedTest): pass def test_onetoone_switch(self): - """test that active history is enabled on a + """test that active history is enabled on a one-to-many/one that has use_get==True""" tableB, A, B, tableA = (self.tables.tableB, @@ -418,7 +418,7 @@ class FKsAsPksTest(fixtures.MappedTest): sess.flush() def test_delete_cascade_BtoA(self): - """No 'blank the PK' error when the child is to + """No 'blank the PK' error when the child is to be deleted as part of a cascade""" tableB, A, B, tableA = (self.tables.tableB, @@ -449,7 +449,7 @@ class FKsAsPksTest(fixtures.MappedTest): sa.orm.clear_mappers() def test_delete_cascade_AtoB(self): - """No 'blank the PK' error when the child is to + """No 'blank the PK' error when the child is to be deleted as part of a cascade""" tableB, A, B, tableA = (self.tables.tableB, @@ -529,23 +529,23 @@ class FKsAsPksTest(fixtures.MappedTest): assert b1 not in sess class UniqueColReferenceSwitchTest(fixtures.MappedTest): - """test a relationship based on a primary + """test a relationship based on a primary join against a unique non-pk column""" @classmethod def define_tables(cls, metadata): Table("table_a", metadata, - Column("id", Integer, primary_key=True, + Column("id", Integer, primary_key=True, test_needs_autoincrement=True), - Column("ident", String(10), nullable=False, + Column("ident", String(10), nullable=False, unique=True), ) Table("table_b", metadata, - Column("id", Integer, primary_key=True, + Column("id", Integer, primary_key=True, test_needs_autoincrement=True), - Column("a_ident", String(10), - ForeignKey('table_a.ident'), + Column("a_ident", String(10), + ForeignKey('table_a.ident'), nullable=False), ) @@ -648,21 +648,21 @@ class RelationshipToSelectableTest(fixtures.MappedTest): eq_(old.id, new.id) class FKEquatedToConstantTest(fixtures.MappedTest): - """test a relationship with a non-column entity in the primary join, - is not viewonly, and also has the non-column's clause mentioned in the + """test a relationship with a non-column entity in the primary join, + is not viewonly, and also has the non-column's clause mentioned in the foreign keys list. """ @classmethod def define_tables(cls, metadata): - Table('tags', metadata, Column("id", Integer, primary_key=True, + Table('tags', metadata, Column("id", Integer, primary_key=True, test_needs_autoincrement=True), Column("data", String(50)), ) - Table('tag_foo', metadata, - Column("id", Integer, primary_key=True, + Table('tag_foo', metadata, + Column("id", Integer, primary_key=True, test_needs_autoincrement=True), Column('tagid', Integer), Column("data", String(50)), @@ -677,7 +677,7 @@ class FKEquatedToConstantTest(fixtures.MappedTest): pass mapper(Tag, tags, properties={ - 'foo':relationship(TagInstance, + 'foo':relationship(TagInstance, primaryjoin=sa.and_(tag_foo.c.data=='iplc_case', tag_foo.c.tagid==tags.c.id), foreign_keys=[tag_foo.c.tagid, tag_foo.c.data], @@ -696,13 +696,13 @@ class FKEquatedToConstantTest(fixtures.MappedTest): # relationship works eq_( - sess.query(Tag).all(), + sess.query(Tag).all(), [Tag(data='some tag', foo=[TagInstance(data='iplc_case')])] ) # both TagInstances were persisted eq_( - sess.query(TagInstance).order_by(TagInstance.data).all(), + sess.query(TagInstance).order_by(TagInstance.data).all(), [TagInstance(data='iplc_case'), TagInstance(data='not_iplc_case')] ) @@ -710,13 +710,13 @@ class BackrefPropagatesForwardsArgs(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): - Table('users', metadata, - Column('id', Integer, primary_key=True, + Table('users', metadata, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('name', String(50)) ) - Table('addresses', metadata, - Column('id', Integer, primary_key=True, + Table('addresses', metadata, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('user_id', Integer), Column('email', String(50)) @@ -737,8 +737,8 @@ class BackrefPropagatesForwardsArgs(fixtures.MappedTest): mapper(User, users, properties={ - 'addresses':relationship(Address, - primaryjoin=addresses.c.user_id==users.c.id, + 'addresses':relationship(Address, + primaryjoin=addresses.c.user_id==users.c.id, foreign_keys=addresses.c.user_id, backref='user') }) @@ -766,14 +766,14 @@ class AmbiguousJoinInterpretedAsSelfRef(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): subscriber_table = Table('subscriber', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('dummy', String(10)) # to appease older sqlite version ) address_table = Table('address', metadata, - Column('subscriber_id', Integer, + Column('subscriber_id', Integer, ForeignKey('subscriber.id'), primary_key=True), Column('type', String(1), primary_key=True), ) @@ -782,8 +782,8 @@ class AmbiguousJoinInterpretedAsSelfRef(fixtures.MappedTest): def setup_mappers(cls): subscriber, address = cls.tables.subscriber, cls.tables.address - subscriber_and_address = subscriber.join(address, - and_(address.c.subscriber_id==subscriber.c.id, + subscriber_and_address = subscriber.join(address, + and_(address.c.subscriber_id==subscriber.c.id, address.c.type.in_(['A', 'B', 'C']))) class Address(cls.Comparable): @@ -796,7 +796,7 @@ class AmbiguousJoinInterpretedAsSelfRef(fixtures.MappedTest): mapper(Subscriber, subscriber_and_address, properties={ 'id':[subscriber.c.id, address.c.subscriber_id], - 'addresses' : relationship(Address, + 'addresses' : relationship(Address, backref=backref("customer")) }) @@ -827,8 +827,8 @@ class AmbiguousJoinInterpretedAsSelfRef(fixtures.MappedTest): eq_( sess.query(Subscriber).order_by(Subscriber.type).all(), [ - Subscriber(id=1, type=u'A'), - Subscriber(id=2, type=u'B'), + Subscriber(id=1, type=u'A'), + Subscriber(id=2, type=u'B'), Subscriber(id=2, type=u'C') ] ) @@ -900,11 +900,11 @@ class ManualBackrefTest(_fixtures.FixtureTest): 'dingaling':relationship(Dingaling) }) - assert_raises_message(sa.exc.ArgumentError, + assert_raises_message(sa.exc.ArgumentError, r"reverse_property 'dingaling' on relationship " "User.addresses references " "relationship Address.dingaling, which does not " - "reference mapper Mapper\|User\|users", + "reference mapper Mapper\|User\|users", configure_mappers) class JoinConditionErrorTest(fixtures.TestBase): @@ -939,7 +939,7 @@ class JoinConditionErrorTest(fixtures.TestBase): def test_only_column_elements(self): m = MetaData() - t1 = Table('t1', m, + t1 = Table('t1', m, Column('id', Integer, primary_key=True), Column('foo_id', Integer, ForeignKey('t2.id')), ) @@ -981,16 +981,16 @@ class JoinConditionErrorTest(fixtures.TestBase): c2 = relationship(C1, **kw) assert_raises_message( - sa.exc.ArgumentError, + sa.exc.ArgumentError, "Column-based expression object expected " - "for argument '%s'; got: '%s', type %r" % + "for argument '%s'; got: '%s', type %r" % (argname, arg[0], type(arg[0])), configure_mappers) def test_fk_error_not_raised_unrelated(self): m = MetaData() - t1 = Table('t1', m, + t1 = Table('t1', m, Column('id', Integer, primary_key=True), Column('foo_id', Integer, ForeignKey('t2.nonexistent_id')), ) @@ -1014,7 +1014,7 @@ class JoinConditionErrorTest(fixtures.TestBase): def test_join_error_raised(self): m = MetaData() - t1 = Table('t1', m, + t1 = Table('t1', m, Column('id', Integer, primary_key=True), ) t2 = Table('t2', m, @@ -1040,27 +1040,27 @@ class JoinConditionErrorTest(fixtures.TestBase): clear_mappers() class TypeMatchTest(fixtures.MappedTest): - """test errors raised when trying to add items + """test errors raised when trying to add items whose type is not handled by a relationship""" @classmethod def define_tables(cls, metadata): Table("a", metadata, - Column('aid', Integer, primary_key=True, + Column('aid', Integer, primary_key=True, test_needs_autoincrement=True), Column('data', String(30))) Table("b", metadata, - Column('bid', Integer, primary_key=True, + Column('bid', Integer, primary_key=True, test_needs_autoincrement=True), Column("a_id", Integer, ForeignKey("a.aid")), Column('data', String(30))) Table("c", metadata, - Column('cid', Integer, primary_key=True, + Column('cid', Integer, primary_key=True, test_needs_autoincrement=True), Column("b_id", Integer, ForeignKey("b.bid")), Column('data', String(30))) Table("d", metadata, - Column('did', Integer, primary_key=True, + Column('did', Integer, primary_key=True, test_needs_autoincrement=True), Column("a_id", Integer, ForeignKey("a.aid")), Column('data', String(30))) @@ -1113,7 +1113,7 @@ class TypeMatchTest(fixtures.MappedTest): sess.add(b1) sess.add(c1) assert_raises_message(sa.orm.exc.FlushError, - "Attempting to flush an item", + "Attempting to flush an item", sess.flush) def test_o2m_nopoly_onflush(self): @@ -1138,7 +1138,7 @@ class TypeMatchTest(fixtures.MappedTest): sess.add(b1) sess.add(c1) assert_raises_message(sa.orm.exc.FlushError, - "Attempting to flush an item", + "Attempting to flush an item", sess.flush) def test_m2o_nopoly_onflush(self): @@ -1159,7 +1159,7 @@ class TypeMatchTest(fixtures.MappedTest): sess.add(b1) sess.add(d1) assert_raises_message(sa.orm.exc.FlushError, - "Attempting to flush an item", + "Attempting to flush an item", sess.flush) def test_m2o_oncascade(self): @@ -1178,7 +1178,7 @@ class TypeMatchTest(fixtures.MappedTest): d1.a = b1 sess = create_session() assert_raises_message(AssertionError, - "doesn't handle objects of type", + "doesn't handle objects of type", sess.add, d1) class TypedAssociationTable(fixtures.MappedTest): @@ -1239,11 +1239,11 @@ class ViewOnlyM2MBackrefTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): Table("t1", metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('data', String(40))) Table("t2", metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('data', String(40)), ) @@ -1261,7 +1261,7 @@ class ViewOnlyM2MBackrefTest(fixtures.MappedTest): class B(fixtures.ComparableEntity):pass mapper(A, t1, properties={ - 'bs':relationship(B, secondary=t1t2, + 'bs':relationship(B, secondary=t1t2, backref=backref('as_', viewonly=True)) }) mapper(B, t2) @@ -1285,16 +1285,16 @@ class ViewOnlyOverlappingNames(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): Table("t1", metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('data', String(40))) Table("t2", metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('data', String(40)), Column('t1id', Integer, ForeignKey('t1.id'))) Table("t3", metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('data', String(40)), Column('t2id', Integer, ForeignKey('t2.id'))) @@ -1352,16 +1352,16 @@ class ViewOnlyUniqueNames(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): Table("t1", metadata, - Column('t1id', Integer, primary_key=True, + Column('t1id', Integer, primary_key=True, test_needs_autoincrement=True), Column('data', String(40))) Table("t2", metadata, - Column('t2id', Integer, primary_key=True, + Column('t2id', Integer, primary_key=True, test_needs_autoincrement=True), Column('data', String(40)), Column('t1id_ref', Integer, ForeignKey('t1.t1id'))) Table("t3", metadata, - Column('t3id', Integer, primary_key=True, + Column('t3id', Integer, primary_key=True, test_needs_autoincrement=True), Column('data', String(40)), Column('t2id_ref', Integer, ForeignKey('t2.t2id'))) @@ -1542,11 +1542,11 @@ class ViewOnlyRepeatedLocalColumn(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): Table('foos', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('data', String(50))) - Table('bars', metadata, Column('id', Integer, primary_key=True, + Table('bars', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('fid1', Integer, ForeignKey('foos.id')), Column('fid2', Integer, ForeignKey('foos.id')), @@ -1593,16 +1593,16 @@ class ViewOnlyComplexJoin(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): Table('t1', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('data', String(50))) Table('t2', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('data', String(50)), Column('t1id', Integer, ForeignKey('t1.id'))) Table('t3', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('data', String(50))) Table('t2tot3', metadata, @@ -1681,11 +1681,11 @@ class ExplicitLocalRemoteTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): Table('t1', metadata, - Column('id', String(50), primary_key=True, + Column('id', String(50), primary_key=True, test_needs_autoincrement=True), Column('data', String(50))) Table('t2', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('data', String(50)), Column('t1id', String(50))) @@ -1859,25 +1859,25 @@ class InvalidRemoteSideTest(fixtures.MappedTest): 't1s':relationship(T1, backref='parent') }) - assert_raises_message(sa.exc.ArgumentError, + assert_raises_message(sa.exc.ArgumentError, "T1.t1s and back-reference T1.parent are " "both of the same direction . Did you " - "mean to set remote_side on the many-to-one side ?", + "mean to set remote_side on the many-to-one side ?", configure_mappers) def test_m2o_backref(self): T1, t1 = self.classes.T1, self.tables.t1 mapper(T1, t1, properties={ - 't1s':relationship(T1, - backref=backref('parent', remote_side=t1.c.id), + 't1s':relationship(T1, + backref=backref('parent', remote_side=t1.c.id), remote_side=t1.c.id) }) - assert_raises_message(sa.exc.ArgumentError, + assert_raises_message(sa.exc.ArgumentError, "T1.t1s and back-reference T1.parent are " "both of the same direction . Did you " - "mean to set remote_side on the many-to-one side ?", + "mean to set remote_side on the many-to-one side ?", configure_mappers) def test_o2m_explicit(self): @@ -1889,25 +1889,25 @@ class InvalidRemoteSideTest(fixtures.MappedTest): }) # can't be sure of ordering here - assert_raises_message(sa.exc.ArgumentError, + assert_raises_message(sa.exc.ArgumentError, "both of the same direction . Did you " - "mean to set remote_side on the many-to-one side ?", + "mean to set remote_side on the many-to-one side ?", configure_mappers) def test_m2o_explicit(self): T1, t1 = self.classes.T1, self.tables.t1 mapper(T1, t1, properties={ - 't1s':relationship(T1, back_populates='parent', + 't1s':relationship(T1, back_populates='parent', remote_side=t1.c.id), - 'parent':relationship(T1, back_populates='t1s', + 'parent':relationship(T1, back_populates='t1s', remote_side=t1.c.id) }) # can't be sure of ordering here - assert_raises_message(sa.exc.ArgumentError, + assert_raises_message(sa.exc.ArgumentError, "both of the same direction . Did you " - "mean to set remote_side on the many-to-one side ?", + "mean to set remote_side on the many-to-one side ?", configure_mappers) @@ -2011,7 +2011,7 @@ class InvalidRelationshipEscalationTest(fixtures.MappedTest): self.tables.bars_with_fks, self.tables.foos) - # very unique - the join between parent/child + # very unique - the join between parent/child # has no fks, but there is an fk join between two other # tables in the join condition, for those users that try creating # these big-long-string-of-joining-many-tables primaryjoins. @@ -2037,7 +2037,7 @@ class InvalidRelationshipEscalationTest(fixtures.MappedTest): "ForeignKeyConstraint on their parent Table, or specify " "the foreign_keys parameter to this relationship. For " "more relaxed rules on join conditions, the relationship " - "may be marked as viewonly=True.", + "may be marked as viewonly=True.", sa.orm.configure_mappers) def test_ambiguous_fks(self): @@ -2083,7 +2083,7 @@ class InvalidRelationshipEscalationTest(fixtures.MappedTest): mapper(Bar, bars) assert_raises_message( - sa.exc.ArgumentError, + sa.exc.ArgumentError, "could not determine any local/remote column pairs", sa.orm.configure_mappers) @@ -2103,7 +2103,7 @@ class InvalidRelationshipEscalationTest(fixtures.MappedTest): mapper(Bar, bars) assert_raises_message( - sa.exc.ArgumentError, + sa.exc.ArgumentError, "could not determine any local/remote column pairs", sa.orm.configure_mappers) @@ -2286,16 +2286,16 @@ class InvalidRelationshipEscalationTestM2M(fixtures.MappedTest): Column('id', Integer, primary_key=True)) Table('foobars_with_fks', metadata, - Column('fid', Integer, ForeignKey('foos.id')), + Column('fid', Integer, ForeignKey('foos.id')), Column('bid', Integer, ForeignKey('bars.id')) ) Table('foobars_with_many_columns', metadata, - Column('fid', Integer), + Column('fid', Integer), Column('bid', Integer), - Column('fid1', Integer), + Column('fid1', Integer), Column('bid1', Integer), - Column('fid2', Integer), + Column('fid2', Integer), Column('bid2', Integer), ) @@ -2350,7 +2350,7 @@ class InvalidRelationshipEscalationTestM2M(fixtures.MappedTest): self.tables.foos) mapper(Foo, foos, properties={ - 'bars': relationship(Bar, secondary=foobars, + 'bars': relationship(Bar, secondary=foobars, primaryjoin=foos.c.id==foobars.c.fid, secondaryjoin=foobars.c.bid==bars.c.id)}) mapper(Bar, bars) @@ -2366,8 +2366,8 @@ class InvalidRelationshipEscalationTestM2M(fixtures.MappedTest): sa.orm.clear_mappers() mapper(Foo, foos, properties={ - 'bars': relationship(Bar, - secondary=foobars_with_many_columns, + 'bars': relationship(Bar, + secondary=foobars_with_many_columns, primaryjoin=foos.c.id== foobars_with_many_columns.c.fid, secondaryjoin=foobars_with_many_columns.c.bid== @@ -2399,7 +2399,7 @@ class InvalidRelationshipEscalationTestM2M(fixtures.MappedTest): self.tables.foos) mapper(Foo, foos, properties={ - 'bars': relationship(Bar, secondary=foobars, + 'bars': relationship(Bar, secondary=foobars, primaryjoin=foos.c.id==foobars.c.fid, secondaryjoin=foobars.c.bid==bars.c.id)}) mapper(Bar, bars) @@ -2415,8 +2415,8 @@ class InvalidRelationshipEscalationTestM2M(fixtures.MappedTest): sa.orm.clear_mappers() mapper(Foo, foos, properties={ - 'bars': relationship(Bar, - secondary=foobars_with_many_columns, + 'bars': relationship(Bar, + secondary=foobars_with_many_columns, primaryjoin=foos.c.id== foobars_with_many_columns.c.fid, secondaryjoin=foobars_with_many_columns.c.bid== @@ -2471,7 +2471,7 @@ class InvalidRelationshipEscalationTestM2M(fixtures.MappedTest): "on their parent Table, or specify the foreign_keys " "parameter to this relationship. For more relaxed " "rules on join conditions, the relationship may be marked " - "as viewonly=True.", + "as viewonly=True.", configure_mappers) sa.orm.clear_mappers() @@ -2557,7 +2557,7 @@ class ActiveHistoryFlagTest(_fixtures.FixtureTest): User, users = self.classes.User, self.tables.users mapper(User, users, properties={ - 'name':column_property(users.c.name, + 'name':column_property(users.c.name, active_history=True) }) u1 = User(name='jack') @@ -2592,8 +2592,8 @@ class ActiveHistoryFlagTest(_fixtures.FixtureTest): other.description == self.description mapper(Order, orders, properties={ 'composite':composite( - MyComposite, - orders.c.description, + MyComposite, + orders.c.description, orders.c.isopen, active_history=True) }) diff --git a/test/orm/test_selectable.py b/test/orm/test_selectable.py index 97849f845b..1a46e3b6d7 100644 --- a/test/orm/test_selectable.py +++ b/test/orm/test_selectable.py @@ -44,7 +44,7 @@ class SelectableNoFromsTest(fixtures.MappedTest, AssertsCompiledSQL): selectable = select(["x", "y", "z"]).alias() assert_raises_message( - sa.exc.ArgumentError, + sa.exc.ArgumentError, "could not assemble any primary key columns", mapper, Subset, selectable ) diff --git a/test/orm/test_session.py b/test/orm/test_session.py index f6521c9f29..c4f03c4310 100644 --- a/test/orm/test_session.py +++ b/test/orm/test_session.py @@ -1068,7 +1068,7 @@ class IsModifiedTest(_fixtures.FixtureTest): s.expire_all() u.name = 'newname' - # can't predict result here + # can't predict result here # deterministically, depending on if # 'name' or 'addresses' is tested first mod = s.is_modified(u) diff --git a/test/orm/test_subquery_relations.py b/test/orm/test_subquery_relations.py index 90df17609d..73e8b62181 100644 --- a/test/orm/test_subquery_relations.py +++ b/test/orm/test_subquery_relations.py @@ -811,7 +811,7 @@ class LoadOnExistingTest(_fixtures.FixtureTest): User, Order, Item = self.classes.User, \ self.classes.Order, self.classes.Item mapper(User, self.tables.users, properties={ - 'orders':relationship(Order), + 'orders':relationship(Order), }) mapper(Order, self.tables.orders, properties={ 'items':relationship(Item, secondary=self.tables.order_items), @@ -1199,7 +1199,7 @@ class SelfReferentialTest(fixtures.MappedTest): class InheritanceToRelatedTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): - Table('foo', metadata, + Table('foo', metadata, Column("id", Integer, primary_key=True), Column("type", String(50)), Column("related_id", Integer, ForeignKey("related.id")) @@ -1256,9 +1256,9 @@ class InheritanceToRelatedTest(fixtures.MappedTest): mapper(cls.classes.Foo, cls.tables.foo, properties={ 'related':relationship(cls.classes.Related) }, polymorphic_on=cls.tables.foo.c.type) - mapper(cls.classes.Bar, cls.tables.bar, polymorphic_identity='bar', + mapper(cls.classes.Bar, cls.tables.bar, polymorphic_identity='bar', inherits=cls.classes.Foo) - mapper(cls.classes.Baz, cls.tables.baz, polymorphic_identity='baz', + mapper(cls.classes.Baz, cls.tables.baz, polymorphic_identity='baz', inherits=cls.classes.Foo) mapper(cls.classes.Related, cls.tables.related) diff --git a/test/orm/test_sync.py b/test/orm/test_sync.py index 178d396b94..66c46f7084 100644 --- a/test/orm/test_sync.py +++ b/test/orm/test_sync.py @@ -21,7 +21,7 @@ class AssertsUOW(object): uow.register_object(d, isdelete=True) return uow -class SyncTest(fixtures.MappedTest, +class SyncTest(fixtures.MappedTest, testing.AssertsExecutionResults, AssertsUOW): @classmethod @@ -89,11 +89,11 @@ class SyncTest(fixtures.MappedTest, "Can't execute sync rule for source column 't2.id'; " r"mapper 'Mapper\|A\|t1' does not map this column.", sync.populate, - a1, - a_mapper, - b1, - b_mapper, - pairs, + a1, + a_mapper, + b1, + b_mapper, + pairs, uowcommit, False ) @@ -105,11 +105,11 @@ class SyncTest(fixtures.MappedTest, "Can't execute sync rule for destination " r"column 't1.id'; mapper 'Mapper\|B\|t2' does not map this column.", sync.populate, - a1, - a_mapper, - b1, - b_mapper, - pairs, + a1, + a_mapper, + b1, + b_mapper, + pairs, uowcommit, False ) @@ -190,14 +190,14 @@ class SyncTest(fixtures.MappedTest, a1.obj().id = 10 pairs = [(a_mapper.c.id, b_mapper.c.id,)] eq_( - sync.source_modified(uowcommit, a1, a_mapper, pairs), + sync.source_modified(uowcommit, a1, a_mapper, pairs), False ) def test_source_modified_no_pairs(self): uowcommit, a1, b1, a_mapper, b_mapper = self._fixture() eq_( - sync.source_modified(uowcommit, a1, a_mapper, []), + sync.source_modified(uowcommit, a1, a_mapper, []), False ) @@ -208,7 +208,7 @@ class SyncTest(fixtures.MappedTest, a1.obj().id = 12 pairs = [(a_mapper.c.id, b_mapper.c.id,)] eq_( - sync.source_modified(uowcommit, a1, a_mapper, pairs), + sync.source_modified(uowcommit, a1, a_mapper, pairs), True ) diff --git a/test/orm/test_transaction.py b/test/orm/test_transaction.py index 516cb69f8b..1fa5835b94 100644 --- a/test/orm/test_transaction.py +++ b/test/orm/test_transaction.py @@ -65,7 +65,7 @@ class SessionTransactionTest(FixtureTest): try: conn = testing.db.connect() trans = conn.begin() - sess = create_session(bind=conn, autocommit=False, + sess = create_session(bind=conn, autocommit=False, autoflush=True) u1 = User(name='u1') sess.add(u1) @@ -154,7 +154,7 @@ class SessionTransactionTest(FixtureTest): mapper(Address, addresses) engine2 = engines.testing_engine() - sess = create_session(autocommit=True, autoflush=False, + sess = create_session(autocommit=True, autoflush=False, twophase=True) sess.bind_mapper(User, testing.db) sess.bind_mapper(Address, engine2) @@ -359,7 +359,7 @@ class SessionTransactionTest(FixtureTest): sess.add(u2) def go(): sess.rollback() - assert_warnings(go, + assert_warnings(go, ["Session's state has been changed on a " "non-active transaction - this state " "will be discarded."], @@ -372,7 +372,7 @@ class SessionTransactionTest(FixtureTest): u1.name = 'newname' def go(): sess.rollback() - assert_warnings(go, + assert_warnings(go, ["Session's state has been changed on a " "non-active transaction - this state " "will be discarded."], @@ -385,7 +385,7 @@ class SessionTransactionTest(FixtureTest): sess.delete(u1) def go(): sess.rollback() - assert_warnings(go, + assert_warnings(go, ["Session's state has been changed on a " "non-active transaction - this state " "will be discarded."], @@ -433,7 +433,7 @@ class _LocalFixture(FixtureTest): users, addresses = cls.tables.users, cls.tables.addresses mapper(User, users, properties={ 'addresses':relationship(Address, backref='user', - cascade="all, delete-orphan", + cascade="all, delete-orphan", order_by=addresses.c.id), }) mapper(Address, addresses) @@ -585,7 +585,7 @@ class AutoExpireTest(_LocalFixture): u1.addresses.remove(a1) s.flush() - eq_(s.query(Address).filter(Address.email_address=='foo').all(), + eq_(s.query(Address).filter(Address.email_address=='foo').all(), []) s.rollback() assert a1 not in s.deleted @@ -659,7 +659,7 @@ class RollbackRecoverTest(_LocalFixture): s.commit() eq_( s.query(User).all(), - [User(id=1, name='edward', + [User(id=1, name='edward', addresses=[Address(email_address='foober')])] ) @@ -690,7 +690,7 @@ class RollbackRecoverTest(_LocalFixture): s.commit() eq_( s.query(User).all(), - [User(id=1, name='edward', + [User(id=1, name='edward', addresses=[Address(email_address='foober')])] ) @@ -711,17 +711,17 @@ class SavepointTest(_LocalFixture): u1.name = 'edward' u2.name = 'jackward' s.add_all([u3, u4]) - eq_(s.query(User.name).order_by(User.id).all(), + eq_(s.query(User.name).order_by(User.id).all(), [('edward',), ('jackward',), ('wendy',), ('foo',)]) s.rollback() assert u1.name == 'ed' assert u2.name == 'jack' - eq_(s.query(User.name).order_by(User.id).all(), + eq_(s.query(User.name).order_by(User.id).all(), [('ed',), ('jack',)]) s.commit() assert u1.name == 'ed' assert u2.name == 'jack' - eq_(s.query(User.name).order_by(User.id).all(), + eq_(s.query(User.name).order_by(User.id).all(), [('ed',), ('jack',)]) @testing.requires.savepoints @@ -752,18 +752,18 @@ class SavepointTest(_LocalFixture): u1.name = 'edward' u2.name = 'jackward' s.add_all([u3, u4]) - eq_(s.query(User.name).order_by(User.id).all(), + eq_(s.query(User.name).order_by(User.id).all(), [('edward',), ('jackward',), ('wendy',), ('foo',)]) s.commit() def go(): assert u1.name == 'edward' assert u2.name == 'jackward' - eq_(s.query(User.name).order_by(User.id).all(), + eq_(s.query(User.name).order_by(User.id).all(), [('edward',), ('jackward',), ('wendy',), ('foo',)]) self.assert_sql_count(testing.db, go, 1) s.commit() - eq_(s.query(User.name).order_by(User.id).all(), + eq_(s.query(User.name).order_by(User.id).all(), [('edward',), ('jackward',), ('wendy',), ('foo',)]) @testing.requires.savepoints @@ -781,7 +781,7 @@ class SavepointTest(_LocalFixture): s.add(u2) eq_(s.query(User).order_by(User.id).all(), [ - User(name='edward', addresses=[Address(email_address='foo'), + User(name='edward', addresses=[Address(email_address='foo'), Address(email_address='bar')]), User(name='jack', addresses=[Address(email_address='bat')]) ] @@ -789,14 +789,14 @@ class SavepointTest(_LocalFixture): s.rollback() eq_(s.query(User).order_by(User.id).all(), [ - User(name='edward', addresses=[Address(email_address='foo'), + User(name='edward', addresses=[Address(email_address='foo'), Address(email_address='bar')]), ] ) s.commit() eq_(s.query(User).order_by(User.id).all(), [ - User(name='edward', addresses=[Address(email_address='foo'), + User(name='edward', addresses=[Address(email_address='foo'), Address(email_address='bar')]), ] ) @@ -920,7 +920,7 @@ class AccountingFlagsTest(_LocalFixture): def test_preflush_no_accounting(self): User, users = self.classes.User, self.tables.users - sess = Session(_enable_transaction_accounting=False, + sess = Session(_enable_transaction_accounting=False, autocommit=True, autoflush=False) u1 = User(name='ed') sess.add(u1) diff --git a/test/orm/test_unitofwork.py b/test/orm/test_unitofwork.py index baf7754b3e..725e0c543e 100644 --- a/test/orm/test_unitofwork.py +++ b/test/orm/test_unitofwork.py @@ -943,13 +943,13 @@ class DefaultTest(fixtures.MappedTest): class ColumnPropertyTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): - Table('data', metadata, + Table('data', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('a', String(50)), Column('b', String(50)) ) - Table('subdata', metadata, + Table('subdata', metadata, Column('id', Integer, ForeignKey('data.id'), primary_key=True), Column('c', String(50)), ) @@ -971,7 +971,7 @@ class ColumnPropertyTest(fixtures.MappedTest): Data, data = self.classes.Data, self.tables.data mapper(Data, data, properties={ - 'aplusb':column_property(data.c.a + literal_column("' '") + data.c.b, + 'aplusb':column_property(data.c.a + literal_column("' '") + data.c.b, expire_on_flush=False) }) self._test(False) @@ -1851,8 +1851,8 @@ class ManyToManyTest(_fixtures.FixtureTest): k.name = 'yellow' objects[5].keywords.append(k) self.assert_sql_execution( - testing.db, - session.flush, + testing.db, + session.flush, AllOf( CompiledSQL("UPDATE items SET description=:description " "WHERE items.id = :items_id", @@ -1874,8 +1874,8 @@ class ManyToManyTest(_fixtures.FixtureTest): dkid = objects[5].keywords[1].id del objects[5].keywords[1] self.assert_sql_execution( - testing.db, - session.flush, + testing.db, + session.flush, CompiledSQL("DELETE FROM item_keywords " "WHERE item_keywords.item_id = :item_id AND " "item_keywords.keyword_id = :keyword_id", @@ -2061,8 +2061,8 @@ class SaveTest2(_fixtures.FixtureTest): session.add_all(fixture()) self.assert_sql_execution( - testing.db, - session.flush, + testing.db, + session.flush, CompiledSQL("INSERT INTO users (name) VALUES (:name)", {'name': 'u1'}), CompiledSQL("INSERT INTO users (name) VALUES (:name)", diff --git a/test/orm/test_unitofworkv2.py b/test/orm/test_unitofworkv2.py index 30557edefa..0dbe509102 100644 --- a/test/orm/test_unitofworkv2.py +++ b/test/orm/test_unitofworkv2.py @@ -28,7 +28,7 @@ class AssertsUOW(object): print postsort_actions eq_(len(postsort_actions), expected, postsort_actions) -class UOWTest(_fixtures.FixtureTest, +class UOWTest(_fixtures.FixtureTest, testing.AssertsExecutionResults, AssertsUOW): run_inserts = None @@ -55,17 +55,17 @@ class RudimentaryFlushTest(UOWTest): sess.flush, CompiledSQL( "INSERT INTO users (name) VALUES (:name)", - {'name': 'u1'} + {'name': 'u1'} ), CompiledSQL( "INSERT INTO addresses (user_id, email_address) " "VALUES (:user_id, :email_address)", - lambda ctx: {'email_address': 'a1', 'user_id':u1.id} + lambda ctx: {'email_address': 'a1', 'user_id':u1.id} ), CompiledSQL( "INSERT INTO addresses (user_id, email_address) " "VALUES (:user_id, :email_address)", - lambda ctx: {'email_address': 'a2', 'user_id':u1.id} + lambda ctx: {'email_address': 'a2', 'user_id':u1.id} ), ) @@ -160,17 +160,17 @@ class RudimentaryFlushTest(UOWTest): sess.flush, CompiledSQL( "INSERT INTO users (name) VALUES (:name)", - {'name': 'u1'} + {'name': 'u1'} ), CompiledSQL( "INSERT INTO addresses (user_id, email_address) " "VALUES (:user_id, :email_address)", - lambda ctx: {'email_address': 'a1', 'user_id':u1.id} + lambda ctx: {'email_address': 'a1', 'user_id':u1.id} ), CompiledSQL( "INSERT INTO addresses (user_id, email_address) " "VALUES (:user_id, :email_address)", - lambda ctx: {'email_address': 'a2', 'user_id':u1.id} + lambda ctx: {'email_address': 'a2', 'user_id':u1.id} ), ) @@ -280,8 +280,8 @@ class RudimentaryFlushTest(UOWTest): session.delete(c2) session.delete(parent) - # testing that relationships - # are loaded even if all ids/references are + # testing that relationships + # are loaded even if all ids/references are # expired self.assert_sql_execution( testing.db, @@ -462,7 +462,7 @@ class RudimentaryFlushTest(UOWTest): testing.db, sess.flush, CompiledSQL( - "INSERT INTO users (id, name) VALUES (:id, :name)", + "INSERT INTO users (id, name) VALUES (:id, :name)", {'id':1, 'name':'u1'}), CompiledSQL( "INSERT INTO addresses (id, user_id, email_address) " @@ -511,9 +511,9 @@ class RudimentaryFlushTest(UOWTest): sess.flush, CompiledSQL( "INSERT INTO nodes (id, parent_id, data) VALUES " - "(:id, :parent_id, :data)", - [{'parent_id': None, 'data': None, 'id': 1}, - {'parent_id': 1, 'data': None, 'id': 2}, + "(:id, :parent_id, :data)", + [{'parent_id': None, 'data': None, 'id': 1}, + {'parent_id': 1, 'data': None, 'id': 2}, {'parent_id': 2, 'data': None, 'id': 3}] ), ) @@ -561,7 +561,7 @@ class RudimentaryFlushTest(UOWTest): testing.db, sess.flush, CompiledSQL("UPDATE items SET description=:description " - "WHERE items.id = :items_id", + "WHERE items.id = :items_id", lambda ctx:{'description':'i2', 'items_id':i1.id}) ) @@ -689,9 +689,9 @@ class SingleCycleTest(UOWTest): self.assert_sql_execution( testing.db, sess.flush, - CompiledSQL("DELETE FROM nodes WHERE nodes.id = :id", + CompiledSQL("DELETE FROM nodes WHERE nodes.id = :id", lambda ctx:[{'id':n2.id}, {'id':n3.id}]), - CompiledSQL("DELETE FROM nodes WHERE nodes.id = :id", + CompiledSQL("DELETE FROM nodes WHERE nodes.id = :id", lambda ctx: {'id':n1.id}) ) @@ -715,13 +715,13 @@ class SingleCycleTest(UOWTest): sess.flush, AllOf( CompiledSQL("UPDATE nodes SET parent_id=:parent_id " - "WHERE nodes.id = :nodes_id", + "WHERE nodes.id = :nodes_id", lambda ctx: {'nodes_id':n3.id, 'parent_id':None}), CompiledSQL("UPDATE nodes SET parent_id=:parent_id " - "WHERE nodes.id = :nodes_id", + "WHERE nodes.id = :nodes_id", lambda ctx: {'nodes_id':n2.id, 'parent_id':None}), ), - CompiledSQL("DELETE FROM nodes WHERE nodes.id = :id", + CompiledSQL("DELETE FROM nodes WHERE nodes.id = :id", lambda ctx:{'id':n1.id}) ) @@ -781,9 +781,9 @@ class SingleCycleTest(UOWTest): self.assert_sql_execution( testing.db, sess.flush, - CompiledSQL("DELETE FROM nodes WHERE nodes.id = :id", + CompiledSQL("DELETE FROM nodes WHERE nodes.id = :id", lambda ctx:[{'id':n2.id},{'id':n3.id}]), - CompiledSQL("DELETE FROM nodes WHERE nodes.id = :id", + CompiledSQL("DELETE FROM nodes WHERE nodes.id = :id", lambda ctx: {'id':n1.id}) ) @@ -834,7 +834,7 @@ class SingleCycleTest(UOWTest): Node, nodes = self.classes.Node, self.tables.nodes mapper(Node, nodes, properties={ - 'children':relationship(Node, + 'children':relationship(Node, backref=backref('parent', remote_side=nodes.c.id)) }) @@ -856,7 +856,7 @@ class SingleCycleTest(UOWTest): Node, nodes = self.classes.Node, self.tables.nodes mapper(Node, nodes, properties={ - 'children':relationship(Node, + 'children':relationship(Node, backref=backref('parent', remote_side=nodes.c.id) ) }) @@ -875,37 +875,37 @@ class SingleCycleTest(UOWTest): sess.flush, CompiledSQL( "INSERT INTO nodes (parent_id, data) VALUES " - "(:parent_id, :data)", + "(:parent_id, :data)", lambda ctx:{'parent_id':None, 'data':'n1'} ), CompiledSQL( "INSERT INTO nodes (parent_id, data) VALUES " - "(:parent_id, :data)", + "(:parent_id, :data)", lambda ctx:{'parent_id':n1.id, 'data':'n11'} ), CompiledSQL( "INSERT INTO nodes (parent_id, data) VALUES " - "(:parent_id, :data)", + "(:parent_id, :data)", lambda ctx:{'parent_id':n1.id, 'data':'n12'} ), CompiledSQL( "INSERT INTO nodes (parent_id, data) VALUES " - "(:parent_id, :data)", + "(:parent_id, :data)", lambda ctx:{'parent_id':n1.id, 'data':'n13'} ), CompiledSQL( "INSERT INTO nodes (parent_id, data) VALUES " - "(:parent_id, :data)", + "(:parent_id, :data)", lambda ctx:{'parent_id':n12.id, 'data':'n121'} ), CompiledSQL( "INSERT INTO nodes (parent_id, data) VALUES " - "(:parent_id, :data)", + "(:parent_id, :data)", lambda ctx:{'parent_id':n12.id, 'data':'n122'} ), CompiledSQL( "INSERT INTO nodes (parent_id, data) VALUES " - "(:parent_id, :data)", + "(:parent_id, :data)", lambda ctx:{'parent_id':n12.id, 'data':'n123'} ), ) @@ -975,8 +975,8 @@ class SingleCycleTest(UOWTest): session.delete(c2) session.delete(parent) - # testing that relationships - # are loaded even if all ids/references are + # testing that relationships + # are loaded even if all ids/references are # expired self.assert_sql_execution( testing.db, @@ -1060,29 +1060,29 @@ class SingleCyclePlusAttributeTest(fixtures.MappedTest, n1.foobars.append(FooBar()) # saveupdateall/deleteall for FooBar added here, - # plus processstate node.foobars + # plus processstate node.foobars # currently the "all" procs stay in pairs self._assert_uow_size(sess, 6) sess.flush() -class SingleCycleM2MTest(fixtures.MappedTest, +class SingleCycleM2MTest(fixtures.MappedTest, testing.AssertsExecutionResults, AssertsUOW): @classmethod def define_tables(cls, metadata): nodes = Table('nodes', metadata, - Column('id', Integer, - primary_key=True, + Column('id', Integer, + primary_key=True, test_needs_autoincrement=True), Column('data', String(30)), Column('favorite_node_id', Integer, ForeignKey('nodes.id')) ) node_to_nodes =Table('node_to_nodes', metadata, - Column('left_node_id', Integer, + Column('left_node_id', Integer, ForeignKey('nodes.id'),primary_key=True), - Column('right_node_id', Integer, + Column('right_node_id', Integer, ForeignKey('nodes.id'),primary_key=True), ) @@ -1127,10 +1127,10 @@ class SingleCycleM2MTest(fixtures.MappedTest, node_to_nodes.c.right_node_id).\ order_by(node_to_nodes.c.left_node_id, node_to_nodes.c.right_node_id).\ - all(), + all(), sorted([ - (n1.id, n2.id), (n1.id, n3.id), (n1.id, n4.id), - (n2.id, n3.id), (n2.id, n5.id), + (n1.id, n2.id), (n1.id, n3.id), (n1.id, n4.id), + (n2.id, n3.id), (n2.id, n5.id), (n3.id, n5.id), (n3.id, n4.id) ]) ) @@ -1155,8 +1155,8 @@ class SingleCycleM2MTest(fixtures.MappedTest, "node_to_nodes.left_node_id = :left_node_id AND " "node_to_nodes.right_node_id = :right_node_id", lambda ctx:[ - {'right_node_id': n2.id, 'left_node_id': n1.id}, - {'right_node_id': n3.id, 'left_node_id': n1.id}, + {'right_node_id': n2.id, 'left_node_id': n1.id}, + {'right_node_id': n3.id, 'left_node_id': n1.id}, {'right_node_id': n4.id, 'left_node_id': n1.id} ] ), @@ -1182,9 +1182,9 @@ class SingleCycleM2MTest(fixtures.MappedTest, "= :left_node_id AND node_to_nodes.right_node_id = " ":right_node_id", lambda ctx:[ - {'right_node_id': n5.id, 'left_node_id': n3.id}, - {'right_node_id': n4.id, 'left_node_id': n3.id}, - {'right_node_id': n3.id, 'left_node_id': n2.id}, + {'right_node_id': n5.id, 'left_node_id': n3.id}, + {'right_node_id': n4.id, 'left_node_id': n3.id}, + {'right_node_id': n3.id, 'left_node_id': n2.id}, {'right_node_id': n5.id, 'left_node_id': n2.id} ] ), @@ -1204,7 +1204,7 @@ class RowswitchAccountingTest(fixtures.MappedTest): Table('parent', metadata, Column('id', Integer, primary_key=True) ) - Table('child', metadata, + Table('child', metadata, Column('id', Integer, ForeignKey('parent.id'), primary_key=True) ) @@ -1219,7 +1219,7 @@ class RowswitchAccountingTest(fixtures.MappedTest): pass mapper(Parent, parent, properties={ - 'child':relationship(Child, uselist=False, + 'child':relationship(Child, uselist=False, cascade="all, delete-orphan", backref="parent") }) @@ -1255,14 +1255,14 @@ class BatchInsertsTest(fixtures.MappedTest, testing.AssertsExecutionResults): @classmethod def define_tables(cls, metadata): Table('t', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('data', String(50)), Column('def_', String(50), server_default='def1') ) def test_batch_interaction(self): - """test batching groups same-structured, primary + """test batching groups same-structured, primary key present statements together. """ @@ -1299,8 +1299,8 @@ class BatchInsertsTest(fixtures.MappedTest, testing.AssertsExecutionResults): ), CompiledSQL( "INSERT INTO t (id, data) VALUES (:id, :data)", - [{'data': 't3', 'id': 3}, - {'data': 't4', 'id': 4}, + [{'data': 't3', 'id': 3}, + {'data': 't4', 'id': 4}, {'data': 't5', 'id': 5}] ), CompiledSQL( @@ -1313,7 +1313,7 @@ class BatchInsertsTest(fixtures.MappedTest, testing.AssertsExecutionResults): ), CompiledSQL( "INSERT INTO t (id, data, def_) VALUES (:id, :data, :def_)", - [{'data': 't9', 'id': 9, 'def_':'def2'}, + [{'data': 't9', 'id': 9, 'def_':'def2'}, {'data': 't10', 'id': 10, 'def_':'def3'}] ), CompiledSQL( diff --git a/test/orm/test_update_delete.py b/test/orm/test_update_delete.py index 351733708c..afc8c5cd7b 100644 --- a/test/orm/test_update_delete.py +++ b/test/orm/test_update_delete.py @@ -12,7 +12,7 @@ class UpdateDeleteTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): Table('users', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('name', String(32)), Column('age', Integer)) @@ -54,13 +54,13 @@ class UpdateDeleteTest(fixtures.MappedTest): (s.query(User).distinct(), "distinct") ): assert_raises_message( - exc.InvalidRequestError, - r"Can't call Query.update\(\) when %s\(\) has been called" % mname, - q.update, + exc.InvalidRequestError, + r"Can't call Query.update\(\) when %s\(\) has been called" % mname, + q.update, {'name':'ed'}) assert_raises_message( - exc.InvalidRequestError, - r"Can't call Query.delete\(\) when %s\(\) has been called" % mname, + exc.InvalidRequestError, + r"Can't call Query.delete\(\) when %s\(\) has been called" % mname, q.delete) @@ -145,7 +145,7 @@ class UpdateDeleteTest(fixtures.MappedTest): assert_raises(exc.InvalidRequestError, sess.query(User). - filter(User.name == select([func.max(User.name)])).delete, + filter(User.name == select([func.max(User.name)])).delete, synchronize_session='evaluate' ) @@ -316,7 +316,7 @@ class UpdateDeleteTest(fixtures.MappedTest): john.name = 'j2' sess.query(User).filter_by(name='j2').\ - update({'age':42}, + update({'age':42}, synchronize_session='evaluate') eq_(john.age, 42) @@ -328,7 +328,7 @@ class UpdateDeleteTest(fixtures.MappedTest): john.name = 'j2' sess.query(User).filter_by(name='j2').\ - update({'age':42}, + update({'age':42}, synchronize_session='fetch') eq_(john.age, 42) @@ -364,10 +364,10 @@ class UpdateDeleteTest(fixtures.MappedTest): sess.expire(john, ['age']) # eval must be before the update. otherwise - # we eval john, age has been expired and doesn't + # we eval john, age has been expired and doesn't # match the new value coming in sess.query(User).filter_by(name='john').filter_by(age=25).\ - update({'name':'j2', 'age':40}, + update({'name':'j2', 'age':40}, synchronize_session='evaluate') eq_(john.name, 'j2') eq_(john.age, 40) @@ -380,7 +380,7 @@ class UpdateDeleteTest(fixtures.MappedTest): sess.expire(john, ['age']) sess.query(User).filter_by(name='john').filter_by(age=25).\ - update({'name':'j2', 'age':40}, + update({'name':'j2', 'age':40}, synchronize_session='fetch') eq_(john.name, 'j2') eq_(john.age, 40) @@ -415,13 +415,13 @@ class UpdateDeleteRelatedTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): Table('users', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('name', String(32)), Column('age', Integer)) Table('documents', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('user_id', None, ForeignKey('users.id')), Column('title', String(32))) @@ -462,7 +462,7 @@ class UpdateDeleteRelatedTest(fixtures.MappedTest): mapper(User, users) mapper(Document, documents, properties={ - 'user': relationship(User, lazy='joined', + 'user': relationship(User, lazy='joined', backref=backref('documents', lazy='select')) }) @@ -476,7 +476,7 @@ class UpdateDeleteRelatedTest(fixtures.MappedTest): update({'title': Document.title+Document.title}, synchronize_session='fetch') eq_([foo.title, bar.title, baz.title], ['foofoo','barbar', 'baz']) - eq_(sess.query(Document.title).order_by(Document.id).all(), + eq_(sess.query(Document.title).order_by(Document.id).all(), zip(['foofoo','barbar', 'baz'])) def test_update_with_explicit_joinedload(self): @@ -505,7 +505,7 @@ class ExpressionUpdateTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): data = Table('data', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('counter', Integer, nullable=False, default=0) ) diff --git a/test/orm/test_versioning.py b/test/orm/test_versioning.py index accce0372f..721bd1286d 100644 --- a/test/orm/test_versioning.py +++ b/test/orm/test_versioning.py @@ -51,7 +51,7 @@ class VersioningTest(fixtures.MappedTest): def _fixture(self): Foo, version_table = self.classes.Foo, self.tables.version_table - mapper(Foo, version_table, + mapper(Foo, version_table, version_id_col=version_table.c.version_id) s1 = Session() return s1 @@ -97,7 +97,7 @@ class VersioningTest(fixtures.MappedTest): # Only dialects with a sane rowcount can detect the # StaleDataError if testing.db.dialect.supports_sane_rowcount: - assert_raises_message(sa.orm.exc.StaleDataError, + assert_raises_message(sa.orm.exc.StaleDataError, r"UPDATE statement on table 'version_table' expected " r"to update 1 row\(s\); 0 were matched.", s1.commit), @@ -117,7 +117,7 @@ class VersioningTest(fixtures.MappedTest): if testing.db.dialect.supports_sane_rowcount: assert_raises_message( - sa.orm.exc.StaleDataError, + sa.orm.exc.StaleDataError, r"DELETE statement on table 'version_table' expected " r"to delete 2 row\(s\); 1 were matched.", s1.commit) @@ -128,8 +128,8 @@ class VersioningTest(fixtures.MappedTest): def test_bump_version(self): """test that version number can be bumped. - Ensures that the UPDATE or DELETE is against the - last committed version of version_id_col, not the modified + Ensures that the UPDATE or DELETE is against the + last committed version of version_id_col, not the modified state. """ @@ -177,7 +177,7 @@ class VersioningTest(fixtures.MappedTest): # load, version is wrong assert_raises_message( - sa.orm.exc.StaleDataError, + sa.orm.exc.StaleDataError, r"Instance .* has version id '\d+' which does not " r"match database-loaded version id '\d+'", s1.query(Foo).with_lockmode('read').get, f1s1.id @@ -351,7 +351,7 @@ class RowSwitchTest(fixtures.MappedTest): cls.classes.C, cls.classes.P) - mapper(P, p, version_id_col=p.c.version_id, + mapper(P, p, version_id_col=p.c.version_id, properties={ 'c':relationship(C, uselist=False, cascade='all, delete-orphan') }) @@ -418,7 +418,7 @@ class AlternateGeneratorTest(fixtures.MappedTest): cls.classes.C, cls.classes.P) - mapper(P, p, version_id_col=p.c.version_id, + mapper(P, p, version_id_col=p.c.version_id, version_id_generator=lambda x:make_uuid(), properties={ 'c':relationship(C, uselist=False, cascade='all, delete-orphan') @@ -466,7 +466,7 @@ class AlternateGeneratorTest(fixtures.MappedTest): Session = sessionmaker() - # TODO: not sure this test is + # TODO: not sure this test is # testing exactly what its looking for sess1 = Session() @@ -528,7 +528,7 @@ class InheritanceTwoVersionIdsTest(fixtures.MappedTest): self.tables.base, self.classes.Sub) - mapper(Base, base, + mapper(Base, base, version_id_col=base.c.version_id) mapper(Sub, sub, inherits=Base) @@ -546,7 +546,7 @@ class InheritanceTwoVersionIdsTest(fixtures.MappedTest): self.tables.base, self.classes.Sub) - mapper(Base, base, + mapper(Base, base, version_id_col=base.c.version_id) mapper(Sub, sub, inherits=Base) @@ -568,7 +568,7 @@ class InheritanceTwoVersionIdsTest(fixtures.MappedTest): self.classes.Sub) mapper(Base, base) - mapper(Sub, sub, inherits=Base, + mapper(Sub, sub, inherits=Base, version_id_col=sub.c.version_id) session = Session() @@ -588,7 +588,7 @@ class InheritanceTwoVersionIdsTest(fixtures.MappedTest): self.tables.base, self.classes.Sub) - mapper(Base, base, + mapper(Base, base, version_id_col=base.c.version_id) assert_raises_message( @@ -599,5 +599,5 @@ class InheritanceTwoVersionIdsTest(fixtures.MappedTest): "version_id_col should only be specified on " "the base-most mapper that includes versioning.", mapper, - Sub, sub, inherits=Base, + Sub, sub, inherits=Base, version_id_col=sub.c.version_id) diff --git a/test/perf/orm2010.py b/test/perf/orm2010.py index d24376cc9c..4ae5da30c4 100644 --- a/test/perf/orm2010.py +++ b/test/perf/orm2010.py @@ -51,36 +51,36 @@ class Grunt(Employee): employer_id = Column(Integer, ForeignKey('boss.id')) - # Configure an 'employer' relationship, where Grunt references - # Boss. This is a joined-table subclass to subclass relationship, + # Configure an 'employer' relationship, where Grunt references + # Boss. This is a joined-table subclass to subclass relationship, # which is a less typical case. # In 0.7, "Boss.id" is the "id" column of "boss", as would be expected. if __version__ >= "0.7": - employer = relationship("Boss", backref="employees", + employer = relationship("Boss", backref="employees", primaryjoin=Boss.id==employer_id) # Prior to 0.7, "Boss.id" is the "id" column of "employee". # Long story. So we hardwire the relationship against the "id" # column of Boss' table. elif __version__ >= "0.6": - employer = relationship("Boss", backref="employees", + employer = relationship("Boss", backref="employees", primaryjoin=Boss.__table__.c.id==employer_id) - # In 0.5, the many-to-one loader wouldn't recognize the above as a + # In 0.5, the many-to-one loader wouldn't recognize the above as a # simple "identity map" fetch. So to give 0.5 a chance to emit # the same amount of SQL as 0.6, we hardwire the relationship against # "employee.id" to work around the bug. else: - employer = relationship("Boss", backref="employees", - primaryjoin=Employee.__table__.c.id==employer_id, + employer = relationship("Boss", backref="employees", + primaryjoin=Employee.__table__.c.id==employer_id, foreign_keys=employer_id) __mapper_args__ = {'polymorphic_identity':'grunt'} if os.path.exists('orm2010.db'): os.remove('orm2010.db') -# use a file based database so that cursor.execute() has some +# use a file based database so that cursor.execute() has some # palpable overhead. engine = create_engine('sqlite:///orm2010.db') @@ -92,7 +92,7 @@ def runit(): # create 1000 Boss objects. bosses = [ Boss( - name="Boss %d" % i, + name="Boss %d" % i, golf_average=Decimal(random.randint(40, 150)) ) for i in xrange(1000) @@ -111,9 +111,9 @@ def runit(): ] # Assign each Grunt a Boss. Look them up in the DB - # to simulate a little bit of two-way activity with the + # to simulate a little bit of two-way activity with the # DB while we populate. Autoflush occurs on each query. - # In 0.7 executemany() is used for all the "boss" and "grunt" + # In 0.7 executemany() is used for all the "boss" and "grunt" # tables since priamry key fetching is not needed. while grunts: boss = sess.query(Boss).\ @@ -131,13 +131,13 @@ def runit(): # load all the Grunts, print a report with their name, stats, # and their bosses' stats. for grunt in sess.query(Grunt): - # here, the overhead of a many-to-one fetch of - # "grunt.employer" directly from the identity map + # here, the overhead of a many-to-one fetch of + # "grunt.employer" directly from the identity map # is less than half of that of 0.6. report.append(( - grunt.name, - grunt.savings, - grunt.employer.name, + grunt.name, + grunt.savings, + grunt.employer.name, grunt.employer.golf_average )) diff --git a/test/perf/stress_all.py b/test/perf/stress_all.py index a19be95795..b5d210eefe 100644 --- a/test/perf/stress_all.py +++ b/test/perf/stress_all.py @@ -137,7 +137,7 @@ unicodetest = (Unicode(20, assert_unicode=False), genunicodevalue, if test_types: tests = [booleantest, datetimetest, decimaltest, intervaltest, pickletypetest, typedecoratortest, unicodetest] - for engineurl in ('postgresql://scott:tiger@localhost/test', + for engineurl in ('postgresql://scott:tiger@localhost/test', 'sqlite://', 'mysql://scott:tiger@localhost/test'): print "\n%s\n" % engineurl for datatype, genvalue, kwargs in tests: @@ -156,7 +156,7 @@ if test_methods: getitem_str_results, getitem_fallback_results, getitem_int_results, getitem_long_results, getitem_obj_results, slice_results] - for engineurl in ('postgresql://scott:tiger@localhost/test', + for engineurl in ('postgresql://scott:tiger@localhost/test', 'sqlite://', 'mysql://scott:tiger@localhost/test'): print "\n%s\n" % engineurl test_table = prepare(Unicode(20, assert_unicode=False), diff --git a/test/sql/test_compiler.py b/test/sql/test_compiler.py index 6980c7974c..49de52d899 100644 --- a/test/sql/test_compiler.py +++ b/test/sql/test_compiler.py @@ -102,11 +102,11 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): assert not hasattr(select([table1.c.myid]).as_scalar(), 'columns') def test_table_select(self): - self.assert_compile(table1.select(), + self.assert_compile(table1.select(), "SELECT mytable.myid, mytable.name, " "mytable.description FROM mytable") - self.assert_compile(select([table1, table2]), + self.assert_compile(select([table1, table2]), "SELECT mytable.myid, mytable.name, mytable.description, " "myothertable.otherid, myothertable.othername FROM mytable, " "myothertable") @@ -135,11 +135,11 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): def test_limit_offset(self): for lim, offset, exp, params in [ - (5, 10, "LIMIT :param_1 OFFSET :param_2", + (5, 10, "LIMIT :param_1 OFFSET :param_2", {'param_1':5, 'param_2':10}), (None, 10, "LIMIT -1 OFFSET :param_1", {'param_1':10}), (5, None, "LIMIT :param_1", {'param_1':5}), - (0, 0, "LIMIT :param_1 OFFSET :param_2", + (0, 0, "LIMIT :param_1 OFFSET :param_2", {'param_1':0, 'param_2':0}), ]: self.assert_compile( @@ -195,7 +195,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): "myothertable.otherid = mytable.myid" self.assert_compile( - sq.select(), + sq.select(), "SELECT sq.mytable_myid, sq.mytable_name, " "sq.mytable_description, sq.myothertable_otherid, " "sq.myothertable_othername FROM (%s) AS sq" % sqstring) @@ -206,7 +206,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): ).alias('sq2') self.assert_compile( - sq2.select(), + sq2.select(), "SELECT sq2.sq_mytable_myid, sq2.sq_mytable_name, " "sq2.sq_mytable_description, sq2.sq_myothertable_otherid, " "sq2.sq_myothertable_othername FROM (SELECT sq.mytable_myid AS " @@ -218,7 +218,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): def test_select_from_clauselist(self): self.assert_compile( - select([ClauseList(column('a'), column('b'))]).select_from('sometable'), + select([ClauseList(column('a'), column('b'))]).select_from('sometable'), 'SELECT a, b FROM sometable' ) @@ -292,7 +292,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): ) def test_dupe_columns(self): - """test that deduping is performed against clause + """test that deduping is performed against clause element identity, not rendered result.""" self.assert_compile( @@ -424,11 +424,11 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): def test_exists(self): s = select([table1.c.myid]).where(table1.c.myid==5) - self.assert_compile(exists(s), + self.assert_compile(exists(s), "EXISTS (SELECT mytable.myid FROM mytable WHERE mytable.myid = :myid_1)" ) - self.assert_compile(exists(s.as_scalar()), + self.assert_compile(exists(s.as_scalar()), "EXISTS (SELECT mytable.myid FROM mytable WHERE mytable.myid = :myid_1)" ) @@ -724,7 +724,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): ':param_1') self.assert_compile( - label('bar', column('foo', type_=String))+ 'foo', + label('bar', column('foo', type_=String))+ 'foo', 'foo || :param_1') @@ -739,7 +739,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): ) self.assert_compile( - and_(table1.c.myid == 12, table1.c.name=='asdf', + and_(table1.c.myid == 12, table1.c.name=='asdf', table2.c.othername == 'foo', "sysdate() = today()"), "mytable.myid = :myid_1 AND mytable.name = :name_1 "\ "AND myothertable.othername = :othername_1 AND sysdate() = today()" @@ -748,14 +748,14 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): self.assert_compile( and_( table1.c.myid == 12, - or_(table2.c.othername=='asdf', + or_(table2.c.othername=='asdf', table2.c.othername == 'foo', table2.c.otherid == 9), "sysdate() = today()", ), 'mytable.myid = :myid_1 AND (myothertable.othername = ' ':othername_1 OR myothertable.othername = :othername_2 OR ' 'myothertable.otherid = :otherid_1) AND sysdate() = ' - 'today()', + 'today()', checkparams = {'othername_1': 'asdf', 'othername_2':'foo', 'otherid_1': 9, 'myid_1': 12} ) @@ -766,50 +766,50 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): t = table('t', column('x')) self.assert_compile( - select([t]).where(and_(t.c.x==5, + select([t]).where(and_(t.c.x==5, or_(and_(or_(t.c.x==7))))), "SELECT t.x FROM t WHERE t.x = :x_1 AND t.x = :x_2" ) self.assert_compile( - select([t]).where(and_(or_(t.c.x==12, + select([t]).where(and_(or_(t.c.x==12, and_(or_(t.c.x==8))))), "SELECT t.x FROM t WHERE t.x = :x_1 OR t.x = :x_2" ) self.assert_compile( - select([t]).where(and_(or_(or_(t.c.x==12), + select([t]).where(and_(or_(or_(t.c.x==12), and_(or_(), or_(and_(t.c.x==8)), and_())))), "SELECT t.x FROM t WHERE t.x = :x_1 OR t.x = :x_2" ) def test_distinct(self): self.assert_compile( - select([table1.c.myid.distinct()]), + select([table1.c.myid.distinct()]), "SELECT DISTINCT mytable.myid FROM mytable" ) self.assert_compile( - select([distinct(table1.c.myid)]), + select([distinct(table1.c.myid)]), "SELECT DISTINCT mytable.myid FROM mytable" ) self.assert_compile( - select([table1.c.myid]).distinct(), + select([table1.c.myid]).distinct(), "SELECT DISTINCT mytable.myid FROM mytable" ) self.assert_compile( - select([func.count(table1.c.myid.distinct())]), + select([func.count(table1.c.myid.distinct())]), "SELECT count(DISTINCT mytable.myid) AS count_1 FROM mytable" ) self.assert_compile( - select([func.count(distinct(table1.c.myid))]), + select([func.count(distinct(table1.c.myid))]), "SELECT count(DISTINCT mytable.myid) AS count_1 FROM mytable" ) def test_operators(self): for (py_op, sql_op) in ((operator.add, '+'), (operator.mul, '*'), - (operator.sub, '-'), + (operator.sub, '-'), # Py3K #(operator.truediv, '/'), # Py2K @@ -879,7 +879,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): ) self.assert_compile( - table1.select((table1.c.myid != 12) & + table1.select((table1.c.myid != 12) & ~(table1.c.name.between('jack','john'))), "SELECT mytable.myid, mytable.name, mytable.description FROM " "mytable WHERE mytable.myid != :myid_1 AND "\ @@ -887,7 +887,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): ) self.assert_compile( - table1.select((table1.c.myid != 12) & + table1.select((table1.c.myid != 12) & ~and_(table1.c.name=='john', table1.c.name=='ed', table1.c.name=='fred')), "SELECT mytable.myid, mytable.name, mytable.description FROM " "mytable WHERE mytable.myid != :myid_1 AND "\ @@ -921,124 +921,124 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): def test_like(self): for expr, check, dialect in [ ( - table1.c.myid.like('somstr'), + table1.c.myid.like('somstr'), "mytable.myid LIKE :myid_1", None), ( - ~table1.c.myid.like('somstr'), + ~table1.c.myid.like('somstr'), "mytable.myid NOT LIKE :myid_1", None), ( - table1.c.myid.like('somstr', escape='\\'), - "mytable.myid LIKE :myid_1 ESCAPE '\\'", + table1.c.myid.like('somstr', escape='\\'), + "mytable.myid LIKE :myid_1 ESCAPE '\\'", None), ( - ~table1.c.myid.like('somstr', escape='\\'), - "mytable.myid NOT LIKE :myid_1 ESCAPE '\\'", + ~table1.c.myid.like('somstr', escape='\\'), + "mytable.myid NOT LIKE :myid_1 ESCAPE '\\'", None), ( - table1.c.myid.ilike('somstr', escape='\\'), - "lower(mytable.myid) LIKE lower(:myid_1) ESCAPE '\\'", + table1.c.myid.ilike('somstr', escape='\\'), + "lower(mytable.myid) LIKE lower(:myid_1) ESCAPE '\\'", None), ( - ~table1.c.myid.ilike('somstr', escape='\\'), - "lower(mytable.myid) NOT LIKE lower(:myid_1) ESCAPE '\\'", + ~table1.c.myid.ilike('somstr', escape='\\'), + "lower(mytable.myid) NOT LIKE lower(:myid_1) ESCAPE '\\'", None), ( - table1.c.myid.ilike('somstr', escape='\\'), - "mytable.myid ILIKE %(myid_1)s ESCAPE '\\\\'", + table1.c.myid.ilike('somstr', escape='\\'), + "mytable.myid ILIKE %(myid_1)s ESCAPE '\\\\'", postgresql.PGDialect()), ( - ~table1.c.myid.ilike('somstr', escape='\\'), - "mytable.myid NOT ILIKE %(myid_1)s ESCAPE '\\\\'", + ~table1.c.myid.ilike('somstr', escape='\\'), + "mytable.myid NOT ILIKE %(myid_1)s ESCAPE '\\\\'", postgresql.PGDialect()), ( - table1.c.name.ilike('%something%'), + table1.c.name.ilike('%something%'), "lower(mytable.name) LIKE lower(:name_1)", None), ( - table1.c.name.ilike('%something%'), + table1.c.name.ilike('%something%'), "mytable.name ILIKE %(name_1)s", postgresql.PGDialect()), ( - ~table1.c.name.ilike('%something%'), + ~table1.c.name.ilike('%something%'), "lower(mytable.name) NOT LIKE lower(:name_1)", None), ( - ~table1.c.name.ilike('%something%'), - "mytable.name NOT ILIKE %(name_1)s", + ~table1.c.name.ilike('%something%'), + "mytable.name NOT ILIKE %(name_1)s", postgresql.PGDialect()), ]: self.assert_compile(expr, check, dialect=dialect) def test_match(self): for expr, check, dialect in [ - (table1.c.myid.match('somstr'), + (table1.c.myid.match('somstr'), "mytable.myid MATCH ?", sqlite.SQLiteDialect()), - (table1.c.myid.match('somstr'), - "MATCH (mytable.myid) AGAINST (%s IN BOOLEAN MODE)", + (table1.c.myid.match('somstr'), + "MATCH (mytable.myid) AGAINST (%s IN BOOLEAN MODE)", mysql.dialect()), - (table1.c.myid.match('somstr'), - "CONTAINS (mytable.myid, :myid_1)", + (table1.c.myid.match('somstr'), + "CONTAINS (mytable.myid, :myid_1)", mssql.dialect()), - (table1.c.myid.match('somstr'), - "mytable.myid @@ to_tsquery(%(myid_1)s)", + (table1.c.myid.match('somstr'), + "mytable.myid @@ to_tsquery(%(myid_1)s)", postgresql.dialect()), - (table1.c.myid.match('somstr'), - "CONTAINS (mytable.myid, :myid_1)", + (table1.c.myid.match('somstr'), + "CONTAINS (mytable.myid, :myid_1)", oracle.dialect()), ]: self.assert_compile(expr, check, dialect=dialect) def test_composed_string_comparators(self): self.assert_compile( - table1.c.name.contains('jo'), - "mytable.name LIKE '%%' || :name_1 || '%%'" , + table1.c.name.contains('jo'), + "mytable.name LIKE '%%' || :name_1 || '%%'" , checkparams = {'name_1': u'jo'}, ) self.assert_compile( - table1.c.name.contains('jo'), - "mytable.name LIKE concat(concat('%%', %s), '%%')" , + table1.c.name.contains('jo'), + "mytable.name LIKE concat(concat('%%', %s), '%%')" , checkparams = {'name_1': u'jo'}, dialect=mysql.dialect() ) self.assert_compile( - table1.c.name.contains('jo', escape='\\'), - "mytable.name LIKE '%%' || :name_1 || '%%' ESCAPE '\\'" , + table1.c.name.contains('jo', escape='\\'), + "mytable.name LIKE '%%' || :name_1 || '%%' ESCAPE '\\'" , checkparams = {'name_1': u'jo'}, ) self.assert_compile( - table1.c.name.startswith('jo', escape='\\'), + table1.c.name.startswith('jo', escape='\\'), "mytable.name LIKE :name_1 || '%%' ESCAPE '\\'" ) self.assert_compile( - table1.c.name.endswith('jo', escape='\\'), + table1.c.name.endswith('jo', escape='\\'), "mytable.name LIKE '%%' || :name_1 ESCAPE '\\'" ) self.assert_compile( - table1.c.name.endswith('hn'), - "mytable.name LIKE '%%' || :name_1", + table1.c.name.endswith('hn'), + "mytable.name LIKE '%%' || :name_1", checkparams = {'name_1': u'hn'}, ) self.assert_compile( - table1.c.name.endswith('hn'), + table1.c.name.endswith('hn'), "mytable.name LIKE concat('%%', %s)", checkparams = {'name_1': u'hn'}, dialect=mysql.dialect() ) self.assert_compile( - table1.c.name.startswith(u"hi \xf6 \xf5"), + table1.c.name.startswith(u"hi \xf6 \xf5"), "mytable.name LIKE :name_1 || '%%'", checkparams = {'name_1': u'hi \xf6 \xf5'}, ) self.assert_compile( - column('name').endswith(text("'foo'")), + column('name').endswith(text("'foo'")), "name LIKE '%%' || 'foo'" ) self.assert_compile( - column('name').endswith(literal_column("'foo'")), + column('name').endswith(literal_column("'foo'")), "name LIKE '%%' || 'foo'" ) self.assert_compile( - column('name').startswith(text("'foo'")), + column('name').startswith(text("'foo'")), "name LIKE 'foo' || '%%'" ) self.assert_compile( column('name').startswith(text("'foo'")), "name LIKE concat('foo', '%%')", dialect=mysql.dialect()) self.assert_compile( - column('name').startswith(literal_column("'foo'")), + column('name').startswith(literal_column("'foo'")), "name LIKE 'foo' || '%%'" ) self.assert_compile( - column('name').startswith(literal_column("'foo'")), + column('name').startswith(literal_column("'foo'")), "name LIKE concat('foo', '%%')", dialect=mysql.dialect()) def test_multiple_col_binds(self): @@ -1108,7 +1108,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): self.assert_compile( select( - [table2.c.othername, func.count(table2.c.otherid)], + [table2.c.othername, func.count(table2.c.otherid)], group_by = [table2.c.othername]), "SELECT myothertable.othername, count(myothertable.otherid) AS count_1 " "FROM myothertable GROUP BY myothertable.othername" @@ -1130,8 +1130,8 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): ) self.assert_compile( - select([table2.c.othername, func.count(table2.c.otherid)], - group_by = [table2.c.othername], + select([table2.c.othername, func.count(table2.c.otherid)], + group_by = [table2.c.othername], order_by = [table2.c.othername]), "SELECT myothertable.othername, count(myothertable.otherid) AS count_1 " "FROM myothertable GROUP BY myothertable.othername ORDER BY myothertable.othername" @@ -1231,17 +1231,17 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): "SELECT mytable_1.myid, mytable_1.name, mytable_1.description " "FROM mytable AS mytable_1") - # create a select for a join of two tables. use_labels - # means the column names will have labels tablename_columnname, + # create a select for a join of two tables. use_labels + # means the column names will have labels tablename_columnname, # which become the column keys accessible off the Selectable object. - # also, only use one column from the second table and all columns + # also, only use one column from the second table and all columns # from the first table1. q = select( - [table1, table2.c.otherid], + [table1, table2.c.otherid], table1.c.myid == table2.c.otherid, use_labels = True ) - # make an alias of the "selectable". column names + # make an alias of the "selectable". column names # stay the same (i.e. the labels), table name "changes" to "t2view". a = alias(q, 't2view') @@ -1288,7 +1288,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): [u"foobar(a)", u"pk_foo_bar(syslaal)"], u"a = 12", from_obj = [u"foobar left outer join lala on foobar.foo = lala.foo"] - ), + ), "SELECT foobar(a), pk_foo_bar(syslaal) FROM foobar " "left outer join lala on foobar.foo = lala.foo WHERE a = 12" ) @@ -1338,7 +1338,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): def test_binds_in_text(self): self.assert_compile( - text("select * from foo where lala=:bar and hoho=:whee", + text("select * from foo where lala=:bar and hoho=:whee", bindparams=[bindparam('bar', 4), bindparam('whee', 7)]), "select * from foo where lala=:bar and hoho=:whee", checkparams={'bar':4, 'whee': 7}, @@ -1353,7 +1353,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): dialect = postgresql.dialect() self.assert_compile( - text("select * from foo where lala=:bar and hoho=:whee", + text("select * from foo where lala=:bar and hoho=:whee", bindparams=[bindparam('bar',4), bindparam('whee',7)]), "select * from foo where lala=%(bar)s and hoho=%(whee)s", checkparams={'bar':4, 'whee': 7}, @@ -1371,7 +1371,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): dialect = sqlite.dialect() self.assert_compile( - text("select * from foo where lala=:bar and hoho=:whee", + text("select * from foo where lala=:bar and hoho=:whee", bindparams=[bindparam('bar',4), bindparam('whee',7)]), "select * from foo where lala=? and hoho=?", checkparams={'bar':4, 'whee':7}, @@ -1401,7 +1401,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): # test Text embedded within select_from(), using binds generate_series = text( - "generate_series(:x, :y, :z) as s(a)", + "generate_series(:x, :y, :z) as s(a)", bindparams=[bindparam('x'), bindparam('y'), bindparam('z')] ) @@ -1409,20 +1409,20 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): (func.current_date() + literal_column("s.a")).label("dates") ]).select_from(generate_series) self.assert_compile( - s, - "SELECT CURRENT_DATE + s.a AS dates FROM generate_series(:x, :y, :z) as s(a)", + s, + "SELECT CURRENT_DATE + s.a AS dates FROM generate_series(:x, :y, :z) as s(a)", checkparams={'y': None, 'x': None, 'z': None} ) self.assert_compile( - s.params(x=5, y=6, z=7), - "SELECT CURRENT_DATE + s.a AS dates FROM generate_series(:x, :y, :z) as s(a)", + s.params(x=5, y=6, z=7), + "SELECT CURRENT_DATE + s.a AS dates FROM generate_series(:x, :y, :z) as s(a)", checkparams={'y': 6, 'x': 5, 'z': 7} ) @testing.emits_warning('.*empty sequence.*') def test_render_binds_as_literal(self): - """test a compiler that renders binds inline into + """test a compiler that renders binds inline into SQL in the columns clause.""" dialect = default.DefaultDialect() @@ -1537,7 +1537,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): expr = select([table1.c.name]).\ order_by(table1.c.name.collate('latin1_german2_ci')) - self.assert_compile(expr, + self.assert_compile(expr, "SELECT mytable.name FROM mytable ORDER BY " "mytable.name COLLATE latin1_german2_ci") @@ -1574,7 +1574,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): self.assert_compile( select( - [join(join(table1, table2, table1.c.myid == table2.c.otherid), + [join(join(table1, table2, table1.c.myid == table2.c.otherid), table3, table1.c.myid == table3.c.userid)] ), "SELECT mytable.myid, mytable.name, mytable.description, " @@ -1605,7 +1605,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): ) self.assert_compile( select([table1, table2, table3], - from_obj = [outerjoin(table1, + from_obj = [outerjoin(table1, join(table2, table3, table2.c.otherid == table3.c.userid), table1.c.myid==table2.c.otherid)] ) @@ -1661,7 +1661,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): ) x = union(x, select([table1])) self.assert_compile(x, "(SELECT mytable.myid, mytable.name, mytable.description " - "FROM mytable UNION SELECT mytable.myid, mytable.name, " + "FROM mytable UNION SELECT mytable.myid, mytable.name, " "mytable.description FROM mytable) UNION SELECT mytable.myid," " mytable.name, mytable.description FROM mytable") @@ -1695,7 +1695,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): self.assert_compile( union( select([table1.c.myid, table1.c.name, func.max(table1.c.description)], - table1.c.name=='name2', + table1.c.name=='name2', group_by=[table1.c.myid, table1.c.name]), table1.select(table1.c.name=='name1') ), @@ -1731,12 +1731,12 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): s = select([column('foo'), column('bar')]) # ORDER BY's even though not supported by all DB's, are rendered if requested - self.assert_compile(union(s.order_by("foo"), s.order_by("bar")), + self.assert_compile(union(s.order_by("foo"), s.order_by("bar")), "SELECT foo, bar ORDER BY foo UNION SELECT foo, bar ORDER BY bar" ) # self_group() is honored self.assert_compile( - union(s.order_by("foo").self_group(), s.order_by("bar").limit(10).self_group()), + union(s.order_by("foo").self_group(), s.order_by("bar").limit(10).self_group()), "(SELECT foo, bar ORDER BY foo) UNION (SELECT foo, bar ORDER BY bar LIMIT :param_1)", {'param_1':10} @@ -1866,7 +1866,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): {'mytablename':5}, {'mytablename':5}, [5] ), ( - select([table1], or_(table1.c.myid==bindparam('myid'), + select([table1], or_(table1.c.myid==bindparam('myid'), table2.c.otherid==bindparam('myid'))), "SELECT mytable.myid, mytable.name, mytable.description " "FROM mytable, myothertable WHERE mytable.myid = :myid " @@ -1891,7 +1891,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): {'myid':5}, {'myid':5}, [5,5] ), ( - select([table1], or_(table1.c.myid==bindparam('myid', unique=True), + select([table1], or_(table1.c.myid==bindparam('myid', unique=True), table2.c.otherid==bindparam('myid', unique=True))), "SELECT mytable.myid, mytable.name, mytable.description FROM " "mytable, myothertable WHERE mytable.myid = " @@ -1910,7 +1910,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): {}, {'test':None}, [None] ), ( - select([table1], or_(table1.c.myid==bindparam('myid'), + select([table1], or_(table1.c.myid==bindparam('myid'), table2.c.otherid==bindparam('myotherid'))).\ params({'myid':8, 'myotherid':7}), "SELECT mytable.myid, mytable.name, mytable.description FROM " @@ -1923,7 +1923,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): {'myid':5}, {'myid':5, 'myotherid':7}, [5,7] ), ( - select([table1], or_(table1.c.myid==bindparam('myid', value=7, unique=True), + select([table1], or_(table1.c.myid==bindparam('myid', value=7, unique=True), table2.c.otherid==bindparam('myid', value=8, unique=True))), "SELECT mytable.myid, mytable.name, mytable.description FROM " "mytable, myothertable WHERE mytable.myid = " @@ -1949,7 +1949,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): assert [pp[k] for k in positional.positiontup] == expected_test_params_list # check that params() doesnt modify original statement - s = select([table1], or_(table1.c.myid==bindparam('myid'), + s = select([table1], or_(table1.c.myid==bindparam('myid'), table2.c.otherid==bindparam('myotherid'))) s2 = s.params({'myid':8, 'myotherid':7}) s3 = s2.params({'myid':9}) @@ -1971,18 +1971,18 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): assert [pp[k] for k in positional.positiontup] == [12, 12] # check that conflicts with "unique" params are caught - s = select([table1], or_(table1.c.myid==7, + s = select([table1], or_(table1.c.myid==7, table1.c.myid==bindparam('myid_1'))) - assert_raises_message(exc.CompileError, + assert_raises_message(exc.CompileError, "conflicts with unique bind parameter " - "of the same name", + "of the same name", str, s) - s = select([table1], or_(table1.c.myid==7, table1.c.myid==8, + s = select([table1], or_(table1.c.myid==7, table1.c.myid==8, table1.c.myid==bindparam('myid_1'))) - assert_raises_message(exc.CompileError, + assert_raises_message(exc.CompileError, "conflicts with unique bind parameter " - "of the same name", + "of the same name", str, s) def test_binds_no_hash_collision(self): @@ -2018,30 +2018,30 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): ) def test_bind_params_missing(self): - assert_raises_message(exc.InvalidRequestError, + assert_raises_message(exc.InvalidRequestError, r"A value is required for bind parameter 'x'", select([table1]).where( and_( - table1.c.myid==bindparam("x", required=True), + table1.c.myid==bindparam("x", required=True), table1.c.name==bindparam("y", required=True) ) ).compile().construct_params, params=dict(y=5) ) - assert_raises_message(exc.InvalidRequestError, + assert_raises_message(exc.InvalidRequestError, r"A value is required for bind parameter 'x'", select([table1]).where( table1.c.myid==bindparam("x", required=True) ).compile().construct_params ) - assert_raises_message(exc.InvalidRequestError, + assert_raises_message(exc.InvalidRequestError, r"A value is required for bind parameter 'x', " "in parameter group 2", select([table1]).where( and_( - table1.c.myid==bindparam("x", required=True), + table1.c.myid==bindparam("x", required=True), table1.c.name==bindparam("y", required=True) ) ).compile().construct_params, @@ -2049,7 +2049,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): _group_number=2 ) - assert_raises_message(exc.InvalidRequestError, + assert_raises_message(exc.InvalidRequestError, r"A value is required for bind parameter 'x', " "in parameter group 2", select([table1]).where( @@ -2200,28 +2200,28 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): ) def check_results(dialect, expected_results, literal): - eq_(len(expected_results), 5, + eq_(len(expected_results), 5, 'Incorrect number of expected results') - eq_(str(cast(tbl.c.v1, Numeric).compile(dialect=dialect)), + eq_(str(cast(tbl.c.v1, Numeric).compile(dialect=dialect)), 'CAST(casttest.v1 AS %s)' % expected_results[0]) - eq_(str(cast(tbl.c.v1, Numeric(12, 9)).compile(dialect=dialect)), + eq_(str(cast(tbl.c.v1, Numeric(12, 9)).compile(dialect=dialect)), 'CAST(casttest.v1 AS %s)' % expected_results[1]) - eq_(str(cast(tbl.c.ts, Date).compile(dialect=dialect)), + eq_(str(cast(tbl.c.ts, Date).compile(dialect=dialect)), 'CAST(casttest.ts AS %s)' % expected_results[2]) - eq_(str(cast(1234, Text).compile(dialect=dialect)), + eq_(str(cast(1234, Text).compile(dialect=dialect)), 'CAST(%s AS %s)' % (literal, expected_results[3])) - eq_(str(cast('test', String(20)).compile(dialect=dialect)), + eq_(str(cast('test', String(20)).compile(dialect=dialect)), 'CAST(%s AS %s)' %(literal, expected_results[4])) # fixme: shoving all of this dialect-specific stuff in one test # is now officialy completely ridiculous AND non-obviously omits # coverage on other dialects. sel = select([tbl, cast(tbl.c.v1, Numeric)]).compile(dialect=dialect) if isinstance(dialect, type(mysql.dialect())): - eq_(str(sel), + eq_(str(sel), "SELECT casttest.id, casttest.v1, casttest.v2, casttest.ts, " "CAST(casttest.v1 AS DECIMAL) AS anon_1 \nFROM casttest") else: - eq_(str(sel), + eq_(str(sel), "SELECT casttest.id, casttest.v1, casttest.v2, " "casttest.ts, CAST(casttest.v1 AS NUMERIC) AS " "anon_1 \nFROM casttest") @@ -2332,7 +2332,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): "AS anon_1 FROM mytable" ) - # this tests that _from_objects + # this tests that _from_objects # concantenates OK self.assert_compile( select([column("x") + over(func.foo())]), @@ -2345,17 +2345,17 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): table = Table('dt', metadata, Column('date', Date)) self.assert_compile( - table.select(table.c.date.between(datetime.date(2006,6,1), + table.select(table.c.date.between(datetime.date(2006,6,1), datetime.date(2006,6,5))), - "SELECT dt.date FROM dt WHERE dt.date BETWEEN :date_1 AND :date_2", - checkparams={'date_1':datetime.date(2006,6,1), + "SELECT dt.date FROM dt WHERE dt.date BETWEEN :date_1 AND :date_2", + checkparams={'date_1':datetime.date(2006,6,1), 'date_2':datetime.date(2006,6,5)}) self.assert_compile( table.select(sql.between(table.c.date, datetime.date(2006,6,1), datetime.date(2006,6,5))), - "SELECT dt.date FROM dt WHERE dt.date BETWEEN :date_1 AND :date_2", - checkparams={'date_1':datetime.date(2006,6,1), + "SELECT dt.date FROM dt WHERE dt.date BETWEEN :date_1 AND :date_2", + checkparams={'date_1':datetime.date(2006,6,1), 'date_2':datetime.date(2006,6,5)}) def test_operator_precedence(self): @@ -2508,17 +2508,17 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): s1 = select([s1]) if label: - self.assert_compile(s1, - "SELECT %s FROM (SELECT %s AS %s FROM mytable)" % + self.assert_compile(s1, + "SELECT %s FROM (SELECT %s AS %s FROM mytable)" % (label, expr, label)) elif col.table is not None: # sqlite rule labels subquery columns - self.assert_compile(s1, - "SELECT %s FROM (SELECT %s AS %s FROM mytable)" % + self.assert_compile(s1, + "SELECT %s FROM (SELECT %s AS %s FROM mytable)" % (key,expr, key)) else: - self.assert_compile(s1, - "SELECT %s FROM (SELECT %s FROM mytable)" % + self.assert_compile(s1, + "SELECT %s FROM (SELECT %s FROM mytable)" % (expr,expr)) def test_hints(self): @@ -2538,7 +2538,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): s4 = select([table3]).select_from( table3.join( - subs4, + subs4, subs4.c.othername==table3.c.otherstuff ) ).\ @@ -2549,7 +2549,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): ]).select_from(table1.join(table2, table1.c.myid==table2.c.otherid)) s5 = select([table3]).select_from( table3.join( - subs5, + subs5, subs5.c.othername==table3.c.otherstuff ) ).\ @@ -2567,42 +2567,42 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): sybase.dialect() for stmt, dialect, expected in [ - (s, mysql_d, + (s, mysql_d, "SELECT mytable.myid FROM mytable test hint mytable"), - (s, oracle_d, + (s, oracle_d, "SELECT /*+ test hint mytable */ mytable.myid FROM mytable"), - (s, sybase_d, + (s, sybase_d, "SELECT mytable.myid FROM mytable test hint mytable"), - (s2, mysql_d, + (s2, mysql_d, "SELECT mytable.myid FROM mytable"), - (s2, oracle_d, + (s2, oracle_d, "SELECT /*+ index(mytable idx) */ mytable.myid FROM mytable"), - (s2, sybase_d, + (s2, sybase_d, "SELECT mytable.myid FROM mytable WITH HINT INDEX idx"), - (s3, mysql_d, + (s3, mysql_d, "SELECT mytable_1.myid FROM mytable AS mytable_1 " "index(mytable_1 hint)"), - (s3, oracle_d, + (s3, oracle_d, "SELECT /*+ index(mytable_1 hint) */ mytable_1.myid FROM " "mytable mytable_1"), - (s3, sybase_d, + (s3, sybase_d, "SELECT mytable_1.myid FROM mytable AS mytable_1 " "index(mytable_1 hint)"), - (s4, mysql_d, + (s4, mysql_d, "SELECT thirdtable.userid, thirdtable.otherstuff FROM thirdtable " "hint3 INNER JOIN (SELECT mytable.myid, mytable.name, " "mytable.description, myothertable.otherid, " "myothertable.othername FROM mytable hint1 INNER " "JOIN myothertable ON mytable.myid = myothertable.otherid) " "ON othername = thirdtable.otherstuff"), - (s4, sybase_d, + (s4, sybase_d, "SELECT thirdtable.userid, thirdtable.otherstuff FROM thirdtable " "hint3 JOIN (SELECT mytable.myid, mytable.name, " "mytable.description, myothertable.otherid, " "myothertable.othername FROM mytable hint1 " "JOIN myothertable ON mytable.myid = myothertable.otherid) " "ON othername = thirdtable.otherstuff"), - (s4, oracle_d, + (s4, oracle_d, "SELECT /*+ hint3 */ thirdtable.userid, thirdtable.otherstuff " "FROM thirdtable JOIN (SELECT /*+ hint1 */ mytable.myid," " mytable.name, mytable.description, myothertable.otherid," @@ -2610,7 +2610,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): " mytable.myid = myothertable.otherid) ON othername =" " thirdtable.otherstuff"), # TODO: figure out dictionary ordering solution here -# (s5, oracle_d, +# (s5, oracle_d, # "SELECT /*+ hint3 */ /*+ hint1 */ thirdtable.userid, " # "thirdtable.otherstuff " # "FROM thirdtable JOIN (SELECT mytable.myid," @@ -2618,10 +2618,10 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): # " myothertable.othername FROM mytable JOIN myothertable ON" # " mytable.myid = myothertable.otherid) ON othername =" # " thirdtable.otherstuff"), - (s6, oracle_d, + (s6, oracle_d, """SELECT /*+ "QuotedName" idx1 */ "QuotedName".col1 """ """FROM "QuotedName" WHERE "QuotedName".col1 > :col1_1"""), - (s7, oracle_d, + (s7, oracle_d, """SELECT /*+ SomeName idx1 */ "SomeName".col1 FROM """ """"QuotedName" "SomeName" WHERE "SomeName".col1 > :col1_1"""), ]: @@ -2648,7 +2648,7 @@ class CRUDTest(fixtures.TestBase, AssertsCompiledSQL): def test_insert(self): # generic insert, will create bind params for all columns - self.assert_compile(insert(table1), + self.assert_compile(insert(table1), "INSERT INTO mytable (myid, name, description) " "VALUES (:myid, :name, :description)") @@ -2656,7 +2656,7 @@ class CRUDTest(fixtures.TestBase, AssertsCompiledSQL): # cols provided literally self.assert_compile( insert(table1, { - table1.c.myid : bindparam('userid'), + table1.c.myid : bindparam('userid'), table1.c.name : bindparam('username')}), "INSERT INTO mytable (myid, name) VALUES (:userid, :username)") @@ -2682,7 +2682,7 @@ class CRUDTest(fixtures.TestBase, AssertsCompiledSQL): ) self.assert_compile( - insert(table1, values=dict(myid=func.lala())), + insert(table1, values=dict(myid=func.lala())), "INSERT INTO mytable (myid) VALUES (lala())") def test_inline_insert(self): @@ -2691,61 +2691,61 @@ class CRUDTest(fixtures.TestBase, AssertsCompiledSQL): Column('id', Integer, primary_key=True), Column('foo', Integer, default=func.foobar())) self.assert_compile( - table.insert(values={}, inline=True), + table.insert(values={}, inline=True), "INSERT INTO sometable (foo) VALUES (foobar())") self.assert_compile( - table.insert(inline=True), + table.insert(inline=True), "INSERT INTO sometable (foo) VALUES (foobar())", params={}) def test_update(self): self.assert_compile( - update(table1, table1.c.myid == 7), - "UPDATE mytable SET name=:name WHERE mytable.myid = :myid_1", + update(table1, table1.c.myid == 7), + "UPDATE mytable SET name=:name WHERE mytable.myid = :myid_1", params = {table1.c.name:'fred'}) self.assert_compile( table1.update().where(table1.c.myid==7). - values({table1.c.myid:5}), - "UPDATE mytable SET myid=:myid WHERE mytable.myid = :myid_1", + values({table1.c.myid:5}), + "UPDATE mytable SET myid=:myid WHERE mytable.myid = :myid_1", checkparams={'myid':5, 'myid_1':7}) self.assert_compile( - update(table1, table1.c.myid == 7), - "UPDATE mytable SET name=:name WHERE mytable.myid = :myid_1", + update(table1, table1.c.myid == 7), + "UPDATE mytable SET name=:name WHERE mytable.myid = :myid_1", params = {'name':'fred'}) self.assert_compile( - update(table1, values = {table1.c.name : table1.c.myid}), + update(table1, values = {table1.c.name : table1.c.myid}), "UPDATE mytable SET name=mytable.myid") self.assert_compile( - update(table1, - whereclause = table1.c.name == bindparam('crit'), - values = {table1.c.name : 'hi'}), - "UPDATE mytable SET name=:name WHERE mytable.name = :crit", - params = {'crit' : 'notthere'}, + update(table1, + whereclause = table1.c.name == bindparam('crit'), + values = {table1.c.name : 'hi'}), + "UPDATE mytable SET name=:name WHERE mytable.name = :crit", + params = {'crit' : 'notthere'}, checkparams={'crit':'notthere', 'name':'hi'}) self.assert_compile( - update(table1, table1.c.myid == 12, - values = {table1.c.name : table1.c.myid}), + update(table1, table1.c.myid == 12, + values = {table1.c.name : table1.c.myid}), "UPDATE mytable SET name=mytable.myid, description=" - ":description WHERE mytable.myid = :myid_1", - params = {'description':'test'}, + ":description WHERE mytable.myid = :myid_1", + params = {'description':'test'}, checkparams={'description':'test', 'myid_1':12}) self.assert_compile( - update(table1, table1.c.myid == 12, - values = {table1.c.myid : 9}), + update(table1, table1.c.myid == 12, + values = {table1.c.myid : 9}), "UPDATE mytable SET myid=:myid, description=:description " - "WHERE mytable.myid = :myid_1", + "WHERE mytable.myid = :myid_1", params = {'myid_1': 12, 'myid': 9, 'description': 'test'}) self.assert_compile( - update(table1, table1.c.myid ==12), - "UPDATE mytable SET myid=:myid WHERE mytable.myid = :myid_1", + update(table1, table1.c.myid ==12), + "UPDATE mytable SET myid=:myid WHERE mytable.myid = :myid_1", params={'myid':18}, checkparams={'myid':18, 'myid_1':12}) s = table1.update(table1.c.myid == 12, values = {table1.c.name : 'lala'}) c = s.compile(column_keys=['id', 'name']) self.assert_compile( - update(table1, table1.c.myid == 12, + update(table1, table1.c.myid == 12, values = {table1.c.name : table1.c.myid} - ).values({table1.c.name:table1.c.name + 'foo'}), + ).values({table1.c.name:table1.c.name + 'foo'}), "UPDATE mytable SET name=(mytable.name || :name_1), " - "description=:description WHERE mytable.myid = :myid_1", + "description=:description WHERE mytable.myid = :myid_1", params = {'description':'test'}) eq_(str(s), str(c)) @@ -2763,25 +2763,25 @@ class CRUDTest(fixtures.TestBase, AssertsCompiledSQL): def test_correlated_update(self): # test against a straight text subquery u = update(table1, values = { - table1.c.name : + table1.c.name : text("(select name from mytable where id=mytable.id)")}) - self.assert_compile(u, + self.assert_compile(u, "UPDATE mytable SET name=(select name from mytable " "where id=mytable.id)") mt = table1.alias() u = update(table1, values = { - table1.c.name : + table1.c.name : select([mt.c.name], mt.c.myid==table1.c.myid) }) - self.assert_compile(u, + self.assert_compile(u, "UPDATE mytable SET name=(SELECT mytable_1.name FROM " "mytable AS mytable_1 WHERE mytable_1.myid = mytable.myid)") # test against a regular constructed subquery s = select([table2], table2.c.otherid == table1.c.myid) u = update(table1, table1.c.name == 'jack', values = {table1.c.name : s}) - self.assert_compile(u, + self.assert_compile(u, "UPDATE mytable SET name=(SELECT myothertable.otherid, " "myothertable.othername FROM myothertable WHERE " "myothertable.otherid = mytable.myid) WHERE mytable.name = :name_1") @@ -2789,7 +2789,7 @@ class CRUDTest(fixtures.TestBase, AssertsCompiledSQL): # test a non-correlated WHERE clause s = select([table2.c.othername], table2.c.otherid == 7) u = update(table1, table1.c.name==s) - self.assert_compile(u, + self.assert_compile(u, "UPDATE mytable SET myid=:myid, name=:name, " "description=:description WHERE mytable.name = " "(SELECT myothertable.othername FROM myothertable " @@ -2798,7 +2798,7 @@ class CRUDTest(fixtures.TestBase, AssertsCompiledSQL): # test one that is actually correlated... s = select([table2.c.othername], table2.c.otherid == table1.c.myid) u = table1.update(table1.c.name==s) - self.assert_compile(u, + self.assert_compile(u, "UPDATE mytable SET myid=:myid, name=:name, " "description=:description WHERE mytable.name = " "(SELECT myothertable.othername FROM myothertable " @@ -2831,14 +2831,14 @@ class CRUDTest(fixtures.TestBase, AssertsCompiledSQL): def test_delete(self): self.assert_compile( - delete(table1, table1.c.myid == 7), + delete(table1, table1.c.myid == 7), "DELETE FROM mytable WHERE mytable.myid = :myid_1") self.assert_compile( - table1.delete().where(table1.c.myid == 7), + table1.delete().where(table1.c.myid == 7), "DELETE FROM mytable WHERE mytable.myid = :myid_1") self.assert_compile( table1.delete().where(table1.c.myid == 7).\ - where(table1.c.name=='somename'), + where(table1.c.name=='somename'), "DELETE FROM mytable WHERE mytable.myid = :myid_1 " "AND mytable.name = :name_1") @@ -2852,13 +2852,13 @@ class CRUDTest(fixtures.TestBase, AssertsCompiledSQL): # test one that is actually correlated... s = select([table2.c.othername], table2.c.otherid == table1.c.myid) u = table1.delete(table1.c.name==s) - self.assert_compile(u, + self.assert_compile(u, "DELETE FROM mytable WHERE mytable.name = (SELECT " "myothertable.othername FROM myothertable WHERE " "myothertable.otherid = mytable.myid)") def test_binds_that_match_columns(self): - """test bind params named after column names + """test bind params named after column names replace the normal SET/VALUES generation.""" t = table('foo', column('x'), column('y')) @@ -2876,20 +2876,20 @@ class CRUDTest(fixtures.TestBase, AssertsCompiledSQL): assert_raises(exc.CompileError, u.values(x=7).compile, column_keys=['x', 'y']) assert_raises(exc.CompileError, u.compile, column_keys=['x', 'y']) - self.assert_compile(u.values(x=3 + bindparam('x')), + self.assert_compile(u.values(x=3 + bindparam('x')), "UPDATE foo SET x=(:param_1 + :x) WHERE foo.x = :x") - self.assert_compile(u.values(x=3 + bindparam('x')), + self.assert_compile(u.values(x=3 + bindparam('x')), "UPDATE foo SET x=(:param_1 + :x) WHERE foo.x = :x", params={'x':1}) - self.assert_compile(u.values(x=3 + bindparam('x')), + self.assert_compile(u.values(x=3 + bindparam('x')), "UPDATE foo SET x=(:param_1 + :x), y=:y WHERE foo.x = :x", params={'x':1, 'y':2}) i = t.insert().values(x=3 + bindparam('x')) self.assert_compile(i, "INSERT INTO foo (x) VALUES ((:param_1 + :x))") - self.assert_compile(i, + self.assert_compile(i, "INSERT INTO foo (x, y) VALUES ((:param_1 + :x), :y)", params={'x':1, 'y':2}) @@ -3019,7 +3019,7 @@ class InlineDefaultTest(fixtures.TestBase, AssertsCompiledSQL): Column('col2', Integer, default=select([func.coalesce(func.max(foo.c.id))])), ) - self.assert_compile(t.insert(inline=True, values={}), + self.assert_compile(t.insert(inline=True, values={}), "INSERT INTO test (col1, col2) VALUES (foo(:foo_1), " "(SELECT coalesce(max(foo.id)) AS coalesce_1 FROM " "foo))") @@ -3035,7 +3035,7 @@ class InlineDefaultTest(fixtures.TestBase, AssertsCompiledSQL): Column('col3', String(30)) ) - self.assert_compile(t.update(inline=True, values={'col3':'foo'}), + self.assert_compile(t.update(inline=True, values={'col3':'foo'}), "UPDATE test SET col1=foo(:foo_1), col2=(SELECT " "coalesce(max(foo.id)) AS coalesce_1 FROM foo), " "col3=:col3") @@ -3044,7 +3044,7 @@ class SchemaTest(fixtures.TestBase, AssertsCompiledSQL): __dialect__ = 'default' def test_select(self): - self.assert_compile(table4.select(), + self.assert_compile(table4.select(), "SELECT remote_owner.remotetable.rem_id, remote_owner.remotetable.datatype_id," " remote_owner.remotetable.value FROM remote_owner.remotetable") @@ -3063,14 +3063,14 @@ class SchemaTest(fixtures.TestBase, AssertsCompiledSQL): "remote_owner.remotetable.value = :value_1") # multi-part schema name - self.assert_compile(table5.select(), + self.assert_compile(table5.select(), 'SELECT "dbo.remote_owner".remotetable.rem_id, ' '"dbo.remote_owner".remotetable.datatype_id, "dbo.remote_owner".remotetable.value ' 'FROM "dbo.remote_owner".remotetable' ) # multi-part schema name labels - convert '.' to '_' - self.assert_compile(table5.select(use_labels=True), + self.assert_compile(table5.select(use_labels=True), 'SELECT "dbo.remote_owner".remotetable.rem_id AS' ' dbo_remote_owner_remotetable_rem_id, "dbo.remote_owner".remotetable.datatype_id' ' AS dbo_remote_owner_remotetable_datatype_id,' @@ -3080,19 +3080,19 @@ class SchemaTest(fixtures.TestBase, AssertsCompiledSQL): def test_alias(self): a = alias(table4, 'remtable') - self.assert_compile(a.select(a.c.datatype_id==7), + self.assert_compile(a.select(a.c.datatype_id==7), "SELECT remtable.rem_id, remtable.datatype_id, remtable.value FROM" " remote_owner.remotetable AS remtable " "WHERE remtable.datatype_id = :datatype_id_1") def test_update(self): self.assert_compile( - table4.update(table4.c.value=='test', values={table4.c.datatype_id:12}), + table4.update(table4.c.value=='test', values={table4.c.datatype_id:12}), "UPDATE remote_owner.remotetable SET datatype_id=:datatype_id " "WHERE remote_owner.remotetable.value = :value_1") def test_insert(self): - self.assert_compile(table4.insert(values=(2, 5, 'test')), + self.assert_compile(table4.insert(values=(2, 5, 'test')), "INSERT INTO remote_owner.remotetable (rem_id, datatype_id, value) VALUES " "(:rem_id, :datatype_id, :value)") diff --git a/test/sql/test_constraints.py b/test/sql/test_constraints.py index 5ea5a7edaa..8a82381399 100644 --- a/test/sql/test_constraints.py +++ b/test/sql/test_constraints.py @@ -194,7 +194,7 @@ class ConstraintTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiled ('sometable', 'this_name_alsois_long', 'ix_sometable_t_3cf1'), ]: - t1 = Table(tname, MetaData(), + t1 = Table(tname, MetaData(), Column(cname, Integer, index=True), ) ix1 = list(t1.indexes)[0] @@ -213,24 +213,24 @@ class ConstraintTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiled assert_raises( exc.IdentifierError, schema.CreateIndex(Index( - "this_other_name_is_too_long_for_what_were_doing", + "this_other_name_is_too_long_for_what_were_doing", t1.c.c)).compile, dialect=dialect ) def test_index_declartion_inline(self): - t1 = Table('t1', metadata, + t1 = Table('t1', metadata, Column('x', Integer), Column('y', Integer), Index('foo', 'x', 'y') ) self.assert_compile( - schema.CreateIndex(list(t1.indexes)[0]), + schema.CreateIndex(list(t1.indexes)[0]), "CREATE INDEX foo ON t1 (x, y)" ) def test_index_asserts_cols_standalone(self): - t1 = Table('t1', metadata, + t1 = Table('t1', metadata, Column('x', Integer) ) t2 = Table('t2', metadata, @@ -244,7 +244,7 @@ class ConstraintTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiled ) def test_index_asserts_cols_inline(self): - t1 = Table('t1', metadata, + t1 = Table('t1', metadata, Column('x', Integer) ) assert_raises_message( @@ -365,7 +365,7 @@ class ConstraintCompilationTest(fixtures.TestBase, AssertsCompiledSQL): def test_multiple(self): m = MetaData() - foo = Table("foo", m, + foo = Table("foo", m, Column('id', Integer, primary_key=True), Column('bar', Integer, primary_key=True) ) @@ -414,11 +414,11 @@ class ConstraintCompilationTest(fixtures.TestBase, AssertsCompiledSQL): m.drop_all(e) e.assert_sql([ - 'CREATE TABLE t (a INTEGER)', - 'CREATE TABLE t2 (a INTEGER, b INTEGER, CONSTRAINT fk_tb FOREIGN KEY(b) REFERENCES t (a))', - 'ALTER TABLE t2 ADD CONSTRAINT fk_ta FOREIGN KEY(a) REFERENCES t (a)', - 'ALTER TABLE t2 DROP CONSTRAINT fk_ta', - 'DROP TABLE t2', + 'CREATE TABLE t (a INTEGER)', + 'CREATE TABLE t2 (a INTEGER, b INTEGER, CONSTRAINT fk_tb FOREIGN KEY(b) REFERENCES t (a))', + 'ALTER TABLE t2 ADD CONSTRAINT fk_ta FOREIGN KEY(a) REFERENCES t (a)', + 'ALTER TABLE t2 DROP CONSTRAINT fk_ta', + 'DROP TABLE t2', 'DROP TABLE t' ]) diff --git a/test/sql/test_cte.py b/test/sql/test_cte.py index 49a53a3ec7..59b347ccd2 100644 --- a/test/sql/test_cte.py +++ b/test/sql/test_cte.py @@ -10,7 +10,7 @@ class CTETest(fixtures.TestBase, AssertsCompiledSQL): __dialect__ = 'default' def test_nonrecursive(self): - orders = table('orders', + orders = table('orders', column('region'), column('amount'), column('product'), @@ -18,22 +18,22 @@ class CTETest(fixtures.TestBase, AssertsCompiledSQL): ) regional_sales = select([ - orders.c.region, + orders.c.region, func.sum(orders.c.amount).label('total_sales') ]).group_by(orders.c.region).cte("regional_sales") top_regions = select([regional_sales.c.region]).\ where( - regional_sales.c.total_sales > + regional_sales.c.total_sales > select([ func.sum(regional_sales.c.total_sales)/10 ]) ).cte("top_regions") s = select([ - orders.c.region, - orders.c.product, - func.sum(orders.c.quantity).label("product_units"), + orders.c.region, + orders.c.product, + func.sum(orders.c.quantity).label("product_units"), func.sum(orders.c.amount).label("product_sales") ]).where(orders.c.region.in_( select([top_regions.c.region]) @@ -60,15 +60,15 @@ class CTETest(fixtures.TestBase, AssertsCompiledSQL): ) def test_recursive(self): - parts = table('parts', + parts = table('parts', column('part'), column('sub_part'), column('quantity'), ) included_parts = select([ - parts.c.sub_part, - parts.c.part, + parts.c.sub_part, + parts.c.part, parts.c.quantity]).\ where(parts.c.part=='our part').\ cte(recursive=True) @@ -77,19 +77,19 @@ class CTETest(fixtures.TestBase, AssertsCompiledSQL): parts_alias = parts.alias() included_parts = included_parts.union( select([ - parts_alias.c.part, - parts_alias.c.sub_part, + parts_alias.c.part, + parts_alias.c.sub_part, parts_alias.c.quantity]).\ where(parts_alias.c.part==incl_alias.c.sub_part) ) s = select([ - included_parts.c.sub_part, + included_parts.c.sub_part, func.sum(included_parts.c.quantity).label('total_quantity')]).\ select_from(included_parts.join( parts,included_parts.c.part==parts.c.part)).\ group_by(included_parts.c.sub_part) - self.assert_compile(s, + self.assert_compile(s, "WITH RECURSIVE anon_1(sub_part, part, quantity) " "AS (SELECT parts.sub_part AS sub_part, parts.part " "AS part, parts.quantity AS quantity FROM parts " @@ -105,7 +105,7 @@ class CTETest(fixtures.TestBase, AssertsCompiledSQL): # quick check that the "WITH RECURSIVE" varies per # dialect - self.assert_compile(s, + self.assert_compile(s, "WITH anon_1(sub_part, part, quantity) " "AS (SELECT parts.sub_part AS sub_part, parts.part " "AS part, parts.quantity AS quantity FROM parts " @@ -259,7 +259,7 @@ class CTETest(fixtures.TestBase, AssertsCompiledSQL): def test_union(self): - orders = table('orders', + orders = table('orders', column('region'), column('amount'), ) @@ -274,7 +274,7 @@ class CTETest(fixtures.TestBase, AssertsCompiledSQL): regional_sales.c.amount > 500 ) - self.assert_compile(s, + self.assert_compile(s, "WITH regional_sales AS " "(SELECT orders.region AS region, " "orders.amount AS amount FROM orders) " @@ -288,7 +288,7 @@ class CTETest(fixtures.TestBase, AssertsCompiledSQL): regional_sales.c.amount < 300 ) ) - self.assert_compile(s, + self.assert_compile(s, "WITH regional_sales AS " "(SELECT orders.region AS region, " "orders.amount AS amount FROM orders) " @@ -299,7 +299,7 @@ class CTETest(fixtures.TestBase, AssertsCompiledSQL): "regional_sales.amount < :amount_2") def test_reserved_quote(self): - orders = table('orders', + orders = table('orders', column('order'), ) s = select([orders.c.order]).cte("regional_sales", recursive=True) @@ -313,7 +313,7 @@ class CTETest(fixtures.TestBase, AssertsCompiledSQL): ) def test_positional_binds(self): - orders = table('orders', + orders = table('orders', column('order'), ) s = select([orders.c.order, literal("x")]).cte("regional_sales") diff --git a/test/sql/test_defaults.py b/test/sql/test_defaults.py index ec08cd28e4..7a6c6d0097 100644 --- a/test/sql/test_defaults.py +++ b/test/sql/test_defaults.py @@ -305,7 +305,7 @@ class DefaultTest(fixtures.TestBase): def test_no_embed_in_sql(self): """Using a DefaultGenerator, Sequence, DefaultClause - in the columns, where clause of a select, or in the values + in the columns, where clause of a select, or in the values clause of insert, update, raises an informative error""" for const in ( @@ -330,7 +330,7 @@ class DefaultTest(fixtures.TestBase): ) def test_missing_many_param(self): - assert_raises_message(exc.StatementError, + assert_raises_message(exc.StatementError, "A value is required for bind parameter 'col7', in parameter group 1", t.insert().execute, {'col4':7, 'col7':12, 'col8':19}, @@ -558,8 +558,8 @@ class AutoIncrementTest(fixtures.TablesTest): Column('id', Integer(), primary_key=True) ) x = Table('x', m, - Column('id', Integer(), - ForeignKey('y.id'), + Column('id', Integer(), + ForeignKey('y.id'), autoincrement="ignore_fk", primary_key=True) ) assert x._autoincrement_column is x.c.id @@ -570,8 +570,8 @@ class AutoIncrementTest(fixtures.TablesTest): Column('id', Integer(), primary_key=True) ) x = Table('x', m, - Column('id', Integer(), - ForeignKey('y.id'), + Column('id', Integer(), + ForeignKey('y.id'), primary_key=True) ) assert x._autoincrement_column is None @@ -652,7 +652,7 @@ class SequenceExecTest(fixtures.TestBase): self._assert_seq_result(s.execute(testing.db)) def test_explicit_optional(self): - """test dialect executes a Sequence, returns nextval, whether + """test dialect executes a Sequence, returns nextval, whether or not "optional" is set """ s = Sequence("my_sequence", optional=True) @@ -721,7 +721,7 @@ class SequenceExecTest(fixtures.TestBase): @testing.provide_metadata def test_inserted_pk_no_returning(self): - """test inserted_primary_key contains [None] when + """test inserted_primary_key contains [None] when pk_col=next_value(), implicit returning is not used.""" metadata = self.metadata @@ -740,7 +740,7 @@ class SequenceExecTest(fixtures.TestBase): @testing.requires.returning @testing.provide_metadata def test_inserted_pk_implicit_returning(self): - """test inserted_primary_key contains the result when + """test inserted_primary_key contains the result when pk_col=next_value(), when implicit returning is used.""" metadata = self.metadata @@ -762,8 +762,8 @@ class SequenceTest(fixtures.TestBase, testing.AssertsCompiledSQL): @testing.fails_on('firebird', 'no FB support for start/increment') def test_start_increment(self): for seq in ( - Sequence('foo_seq'), - Sequence('foo_seq', start=8), + Sequence('foo_seq'), + Sequence('foo_seq', start=8), Sequence('foo_seq', increment=5)): seq.create(testing.db) try: @@ -782,11 +782,11 @@ class SequenceTest(fixtures.TestBase, testing.AssertsCompiledSQL): return testing.db.dialect.has_sequence(testing.db, name) def test_nextval_render(self): - """test dialect renders the "nextval" construct, + """test dialect renders the "nextval" construct, whether or not "optional" is set """ for s in ( - Sequence("my_seq"), + Sequence("my_seq"), Sequence("my_seq", optional=True)): assert str(s.next_value(). compile(dialect=testing.db.dialect)) in ( @@ -796,7 +796,7 @@ class SequenceTest(fixtures.TestBase, testing.AssertsCompiledSQL): ) def test_nextval_unsupported(self): - """test next_value() used on non-sequence platform + """test next_value() used on non-sequence platform raises NotImplementedError.""" s = Sequence("my_seq") @@ -844,7 +844,7 @@ class SequenceTest(fixtures.TestBase, testing.AssertsCompiledSQL): s1 = Sequence("s1", metadata=metadata) s2 = Sequence("s2", metadata=metadata) s3 = Sequence("s3") - t = Table('t', metadata, + t = Table('t', metadata, Column('c', Integer, s3, primary_key=True)) assert s3.metadata is metadata @@ -1017,7 +1017,7 @@ class SpecialTypePKTest(fixtures.TestBase): class ServerDefaultsOnPKTest(fixtures.TestBase): @testing.provide_metadata def test_string_default_none_on_insert(self): - """Test that without implicit returning, we return None for + """Test that without implicit returning, we return None for a string server default. That is, we don't want to attempt to pre-execute "server_default" @@ -1027,7 +1027,7 @@ class ServerDefaultsOnPKTest(fixtures.TestBase): """ metadata = self.metadata - t = Table('x', metadata, + t = Table('x', metadata, Column('y', String(10), server_default='key_one', primary_key=True), Column('data', String(10)), implicit_returning=False @@ -1046,7 +1046,7 @@ class ServerDefaultsOnPKTest(fixtures.TestBase): """With implicit_returning, we get a string PK default back no problem.""" metadata = self.metadata - t = Table('x', metadata, + t = Table('x', metadata, Column('y', String(10), server_default='key_one', primary_key=True), Column('data', String(10)) ) @@ -1061,8 +1061,8 @@ class ServerDefaultsOnPKTest(fixtures.TestBase): @testing.provide_metadata def test_int_default_none_on_insert(self): metadata = self.metadata - t = Table('x', metadata, - Column('y', Integer, + t = Table('x', metadata, + Column('y', Integer, server_default='5', primary_key=True), Column('data', String(10)), implicit_returning=False @@ -1084,8 +1084,8 @@ class ServerDefaultsOnPKTest(fixtures.TestBase): @testing.provide_metadata def test_autoincrement_reflected_from_server_default(self): metadata = self.metadata - t = Table('x', metadata, - Column('y', Integer, + t = Table('x', metadata, + Column('y', Integer, server_default='5', primary_key=True), Column('data', String(10)), implicit_returning=False @@ -1100,8 +1100,8 @@ class ServerDefaultsOnPKTest(fixtures.TestBase): @testing.provide_metadata def test_int_default_none_on_insert_reflected(self): metadata = self.metadata - t = Table('x', metadata, - Column('y', Integer, + t = Table('x', metadata, + Column('y', Integer, server_default='5', primary_key=True), Column('data', String(10)), implicit_returning=False @@ -1128,8 +1128,8 @@ class ServerDefaultsOnPKTest(fixtures.TestBase): @testing.provide_metadata def test_int_default_on_insert_with_returning(self): metadata = self.metadata - t = Table('x', metadata, - Column('y', Integer, + t = Table('x', metadata, + Column('y', Integer, server_default='5', primary_key=True), Column('data', String(10)) ) diff --git a/test/sql/test_functions.py b/test/sql/test_functions.py index 961845bac7..2f9c6f9086 100644 --- a/test/sql/test_functions.py +++ b/test/sql/test_functions.py @@ -36,13 +36,13 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): GenericFunction.__init__(self, args=[arg], **kwargs) self.assert_compile( - fake_func('foo'), - "fake_func(%s)" % - bindtemplate % {'name':'param_1', 'position':1}, + fake_func('foo'), + "fake_func(%s)" % + bindtemplate % {'name':'param_1', 'position':1}, dialect=dialect) def test_use_labels(self): - self.assert_compile(select([func.foo()], use_labels=True), + self.assert_compile(select([func.foo()], use_labels=True), "SELECT foo() AS foo_1" ) def test_underscores(self): @@ -105,12 +105,12 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): for fn in [func.coalesce, func.max, func.min, func.sum]: for args, type_ in [ - ((datetime.date(2007, 10, 5), + ((datetime.date(2007, 10, 5), datetime.date(2005, 10, 15)), sqltypes.Date), ((3, 5), sqltypes.Integer), ((decimal.Decimal(3), decimal.Decimal(5)), sqltypes.Numeric), (("foo", "bar"), sqltypes.String), - ((datetime.datetime(2007, 10, 5, 8, 3, 34), + ((datetime.datetime(2007, 10, 5, 8, 3, 34), datetime.datetime(2005, 10, 15, 14, 45, 33)), sqltypes.DateTime) ]: assert isinstance(fn(*args).type, type_), "%s / %s" % (fn(), type_) @@ -149,7 +149,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): self.assert_compile(func.lala.hoho(7), "lala.hoho(:hoho_1)") # test None becomes NULL - self.assert_compile(func.my_func(1,2,None,3), + self.assert_compile(func.my_func(1,2,None,3), "my_func(:my_func_1, :my_func_2, NULL, :my_func_3)") # test pickling diff --git a/test/sql/test_generative.py b/test/sql/test_generative.py index 98e783ede9..0b30ae8f3d 100644 --- a/test/sql/test_generative.py +++ b/test/sql/test_generative.py @@ -8,7 +8,7 @@ from sqlalchemy.sql import util as sql_util from test.lib.testing import eq_, ne_, assert_raises class TraversalTest(fixtures.TestBase, AssertsExecutionResults): - """test ClauseVisitor's traversal, particularly its + """test ClauseVisitor's traversal, particularly its ability to copy and modify a ClauseElement in place.""" @classmethod @@ -16,7 +16,7 @@ class TraversalTest(fixtures.TestBase, AssertsExecutionResults): global A, B # establish two ficticious ClauseElements. - # define deep equality semantics as well as deep + # define deep equality semantics as well as deep # identity semantics. class A(ClauseElement): __visit_name__ = 'a' @@ -79,7 +79,7 @@ class TraversalTest(fixtures.TestBase, AssertsExecutionResults): a1 = A("expr1") struct = B(a1, A("expr2"), B(A("expr1b"), A("expr2b")), A("expr3")) struct2 = B(a1, A("expr2"), B(A("expr1b"), A("expr2b")), A("expr3")) - struct3 = B(a1, A("expr2"), B(A("expr1b"), + struct3 = B(a1, A("expr2"), B(A("expr1b"), A("expr2bmodified")), A("expr3")) assert a1.is_other(a1) @@ -90,7 +90,7 @@ class TraversalTest(fixtures.TestBase, AssertsExecutionResults): assert not struct.is_other(struct3) def test_clone(self): - struct = B(A("expr1"), A("expr2"), B(A("expr1b"), + struct = B(A("expr1"), A("expr2"), B(A("expr1b"), A("expr2b")), A("expr3")) class Vis(CloningVisitor): @@ -105,7 +105,7 @@ class TraversalTest(fixtures.TestBase, AssertsExecutionResults): assert not struct.is_other(s2) def test_no_clone(self): - struct = B(A("expr1"), A("expr2"), B(A("expr1b"), + struct = B(A("expr1"), A("expr2"), B(A("expr1b"), A("expr2b")), A("expr3")) class Vis(ClauseVisitor): @@ -120,11 +120,11 @@ class TraversalTest(fixtures.TestBase, AssertsExecutionResults): assert struct.is_other(s2) def test_change_in_place(self): - struct = B(A("expr1"), A("expr2"), B(A("expr1b"), + struct = B(A("expr1"), A("expr2"), B(A("expr1b"), A("expr2b")), A("expr3")) - struct2 = B(A("expr1"), A("expr2modified"), B(A("expr1b"), + struct2 = B(A("expr1"), A("expr2modified"), B(A("expr1b"), A("expr2b")), A("expr3")) - struct3 = B(A("expr1"), A("expr2"), B(A("expr1b"), + struct3 = B(A("expr1"), A("expr2"), B(A("expr1b"), A("expr2bmodified")), A("expr3")) class Vis(CloningVisitor): @@ -184,7 +184,7 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL): column("col2"), column("col3"), ) - t3 = Table('table3', MetaData(), + t3 = Table('table3', MetaData(), Column('col1', Integer), Column('col2', Integer) ) @@ -198,7 +198,7 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL): f = t.c.col1 * 5 - self.assert_compile(select([f]), + self.assert_compile(select([f]), "SELECT t1.col1 * :col1_1 AS anon_1 FROM t1") f.anon_label @@ -206,7 +206,7 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL): a = t.alias() f = sql_util.ClauseAdapter(a).traverse(f) - self.assert_compile(select([f]), + self.assert_compile(select([f]), "SELECT t1_1.col1 * :col1_1 AS anon_1 FROM t1 AS t1_1") def test_join(self): @@ -227,7 +227,7 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL): aliased = t1.select().alias() aliased2 = t1.alias() - + adapter = sql_util.ColumnAdapter(aliased) f = select([ @@ -237,7 +237,7 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL): s = select([aliased2]).select_from(aliased) eq_(str(s), str(f)) - + f = select([ adapter.columns[func.count(aliased2.c.col1)] ]).select_from(aliased) @@ -256,7 +256,7 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL): # fixed by [ticket:2419]. the inside columns # on aliased3 have _is_clone_of pointers to those of - # aliased2. corresponding_column checks these + # aliased2. corresponding_column checks these # now. adapter = sql_util.ColumnAdapter(aliased1) f1 = select([ @@ -280,7 +280,7 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL): aliased3 = cloned_traverse(aliased2, {}, {}) # also fixed by [ticket:2419]. When we look at the - # *outside* columns of aliased3, they previously did not + # *outside* columns of aliased3, they previously did not # have an _is_clone_of pointer. But we now modified _make_proxy # to assign this. adapter = sql_util.ColumnAdapter(aliased1) @@ -305,7 +305,7 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL): aliased3 = cloned_traverse(aliased2, {}, {}) # also fixed by [ticket:2419]. When we look at the - # *outside* columns of aliased3, they previously did not + # *outside* columns of aliased3, they previously did not # have an _is_clone_of pointer. But we now modified _make_proxy # to assign this. adapter = sql_util.ColumnAdapter(aliased1) @@ -421,14 +421,14 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL): assert sql_util.ClauseAdapter(u).traverse(t1) is u def test_binds(self): - """test that unique bindparams change their name upon clone() + """test that unique bindparams change their name upon clone() to prevent conflicts""" s = select([t1], t1.c.col1==bindparam(None, unique=True)).alias() s2 = CloningVisitor().traverse(s).alias() s3 = select([s], s.c.col2==s2.c.col2) - self.assert_compile(s3, + self.assert_compile(s3, "SELECT anon_1.col1, anon_1.col2, anon_1.col3 FROM " "(SELECT table1.col1 AS col1, table1.col2 AS col2, " "table1.col3 AS col3 FROM table1 WHERE table1.col1 = :param_1) " @@ -440,7 +440,7 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL): s = select([t1], t1.c.col1==4).alias() s2 = CloningVisitor().traverse(s).alias() s3 = select([s], s.c.col2==s2.c.col2) - self.assert_compile(s3, + self.assert_compile(s3, "SELECT anon_1.col1, anon_1.col2, anon_1.col3 FROM " "(SELECT table1.col1 AS col1, table1.col2 AS col2, " "table1.col3 AS col3 FROM table1 WHERE table1.col1 = :col1_1) " @@ -451,14 +451,14 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL): def test_extract(self): s = select([extract('foo', t1.c.col1).label('col1')]) - self.assert_compile(s, + self.assert_compile(s, "SELECT EXTRACT(foo FROM table1.col1) AS col1 FROM table1") s2 = CloningVisitor().traverse(s).alias() s3 = select([s2.c.col1]) - self.assert_compile(s, + self.assert_compile(s, "SELECT EXTRACT(foo FROM table1.col1) AS col1 FROM table1") - self.assert_compile(s3, + self.assert_compile(s3, "SELECT anon_1.col1 FROM (SELECT EXTRACT(foo FROM " "table1.col1) AS col1 FROM table1) AS anon_1") @@ -466,8 +466,8 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL): @testing.emits_warning('.*replaced by another column with the same key') def test_alias(self): subq = t2.select().alias('subq') - s = select([t1.c.col1, subq.c.col1], - from_obj=[t1, subq, + s = select([t1.c.col1, subq.c.col1], + from_obj=[t1, subq, t1.join(subq, t1.c.col1==subq.c.col2)] ) orig = str(s) @@ -484,21 +484,21 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL): assert orig == str(s) == str(s3) == str(s4) subq = subq.alias('subq') - s = select([t1.c.col1, subq.c.col1], - from_obj=[t1, subq, + s = select([t1.c.col1, subq.c.col1], + from_obj=[t1, subq, t1.join(subq, t1.c.col1==subq.c.col2)] ) s5 = CloningVisitor().traverse(s) assert orig == str(s) == str(s5) def test_correlated_select(self): - s = select(['*'], t1.c.col1==t2.c.col1, + s = select(['*'], t1.c.col1==t2.c.col1, from_obj=[t1, t2]).correlate(t2) class Vis(CloningVisitor): def visit_select(self, select): select.append_whereclause(t1.c.col2==7) - self.assert_compile(Vis().traverse(s), + self.assert_compile(Vis().traverse(s), "SELECT * FROM table1 WHERE table1.col1 = table2.col1 " "AND table1.col2 = :col2_1") @@ -755,8 +755,8 @@ class ClauseAdapterTest(fixtures.TestBase, AssertsCompiledSQL): m = MetaData() a=Table( 'a',m, Column( 'id', Integer, primary_key=True), - Column( 'xxx_id', Integer, - ForeignKey( 'a.id', name='adf',use_alter=True ) + Column( 'xxx_id', Integer, + ForeignKey( 'a.id', name='adf',use_alter=True ) ) ) @@ -791,7 +791,7 @@ class ClauseAdapterTest(fixtures.TestBase, AssertsCompiledSQL): alias = select([a]).select_from(a.join(b, a.c.x==b.c.x)).alias() - # two levels of indirection from c.x->b.x->a.x, requires recursive + # two levels of indirection from c.x->b.x->a.x, requires recursive # corresponding_column call adapt = sql_util.ClauseAdapter(alias, equivalents={b.c.x: set([a.c.x]), c.c.x: set([b.c.x])}) @@ -1171,7 +1171,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): assert_raises( exc.ArgumentError, - select().execution_options, + select().execution_options, isolation_level='READ_COMMITTED' ) diff --git a/test/sql/test_labels.py b/test/sql/test_labels.py index 7c65a1d013..c814a01308 100644 --- a/test/sql/test_labels.py +++ b/test/sql/test_labels.py @@ -27,14 +27,14 @@ class LongLabelsTest(fixtures.TablesTest, AssertsCompiledSQL): def define_tables(cls, metadata): table1 = Table("some_large_named_table", metadata, Column("this_is_the_primarykey_column", Integer, - primary_key=True, + primary_key=True, test_needs_autoincrement=True), Column("this_is_the_data_column", String(30)) ) table2 = Table("table_with_exactly_29_characs", metadata, Column("this_is_the_primarykey_column", Integer, - primary_key=True, + primary_key=True, test_needs_autoincrement=True), Column("this_is_the_data_column", String(30)) ) @@ -46,13 +46,13 @@ class LongLabelsTest(fixtures.TablesTest, AssertsCompiledSQL): table1 = cls.tables.table1 table2 = cls.tables.table2 for data in [ - {"this_is_the_primarykey_column":1, + {"this_is_the_primarykey_column":1, "this_is_the_data_column":"data1"}, - {"this_is_the_primarykey_column":2, + {"this_is_the_primarykey_column":2, "this_is_the_data_column":"data2"}, - {"this_is_the_primarykey_column":3, + {"this_is_the_primarykey_column":3, "this_is_the_data_column":"data3"}, - {"this_is_the_primarykey_column":4, + {"this_is_the_primarykey_column":4, "this_is_the_data_column":"data4"} ]: testing.db.execute( @@ -61,7 +61,7 @@ class LongLabelsTest(fixtures.TablesTest, AssertsCompiledSQL): ) testing.db.execute( table2.insert(), - {"this_is_the_primary_key_column":1, + {"this_is_the_primary_key_column":1, "this_is_the_data_column":"data"} ) @@ -78,7 +78,7 @@ class LongLabelsTest(fixtures.TablesTest, AssertsCompiledSQL): def test_too_long_name_disallowed(self): m = MetaData(testing.db) - t1 = Table("this_name_is_too_long_for_what_were_doing_in_this_test", + t1 = Table("this_name_is_too_long_for_what_were_doing_in_this_test", m, Column('foo', Integer)) assert_raises(exceptions.IdentifierError, m.create_all) assert_raises(exceptions.IdentifierError, m.drop_all) @@ -87,11 +87,11 @@ class LongLabelsTest(fixtures.TablesTest, AssertsCompiledSQL): def test_basic_result(self): table1 = self.tables.table1 - s = table1.select(use_labels=True, + s = table1.select(use_labels=True, order_by=[table1.c.this_is_the_primarykey_column]) result = [ - (row[table1.c.this_is_the_primarykey_column], + (row[table1.c.this_is_the_primarykey_column], row[table1.c.this_is_the_data_column]) for row in testing.db.execute(s) ] @@ -104,18 +104,18 @@ class LongLabelsTest(fixtures.TablesTest, AssertsCompiledSQL): def test_result_limit(self): table1 = self.tables.table1 - # some dialects such as oracle (and possibly ms-sql + # some dialects such as oracle (and possibly ms-sql # in a future version) # generate a subquery for limits/offsets. - # ensure that the generated result map corresponds + # ensure that the generated result map corresponds # to the selected table, not # the select query - s = table1.select(use_labels=True, + s = table1.select(use_labels=True, order_by=[table1.c.this_is_the_primarykey_column]).\ limit(2) result = [ - (row[table1.c.this_is_the_primarykey_column], + (row[table1.c.this_is_the_primarykey_column], row[table1.c.this_is_the_data_column]) for row in testing.db.execute(s) ] @@ -127,12 +127,12 @@ class LongLabelsTest(fixtures.TablesTest, AssertsCompiledSQL): @testing.requires.offset def test_result_limit_offset(self): table1 = self.tables.table1 - s = table1.select(use_labels=True, + s = table1.select(use_labels=True, order_by=[table1.c.this_is_the_primarykey_column]).\ limit(2).offset(1) result = [ - (row[table1.c.this_is_the_primarykey_column], + (row[table1.c.this_is_the_primarykey_column], row[table1.c.this_is_the_data_column]) for row in testing.db.execute(s) ] @@ -170,7 +170,7 @@ class LongLabelsTest(fixtures.TablesTest, AssertsCompiledSQL): dialect.max_identifier_length = IDENT_LENGTH self.assert_compile( select([table1, ta]).select_from( - table1.join(ta, + table1.join(ta, table1.c.this_is_the_data_column== ta.c.this_is_the_data_column)).\ where(ta.c.this_is_the_data_column=='data3'), @@ -210,13 +210,13 @@ class LongLabelsTest(fixtures.TablesTest, AssertsCompiledSQL): @testing.provide_metadata def test_insert_no_pk(self): t = Table("some_other_large_named_table", self.metadata, - Column("this_is_the_primarykey_column", Integer, - Sequence("this_is_some_large_seq"), + Column("this_is_the_primarykey_column", Integer, + Sequence("this_is_some_large_seq"), primary_key=True), Column("this_is_the_data_column", String(30)) ) t.create(testing.db, checkfirst=True) - testing.db.execute(t.insert(), + testing.db.execute(t.insert(), **{"this_is_the_data_column":"data1"}) @testing.requires.subqueries @@ -238,7 +238,7 @@ class LongLabelsTest(fixtures.TablesTest, AssertsCompiledSQL): q = table1.select(table1.c.this_is_the_primarykey_column == 4).alias() x = select([q], use_labels=True) - self.assert_compile(x, + self.assert_compile(x, "SELECT anon_1.this_is_the_primarykey_column AS " "anon_1_this_is_the_prim_1, anon_1.this_is_the_data_column " "AS anon_1_this_is_the_data_2 " @@ -249,7 +249,7 @@ class LongLabelsTest(fixtures.TablesTest, AssertsCompiledSQL): "AS this_is_the_data_column " "FROM some_large_named_table " "WHERE some_large_named_table.this_is_the_primarykey_column " - "= :this_is_the_primarykey__1) AS anon_1", + "= :this_is_the_primarykey__1) AS anon_1", dialect=compile_dialect) eq_( @@ -264,13 +264,13 @@ class LongLabelsTest(fixtures.TablesTest, AssertsCompiledSQL): x = select([q]) compile_dialect = default.DefaultDialect(label_length=10) - self.assert_compile(x, + self.assert_compile(x, "SELECT foo.this_1, foo.this_2 FROM " "(SELECT some_large_named_table." "this_is_the_primarykey_column AS this_1, " "some_large_named_table.this_is_the_data_column AS this_2 " "FROM some_large_named_table WHERE " - "some_large_named_table.this_is_the_primarykey_column = :this_1) AS foo", + "some_large_named_table.this_is_the_primarykey_column = :this_1) AS foo", dialect=compile_dialect) compile_dialect = default.DefaultDialect(label_length=4) @@ -278,19 +278,19 @@ class LongLabelsTest(fixtures.TablesTest, AssertsCompiledSQL): "(SELECT some_large_named_table.this_is_the_primarykey_column " "AS _1, some_large_named_table.this_is_the_data_column AS _2 " "FROM some_large_named_table WHERE " - "some_large_named_table.this_is_the_primarykey_column = :_1) AS foo", + "some_large_named_table.this_is_the_primarykey_column = :_1) AS foo", dialect=compile_dialect) q = table1.select(table1.c.this_is_the_primarykey_column == 4).alias() x = select([q], use_labels=True) compile_dialect = default.DefaultDialect(label_length=10) - self.assert_compile(x, + self.assert_compile(x, "SELECT anon_1.this_2 AS anon_1, anon_1.this_4 AS anon_3 FROM " "(SELECT some_large_named_table.this_is_the_primarykey_column " "AS this_2, some_large_named_table.this_is_the_data_column AS this_4 " "FROM some_large_named_table WHERE " - "some_large_named_table.this_is_the_primarykey_column = :this_1) AS anon_1", + "some_large_named_table.this_is_the_primarykey_column = :this_1) AS anon_1", dialect=compile_dialect) compile_dialect = default.DefaultDialect(label_length=4) @@ -298,7 +298,7 @@ class LongLabelsTest(fixtures.TablesTest, AssertsCompiledSQL): "(SELECT some_large_named_table.this_is_the_primarykey_column " "AS _2, some_large_named_table.this_is_the_data_column AS _4 " "FROM some_large_named_table WHERE " - "some_large_named_table.this_is_the_primarykey_column = :_1) AS _1", + "some_large_named_table.this_is_the_primarykey_column = :_1) AS _1", dialect=compile_dialect) def test_adjustable_result_schema_column(self): @@ -321,7 +321,7 @@ class LongLabelsTest(fixtures.TablesTest, AssertsCompiledSQL): def test_adjustable_result_lightweight_column(self): - table1 = table("some_large_named_table", + table1 = table("some_large_named_table", column("this_is_the_primarykey_column"), column("this_is_the_data_column") ) diff --git a/test/sql/test_metadata.py b/test/sql/test_metadata.py index ecbf8ad754..3e9f87fe70 100644 --- a/test/sql/test_metadata.py +++ b/test/sql/test_metadata.py @@ -19,7 +19,7 @@ from test.lib.testing import eq_ class MetaDataTest(fixtures.TestBase, ComparesTables): def test_metadata_connect(self): metadata = MetaData() - t1 = Table('table1', metadata, + t1 = Table('table1', metadata, Column('col1', Integer, primary_key=True), Column('col2', String(20))) metadata.bind = testing.db @@ -58,7 +58,7 @@ class MetaDataTest(fixtures.TestBase, ComparesTables): Column('bar', Integer(), info={'foo':'bar'}), ]: c2 = col.copy() - for attr in ('name', 'type', 'nullable', + for attr in ('name', 'type', 'nullable', 'primary_key', 'key', 'unique', 'info', 'doc'): eq_(getattr(col, attr), getattr(c2, attr)) @@ -148,14 +148,14 @@ class MetaDataTest(fixtures.TestBase, ComparesTables): @testing.provide_metadata def test_dupe_tables(self): metadata = self.metadata - t1 = Table('table1', metadata, + t1 = Table('table1', metadata, Column('col1', Integer, primary_key=True), Column('col2', String(20))) metadata.create_all() t1 = Table('table1', metadata, autoload=True) def go(): - t2 = Table('table1', metadata, + t2 = Table('table1', metadata, Column('col1', Integer, primary_key=True), Column('col2', String(20))) assert_raises_message( @@ -173,11 +173,11 @@ class MetaDataTest(fixtures.TestBase, ComparesTables): m = MetaData() t1 = Table('t', m, c1, c2) - kw = dict(onupdate="X", + kw = dict(onupdate="X", ondelete="Y", use_alter=True, name='f1', deferrable="Z", initially="Q", link_to_name=True) - fk1 = ForeignKey(c1, **kw) + fk1 = ForeignKey(c1, **kw) fk2 = ForeignKeyConstraint((c1,), (c2,), **kw) t1.append_constraint(fk2) @@ -190,10 +190,10 @@ class MetaDataTest(fixtures.TestBase, ComparesTables): def test_check_constraint_copy(self): r = lambda x: x - c = CheckConstraint("foo bar", - name='name', - initially=True, - deferrable=True, + c = CheckConstraint("foo bar", + name='name', + initially=True, + deferrable=True, _create_rule = r) c2 = c.copy() eq_(c2.name, 'name') @@ -273,7 +273,7 @@ class MetaDataTest(fixtures.TestBase, ComparesTables): table2 = Table('othertable', meta, Column('id', Integer, Sequence('foo_seq'), primary_key=True), - Column('myid', Integer, + Column('myid', Integer, ForeignKey('mytable.myid'), ), test_needs_fk=True, @@ -359,7 +359,7 @@ class MetaDataTest(fixtures.TestBase, ComparesTables): m2 = pickle.loads(pickle.dumps(m1)) s2 = Sequence("x_seq") - t2 = Table('a', m2, + t2 = Table('a', m2, Column('id',Integer,primary_key=True), Column('x', Integer, s2), extend_existing=True) @@ -373,7 +373,7 @@ class MetaDataTest(fixtures.TestBase, ComparesTables): m1 = MetaData() s1 = Sequence("x_seq") - t = Table('a', m1, + t = Table('a', m1, Column('x', Integer, s1) ) assert m1._sequences['x_seq'] is s1 @@ -601,9 +601,9 @@ class MetaDataTest(fixtures.TestBase, ComparesTables): table_c = table.tometadata(meta2, schema=None) table2_c = table2.tometadata(meta2, schema=None) - eq_(str(table_c.join(table2_c).onclause), + eq_(str(table_c.join(table2_c).onclause), str(table_c.c.myid == table2_c.c.myid)) - eq_(str(table_c.join(table2_c).onclause), + eq_(str(table_c.join(table2_c).onclause), "someschema.mytable.myid = someschema.othertable.myid") def test_tometadata_strip_schema(self): @@ -644,21 +644,21 @@ class MetaDataTest(fixtures.TestBase, ComparesTables): ck = schema.CheckConstraint("x > y", name="someconstraint") for const, exp in ( - (Sequence("my_seq"), + (Sequence("my_seq"), "Sequence('my_seq')"), - (Sequence("my_seq", start=5), + (Sequence("my_seq", start=5), "Sequence('my_seq', start=5)"), - (Column("foo", Integer), + (Column("foo", Integer), "Column('foo', Integer(), table=None)"), - (Table("bar", MetaData(), Column("x", String)), + (Table("bar", MetaData(), Column("x", String)), "Table('bar', MetaData(bind=None), " "Column('x', String(), table=), schema=None)"), - (schema.DefaultGenerator(for_update=True), + (schema.DefaultGenerator(for_update=True), "DefaultGenerator(for_update=True)"), (schema.Index("bar", "c"), "Index('bar')"), (i1, "Index('bar', Column('x', Integer(), table=))"), (schema.FetchedValue(), "FetchedValue()"), - (ck, + (ck, "CheckConstraint(" "%s" ", name='someconstraint')" % repr(ck.sqltext)), @@ -675,7 +675,7 @@ class TableTest(fixtures.TestBase, AssertsCompiledSQL): prefixes = ["TEMPORARY"]) self.assert_compile( - schema.CreateTable(table1), + schema.CreateTable(table1), "CREATE TEMPORARY TABLE temporary_table_1 (col1 INTEGER)" ) @@ -683,7 +683,7 @@ class TableTest(fixtures.TestBase, AssertsCompiledSQL): Column("col1", Integer), prefixes = ["VIRTUAL"]) self.assert_compile( - schema.CreateTable(table2), + schema.CreateTable(table2), "CREATE VIRTUAL TABLE temporary_table_2 (col1 INTEGER)" ) @@ -741,7 +741,7 @@ class SchemaTest(fixtures.TestBase, AssertsCompiledSQL): def test_default_schema_metadata_fk_alt_remote(self): m = MetaData(schema="foo") t1 = Table('t1', m, Column('x', Integer)) - t2 = Table('t2', m, Column('x', Integer, ForeignKey('t1.x')), + t2 = Table('t2', m, Column('x', Integer, ForeignKey('t1.x')), schema="bar") assert t2.c.x.references(t1.c.x) @@ -797,8 +797,8 @@ class SchemaTest(fixtures.TestBase, AssertsCompiledSQL): class UseExistingTest(fixtures.TablesTest): @classmethod def define_tables(cls, metadata): - Table('users', metadata, - Column('id', Integer, primary_key=True), + Table('users', metadata, + Column('id', Integer, primary_key=True), Column('name', String(30))) def _useexisting_fixture(self): @@ -836,7 +836,7 @@ class UseExistingTest(fixtures.TablesTest): meta2 = self._useexisting_fixture() assert_raises( exc.ArgumentError, - Table, 'users', meta2, keep_existing=True, + Table, 'users', meta2, keep_existing=True, extend_existing=True ) @@ -845,13 +845,13 @@ class UseExistingTest(fixtures.TablesTest): meta2 = self._useexisting_fixture() assert_raises( exc.ArgumentError, - Table, 'users', meta2, useexisting=True, + Table, 'users', meta2, useexisting=True, extend_existing=True ) def test_keep_existing_no_dupe_constraints(self): meta2 = self._notexisting_fixture() - users = Table('users', meta2, + users = Table('users', meta2, Column('id', Integer), Column('name', Unicode), UniqueConstraint('name'), @@ -861,7 +861,7 @@ class UseExistingTest(fixtures.TablesTest): assert 'id' in users.c eq_(len(users.constraints), 2) - u2 = Table('users', meta2, + u2 = Table('users', meta2, Column('id', Integer), Column('name', Unicode), UniqueConstraint('name'), @@ -871,7 +871,7 @@ class UseExistingTest(fixtures.TablesTest): def test_extend_existing_dupes_constraints(self): meta2 = self._notexisting_fixture() - users = Table('users', meta2, + users = Table('users', meta2, Column('id', Integer), Column('name', Unicode), UniqueConstraint('name'), @@ -881,7 +881,7 @@ class UseExistingTest(fixtures.TablesTest): assert 'id' in users.c eq_(len(users.constraints), 2) - u2 = Table('users', meta2, + u2 = Table('users', meta2, Column('id', Integer), Column('name', Unicode), UniqueConstraint('name'), @@ -904,7 +904,7 @@ class UseExistingTest(fixtures.TablesTest): def test_keep_existing_add_column(self): meta2 = self._useexisting_fixture() - users = Table('users', meta2, + users = Table('users', meta2, Column('foo', Integer), autoload=True, keep_existing=True) @@ -918,14 +918,14 @@ class UseExistingTest(fixtures.TablesTest): def test_keep_existing_quote_no_orig(self): meta2 = self._notexisting_fixture() - users = Table('users', meta2, quote=True, + users = Table('users', meta2, quote=True, autoload=True, keep_existing=True) assert users.quote def test_keep_existing_add_column_no_orig(self): meta2 = self._notexisting_fixture() - users = Table('users', meta2, + users = Table('users', meta2, Column('foo', Integer), autoload=True, keep_existing=True) @@ -939,13 +939,13 @@ class UseExistingTest(fixtures.TablesTest): def test_keep_existing_quote_no_reflection(self): meta2 = self._useexisting_fixture() - users = Table('users', meta2, quote=True, + users = Table('users', meta2, quote=True, keep_existing=True) assert not users.quote def test_keep_existing_add_column_no_reflection(self): meta2 = self._useexisting_fixture() - users = Table('users', meta2, + users = Table('users', meta2, Column('foo', Integer), keep_existing=True) assert "foo" not in users.c @@ -964,7 +964,7 @@ class UseExistingTest(fixtures.TablesTest): def test_extend_existing_add_column(self): meta2 = self._useexisting_fixture() - users = Table('users', meta2, + users = Table('users', meta2, Column('foo', Integer), autoload=True, extend_existing=True) @@ -978,14 +978,14 @@ class UseExistingTest(fixtures.TablesTest): def test_extend_existing_quote_no_orig(self): meta2 = self._notexisting_fixture() - users = Table('users', meta2, quote=True, + users = Table('users', meta2, quote=True, autoload=True, extend_existing=True) assert users.quote def test_extend_existing_add_column_no_orig(self): meta2 = self._notexisting_fixture() - users = Table('users', meta2, + users = Table('users', meta2, Column('foo', Integer), autoload=True, extend_existing=True) @@ -999,13 +999,13 @@ class UseExistingTest(fixtures.TablesTest): def test_extend_existing_quote_no_reflection(self): meta2 = self._useexisting_fixture() - users = Table('users', meta2, quote=True, + users = Table('users', meta2, quote=True, extend_existing=True) assert users.quote def test_extend_existing_add_column_no_reflection(self): meta2 = self._useexisting_fixture() - users = Table('users', meta2, + users = Table('users', meta2, Column('foo', Integer), extend_existing=True) assert "foo" in users.c @@ -1014,16 +1014,16 @@ class ConstraintTest(fixtures.TestBase): def _single_fixture(self): m = MetaData() - t1 = Table('t1', m, + t1 = Table('t1', m, Column('a', Integer), Column('b', Integer) ) - t2 = Table('t2', m, + t2 = Table('t2', m, Column('a', Integer, ForeignKey('t1.a')) ) - t3 = Table('t3', m, + t3 = Table('t3', m, Column('a', Integer) ) return t1, t2, t3 @@ -1090,7 +1090,7 @@ class ColumnDefinitionTest(AssertsCompiledSQL, fixtures.TestBase): c = Column(Integer) assert_raises_message( - exc.ArgumentError, + exc.ArgumentError, "Column must be constructed with a non-blank name or assign a " "non-blank .name ", Table, 't', MetaData(), c) @@ -1099,7 +1099,7 @@ class ColumnDefinitionTest(AssertsCompiledSQL, fixtures.TestBase): c = Column('', Integer) assert_raises_message( - exc.ArgumentError, + exc.ArgumentError, "Column must be constructed with a non-blank name or assign a " "non-blank .name ", Table, 't', MetaData(), c) @@ -1109,7 +1109,7 @@ class ColumnDefinitionTest(AssertsCompiledSQL, fixtures.TestBase): t = Table('t', MetaData(), c) assert_raises_message( - exc.ArgumentError, + exc.ArgumentError, "Column object already assigned to Table 't'", Table, 'q', MetaData(), c) @@ -1335,7 +1335,7 @@ class CatchAllEventsTest(fixtures.TestBase): event.listen(schema.SchemaItem, "after_parent_attach", after_attach) m = MetaData() - t1 = Table('t1', m, + t1 = Table('t1', m, Column('id', Integer, Sequence('foo_id'), primary_key=True), Column('bar', String, ForeignKey('t2.id')) ) @@ -1375,7 +1375,7 @@ class CatchAllEventsTest(fixtures.TestBase): evt(target) m = MetaData() - t1 = Table('t1', m, + t1 = Table('t1', m, Column('id', Integer, Sequence('foo_id'), primary_key=True), Column('bar', String, ForeignKey('t2.id')), Column('bat', Integer, unique=True), @@ -1390,10 +1390,10 @@ class CatchAllEventsTest(fixtures.TestBase): eq_( canary, [ - 'PrimaryKeyConstraint->Table', 'PrimaryKeyConstraint->t1', + 'PrimaryKeyConstraint->Table', 'PrimaryKeyConstraint->t1', 'ForeignKeyConstraint->Table', 'ForeignKeyConstraint->t1', 'UniqueConstraint->Table', 'UniqueConstraint->t1', - 'PrimaryKeyConstraint->Table', 'PrimaryKeyConstraint->t2', + 'PrimaryKeyConstraint->Table', 'PrimaryKeyConstraint->t2', 'CheckConstraint->Table', 'CheckConstraint->t2', 'UniqueConstraint->Table', 'UniqueConstraint->t2' ] diff --git a/test/sql/test_query.py b/test/sql/test_query.py index b7f28e58d9..aba7288d26 100644 --- a/test/sql/test_query.py +++ b/test/sql/test_query.py @@ -49,7 +49,7 @@ class QueryTest(fixtures.TestBase): def test_insert_heterogeneous_params(self): """test that executemany parameters are asserted to match the parameter set of the first.""" - assert_raises_message(exc.StatementError, + assert_raises_message(exc.StatementError, r"A value is required for bind parameter 'user_name', in " "parameter group 2 \(original cause: (sqlalchemy.exc.)?InvalidRequestError: A " "value is required for bind parameter 'user_name', in " @@ -60,7 +60,7 @@ class QueryTest(fixtures.TestBase): {'user_id':9} ) - # this succeeds however. We aren't yet doing + # this succeeds however. We aren't yet doing # a length check on all subsequent parameters. users.insert().execute( {'user_id':7}, @@ -99,7 +99,7 @@ class QueryTest(fixtures.TestBase): ret[col.key] = id if result.lastrow_has_defaults(): - criterion = and_(*[col==id for col, id in + criterion = and_(*[col==id for col, id in zip(table.primary_key, result.inserted_primary_key)]) row = engine.execute(table.select(criterion)).first() for c in table.c: @@ -217,7 +217,7 @@ class QueryTest(fixtures.TestBase): for engine in test_engines: - r = engine.execute(users.insert(), + r = engine.execute(users.insert(), {'user_name':'jack'}, ) assert r.closed @@ -312,7 +312,7 @@ class QueryTest(fixtures.TestBase): content = Table('content', self.metadata, Column('type', String(30)), ) - bar = Table('bar', self.metadata, + bar = Table('bar', self.metadata, Column('content_type', String(30)) ) self.metadata.create_all(testing.db) @@ -348,7 +348,7 @@ class QueryTest(fixtures.TestBase): result = util.pickle.loads(util.pickle.dumps(result)) eq_( - result, + result, [(7, "jack"), (8, "ed"), (9, "fred")] ) if use_labels: @@ -365,7 +365,7 @@ class QueryTest(fixtures.TestBase): if not pickle or use_labels: assert_raises(exc.NoSuchColumnError, lambda: result[0][addresses.c.user_id]) else: - # test with a different table. name resolution is + # test with a different table. name resolution is # causing 'user_id' to match when use_labels wasn't used. eq_(result[0][addresses.c.user_id], 7) @@ -387,7 +387,7 @@ class QueryTest(fixtures.TestBase): (unprintable(), "unprintable element.*"), ]: assert_raises_message( - exc.NoSuchColumnError, + exc.NoSuchColumnError, msg % repl, lambda: row[accessor] ) @@ -740,7 +740,7 @@ class QueryTest(fixtures.TestBase): dict(user_id=1, user_name='john'), ) - # test a little sqlite weirdness - with the UNION, + # test a little sqlite weirdness - with the UNION, # cols come back as "query_users.user_id" in cursor.description r = text("select query_users.user_id, query_users.user_name from query_users " "UNION select query_users.user_id, query_users.user_name from query_users", @@ -785,7 +785,7 @@ class QueryTest(fixtures.TestBase): ) # test using literal tablename.colname r = text('select query_users.user_id AS "query_users.user_id", ' - 'query_users.user_name AS "query_users.user_name" from query_users', + 'query_users.user_name AS "query_users.user_name" from query_users', bind=testing.db).execution_options(sqlite_raw_colnames=True).execute().first() eq_(r['query_users.user_id'], 1) eq_(r['query_users.user_name'], "john") @@ -1030,9 +1030,9 @@ class QueryTest(fixtures.TestBase): ) shadowed.create(checkfirst=True) try: - shadowed.insert().execute(shadow_id=1, shadow_name='The Shadow', parent='The Light', - row='Without light there is no shadow', - _parent='Hidden parent', + shadowed.insert().execute(shadow_id=1, shadow_name='The Shadow', parent='The Light', + row='Without light there is no shadow', + _parent='Hidden parent', _row='Hidden row') r = shadowed.select(shadowed.c.shadow_id==1).execute().first() self.assert_(r.shadow_id == r['shadow_id'] == r[shadowed.c.shadow_id] == 1) @@ -1080,7 +1080,7 @@ class QueryTest(fixtures.TestBase): @testing.fails_on('firebird', "uses sql-92 rules") @testing.fails_on('sybase', "uses sql-92 rules") @testing.fails_on('mssql+mxodbc', "uses sql-92 rules") - @testing.fails_if(lambda: + @testing.fails_if(lambda: testing.against('mssql+pyodbc') and not testing.db.dialect.freetds, "uses sql-92 rules") def test_bind_in(self): @@ -1120,7 +1120,7 @@ class QueryTest(fixtures.TestBase): @testing.emits_warning('.*empty sequence.*') @testing.requires.boolean_col_expressions def test_in_filtering_advanced(self): - """test the behavior of the in_() function when + """test the behavior of the in_() function when comparing against an empty collection, specifically that a proper boolean value is generated. @@ -1143,7 +1143,7 @@ class QueryTest(fixtures.TestBase): class PercentSchemaNamesTest(fixtures.TestBase): """tests using percent signs, spaces in table and column names. - Doesn't pass for mysql, postgresql, but this is really a + Doesn't pass for mysql, postgresql, but this is really a SQLAlchemy bug - we should be escaping out %% signs for this operation the same way we do for text() and column labels. @@ -1157,7 +1157,7 @@ class PercentSchemaNamesTest(fixtures.TestBase): Column("percent%", Integer), Column("spaces % more spaces", Integer), ) - lightweight_percent_table = sql.table('percent%table', + lightweight_percent_table = sql.table('percent%table', sql.column("percent%"), sql.column("spaces % more spaces"), ) @@ -1170,7 +1170,7 @@ class PercentSchemaNamesTest(fixtures.TestBase): def teardown_class(cls): metadata.drop_all() - @testing.skip_if(lambda: testing.against('postgresql'), + @testing.skip_if(lambda: testing.against('postgresql'), "psycopg2 2.4 no longer accepts % in bind placeholders") def test_single_roundtrip(self): percent_table.insert().execute( @@ -1187,7 +1187,7 @@ class PercentSchemaNamesTest(fixtures.TestBase): ) self._assert_table() - @testing.skip_if(lambda: testing.against('postgresql'), + @testing.skip_if(lambda: testing.against('postgresql'), "psycopg2 2.4 no longer accepts % in bind placeholders") @testing.crashes('mysql+mysqldb', "MySQLdb handles executemany() " "inconsistently vs. execute()") @@ -1204,9 +1204,9 @@ class PercentSchemaNamesTest(fixtures.TestBase): def _assert_table(self): for table in ( - percent_table, - percent_table.alias(), - lightweight_percent_table, + percent_table, + percent_table.alias(), + lightweight_percent_table, lightweight_percent_table.alias()): eq_( list( @@ -1290,7 +1290,7 @@ class KeyTargetingTest(fixtures.TablesTest): content = Table('content', metadata, Column('t', String(30), key="type"), ) - bar = Table('bar', metadata, + bar = Table('bar', metadata, Column('ctype', String(30), key="content_type") ) @@ -1623,7 +1623,7 @@ class CompoundTest(fixtures.TestBase): eq_(found2, wanted) def test_union_all_lightweight(self): - """like test_union_all, but breaks the sub-union into + """like test_union_all, but breaks the sub-union into a subquery with an explicit column reference on the outside, more palatable to a wider variety of engines. diff --git a/test/sql/test_quote.py b/test/sql/test_quote.py index 952b147638..a714002b1a 100644 --- a/test/sql/test_quote.py +++ b/test/sql/test_quote.py @@ -94,7 +94,7 @@ class QuoteTest(fixtures.TestBase, AssertsCompiledSQL): self.assert_compile(t1.select().apply_labels(), '''SELECT "foo"."t1"."col1" AS "foo_t1_col1" FROM "foo"."t1"''') a = t1.select().alias('anon') b = select([1], a.c.col1==2, from_obj=a) - self.assert_compile(b, + self.assert_compile(b, '''SELECT 1 FROM (SELECT "foo"."t1"."col1" AS "col1" FROM '''\ '''"foo"."t1") AS anon WHERE anon."col1" = :col1_1''' ) @@ -104,15 +104,15 @@ class QuoteTest(fixtures.TestBase, AssertsCompiledSQL): Column('ColumnOne', Integer, quote=False), quote=False, schema="FooBar", quote_schema=False) self.assert_compile(t1.select(), "SELECT FooBar.TableOne.ColumnOne FROM FooBar.TableOne") - self.assert_compile(t1.select().apply_labels(), + self.assert_compile(t1.select().apply_labels(), "SELECT FooBar.TableOne.ColumnOne AS "\ - "FooBar_TableOne_ColumnOne FROM FooBar.TableOne" # TODO: is this what we really want here ? what if table/schema + "FooBar_TableOne_ColumnOne FROM FooBar.TableOne" # TODO: is this what we really want here ? what if table/schema # *are* quoted? ) a = t1.select().alias('anon') b = select([1], a.c.ColumnOne==2, from_obj=a) - self.assert_compile(b, + self.assert_compile(b, "SELECT 1 FROM (SELECT FooBar.TableOne.ColumnOne AS "\ "ColumnOne FROM FooBar.TableOne) AS anon WHERE anon.ColumnOne = :ColumnOne_1" ) @@ -142,8 +142,8 @@ class QuoteTest(fixtures.TestBase, AssertsCompiledSQL): if labels arent quoted, a query in postgresql in particular will fail since it produces: SELECT LaLa.lowercase, LaLa."UPPERCASE", LaLa."MixedCase", LaLa."ASC" - FROM (SELECT DISTINCT "WorstCase1".lowercase AS lowercase, - "WorstCase1"."UPPERCASE" AS UPPERCASE, + FROM (SELECT DISTINCT "WorstCase1".lowercase AS lowercase, + "WorstCase1"."UPPERCASE" AS UPPERCASE, "WorstCase1"."MixedCase" AS MixedCase, "WorstCase1"."ASC" AS ASC \nFROM "WorstCase1") AS LaLa where the "UPPERCASE" column of "LaLa" doesnt exist. @@ -179,7 +179,7 @@ class QuoteTest(fixtures.TestBase, AssertsCompiledSQL): Column("order", Integer)) x = select([table.c.col1, table.c['from'], table.c.louisville, table.c.order]) - self.assert_compile(x, + self.assert_compile(x, '''SELECT "ImATable".col1, "ImATable"."from", "ImATable".louisville, "ImATable"."order" FROM "ImATable"''') diff --git a/test/sql/test_returning.py b/test/sql/test_returning.py index 3ef6dc9d93..4be2d74f79 100644 --- a/test/sql/test_returning.py +++ b/test/sql/test_returning.py @@ -215,9 +215,9 @@ class ImplicitReturningFlag(fixtures.TestBase): # starts as False. This is because all of Firebird, # Postgresql, Oracle, SQL Server started supporting RETURNING # as of a certain version, and the flag is not set until - # version detection occurs. If some DB comes along that has + # version detection occurs. If some DB comes along that has # RETURNING in all cases, this test can be adjusted. - assert e.dialect.implicit_returning is False + assert e.dialect.implicit_returning is False # version detection on connect sets it c = e.connect() diff --git a/test/sql/test_rowcount.py b/test/sql/test_rowcount.py index 5d95baa81c..c14fa22a13 100644 --- a/test/sql/test_rowcount.py +++ b/test/sql/test_rowcount.py @@ -13,8 +13,8 @@ class FoundRowsTest(fixtures.TestBase, AssertsExecutionResults): metadata = MetaData(testing.db) employees_table = Table('employees', metadata, - Column('employee_id', Integer, - Sequence('employee_id_seq', optional=True), + Column('employee_id', Integer, + Sequence('employee_id_seq', optional=True), primary_key=True), Column('name', String(50)), Column('department', String(1)), diff --git a/test/sql/test_selectable.py b/test/sql/test_selectable.py index 7cafdf9ea9..b13edddaec 100644 --- a/test/sql/test_selectable.py +++ b/test/sql/test_selectable.py @@ -44,8 +44,8 @@ class SelectableTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiled s = select([table1.c.col1.label('c2'), table1.c.col1, table1.c.col1.label('c1')]) - # this tests the same thing as - # test_direct_correspondence_on_labels below - + # this tests the same thing as + # test_direct_correspondence_on_labels below - # that the presence of label() affects the 'distance' assert s.corresponding_column(table1.c.col1) is s.c.col1 @@ -100,7 +100,7 @@ class SelectableTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiled s = select([keyed]) eq_(s.c.colx.key, 'colx') - # this would change to 'colx' + # this would change to 'colx' # with #2397 eq_(s.c.colx.name, 'x') @@ -139,11 +139,11 @@ class SelectableTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiled is j2.c.table1_col1 def test_clone_append_column(self): - sel = select([literal_column('1').label('a')]) + sel = select([literal_column('1').label('a')]) cloned = visitors.ReplacingCloningVisitor().traverse(sel) - cloned.append_column(literal_column('2').label('b')) - cloned.append_column(func.foo()) - eq_(cloned.c.keys(), ['a', 'b', 'foo()']) + cloned.append_column(literal_column('2').label('b')) + cloned.append_column(func.foo()) + eq_(cloned.c.keys(), ['a', 'b', 'foo()']) def test_append_column_after_replace_selectable(self): basesel = select([literal_column('1').label('a')]) @@ -164,7 +164,7 @@ class SelectableTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiled "JOIN (SELECT 1 AS a, 2 AS b) AS joinfrom " "ON basefrom.a = joinfrom.a" ) - replaced.append_column(joinfrom.c.b) + replaced.append_column(joinfrom.c.b) self.assert_compile( replaced, "SELECT basefrom.a, joinfrom.b FROM (SELECT 1 AS a) AS basefrom " @@ -244,7 +244,7 @@ class SelectableTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiled assert u.corresponding_column(s2.c.table2_col2) is u.c.col2 def test_union_precedence(self): - # conflicting column correspondence should be resolved based on + # conflicting column correspondence should be resolved based on # the order of the select()s in the union s1 = select([table1.c.col1, table1.c.col2]) @@ -451,9 +451,9 @@ class SelectableTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiled eq_(c1._from_objects, [t]) eq_(c2._from_objects, [t]) - self.assert_compile(select([c1]), + self.assert_compile(select([c1]), "SELECT t.c1 FROM t") - self.assert_compile(select([c2]), + self.assert_compile(select([c2]), "SELECT t.c2 FROM t") def test_from_list_deferred_whereclause(self): @@ -467,9 +467,9 @@ class SelectableTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiled eq_(c1._from_objects, [t]) eq_(c2._from_objects, [t]) - self.assert_compile(select([c1]), + self.assert_compile(select([c1]), "SELECT t.c1 FROM t") - self.assert_compile(select([c2]), + self.assert_compile(select([c2]), "SELECT t.c2 FROM t") def test_from_list_deferred_fromlist(self): @@ -483,7 +483,7 @@ class SelectableTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiled eq_(c1._from_objects, [t2]) - self.assert_compile(select([c1]), + self.assert_compile(select([c1]), "SELECT t2.c1 FROM t2") def test_from_list_deferred_cloning(self): @@ -505,20 +505,20 @@ class SelectableTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiled table1 = table('t1', column('a')) table2 = table('t2', column('b')) s1 = select([table1.c.a, table2.c.b]) - self.assert_compile(s1, + self.assert_compile(s1, "SELECT t1.a, t2.b FROM t1, t2" ) s2 = s1.with_only_columns([table2.c.b]) - self.assert_compile(s2, + self.assert_compile(s2, "SELECT t2.b FROM t2" ) s3 = sql_util.ClauseAdapter(table1).traverse(s1) - self.assert_compile(s3, + self.assert_compile(s3, "SELECT t1.a, t2.b FROM t1, t2" ) s4 = s3.with_only_columns([table2.c.b]) - self.assert_compile(s4, + self.assert_compile(s4, "SELECT t2.b FROM t2" ) @@ -675,13 +675,13 @@ class JoinConditionTest(fixtures.TestBase, AssertsExecutionResults): def test_join_cond_no_such_unrelated_table(self): m = MetaData() - # bounding the "good" column with two "bad" ones is so to + # bounding the "good" column with two "bad" ones is so to # try to get coverage to get the "continue" statements # in the loop... - t1 = Table('t1', m, + t1 = Table('t1', m, Column('y', Integer, ForeignKey('t22.id')), - Column('x', Integer, ForeignKey('t2.id')), - Column('q', Integer, ForeignKey('t22.id')), + Column('x', Integer, ForeignKey('t2.id')), + Column('q', Integer, ForeignKey('t22.id')), ) t2 = Table('t2', m, Column('id', Integer)) assert sql_util.join_condition(t1, t2).compare(t1.c.x==t2.c.id) @@ -689,7 +689,7 @@ class JoinConditionTest(fixtures.TestBase, AssertsExecutionResults): def test_join_cond_no_such_unrelated_column(self): m = MetaData() - t1 = Table('t1', m, Column('x', Integer, ForeignKey('t2.id')), + t1 = Table('t1', m, Column('x', Integer, ForeignKey('t2.id')), Column('y', Integer, ForeignKey('t3.q'))) t2 = Table('t2', m, Column('id', Integer)) t3 = Table('t3', m, Column('id', Integer)) @@ -790,11 +790,11 @@ class PrimaryKeyTest(fixtures.TestBase, AssertsExecutionResults): def test_init_doesnt_blowitaway(self): meta = MetaData() - a = Table('a', meta, - Column('id', Integer, primary_key=True), + a = Table('a', meta, + Column('id', Integer, primary_key=True), Column('x', Integer)) - b = Table('b', meta, - Column('id', Integer, ForeignKey('a.id'), primary_key=True), + b = Table('b', meta, + Column('id', Integer, ForeignKey('a.id'), primary_key=True), Column('x', Integer)) j = a.join(b) @@ -805,11 +805,11 @@ class PrimaryKeyTest(fixtures.TestBase, AssertsExecutionResults): def test_non_column_clause(self): meta = MetaData() - a = Table('a', meta, - Column('id', Integer, primary_key=True), + a = Table('a', meta, + Column('id', Integer, primary_key=True), Column('x', Integer)) - b = Table('b', meta, - Column('id', Integer, ForeignKey('a.id'), primary_key=True), + b = Table('b', meta, + Column('id', Integer, ForeignKey('a.id'), primary_key=True), Column('x', Integer, primary_key=True)) j = a.join(b, and_(a.c.id==b.c.id, b.c.x==5)) @@ -824,7 +824,7 @@ class PrimaryKeyTest(fixtures.TestBase, AssertsExecutionResults): Column('id', Integer, primary_key= True), ) - engineer = Table('Engineer', metadata, + engineer = Table('Engineer', metadata, Column('id', Integer, ForeignKey('Employee.id'), primary_key=True)) @@ -921,8 +921,8 @@ class ReduceTest(fixtures.TestBase, AssertsExecutionResults): 'BaseItem': base_item_table.select( base_item_table.c.child_name - == 'BaseItem'), - 'Item': base_item_table.join(item_table)}, + == 'BaseItem'), + 'Item': base_item_table.join(item_table)}, None, 'item_join') eq_(util.column_set(sql_util.reduce_columns([item_join.c.id, item_join.c.dummy, item_join.c.child_name])), @@ -961,7 +961,7 @@ class ReduceTest(fixtures.TestBase, AssertsExecutionResults): select([ page_table.c.id, - magazine_page_table.c.page_id, + magazine_page_table.c.page_id, cast(null(), Integer).label('magazine_page_id') ]). select_from(page_table.join(magazine_page_table)) @@ -979,7 +979,7 @@ class ReduceTest(fixtures.TestBase, AssertsExecutionResults): pjoin = union(select([ page_table.c.id, - magazine_page_table.c.page_id, + magazine_page_table.c.page_id, cast(null(), Integer).label('magazine_page_id') ]). select_from(page_table.join(magazine_page_table)), @@ -1089,7 +1089,7 @@ class AnnotationsTest(fixtures.TestBase): assert t1.c is t2.c assert t1.c.col1 is t2.c.col1 - inner = select([s1]) + inner = select([s1]) assert inner.corresponding_column(t2.c.col1, require_embedded=False) \ @@ -1163,7 +1163,7 @@ class AnnotationsTest(fixtures.TestBase): b4._annotations, b4.left._annotations: assert elem == {} - assert b2.left is not bin.left + assert b2.left is not bin.left assert b3.left is not b2.left is not bin.left assert b4.left is bin.left # since column is immutable assert b4.right is not bin.right is not b2.right is not b3.right diff --git a/test/sql/test_types.py b/test/sql/test_types.py index 659c7f2ae3..91bf17175f 100644 --- a/test/sql/test_types.py +++ b/test/sql/test_types.py @@ -26,7 +26,7 @@ class AdaptTest(fixtures.TestBase): ] def _all_dialects(self): - return [d.base.dialect() for d in + return [d.base.dialect() for d in self._all_dialect_modules()] def _types_for_mod(self, mod): @@ -327,11 +327,11 @@ class UserDefinedTest(fixtures.TablesTest, AssertsCompiledSQL): self.assert_compile(t, "VARCHAR(50)", dialect=sl) self.assert_compile(t, "FLOAT", dialect=pg) eq_( - t.dialect_impl(dialect=sl).impl.__class__, + t.dialect_impl(dialect=sl).impl.__class__, String().dialect_impl(dialect=sl).__class__ ) eq_( - t.dialect_impl(dialect=pg).impl.__class__, + t.dialect_impl(dialect=pg).impl.__class__, Float().dialect_impl(pg).__class__ ) @@ -695,8 +695,8 @@ class UnicodeTest(fixtures.TestBase, AssertsExecutionResults): ('oracle','cx_oracle'), )), \ "name: %s driver %s returns_unicode_strings=%s" % \ - (testing.db.name, - testing.db.driver, + (testing.db.name, + testing.db.driver, testing.db.dialect.returns_unicode_strings) def test_round_trip(self): @@ -787,7 +787,7 @@ class UnicodeTest(fixtures.TestBase, AssertsExecutionResults): eq_(uni(unicodedata), unicodedata.encode('utf-8')) - # using convert unicode at engine level - + # using convert unicode at engine level - # this should not be raising a warning unicode_engine = engines.utf8_engine(options={'convert_unicode':True,}) unicode_engine.dialect.supports_unicode_binds = False @@ -836,7 +836,7 @@ class UnicodeTest(fixtures.TestBase, AssertsExecutionResults): engine = engines.testing_engine(options={'encoding':'ascii'}) m.create_all(engine) try: - # insert a row that should be ascii and + # insert a row that should be ascii and # coerce from unicode with ignore on the bind side engine.execute( table.insert(), @@ -871,7 +871,7 @@ class UnicodeTest(fixtures.TestBase, AssertsExecutionResults): # one row will be ascii with ignores, # the other will be either ascii with the ignores - # or just the straight unicode+ utf8 value if the + # or just the straight unicode+ utf8 value if the # dialect just returns unicode result = engine.execute(table.select().order_by(table.c.sort)) ascii_row = result.fetchone() @@ -929,10 +929,10 @@ class EnumTest(fixtures.TestBase): def teardown_class(cls): metadata.drop_all() - @testing.fails_on('postgresql+zxjdbc', + @testing.fails_on('postgresql+zxjdbc', 'zxjdbc fails on ENUM: column "XXX" is of type XXX ' 'but expression is of type character varying') - @testing.fails_on('postgresql+pg8000', + @testing.fails_on('postgresql+pg8000', 'zxjdbc fails on ENUM: column "XXX" is of type XXX ' 'but expression is of type text') def test_round_trip(self): @@ -943,7 +943,7 @@ class EnumTest(fixtures.TestBase): ]) eq_( - enum_table.select().order_by(enum_table.c.id).execute().fetchall(), + enum_table.select().order_by(enum_table.c.id).execute().fetchall(), [ (1, 'two'), (2, 'two'), @@ -960,7 +960,7 @@ class EnumTest(fixtures.TestBase): eq_( non_native_enum_table.select(). - order_by(non_native_enum_table.c.id).execute().fetchall(), + order_by(non_native_enum_table.c.id).execute().fetchall(), [ (1, 'two'), (2, 'two'), @@ -978,19 +978,19 @@ class EnumTest(fixtures.TestBase): eq_(e1.adapt(ENUM).name, 'foo') eq_(e1.adapt(ENUM).schema, 'bar') - @testing.crashes('mysql', + @testing.crashes('mysql', 'Inconsistent behavior across various OS/drivers' ) def test_constraint(self): - assert_raises(exc.DBAPIError, + assert_raises(exc.DBAPIError, enum_table.insert().execute, {'id':4, 'someenum':'four'} ) - @testing.fails_on('mysql', + @testing.fails_on('mysql', "the CHECK constraint doesn't raise an exception for unknown reason") def test_non_native_constraint(self): - assert_raises(exc.DBAPIError, + assert_raises(exc.DBAPIError, non_native_enum_table.insert().execute, {'id':4, 'someenum':'four'} ) @@ -1004,7 +1004,7 @@ class EnumTest(fixtures.TestBase): Column('x', Enum("x", "y", name="pge")) ) t.create(e, checkfirst=False) - # basically looking for the start of + # basically looking for the start of # the constraint, or the ENUM def itself, # depending on backend. assert "('x'," in e.print_sql() @@ -1058,32 +1058,32 @@ class BinaryTest(fixtures.TestBase, AssertsExecutionResults): stream1 =self.load_stream('binary_data_one.dat') stream2 =self.load_stream('binary_data_two.dat') binary_table.insert().execute( - primary_id=1, - misc='binary_data_one.dat', - data=stream1, - data_slice=stream1[0:100], - pickled=testobj1, + primary_id=1, + misc='binary_data_one.dat', + data=stream1, + data_slice=stream1[0:100], + pickled=testobj1, mypickle=testobj3) binary_table.insert().execute( - primary_id=2, - misc='binary_data_two.dat', - data=stream2, - data_slice=stream2[0:99], + primary_id=2, + misc='binary_data_two.dat', + data=stream2, + data_slice=stream2[0:99], pickled=testobj2) binary_table.insert().execute( - primary_id=3, - misc='binary_data_two.dat', - data=None, - data_slice=stream2[0:99], + primary_id=3, + misc='binary_data_two.dat', + data=None, + data_slice=stream2[0:99], pickled=None) for stmt in ( binary_table.select(order_by=binary_table.c.primary_id), text( - "select * from binary_table order by binary_table.primary_id", - typemap={'pickled':PickleType, - 'mypickle':MyPickleType, - 'data':LargeBinary, 'data_slice':LargeBinary}, + "select * from binary_table order by binary_table.primary_id", + typemap={'pickled':PickleType, + 'mypickle':MyPickleType, + 'data':LargeBinary, 'data_slice':LargeBinary}, bind=testing.db) ): l = stmt.execute().fetchall() @@ -1154,9 +1154,9 @@ class ExpressionTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiled meta.create_all() test_table.insert().execute({ - 'id':1, - 'data':'somedata', - 'atimestamp':datetime.date(2007, 10, 15), + 'id':1, + 'data':'somedata', + 'atimestamp':datetime.date(2007, 10, 15), 'avalue':25, 'bvalue':'foo'}) @classmethod @@ -1180,7 +1180,7 @@ class ExpressionTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiled eq_( testing.db.execute( select([test_table.c.id, test_table.c.data, test_table.c.atimestamp]) - .where(expr), + .where(expr), {"thedate":datetime.date(2007, 10, 15)}).fetchall(), [(1, 'somedata', datetime.date(2007, 10, 15))] ) @@ -1199,9 +1199,9 @@ class ExpressionTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiled eq_(expr.right.type._type_affinity, String) eq_( - testing.db.execute(test_table.select().where(expr), + testing.db.execute(test_table.select().where(expr), {"somevalue":"foo"}).fetchall(), - [(1, 'somedata', + [(1, 'somedata', datetime.date(2007, 10, 15), 25, 'BIND_INfooBIND_OUT')] ) @@ -1358,7 +1358,7 @@ class ExpressionTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiled def test_null_comparison(self): eq_( - str(column('a', types.NullType()) + column('b', types.NullType())), + str(column('a', types.NullType()) + column('b', types.NullType())), "a + b" ) @@ -1577,7 +1577,7 @@ class NumericTest(fixtures.TestBase): self._do_test( Numeric(precision=8, scale=4), [15.7563, decimal.Decimal("15.7563"), None], - [decimal.Decimal("15.7563"), None], + [decimal.Decimal("15.7563"), None], ) def test_numeric_as_float(self): @@ -1597,7 +1597,7 @@ class NumericTest(fixtures.TestBase): self._do_test( Float(precision=8, asdecimal=True), [15.7563, decimal.Decimal("15.7563"), None], - [decimal.Decimal("15.7563"), None], + [decimal.Decimal("15.7563"), None], filter_ = lambda n:n is not None and round(n, 5) or None ) @@ -1613,8 +1613,8 @@ class NumericTest(fixtures.TestBase): def test_precision_decimal(self): numbers = set([ decimal.Decimal("54.234246451650"), - decimal.Decimal("0.004354"), - decimal.Decimal("900.0"), + decimal.Decimal("0.004354"), + decimal.Decimal("900.0"), ]) self._do_test( @@ -1627,7 +1627,7 @@ class NumericTest(fixtures.TestBase): def test_enotation_decimal(self): """test exceedingly small decimals. - Decimal reports values with E notation when the exponent + Decimal reports values with E notation when the exponent is greater than 6. """ @@ -1652,7 +1652,7 @@ class NumericTest(fixtures.TestBase): numbers ) - @testing.fails_on("sybase+pyodbc", + @testing.fails_on("sybase+pyodbc", "Don't know how do get these values through FreeTDS + Sybase") @testing.fails_on("firebird", "Precision must be from 1 to 18") def test_enotation_decimal_large(self): @@ -1692,7 +1692,7 @@ class NumericTest(fixtures.TestBase): "this may be a bug due to the difficulty in handling " "oracle precision numerics" ) - @testing.fails_on('postgresql+pg8000', + @testing.fails_on('postgresql+pg8000', "pg-8000 does native decimal but truncates the decimals.") def test_numeric_no_decimal(self): numbers = set([ @@ -1794,8 +1794,8 @@ class IntervalTest(fixtures.TestBase, AssertsExecutionResults): small_delta = datetime.timedelta(days=15, seconds=5874) delta = datetime.timedelta(414) interval_table.insert().execute( - native_interval=small_delta, - native_interval_args=delta, + native_interval=small_delta, + native_interval_args=delta, non_native_interval=delta ) row = interval_table.select().execute().first() @@ -1851,22 +1851,22 @@ class BooleanTest(fixtures.TestBase, AssertsExecutionResults): res3 = select([bool_table.c.id, bool_table.c.value]).\ order_by(bool_table.c.id).\ execute().fetchall() - eq_(res3, [(1, True), (2, False), - (3, True), (4, True), + eq_(res3, [(1, True), (2, False), + (3, True), (4, True), (5, True), (6, None)]) # ensure we're getting True/False, not just ints assert res3[0][1] is True assert res3[1][1] is False - @testing.fails_on('mysql', + @testing.fails_on('mysql', "The CHECK clause is parsed but ignored by all storage engines.") - @testing.fails_on('mssql', + @testing.fails_on('mssql', "FIXME: MS-SQL 2005 doesn't honor CHECK ?!?") @testing.skip_if(lambda: testing.db.dialect.supports_native_boolean) def test_constraint(self): assert_raises((exc.IntegrityError, exc.ProgrammingError), - testing.db.execute, + testing.db.execute, "insert into booltest (id, value) values(1, 5)") @testing.skip_if(lambda: testing.db.dialect.supports_native_boolean) @@ -1885,7 +1885,7 @@ class PickleTest(fixtures.TestBase): ): assert p1.compare_values(p1.copy_value(obj), obj) - assert_raises(NotImplementedError, + assert_raises(NotImplementedError, p1.compare_values, pickleable.BrokenComparable('foo'), pickleable.BrokenComparable('foo')) diff --git a/test/sql/test_unicode.py b/test/sql/test_unicode.py index 19f672f77e..e3fa0a4b33 100644 --- a/test/sql/test_unicode.py +++ b/test/sql/test_unicode.py @@ -130,7 +130,7 @@ class EscapesDefaultsTest(fixtures.TestBase): select([column(u'special_col')]).select_from(t1).execute().close() assert isinstance(engine.dialect.identifier_preparer.format_sequence(Sequence('special_col')), unicode) - # now execute, run the sequence. it should run in u"Special_col.nextid" or similar as + # now execute, run the sequence. it should run in u"Special_col.nextid" or similar as # a unicode object; cx_oracle asserts that this is None or a String (postgresql lets it pass thru). # ensure that executioncontext._exec_default() is encoding. t1.insert().execute(data='foo') diff --git a/test/sql/test_update.py b/test/sql/test_update.py index f900a164cf..79079e5127 100644 --- a/test/sql/test_update.py +++ b/test/sql/test_update.py @@ -11,13 +11,13 @@ class _UpdateFromTestBase(object): @classmethod def define_tables(cls, metadata): Table('users', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('name', String(30), nullable=False), ) Table('addresses', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('user_id', None, ForeignKey('users.id')), Column('name', String(30), nullable=False), @@ -25,7 +25,7 @@ class _UpdateFromTestBase(object): ) Table("dingalings", metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('address_id', None, ForeignKey('addresses.id')), Column('data', String(30)), @@ -93,7 +93,7 @@ class UpdateFromCompileTest(_UpdateFromTestBase, fixtures.TablesTest, AssertsCom "AND addresses.email_address = :email_address_1 " "AND addresses.id = dingalings.address_id AND " "dingalings.id = :id_1", - checkparams={u'email_address_1': 'e1', u'id_1': 2, + checkparams={u'email_address_1': 'e1', u'id_1': 2, 'name': 'newname'} ) @@ -113,8 +113,8 @@ class UpdateFromCompileTest(_UpdateFromTestBase, fixtures.TablesTest, AssertsCom def test_render_subquery(self): users, addresses = self.tables.users, self.tables.addresses - subq = select([addresses.c.id, - addresses.c.user_id, + subq = select([addresses.c.id, + addresses.c.user_id, addresses.c.email_address]).\ where(addresses.c.id==7).alias() self.assert_compile( @@ -128,7 +128,7 @@ class UpdateFromCompileTest(_UpdateFromTestBase, fixtures.TablesTest, AssertsCom "email_address FROM addresses WHERE addresses.id = " ":id_1) AS anon_1 WHERE users.id = anon_1.user_id " "AND anon_1.email_address = :email_address_1", - checkparams={u'email_address_1': 'e1', + checkparams={u'email_address_1': 'e1', u'id_1': 7, 'name': 'newname'} ) @@ -214,7 +214,7 @@ class UpdateFromRoundTripTest(_UpdateFromTestBase, fixtures.TablesTest): testing.db.execute( addresses.update().\ values({ - addresses.c.email_address:users.c.name, + addresses.c.email_address:users.c.name, users.c.name:'ed2' }).\ where(users.c.id==addresses.c.user_id).\ @@ -246,14 +246,14 @@ class UpdateFromMultiTableUpdateDefaultsTest(_UpdateFromTestBase, fixtures.Table @classmethod def define_tables(cls, metadata): Table('users', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('name', String(30), nullable=False), Column('some_update', String(30), onupdate="im the update") ) Table('addresses', metadata, - Column('id', Integer, primary_key=True, + Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('user_id', None, ForeignKey('users.id')), Column('email_address', String(50), nullable=False), @@ -282,7 +282,7 @@ class UpdateFromMultiTableUpdateDefaultsTest(_UpdateFromTestBase, fixtures.Table ret = testing.db.execute( addresses.update().\ values({ - addresses.c.email_address:users.c.name, + addresses.c.email_address:users.c.name, users.c.name:'ed2' }).\ where(users.c.id==addresses.c.user_id).\ @@ -316,7 +316,7 @@ class UpdateFromMultiTableUpdateDefaultsTest(_UpdateFromTestBase, fixtures.Table ret = testing.db.execute( addresses.update().\ values({ - 'email_address':users.c.name, + 'email_address':users.c.name, }).\ where(users.c.id==addresses.c.user_id).\ where(users.c.name=='ed') @@ -333,7 +333,7 @@ class UpdateFromMultiTableUpdateDefaultsTest(_UpdateFromTestBase, fixtures.Table (4, 9, "fred@fred.com") ] ) - # users table not actually updated, + # users table not actually updated, # so no onupdate eq_( testing.db.execute(