From: Diana Clarke Date: Mon, 19 Nov 2012 19:16:39 +0000 (-0500) Subject: just a pep8 pass of lib/sqlalchemy/ext X-Git-Tag: rel_0_8_0b2~33^2~14 X-Git-Url: http://git.ipfire.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=03687b36b78be86c0f2a01eeb658c5b52fbd6c76;p=thirdparty%2Fsqlalchemy%2Fsqlalchemy.git just a pep8 pass of lib/sqlalchemy/ext --- diff --git a/lib/sqlalchemy/ext/__init__.py b/lib/sqlalchemy/ext/__init__.py index 7558ac268b..bb99e60fc1 100644 --- a/lib/sqlalchemy/ext/__init__.py +++ b/lib/sqlalchemy/ext/__init__.py @@ -3,4 +3,3 @@ # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php - diff --git a/lib/sqlalchemy/ext/associationproxy.py b/lib/sqlalchemy/ext/associationproxy.py index 27c76eb6bf..f6c0764e4b 100644 --- a/lib/sqlalchemy/ext/associationproxy.py +++ b/lib/sqlalchemy/ext/associationproxy.py @@ -27,24 +27,25 @@ def association_proxy(target_collection, attr, **kw): The returned value is an instance of :class:`.AssociationProxy`. - Implements a Python property representing a relationship as a collection of - simpler values, or a scalar value. The proxied property will mimic the collection type of - the target (list, dict or set), or, in the case of a one to one relationship, - a simple scalar value. + Implements a Python property representing a relationship as a collection + of simpler values, or a scalar value. The proxied property will mimic + the collection type of the target (list, dict or set), or, in the case of + a one to one relationship, a simple scalar value. :param target_collection: Name of the attribute we'll proxy to. This attribute is typically mapped by :func:`~sqlalchemy.orm.relationship` to link to a target collection, but can also be a many-to-one or non-scalar relationship. - :param attr: Attribute on the associated instance or instances we'll proxy for. + :param attr: Attribute on the associated instance or instances we'll + proxy for. For example, given a target collection of [obj1, obj2], a list created by this proxy property would look like [getattr(obj1, *attr*), getattr(obj2, *attr*)] - If the relationship is one-to-one or otherwise uselist=False, then simply: - getattr(obj, *attr*) + If the relationship is one-to-one or otherwise uselist=False, then + simply: getattr(obj, *attr*) :param creator: optional. @@ -89,34 +90,36 @@ class AssociationProxy(object): :param target_collection: Name of the collection we'll proxy to, usually created with :func:`.relationship`. - :param attr: Attribute on the collected instances we'll proxy for. For example, - given a target collection of [obj1, obj2], a list created by this - proxy property would look like [getattr(obj1, attr), getattr(obj2, - attr)] + :param attr: Attribute on the collected instances we'll proxy + for. For example, given a target collection of [obj1, obj2], a + list created by this proxy property would look like + [getattr(obj1, attr), getattr(obj2, attr)] - :param creator: Optional. When new items are added to this proxied collection, new - instances of the class collected by the target collection will be - created. For list and set collections, the target class constructor - will be called with the 'value' for the new instance. For dict - types, two arguments are passed: key and value. + :param creator: Optional. When new items are added to this proxied + collection, new instances of the class collected by the target + collection will be created. For list and set collections, the + target class constructor will be called with the 'value' for the + new instance. For dict types, two arguments are passed: + key and value. If you want to construct instances differently, supply a 'creator' function that takes arguments as above and returns instances. - :param getset_factory: Optional. Proxied attribute access is automatically handled by - routines that get and set values based on the `attr` argument for - this proxy. + :param getset_factory: Optional. Proxied attribute access is + automatically handled by routines that get and set values based on + the `attr` argument for this proxy. If you would like to customize this behavior, you may supply a `getset_factory` callable that produces a tuple of `getter` and `setter` functions. The factory is called with two arguments, the abstract type of the underlying collection and this proxy instance. - :param proxy_factory: Optional. The type of collection to emulate is determined by - sniffing the target collection. If your collection type can't be - determined by duck typing or you'd like to use a different - collection implementation, you may supply a factory function to - produce those collections. Only applicable to non-scalar relationships. + :param proxy_factory: Optional. The type of collection to emulate is + determined by sniffing the target collection. If your collection + type can't be determined by duck typing or you'd like to use a + different collection implementation, you may supply a factory + function to produce those collections. Only applicable to + non-scalar relationships. :param proxy_bulk_set: Optional, use with proxy_factory. See the _set() method for details. @@ -279,7 +282,8 @@ class AssociationProxy(object): self.collection_class = util.duck_type_collection(lazy_collection()) if self.proxy_factory: - return self.proxy_factory(lazy_collection, creator, self.value_attr, self) + return self.proxy_factory( + lazy_collection, creator, self.value_attr, self) if self.getset_factory: getter, setter = self.getset_factory(self.collection_class, self) @@ -287,11 +291,14 @@ class AssociationProxy(object): getter, setter = self._default_getset(self.collection_class) if self.collection_class is list: - return _AssociationList(lazy_collection, creator, getter, setter, self) + return _AssociationList( + lazy_collection, creator, getter, setter, self) elif self.collection_class is dict: - return _AssociationDict(lazy_collection, creator, getter, setter, self) + return _AssociationDict( + lazy_collection, creator, getter, setter, self) elif self.collection_class is set: - return _AssociationSet(lazy_collection, creator, getter, setter, self) + return _AssociationSet( + lazy_collection, creator, getter, setter, self) else: raise exc.ArgumentError( 'could not guess which interface to use for ' @@ -340,9 +347,11 @@ class AssociationProxy(object): """ if self._value_is_scalar: - value_expr = getattr(self.target_class, self.value_attr).has(criterion, **kwargs) + value_expr = getattr( + self.target_class, self.value_attr).has(criterion, **kwargs) else: - value_expr = getattr(self.target_class, self.value_attr).any(criterion, **kwargs) + value_expr = getattr( + self.target_class, self.value_attr).any(criterion, **kwargs) # check _value_is_scalar here, otherwise # we're scalar->scalar - call .any() so that @@ -409,12 +418,13 @@ class _lazy_collection(object): return getattr(obj, self.target) def __getstate__(self): - return {'obj':self.ref(), 'target':self.target} + return {'obj': self.ref(), 'target': self.target} def __setstate__(self, state): self.ref = weakref.ref(state['obj']) self.target = state['target'] + class _AssociationCollection(object): def __init__(self, lazy_collection, creator, getter, setter, parent): """Constructs an _AssociationCollection. @@ -456,13 +466,14 @@ class _AssociationCollection(object): return bool(self.col) def __getstate__(self): - return {'parent':self.parent, 'lazy_collection':self.lazy_collection} + return {'parent': self.parent, 'lazy_collection': self.lazy_collection} def __setstate__(self, state): self.parent = state['parent'] self.lazy_collection = state['lazy_collection'] self.parent._inflate(self) + class _AssociationList(_AssociationCollection): """Generic, converting, list-to-list proxy.""" @@ -652,6 +663,8 @@ class _AssociationList(_AssociationCollection): _NotProvided = util.symbol('_NotProvided') + + class _AssociationDict(_AssociationCollection): """Generic, converting, dict-to-dict proxy.""" @@ -734,7 +747,7 @@ class _AssociationDict(_AssociationCollection): return self.col.iterkeys() def values(self): - return [ self._get(member) for member in self.col.values() ] + return [self._get(member) for member in self.col.values()] def itervalues(self): for key in self.col: @@ -766,8 +779,8 @@ class _AssociationDict(_AssociationCollection): len(a)) elif len(a) == 1: seq_or_map = a[0] - # discern dict from sequence - took the advice - # from http://www.voidspace.org.uk/python/articles/duck_typing.shtml + # discern dict from sequence - took the advice from + # http://www.voidspace.org.uk/python/articles/duck_typing.shtml # still not perfect :( if hasattr(seq_or_map, 'keys'): for item in seq_or_map: diff --git a/lib/sqlalchemy/ext/compiler.py b/lib/sqlalchemy/ext/compiler.py index e3e6683648..93984d0d1e 100644 --- a/lib/sqlalchemy/ext/compiler.py +++ b/lib/sqlalchemy/ext/compiler.py @@ -9,8 +9,9 @@ Synopsis ======== -Usage involves the creation of one or more :class:`~sqlalchemy.sql.expression.ClauseElement` -subclasses and one or more callables defining its compilation:: +Usage involves the creation of one or more +:class:`~sqlalchemy.sql.expression.ClauseElement` subclasses and one or +more callables defining its compilation:: from sqlalchemy.ext.compiler import compiles from sqlalchemy.sql.expression import ColumnClause @@ -58,7 +59,8 @@ invoked for the dialect in use:: def visit_alter_column(element, compiler, **kw): return "ALTER TABLE %s ALTER COLUMN %s ..." % (element.table.name, element.column.name) -The second ``visit_alter_table`` will be invoked when any ``postgresql`` dialect is used. +The second ``visit_alter_table`` will be invoked when any ``postgresql`` +dialect is used. Compiling sub-elements of a custom expression construct ======================================================= @@ -99,10 +101,11 @@ Produces:: Cross Compiling between SQL and DDL compilers --------------------------------------------- -SQL and DDL constructs are each compiled using different base compilers - ``SQLCompiler`` -and ``DDLCompiler``. A common need is to access the compilation rules of SQL expressions -from within a DDL expression. The ``DDLCompiler`` includes an accessor ``sql_compiler`` for this reason, such as below where we generate a CHECK -constraint that embeds a SQL expression:: +SQL and DDL constructs are each compiled using different base compilers - +``SQLCompiler`` and ``DDLCompiler``. A common need is to access the +compilation rules of SQL expressions from within a DDL expression. The +``DDLCompiler`` includes an accessor ``sql_compiler`` for this reason, such as +below where we generate a CHECK constraint that embeds a SQL expression:: @compiles(MyConstraint) def compile_my_constraint(constraint, ddlcompiler, **kw): @@ -116,20 +119,22 @@ constraint that embeds a SQL expression:: Enabling Autocommit on a Construct ================================== -Recall from the section :ref:`autocommit` that the :class:`.Engine`, when asked to execute -a construct in the absence of a user-defined transaction, detects if the given -construct represents DML or DDL, that is, a data modification or data definition statement, which -requires (or may require, in the case of DDL) that the transaction generated by the DBAPI be committed -(recall that DBAPI always has a transaction going on regardless of what SQLAlchemy does). Checking -for this is actually accomplished -by checking for the "autocommit" execution option on the construct. When building a construct like -an INSERT derivation, a new DDL type, or perhaps a stored procedure that alters data, the "autocommit" -option needs to be set in order for the statement to function with "connectionless" execution +Recall from the section :ref:`autocommit` that the :class:`.Engine`, when +asked to execute a construct in the absence of a user-defined transaction, +detects if the given construct represents DML or DDL, that is, a data +modification or data definition statement, which requires (or may require, +in the case of DDL) that the transaction generated by the DBAPI be committed +(recall that DBAPI always has a transaction going on regardless of what +SQLAlchemy does). Checking for this is actually accomplished by checking for +the "autocommit" execution option on the construct. When building a +construct like an INSERT derivation, a new DDL type, or perhaps a stored +procedure that alters data, the "autocommit" option needs to be set in order +for the statement to function with "connectionless" execution (as described in :ref:`dbengine_implicit`). -Currently a quick way to do this is to subclass :class:`.Executable`, then add the "autocommit" flag -to the ``_execution_options`` dictionary (note this is a "frozen" dictionary which supplies a generative -``union()`` method):: +Currently a quick way to do this is to subclass :class:`.Executable`, then +add the "autocommit" flag to the ``_execution_options`` dictionary (note this +is a "frozen" dictionary which supplies a generative ``union()`` method):: from sqlalchemy.sql.expression import Executable, ClauseElement @@ -137,8 +142,9 @@ to the ``_execution_options`` dictionary (note this is a "frozen" dictionary whi _execution_options = \\ Executable._execution_options.union({'autocommit': True}) -More succinctly, if the construct is truly similar to an INSERT, UPDATE, or DELETE, :class:`.UpdateBase` -can be used, which already is a subclass of :class:`.Executable`, :class:`.ClauseElement` and includes the +More succinctly, if the construct is truly similar to an INSERT, UPDATE, or +DELETE, :class:`.UpdateBase` can be used, which already is a subclass +of :class:`.Executable`, :class:`.ClauseElement` and includes the ``autocommit`` flag:: from sqlalchemy.sql.expression import UpdateBase @@ -150,7 +156,8 @@ can be used, which already is a subclass of :class:`.Executable`, :class:`.Claus -DDL elements that subclass :class:`.DDLElement` already have the "autocommit" flag turned on. +DDL elements that subclass :class:`.DDLElement` already have the +"autocommit" flag turned on. @@ -158,13 +165,16 @@ DDL elements that subclass :class:`.DDLElement` already have the "autocommit" fl Changing the default compilation of existing constructs ======================================================= -The compiler extension applies just as well to the existing constructs. When overriding -the compilation of a built in SQL construct, the @compiles decorator is invoked upon -the appropriate class (be sure to use the class, i.e. ``Insert`` or ``Select``, instead of the creation function such as ``insert()`` or ``select()``). +The compiler extension applies just as well to the existing constructs. When +overriding the compilation of a built in SQL construct, the @compiles +decorator is invoked upon the appropriate class (be sure to use the class, +i.e. ``Insert`` or ``Select``, instead of the creation function such +as ``insert()`` or ``select()``). -Within the new compilation function, to get at the "original" compilation routine, -use the appropriate visit_XXX method - this because compiler.process() will call upon the -overriding routine and cause an endless loop. Such as, to add "prefix" to all insert statements:: +Within the new compilation function, to get at the "original" compilation +routine, use the appropriate visit_XXX method - this +because compiler.process() will call upon the overriding routine and cause +an endless loop. Such as, to add "prefix" to all insert statements:: from sqlalchemy.sql.expression import Insert @@ -172,14 +182,16 @@ overriding routine and cause an endless loop. Such as, to add "prefix" to all def prefix_inserts(insert, compiler, **kw): return compiler.visit_insert(insert.prefix_with("some prefix"), **kw) -The above compiler will prefix all INSERT statements with "some prefix" when compiled. +The above compiler will prefix all INSERT statements with "some prefix" when +compiled. .. _type_compilation_extension: Changing Compilation of Types ============================= -``compiler`` works for types, too, such as below where we implement the MS-SQL specific 'max' keyword for ``String``/``VARCHAR``:: +``compiler`` works for types, too, such as below where we implement the +MS-SQL specific 'max' keyword for ``String``/``VARCHAR``:: @compiles(String, 'mssql') @compiles(VARCHAR, 'mssql') @@ -248,10 +260,10 @@ A synopsis is as follows: ``execute_at()`` method, allowing the construct to be invoked during CREATE TABLE and DROP TABLE sequences. -* :class:`~sqlalchemy.sql.expression.Executable` - This is a mixin which should be - used with any expression class that represents a "standalone" SQL statement that - can be passed directly to an ``execute()`` method. It is already implicit - within ``DDLElement`` and ``FunctionElement``. +* :class:`~sqlalchemy.sql.expression.Executable` - This is a mixin which + should be used with any expression class that represents a "standalone" + SQL statement that can be passed directly to an ``execute()`` method. It + is already implicit within ``DDLElement`` and ``FunctionElement``. Further Examples ================ @@ -259,12 +271,13 @@ Further Examples "UTC timestamp" function ------------------------- -A function that works like "CURRENT_TIMESTAMP" except applies the appropriate conversions -so that the time is in UTC time. Timestamps are best stored in relational databases -as UTC, without time zones. UTC so that your database doesn't think time has gone -backwards in the hour when daylight savings ends, without timezones because timezones -are like character encodings - they're best applied only at the endpoints of an -application (i.e. convert to UTC upon user input, re-apply desired timezone upon display). +A function that works like "CURRENT_TIMESTAMP" except applies the +appropriate conversions so that the time is in UTC time. Timestamps are best +stored in relational databases as UTC, without time zones. UTC so that your +database doesn't think time has gone backwards in the hour when daylight +savings ends, without timezones because timezones are like character +encodings - they're best applied only at the endpoints of an application +(i.e. convert to UTC upon user input, re-apply desired timezone upon display). For Postgresql and Microsoft SQL Server:: @@ -298,10 +311,10 @@ Example usage:: "GREATEST" function ------------------- -The "GREATEST" function is given any number of arguments and returns the one that is -of the highest value - it's equivalent to Python's ``max`` function. A SQL -standard version versus a CASE based version which only accommodates two -arguments:: +The "GREATEST" function is given any number of arguments and returns the one +that is of the highest value - it's equivalent to Python's ``max`` +function. A SQL standard version versus a CASE based version which only +accommodates two arguments:: from sqlalchemy.sql import expression from sqlalchemy.ext.compiler import compiles @@ -339,7 +352,8 @@ Example usage:: "false" expression ------------------ -Render a "false" constant expression, rendering as "0" on platforms that don't have a "false" constant:: +Render a "false" constant expression, rendering as "0" on platforms that +don't have a "false" constant:: from sqlalchemy.sql import expression from sqlalchemy.ext.compiler import compiles @@ -370,6 +384,7 @@ Example usage:: from .. import exc from ..sql import visitors + def compiles(class_, *specs): """Register a function as a compiler for a given :class:`.ClauseElement` type.""" @@ -384,7 +399,8 @@ def compiles(class_, *specs): existing.specs['default'] = existing_dispatch # TODO: why is the lambda needed ? - setattr(class_, '_compiler_dispatch', lambda *arg, **kw: existing(*arg, **kw)) + setattr(class_, '_compiler_dispatch', + lambda *arg, **kw: existing(*arg, **kw)) setattr(class_, '_compiler_dispatcher', existing) if specs: @@ -396,6 +412,7 @@ def compiles(class_, *specs): return fn return decorate + def deregister(class_): """Remove all custom compilers associated with a given :class:`.ClauseElement` type.""" @@ -422,4 +439,3 @@ class _dispatcher(object): "%s construct has no default " "compilation handler." % type(element)) return fn(element, compiler, **kw) - diff --git a/lib/sqlalchemy/ext/horizontal_shard.py b/lib/sqlalchemy/ext/horizontal_shard.py index f55cfae0a4..1edc4d4c28 100644 --- a/lib/sqlalchemy/ext/horizontal_shard.py +++ b/lib/sqlalchemy/ext/horizontal_shard.py @@ -14,13 +14,13 @@ the source distribution. """ -from .. import exc as sa_exc from .. import util from ..orm.session import Session from ..orm.query import Query __all__ = ['ShardedSession', 'ShardedQuery'] + class ShardedQuery(Query): def __init__(self, *args, **kwargs): super(ShardedQuery, self).__init__(*args, **kwargs) @@ -72,28 +72,29 @@ class ShardedQuery(Query): else: return None + class ShardedSession(Session): def __init__(self, shard_chooser, id_chooser, query_chooser, shards=None, query_cls=ShardedQuery, **kwargs): """Construct a ShardedSession. - :param shard_chooser: A callable which, passed a Mapper, a mapped instance, and possibly a - SQL clause, returns a shard ID. This id may be based off of the - attributes present within the object, or on some round-robin - scheme. If the scheme is based on a selection, it should set - whatever state on the instance to mark it in the future as + :param shard_chooser: A callable which, passed a Mapper, a mapped + instance, and possibly a SQL clause, returns a shard ID. This id + may be based off of the attributes present within the object, or on + some round-robin scheme. If the scheme is based on a selection, it + should set whatever state on the instance to mark it in the future as participating in that shard. - :param id_chooser: A callable, passed a query and a tuple of identity values, which - should return a list of shard ids where the ID might reside. The - databases will be queried in the order of this listing. + :param id_chooser: A callable, passed a query and a tuple of identity + values, which should return a list of shard ids where the ID might + reside. The databases will be queried in the order of this listing. - :param query_chooser: For a given Query, returns the list of shard_ids where the query - should be issued. Results from all shards returned will be combined - together into a single listing. + :param query_chooser: For a given Query, returns the list of shard_ids + where the query should be issued. Results from all shards returned + will be combined together into a single listing. - :param shards: A dictionary of string shard names to :class:`~sqlalchemy.engine.Engine` - objects. + :param shards: A dictionary of string shard names + to :class:`~sqlalchemy.engine.Engine` objects. """ super(ShardedSession, self).__init__(query_cls=query_cls, **kwargs) @@ -117,12 +118,11 @@ class ShardedSession(Session): shard_id=shard_id, instance=instance).contextual_connect(**kwargs) - def get_bind(self, mapper, shard_id=None, instance=None, clause=None, **kw): + def get_bind(self, mapper, shard_id=None, + instance=None, clause=None, **kw): if shard_id is None: shard_id = self.shard_chooser(mapper, instance, clause=clause) return self.__binds[shard_id] def bind_shard(self, shard_id, bind): self.__binds[shard_id] = bind - - diff --git a/lib/sqlalchemy/ext/hybrid.py b/lib/sqlalchemy/ext/hybrid.py index 57d39866cb..047b2ff95c 100644 --- a/lib/sqlalchemy/ext/hybrid.py +++ b/lib/sqlalchemy/ext/hybrid.py @@ -240,8 +240,8 @@ The above hybrid property ``balance`` works with the first in-Python getter/setter methods can treat ``accounts`` as a Python list available on ``self``. -However, at the expression level, it's expected that the ``User`` class will be used -in an appropriate context such that an appropriate join to +However, at the expression level, it's expected that the ``User`` class will +be used in an appropriate context such that an appropriate join to ``SavingsAccount`` will be present:: >>> print Session().query(User, User.balance).\\ @@ -268,11 +268,10 @@ Correlated Subquery Relationship Hybrid ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ We can, of course, forego being dependent on the enclosing query's usage -of joins in favor of the correlated -subquery, which can portably be packed into a single colunn expression. -A correlated subquery is more portable, but often performs more poorly -at the SQL level. -Using the same technique illustrated at :ref:`mapper_column_property_sql_expressions`, +of joins in favor of the correlated subquery, which can portably be packed +into a single colunn expression. A correlated subquery is more portable, but +often performs more poorly at the SQL level. Using the same technique +illustrated at :ref:`mapper_column_property_sql_expressions`, we can adjust our ``SavingsAccount`` example to aggregate the balances for *all* accounts, and use a correlated subquery for the column expression:: @@ -629,6 +628,7 @@ there's probably a whole lot of amazing things it can be used for. from .. import util from ..orm import attributes, interfaces + class hybrid_method(object): """A decorator which allows definition of a Python object method with both instance-level and class-level behavior. @@ -668,6 +668,7 @@ class hybrid_method(object): self.expr = expr return self + class hybrid_property(object): """A decorator which allows definition of a Python descriptor with both instance-level and class-level behavior. @@ -750,6 +751,7 @@ class hybrid_property(object): self.expr = expr return self + class Comparator(interfaces.PropComparator): """A helper class that allows easy construction of custom :class:`~.orm.interfaces.PropComparator` diff --git a/lib/sqlalchemy/ext/instrumentation.py b/lib/sqlalchemy/ext/instrumentation.py index f840ad066f..bb44a492ca 100644 --- a/lib/sqlalchemy/ext/instrumentation.py +++ b/lib/sqlalchemy/ext/instrumentation.py @@ -61,6 +61,7 @@ attribute. """ + def find_native_user_instrumentation_hook(cls): """Find user-specified instrumentation management for a class.""" return getattr(cls, INSTRUMENTATION_MANAGER, None) @@ -81,6 +82,7 @@ ClassManager instrumentation is used. """ + class ExtendedInstrumentationRegistry(InstrumentationFactory): """Extends :class:`.InstrumentationFactory` with additional bookkeeping, to accommodate multiple types of @@ -169,17 +171,21 @@ class ExtendedInstrumentationRegistry(InstrumentationFactory): def state_of(self, instance): if instance is None: raise AttributeError("None has no persistent state.") - return self._state_finders.get(instance.__class__, _default_state_getter)(instance) + return self._state_finders.get( + instance.__class__, _default_state_getter)(instance) def dict_of(self, instance): if instance is None: raise AttributeError("None has no persistent state.") - return self._dict_finders.get(instance.__class__, _default_dict_getter)(instance) + return self._dict_finders.get( + instance.__class__, _default_dict_getter)(instance) + orm_instrumentation._instrumentation_factory = \ _instrumentation_factory = ExtendedInstrumentationRegistry() orm_instrumentation.instrumentation_finders = instrumentation_finders + class InstrumentationManager(object): """User-defined class instrumentation extension. @@ -259,6 +265,7 @@ class InstrumentationManager(object): def dict_getter(self, class_): return lambda inst: self.get_instance_dict(class_, inst) + class _ClassInstrumentationAdapter(ClassManager): """Adapts a user-defined InstrumentationManager to a ClassManager.""" @@ -353,6 +360,7 @@ class _ClassInstrumentationAdapter(ClassManager): def dict_getter(self): return self._get_dict + def _install_instrumented_lookups(): """Replace global class/object management functions with ExtendedInstrumentationRegistry implementations, which @@ -368,22 +376,24 @@ def _install_instrumented_lookups(): """ _install_lookups( dict( - instance_state = _instrumentation_factory.state_of, - instance_dict = _instrumentation_factory.dict_of, - manager_of_class = _instrumentation_factory.manager_of_class + instance_state=_instrumentation_factory.state_of, + instance_dict=_instrumentation_factory.dict_of, + manager_of_class=_instrumentation_factory.manager_of_class ) ) + def _reinstall_default_lookups(): """Restore simplified lookups.""" _install_lookups( dict( - instance_state = _default_state_getter, - instance_dict = _default_dict_getter, - manager_of_class = _default_manager_getter + instance_state=_default_state_getter, + instance_dict=_default_dict_getter, + manager_of_class=_default_manager_getter ) ) + def _install_lookups(lookups): global instance_state, instance_dict, manager_of_class instance_state = lookups['instance_state'] diff --git a/lib/sqlalchemy/ext/mutable.py b/lib/sqlalchemy/ext/mutable.py index db05a82b48..36d60d6d51 100644 --- a/lib/sqlalchemy/ext/mutable.py +++ b/lib/sqlalchemy/ext/mutable.py @@ -7,13 +7,13 @@ """Provide support for tracking of in-place changes to scalar values, which are propagated into ORM change events on owning parent objects. -The :mod:`sqlalchemy.ext.mutable` extension replaces SQLAlchemy's legacy approach to in-place -mutations of scalar values, established by the :class:`.types.MutableType` -class as well as the ``mutable=True`` type flag, with a system that allows -change events to be propagated from the value to the owning parent, thereby -removing the need for the ORM to maintain copies of values as well as the very -expensive requirement of scanning through all "mutable" values on each flush -call, looking for changes. +The :mod:`sqlalchemy.ext.mutable` extension replaces SQLAlchemy's legacy +approach to in-place mutations of scalar values, established by the +:class:`.types.MutableType` class as well as the ``mutable=True`` type flag, +with a system that allows change events to be propagated from the value to +the owning parent, thereby removing the need for the ORM to maintain copies +of values as well as the very expensive requirement of scanning through all +"mutable" values on each flush call, looking for changes. .. _mutable_scalars: @@ -43,8 +43,8 @@ JSON strings before being persisted:: value = json.loads(value) return value -The usage of ``json`` is only for the purposes of example. The :mod:`sqlalchemy.ext.mutable` -extension can be used +The usage of ``json`` is only for the purposes of example. The +:mod:`sqlalchemy.ext.mutable` extension can be used with any type whose target Python type may be mutable, including :class:`.PickleType`, :class:`.postgresql.ARRAY`, etc. @@ -86,19 +86,19 @@ The above dictionary class takes the approach of subclassing the Python built-in ``dict`` to produce a dict subclass which routes all mutation events through ``__setitem__``. There are variants on this approach, such as subclassing ``UserDict.UserDict`` or -``collections.MutableMapping``; the part that's important to this -example is that the :meth:`.Mutable.changed` method is called whenever an in-place change to the -datastructure takes place. +``collections.MutableMapping``; the part that's important to this example is +that the :meth:`.Mutable.changed` method is called whenever an in-place +change to the datastructure takes place. We also redefine the :meth:`.Mutable.coerce` method which will be used to convert any values that are not instances of ``MutableDict``, such as the plain dictionaries returned by the ``json`` module, into the -appropriate type. Defining this method is optional; we could just as well created our -``JSONEncodedDict`` such that it always returns an instance of ``MutableDict``, -and additionally ensured that all calling code uses ``MutableDict`` -explicitly. When :meth:`.Mutable.coerce` is not overridden, any values -applied to a parent object which are not instances of the mutable type -will raise a ``ValueError``. +appropriate type. Defining this method is optional; we could just as well +created our ``JSONEncodedDict`` such that it always returns an instance +of ``MutableDict``, and additionally ensured that all calling code +uses ``MutableDict`` explicitly. When :meth:`.Mutable.coerce` is not +overridden, any values applied to a parent object which are not instances +of the mutable type will raise a ``ValueError``. Our new ``MutableDict`` type offers a class method :meth:`~.Mutable.as_mutable` which we can use within column metadata @@ -156,9 +156,10 @@ will flag the attribute as "dirty" on the parent object:: True The ``MutableDict`` can be associated with all future instances -of ``JSONEncodedDict`` in one step, using :meth:`~.Mutable.associate_with`. This -is similar to :meth:`~.Mutable.as_mutable` except it will intercept -all occurrences of ``MutableDict`` in all mappings unconditionally, without +of ``JSONEncodedDict`` in one step, using +:meth:`~.Mutable.associate_with`. This is similar to +:meth:`~.Mutable.as_mutable` except it will intercept all occurrences +of ``MutableDict`` in all mappings unconditionally, without the need to declare it individually:: MutableDict.associate_with(JSONEncodedDict) @@ -330,11 +331,14 @@ from ..orm.attributes import flag_modified from .. import event, types from ..orm import mapper, object_mapper from ..util import memoized_property -from .. import exc import weakref + class MutableBase(object): - """Common base class to :class:`.Mutable` and :class:`.MutableComposite`.""" + """Common base class to :class:`.Mutable` + and :class:`.MutableComposite`. + + """ @memoized_property def _parents(self): @@ -356,7 +360,8 @@ class MutableBase(object): """ if value is None: return None - raise ValueError("Attribute '%s' does not accept objects of type %s" % (key, type(value))) + msg = "Attribute '%s' does not accept objects of type %s" + raise ValueError(msg % (key, type(value))) @classmethod def _listen_on_attribute(cls, attribute, coerce, parent_cls): @@ -414,12 +419,17 @@ class MutableBase(object): for val in state_dict['ext.mutable.values']: val._parents[state.obj()] = key + event.listen(parent_cls, 'load', load, + raw=True, propagate=True) + event.listen(parent_cls, 'refresh', load, + raw=True, propagate=True) + event.listen(attribute, 'set', set, + raw=True, retval=True, propagate=True) + event.listen(parent_cls, 'pickle', pickle, + raw=True, propagate=True) + event.listen(parent_cls, 'unpickle', unpickle, + raw=True, propagate=True) - event.listen(parent_cls, 'load', load, raw=True, propagate=True) - event.listen(parent_cls, 'refresh', load, raw=True, propagate=True) - event.listen(attribute, 'set', set, raw=True, retval=True, propagate=True) - event.listen(parent_cls, 'pickle', pickle, raw=True, propagate=True) - event.listen(parent_cls, 'unpickle', unpickle, raw=True, propagate=True) class Mutable(MutableBase): """Mixin that defines transparent propagation of change @@ -448,15 +458,16 @@ class Mutable(MutableBase): """Associate this wrapper with all future mapped columns of the given type. - This is a convenience method that calls ``associate_with_attribute`` automatically. + This is a convenience method that calls + ``associate_with_attribute`` automatically. .. warning:: The listeners established by this method are *global* to all mappers, and are *not* garbage collected. Only use - :meth:`.associate_with` for types that are permanent to an application, - not with ad-hoc types else this will cause unbounded growth - in memory usage. + :meth:`.associate_with` for types that are permanent to an + application, not with ad-hoc types else this will cause unbounded + growth in memory usage. """ @@ -483,8 +494,8 @@ class Mutable(MutableBase): ) Note that the returned type is always an instance, even if a class - is given, and that only columns which are declared specifically with that - type instance receive additional instrumentation. + is given, and that only columns which are declared specifically with + that type instance receive additional instrumentation. To associate a particular mutable type with all occurrences of a particular type, use the :meth:`.Mutable.associate_with` classmethod @@ -511,11 +522,13 @@ class Mutable(MutableBase): return sqltype + class _MutableCompositeMeta(type): def __init__(cls, classname, bases, dict_): cls._setup_listeners() return type.__init__(cls, classname, bases, dict_) + class MutableComposite(MutableBase): """Mixin that defines transparent propagation of change events on a SQLAlchemy "composite" object to its @@ -526,10 +539,10 @@ class MutableComposite(MutableBase): .. warning:: The listeners established by the :class:`.MutableComposite` - class are *global* to all mappers, and are *not* garbage collected. Only use - :class:`.MutableComposite` for types that are permanent to an application, - not with ad-hoc types else this will cause unbounded growth - in memory usage. + class are *global* to all mappers, and are *not* garbage + collected. Only use :class:`.MutableComposite` for types that are + permanent to an application, not with ad-hoc types else this will + cause unbounded growth in memory usage. """ __metaclass__ = _MutableCompositeMeta @@ -550,19 +563,21 @@ class MutableComposite(MutableBase): """Associate this wrapper with all future mapped composites of the given type. - This is a convenience method that calls ``associate_with_attribute`` automatically. + This is a convenience method that calls ``associate_with_attribute`` + automatically. """ def listen_for_type(mapper, class_): for prop in mapper.iterate_properties: - if hasattr(prop, 'composite_class') and issubclass(prop.composite_class, cls): - cls._listen_on_attribute(getattr(class_, prop.key), False, class_) + if (hasattr(prop, 'composite_class') and + issubclass(prop.composite_class, cls)): + cls._listen_on_attribute( + getattr(class_, prop.key), False, class_) event.listen(mapper, 'mapper_configured', listen_for_type) - class MutableDict(Mutable, dict): """A dictionary type that implements :class:`.Mutable`. diff --git a/lib/sqlalchemy/ext/orderinglist.py b/lib/sqlalchemy/ext/orderinglist.py index 968c0a4a9b..a2604c379a 100644 --- a/lib/sqlalchemy/ext/orderinglist.py +++ b/lib/sqlalchemy/ext/orderinglist.py @@ -8,10 +8,11 @@ :author: Jason Kirtland -``orderinglist`` is a helper for mutable ordered relationships. It will intercept -list operations performed on a relationship collection and automatically -synchronize changes in list position with an attribute on the related objects. -(See :ref:`advdatamapping_entitycollections` for more information on the general pattern.) +``orderinglist`` is a helper for mutable ordered relationships. It will +intercept list operations performed on a relationship collection and +automatically synchronize changes in list position with an attribute on the +related objects. (See :ref:`advdatamapping_entitycollections` for more +information on the general pattern.) Example: Two tables that store slides in a presentation. Each slide has a number of bullet points, displayed in order by the 'position' @@ -41,15 +42,15 @@ affected rows when changes are made. }) mapper(Bullet, bullets_table) -The standard relationship mapping will produce a list-like attribute on each Slide -containing all related Bullets, but coping with changes in ordering is totally -your responsibility. If you insert a Bullet into that list, there is no -magic- it won't have a position attribute unless you assign it it one, and +The standard relationship mapping will produce a list-like attribute on each +Slide containing all related Bullets, but coping with changes in ordering is +totally your responsibility. If you insert a Bullet into that list, there is +no magic - it won't have a position attribute unless you assign it it one, and you'll need to manually renumber all the subsequent Bullets in the list to accommodate the insert. -An ``orderinglist`` can automate this and manage the 'position' attribute on all -related bullets for you. +An ``orderinglist`` can automate this and manage the 'position' attribute on +all related bullets for you. .. sourcecode:: python+sql @@ -69,18 +70,20 @@ related bullets for you. s.bullets[2].position >>> 2 -Use the ``ordering_list`` function to set up the ``collection_class`` on relationships -(as in the mapper example above). This implementation depends on the list -starting in the proper order, so be SURE to put an order_by on your relationship. +Use the ``ordering_list`` function to set up the ``collection_class`` on +relationships (as in the mapper example above). This implementation depends +on the list starting in the proper order, so be SURE to put an order_by on +your relationship. .. warning:: ``ordering_list`` only provides limited functionality when a primary - key column or unique column is the target of the sort. Since changing the order of - entries often means that two rows must trade values, this is not possible when - the value is constrained by a primary key or unique constraint, since one of the rows - would temporarily have to point to a third available value so that the other row - could take its old value. ``ordering_list`` doesn't do any of this for you, + key column or unique column is the target of the sort. Since changing the + order of entries often means that two rows must trade values, this is not + possible when the value is constrained by a primary key or unique + constraint, since one of the rows would temporarily have to point to a + third available value so that the other row could take its old + value. ``ordering_list`` doesn't do any of this for you, nor does SQLAlchemy itself. ``ordering_list`` takes the name of the related object's ordering attribute as @@ -100,14 +103,14 @@ index to any value you require. from ..orm.collections import collection from .. import util -__all__ = [ 'ordering_list' ] +__all__ = ['ordering_list'] def ordering_list(attr, count_from=None, **kw): """Prepares an OrderingList factory for use in mapper definitions. - Returns an object suitable for use as an argument to a Mapper relationship's - ``collection_class`` option. Arguments are: + Returns an object suitable for use as an argument to a Mapper + relationship's ``collection_class`` option. Arguments are: attr Name of the mapped attribute to use for storage and retrieval of @@ -125,17 +128,22 @@ def ordering_list(attr, count_from=None, **kw): kw = _unsugar_count_from(count_from=count_from, **kw) return lambda: OrderingList(attr, **kw) + # Ordering utility functions + + def count_from_0(index, collection): """Numbering function: consecutive integers starting at 0.""" return index + def count_from_1(index, collection): """Numbering function: consecutive integers starting at 1.""" return index + 1 + def count_from_n_factory(start): """Numbering function: consecutive integers starting at arbitrary start.""" @@ -147,6 +155,7 @@ def count_from_n_factory(start): pass return f + def _unsugar_count_from(**kw): """Builds counting functions from keyword arguments. @@ -164,6 +173,7 @@ def _unsugar_count_from(**kw): kw['ordering_func'] = count_from_n_factory(count_from) return kw + class OrderingList(list): """A custom list that manages position information for its children. @@ -188,9 +198,10 @@ class OrderingList(list): Name of the attribute that stores the object's order in the relationship. - :param ordering_func: Optional. A function that maps the position in the Python list to a - value to store in the ``ordering_attr``. Values returned are - usually (but need not be!) integers. + :param ordering_func: Optional. A function that maps the position in + the Python list to a value to store in the + ``ordering_attr``. Values returned are usually (but need not be!) + integers. An ``ordering_func`` is called with two positional parameters: the index of the element in the list, and the list itself. @@ -323,6 +334,7 @@ class OrderingList(list): func.__doc__ = getattr(list, func_name).__doc__ del func_name, func + def _reconstitute(cls, dict_, items): """ Reconstitute an ``OrderingList``. diff --git a/lib/sqlalchemy/ext/serializer.py b/lib/sqlalchemy/ext/serializer.py index 8a58821076..3ed41f48a3 100644 --- a/lib/sqlalchemy/ext/serializer.py +++ b/lib/sqlalchemy/ext/serializer.py @@ -39,18 +39,19 @@ The serializer module is only appropriate for query structures. It is not needed for: * instances of user-defined classes. These contain no references to engines, - sessions or expression constructs in the typical case and can be serialized directly. + sessions or expression constructs in the typical case and can be serialized + directly. -* Table metadata that is to be loaded entirely from the serialized structure (i.e. is - not already declared in the application). Regular pickle.loads()/dumps() can - be used to fully dump any ``MetaData`` object, typically one which was reflected - from an existing database at some previous point in time. The serializer module - is specifically for the opposite case, where the Table metadata is already present - in memory. +* Table metadata that is to be loaded entirely from the serialized structure + (i.e. is not already declared in the application). Regular + pickle.loads()/dumps() can be used to fully dump any ``MetaData`` object, + typically one which was reflected from an existing database at some previous + point in time. The serializer module is specifically for the opposite case, + where the Table metadata is already present in memory. """ -from ..orm import class_mapper, Query +from ..orm import class_mapper from ..orm.session import Session from ..orm.mapper import Mapper from ..orm.attributes import QueryableAttribute @@ -78,7 +79,6 @@ b64decode = base64.b64decode __all__ = ['Serializer', 'Deserializer', 'dumps', 'loads'] - def Serializer(*args, **kw): pickler = pickle.Pickler(*args, **kw) @@ -107,6 +107,7 @@ def Serializer(*args, **kw): our_ids = re.compile(r'(mapper|table|column|session|attribute|engine):(.*)') + def Deserializer(file, metadata=None, scoped_session=None, engine=None): unpickler = pickle.Unpickler(file) @@ -147,15 +148,15 @@ def Deserializer(file, metadata=None, scoped_session=None, engine=None): unpickler.persistent_load = persistent_load return unpickler + def dumps(obj, protocol=0): buf = byte_buffer() pickler = Serializer(buf, protocol) pickler.dump(obj) return buf.getvalue() + def loads(data, metadata=None, scoped_session=None, engine=None): buf = byte_buffer(data) unpickler = Deserializer(buf, metadata, scoped_session, engine) return unpickler.load() - -