#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
The returned value is an instance of :class:`.AssociationProxy`.
- Implements a Python property representing a relationship as a collection of
- simpler values, or a scalar value. The proxied property will mimic the collection type of
- the target (list, dict or set), or, in the case of a one to one relationship,
- a simple scalar value.
+ Implements a Python property representing a relationship as a collection
+ of simpler values, or a scalar value. The proxied property will mimic
+ the collection type of the target (list, dict or set), or, in the case of
+ a one to one relationship, a simple scalar value.
:param target_collection: Name of the attribute we'll proxy to.
This attribute is typically mapped by
:func:`~sqlalchemy.orm.relationship` to link to a target collection, but
can also be a many-to-one or non-scalar relationship.
- :param attr: Attribute on the associated instance or instances we'll proxy for.
+ :param attr: Attribute on the associated instance or instances we'll
+ proxy for.
For example, given a target collection of [obj1, obj2], a list created
by this proxy property would look like [getattr(obj1, *attr*),
getattr(obj2, *attr*)]
- If the relationship is one-to-one or otherwise uselist=False, then simply:
- getattr(obj, *attr*)
+ If the relationship is one-to-one or otherwise uselist=False, then
+ simply: getattr(obj, *attr*)
:param creator: optional.
:param target_collection: Name of the collection we'll proxy to,
usually created with :func:`.relationship`.
- :param attr: Attribute on the collected instances we'll proxy for. For example,
- given a target collection of [obj1, obj2], a list created by this
- proxy property would look like [getattr(obj1, attr), getattr(obj2,
- attr)]
+ :param attr: Attribute on the collected instances we'll proxy
+ for. For example, given a target collection of [obj1, obj2], a
+ list created by this proxy property would look like
+ [getattr(obj1, attr), getattr(obj2, attr)]
- :param creator: Optional. When new items are added to this proxied collection, new
- instances of the class collected by the target collection will be
- created. For list and set collections, the target class constructor
- will be called with the 'value' for the new instance. For dict
- types, two arguments are passed: key and value.
+ :param creator: Optional. When new items are added to this proxied
+ collection, new instances of the class collected by the target
+ collection will be created. For list and set collections, the
+ target class constructor will be called with the 'value' for the
+ new instance. For dict types, two arguments are passed:
+ key and value.
If you want to construct instances differently, supply a 'creator'
function that takes arguments as above and returns instances.
- :param getset_factory: Optional. Proxied attribute access is automatically handled by
- routines that get and set values based on the `attr` argument for
- this proxy.
+ :param getset_factory: Optional. Proxied attribute access is
+ automatically handled by routines that get and set values based on
+ the `attr` argument for this proxy.
If you would like to customize this behavior, you may supply a
`getset_factory` callable that produces a tuple of `getter` and
`setter` functions. The factory is called with two arguments, the
abstract type of the underlying collection and this proxy instance.
- :param proxy_factory: Optional. The type of collection to emulate is determined by
- sniffing the target collection. If your collection type can't be
- determined by duck typing or you'd like to use a different
- collection implementation, you may supply a factory function to
- produce those collections. Only applicable to non-scalar relationships.
+ :param proxy_factory: Optional. The type of collection to emulate is
+ determined by sniffing the target collection. If your collection
+ type can't be determined by duck typing or you'd like to use a
+ different collection implementation, you may supply a factory
+ function to produce those collections. Only applicable to
+ non-scalar relationships.
:param proxy_bulk_set: Optional, use with proxy_factory. See
the _set() method for details.
self.collection_class = util.duck_type_collection(lazy_collection())
if self.proxy_factory:
- return self.proxy_factory(lazy_collection, creator, self.value_attr, self)
+ return self.proxy_factory(
+ lazy_collection, creator, self.value_attr, self)
if self.getset_factory:
getter, setter = self.getset_factory(self.collection_class, self)
getter, setter = self._default_getset(self.collection_class)
if self.collection_class is list:
- return _AssociationList(lazy_collection, creator, getter, setter, self)
+ return _AssociationList(
+ lazy_collection, creator, getter, setter, self)
elif self.collection_class is dict:
- return _AssociationDict(lazy_collection, creator, getter, setter, self)
+ return _AssociationDict(
+ lazy_collection, creator, getter, setter, self)
elif self.collection_class is set:
- return _AssociationSet(lazy_collection, creator, getter, setter, self)
+ return _AssociationSet(
+ lazy_collection, creator, getter, setter, self)
else:
raise exc.ArgumentError(
'could not guess which interface to use for '
"""
if self._value_is_scalar:
- value_expr = getattr(self.target_class, self.value_attr).has(criterion, **kwargs)
+ value_expr = getattr(
+ self.target_class, self.value_attr).has(criterion, **kwargs)
else:
- value_expr = getattr(self.target_class, self.value_attr).any(criterion, **kwargs)
+ value_expr = getattr(
+ self.target_class, self.value_attr).any(criterion, **kwargs)
# check _value_is_scalar here, otherwise
# we're scalar->scalar - call .any() so that
return getattr(obj, self.target)
def __getstate__(self):
- return {'obj':self.ref(), 'target':self.target}
+ return {'obj': self.ref(), 'target': self.target}
def __setstate__(self, state):
self.ref = weakref.ref(state['obj'])
self.target = state['target']
+
class _AssociationCollection(object):
def __init__(self, lazy_collection, creator, getter, setter, parent):
"""Constructs an _AssociationCollection.
return bool(self.col)
def __getstate__(self):
- return {'parent':self.parent, 'lazy_collection':self.lazy_collection}
+ return {'parent': self.parent, 'lazy_collection': self.lazy_collection}
def __setstate__(self, state):
self.parent = state['parent']
self.lazy_collection = state['lazy_collection']
self.parent._inflate(self)
+
class _AssociationList(_AssociationCollection):
"""Generic, converting, list-to-list proxy."""
_NotProvided = util.symbol('_NotProvided')
+
+
class _AssociationDict(_AssociationCollection):
"""Generic, converting, dict-to-dict proxy."""
return self.col.iterkeys()
def values(self):
- return [ self._get(member) for member in self.col.values() ]
+ return [self._get(member) for member in self.col.values()]
def itervalues(self):
for key in self.col:
len(a))
elif len(a) == 1:
seq_or_map = a[0]
- # discern dict from sequence - took the advice
- # from http://www.voidspace.org.uk/python/articles/duck_typing.shtml
+ # discern dict from sequence - took the advice from
+ # http://www.voidspace.org.uk/python/articles/duck_typing.shtml
# still not perfect :(
if hasattr(seq_or_map, 'keys'):
for item in seq_or_map:
Synopsis
========
-Usage involves the creation of one or more :class:`~sqlalchemy.sql.expression.ClauseElement`
-subclasses and one or more callables defining its compilation::
+Usage involves the creation of one or more
+:class:`~sqlalchemy.sql.expression.ClauseElement` subclasses and one or
+more callables defining its compilation::
from sqlalchemy.ext.compiler import compiles
from sqlalchemy.sql.expression import ColumnClause
def visit_alter_column(element, compiler, **kw):
return "ALTER TABLE %s ALTER COLUMN %s ..." % (element.table.name, element.column.name)
-The second ``visit_alter_table`` will be invoked when any ``postgresql`` dialect is used.
+The second ``visit_alter_table`` will be invoked when any ``postgresql``
+dialect is used.
Compiling sub-elements of a custom expression construct
=======================================================
Cross Compiling between SQL and DDL compilers
---------------------------------------------
-SQL and DDL constructs are each compiled using different base compilers - ``SQLCompiler``
-and ``DDLCompiler``. A common need is to access the compilation rules of SQL expressions
-from within a DDL expression. The ``DDLCompiler`` includes an accessor ``sql_compiler`` for this reason, such as below where we generate a CHECK
-constraint that embeds a SQL expression::
+SQL and DDL constructs are each compiled using different base compilers -
+``SQLCompiler`` and ``DDLCompiler``. A common need is to access the
+compilation rules of SQL expressions from within a DDL expression. The
+``DDLCompiler`` includes an accessor ``sql_compiler`` for this reason, such as
+below where we generate a CHECK constraint that embeds a SQL expression::
@compiles(MyConstraint)
def compile_my_constraint(constraint, ddlcompiler, **kw):
Enabling Autocommit on a Construct
==================================
-Recall from the section :ref:`autocommit` that the :class:`.Engine`, when asked to execute
-a construct in the absence of a user-defined transaction, detects if the given
-construct represents DML or DDL, that is, a data modification or data definition statement, which
-requires (or may require, in the case of DDL) that the transaction generated by the DBAPI be committed
-(recall that DBAPI always has a transaction going on regardless of what SQLAlchemy does). Checking
-for this is actually accomplished
-by checking for the "autocommit" execution option on the construct. When building a construct like
-an INSERT derivation, a new DDL type, or perhaps a stored procedure that alters data, the "autocommit"
-option needs to be set in order for the statement to function with "connectionless" execution
+Recall from the section :ref:`autocommit` that the :class:`.Engine`, when
+asked to execute a construct in the absence of a user-defined transaction,
+detects if the given construct represents DML or DDL, that is, a data
+modification or data definition statement, which requires (or may require,
+in the case of DDL) that the transaction generated by the DBAPI be committed
+(recall that DBAPI always has a transaction going on regardless of what
+SQLAlchemy does). Checking for this is actually accomplished by checking for
+the "autocommit" execution option on the construct. When building a
+construct like an INSERT derivation, a new DDL type, or perhaps a stored
+procedure that alters data, the "autocommit" option needs to be set in order
+for the statement to function with "connectionless" execution
(as described in :ref:`dbengine_implicit`).
-Currently a quick way to do this is to subclass :class:`.Executable`, then add the "autocommit" flag
-to the ``_execution_options`` dictionary (note this is a "frozen" dictionary which supplies a generative
-``union()`` method)::
+Currently a quick way to do this is to subclass :class:`.Executable`, then
+add the "autocommit" flag to the ``_execution_options`` dictionary (note this
+is a "frozen" dictionary which supplies a generative ``union()`` method)::
from sqlalchemy.sql.expression import Executable, ClauseElement
_execution_options = \\
Executable._execution_options.union({'autocommit': True})
-More succinctly, if the construct is truly similar to an INSERT, UPDATE, or DELETE, :class:`.UpdateBase`
-can be used, which already is a subclass of :class:`.Executable`, :class:`.ClauseElement` and includes the
+More succinctly, if the construct is truly similar to an INSERT, UPDATE, or
+DELETE, :class:`.UpdateBase` can be used, which already is a subclass
+of :class:`.Executable`, :class:`.ClauseElement` and includes the
``autocommit`` flag::
from sqlalchemy.sql.expression import UpdateBase
-DDL elements that subclass :class:`.DDLElement` already have the "autocommit" flag turned on.
+DDL elements that subclass :class:`.DDLElement` already have the
+"autocommit" flag turned on.
Changing the default compilation of existing constructs
=======================================================
-The compiler extension applies just as well to the existing constructs. When overriding
-the compilation of a built in SQL construct, the @compiles decorator is invoked upon
-the appropriate class (be sure to use the class, i.e. ``Insert`` or ``Select``, instead of the creation function such as ``insert()`` or ``select()``).
+The compiler extension applies just as well to the existing constructs. When
+overriding the compilation of a built in SQL construct, the @compiles
+decorator is invoked upon the appropriate class (be sure to use the class,
+i.e. ``Insert`` or ``Select``, instead of the creation function such
+as ``insert()`` or ``select()``).
-Within the new compilation function, to get at the "original" compilation routine,
-use the appropriate visit_XXX method - this because compiler.process() will call upon the
-overriding routine and cause an endless loop. Such as, to add "prefix" to all insert statements::
+Within the new compilation function, to get at the "original" compilation
+routine, use the appropriate visit_XXX method - this
+because compiler.process() will call upon the overriding routine and cause
+an endless loop. Such as, to add "prefix" to all insert statements::
from sqlalchemy.sql.expression import Insert
def prefix_inserts(insert, compiler, **kw):
return compiler.visit_insert(insert.prefix_with("some prefix"), **kw)
-The above compiler will prefix all INSERT statements with "some prefix" when compiled.
+The above compiler will prefix all INSERT statements with "some prefix" when
+compiled.
.. _type_compilation_extension:
Changing Compilation of Types
=============================
-``compiler`` works for types, too, such as below where we implement the MS-SQL specific 'max' keyword for ``String``/``VARCHAR``::
+``compiler`` works for types, too, such as below where we implement the
+MS-SQL specific 'max' keyword for ``String``/``VARCHAR``::
@compiles(String, 'mssql')
@compiles(VARCHAR, 'mssql')
``execute_at()`` method, allowing the construct to be invoked during CREATE
TABLE and DROP TABLE sequences.
-* :class:`~sqlalchemy.sql.expression.Executable` - This is a mixin which should be
- used with any expression class that represents a "standalone" SQL statement that
- can be passed directly to an ``execute()`` method. It is already implicit
- within ``DDLElement`` and ``FunctionElement``.
+* :class:`~sqlalchemy.sql.expression.Executable` - This is a mixin which
+ should be used with any expression class that represents a "standalone"
+ SQL statement that can be passed directly to an ``execute()`` method. It
+ is already implicit within ``DDLElement`` and ``FunctionElement``.
Further Examples
================
"UTC timestamp" function
-------------------------
-A function that works like "CURRENT_TIMESTAMP" except applies the appropriate conversions
-so that the time is in UTC time. Timestamps are best stored in relational databases
-as UTC, without time zones. UTC so that your database doesn't think time has gone
-backwards in the hour when daylight savings ends, without timezones because timezones
-are like character encodings - they're best applied only at the endpoints of an
-application (i.e. convert to UTC upon user input, re-apply desired timezone upon display).
+A function that works like "CURRENT_TIMESTAMP" except applies the
+appropriate conversions so that the time is in UTC time. Timestamps are best
+stored in relational databases as UTC, without time zones. UTC so that your
+database doesn't think time has gone backwards in the hour when daylight
+savings ends, without timezones because timezones are like character
+encodings - they're best applied only at the endpoints of an application
+(i.e. convert to UTC upon user input, re-apply desired timezone upon display).
For Postgresql and Microsoft SQL Server::
"GREATEST" function
-------------------
-The "GREATEST" function is given any number of arguments and returns the one that is
-of the highest value - it's equivalent to Python's ``max`` function. A SQL
-standard version versus a CASE based version which only accommodates two
-arguments::
+The "GREATEST" function is given any number of arguments and returns the one
+that is of the highest value - it's equivalent to Python's ``max``
+function. A SQL standard version versus a CASE based version which only
+accommodates two arguments::
from sqlalchemy.sql import expression
from sqlalchemy.ext.compiler import compiles
"false" expression
------------------
-Render a "false" constant expression, rendering as "0" on platforms that don't have a "false" constant::
+Render a "false" constant expression, rendering as "0" on platforms that
+don't have a "false" constant::
from sqlalchemy.sql import expression
from sqlalchemy.ext.compiler import compiles
from .. import exc
from ..sql import visitors
+
def compiles(class_, *specs):
"""Register a function as a compiler for a
given :class:`.ClauseElement` type."""
existing.specs['default'] = existing_dispatch
# TODO: why is the lambda needed ?
- setattr(class_, '_compiler_dispatch', lambda *arg, **kw: existing(*arg, **kw))
+ setattr(class_, '_compiler_dispatch',
+ lambda *arg, **kw: existing(*arg, **kw))
setattr(class_, '_compiler_dispatcher', existing)
if specs:
return fn
return decorate
+
def deregister(class_):
"""Remove all custom compilers associated with a given
:class:`.ClauseElement` type."""
"%s construct has no default "
"compilation handler." % type(element))
return fn(element, compiler, **kw)
-
"""
-from .. import exc as sa_exc
from .. import util
from ..orm.session import Session
from ..orm.query import Query
__all__ = ['ShardedSession', 'ShardedQuery']
+
class ShardedQuery(Query):
def __init__(self, *args, **kwargs):
super(ShardedQuery, self).__init__(*args, **kwargs)
else:
return None
+
class ShardedSession(Session):
def __init__(self, shard_chooser, id_chooser, query_chooser, shards=None,
query_cls=ShardedQuery, **kwargs):
"""Construct a ShardedSession.
- :param shard_chooser: A callable which, passed a Mapper, a mapped instance, and possibly a
- SQL clause, returns a shard ID. This id may be based off of the
- attributes present within the object, or on some round-robin
- scheme. If the scheme is based on a selection, it should set
- whatever state on the instance to mark it in the future as
+ :param shard_chooser: A callable which, passed a Mapper, a mapped
+ instance, and possibly a SQL clause, returns a shard ID. This id
+ may be based off of the attributes present within the object, or on
+ some round-robin scheme. If the scheme is based on a selection, it
+ should set whatever state on the instance to mark it in the future as
participating in that shard.
- :param id_chooser: A callable, passed a query and a tuple of identity values, which
- should return a list of shard ids where the ID might reside. The
- databases will be queried in the order of this listing.
+ :param id_chooser: A callable, passed a query and a tuple of identity
+ values, which should return a list of shard ids where the ID might
+ reside. The databases will be queried in the order of this listing.
- :param query_chooser: For a given Query, returns the list of shard_ids where the query
- should be issued. Results from all shards returned will be combined
- together into a single listing.
+ :param query_chooser: For a given Query, returns the list of shard_ids
+ where the query should be issued. Results from all shards returned
+ will be combined together into a single listing.
- :param shards: A dictionary of string shard names to :class:`~sqlalchemy.engine.Engine`
- objects.
+ :param shards: A dictionary of string shard names
+ to :class:`~sqlalchemy.engine.Engine` objects.
"""
super(ShardedSession, self).__init__(query_cls=query_cls, **kwargs)
shard_id=shard_id,
instance=instance).contextual_connect(**kwargs)
- def get_bind(self, mapper, shard_id=None, instance=None, clause=None, **kw):
+ def get_bind(self, mapper, shard_id=None,
+ instance=None, clause=None, **kw):
if shard_id is None:
shard_id = self.shard_chooser(mapper, instance, clause=clause)
return self.__binds[shard_id]
def bind_shard(self, shard_id, bind):
self.__binds[shard_id] = bind
-
-
in-Python getter/setter methods can treat ``accounts`` as a Python
list available on ``self``.
-However, at the expression level, it's expected that the ``User`` class will be used
-in an appropriate context such that an appropriate join to
+However, at the expression level, it's expected that the ``User`` class will
+be used in an appropriate context such that an appropriate join to
``SavingsAccount`` will be present::
>>> print Session().query(User, User.balance).\\
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
We can, of course, forego being dependent on the enclosing query's usage
-of joins in favor of the correlated
-subquery, which can portably be packed into a single colunn expression.
-A correlated subquery is more portable, but often performs more poorly
-at the SQL level.
-Using the same technique illustrated at :ref:`mapper_column_property_sql_expressions`,
+of joins in favor of the correlated subquery, which can portably be packed
+into a single colunn expression. A correlated subquery is more portable, but
+often performs more poorly at the SQL level. Using the same technique
+illustrated at :ref:`mapper_column_property_sql_expressions`,
we can adjust our ``SavingsAccount`` example to aggregate the balances for
*all* accounts, and use a correlated subquery for the column expression::
from .. import util
from ..orm import attributes, interfaces
+
class hybrid_method(object):
"""A decorator which allows definition of a Python object method with both
instance-level and class-level behavior.
self.expr = expr
return self
+
class hybrid_property(object):
"""A decorator which allows definition of a Python descriptor with both
instance-level and class-level behavior.
self.expr = expr
return self
+
class Comparator(interfaces.PropComparator):
"""A helper class that allows easy construction of custom
:class:`~.orm.interfaces.PropComparator`
"""
+
def find_native_user_instrumentation_hook(cls):
"""Find user-specified instrumentation management for a class."""
return getattr(cls, INSTRUMENTATION_MANAGER, None)
"""
+
class ExtendedInstrumentationRegistry(InstrumentationFactory):
"""Extends :class:`.InstrumentationFactory` with additional
bookkeeping, to accommodate multiple types of
def state_of(self, instance):
if instance is None:
raise AttributeError("None has no persistent state.")
- return self._state_finders.get(instance.__class__, _default_state_getter)(instance)
+ return self._state_finders.get(
+ instance.__class__, _default_state_getter)(instance)
def dict_of(self, instance):
if instance is None:
raise AttributeError("None has no persistent state.")
- return self._dict_finders.get(instance.__class__, _default_dict_getter)(instance)
+ return self._dict_finders.get(
+ instance.__class__, _default_dict_getter)(instance)
+
orm_instrumentation._instrumentation_factory = \
_instrumentation_factory = ExtendedInstrumentationRegistry()
orm_instrumentation.instrumentation_finders = instrumentation_finders
+
class InstrumentationManager(object):
"""User-defined class instrumentation extension.
def dict_getter(self, class_):
return lambda inst: self.get_instance_dict(class_, inst)
+
class _ClassInstrumentationAdapter(ClassManager):
"""Adapts a user-defined InstrumentationManager to a ClassManager."""
def dict_getter(self):
return self._get_dict
+
def _install_instrumented_lookups():
"""Replace global class/object management functions
with ExtendedInstrumentationRegistry implementations, which
"""
_install_lookups(
dict(
- instance_state = _instrumentation_factory.state_of,
- instance_dict = _instrumentation_factory.dict_of,
- manager_of_class = _instrumentation_factory.manager_of_class
+ instance_state=_instrumentation_factory.state_of,
+ instance_dict=_instrumentation_factory.dict_of,
+ manager_of_class=_instrumentation_factory.manager_of_class
)
)
+
def _reinstall_default_lookups():
"""Restore simplified lookups."""
_install_lookups(
dict(
- instance_state = _default_state_getter,
- instance_dict = _default_dict_getter,
- manager_of_class = _default_manager_getter
+ instance_state=_default_state_getter,
+ instance_dict=_default_dict_getter,
+ manager_of_class=_default_manager_getter
)
)
+
def _install_lookups(lookups):
global instance_state, instance_dict, manager_of_class
instance_state = lookups['instance_state']
"""Provide support for tracking of in-place changes to scalar values,
which are propagated into ORM change events on owning parent objects.
-The :mod:`sqlalchemy.ext.mutable` extension replaces SQLAlchemy's legacy approach to in-place
-mutations of scalar values, established by the :class:`.types.MutableType`
-class as well as the ``mutable=True`` type flag, with a system that allows
-change events to be propagated from the value to the owning parent, thereby
-removing the need for the ORM to maintain copies of values as well as the very
-expensive requirement of scanning through all "mutable" values on each flush
-call, looking for changes.
+The :mod:`sqlalchemy.ext.mutable` extension replaces SQLAlchemy's legacy
+approach to in-place mutations of scalar values, established by the
+:class:`.types.MutableType` class as well as the ``mutable=True`` type flag,
+with a system that allows change events to be propagated from the value to
+the owning parent, thereby removing the need for the ORM to maintain copies
+of values as well as the very expensive requirement of scanning through all
+"mutable" values on each flush call, looking for changes.
.. _mutable_scalars:
value = json.loads(value)
return value
-The usage of ``json`` is only for the purposes of example. The :mod:`sqlalchemy.ext.mutable`
-extension can be used
+The usage of ``json`` is only for the purposes of example. The
+:mod:`sqlalchemy.ext.mutable` extension can be used
with any type whose target Python type may be mutable, including
:class:`.PickleType`, :class:`.postgresql.ARRAY`, etc.
built-in ``dict`` to produce a dict
subclass which routes all mutation events through ``__setitem__``. There are
variants on this approach, such as subclassing ``UserDict.UserDict`` or
-``collections.MutableMapping``; the part that's important to this
-example is that the :meth:`.Mutable.changed` method is called whenever an in-place change to the
-datastructure takes place.
+``collections.MutableMapping``; the part that's important to this example is
+that the :meth:`.Mutable.changed` method is called whenever an in-place
+change to the datastructure takes place.
We also redefine the :meth:`.Mutable.coerce` method which will be used to
convert any values that are not instances of ``MutableDict``, such
as the plain dictionaries returned by the ``json`` module, into the
-appropriate type. Defining this method is optional; we could just as well created our
-``JSONEncodedDict`` such that it always returns an instance of ``MutableDict``,
-and additionally ensured that all calling code uses ``MutableDict``
-explicitly. When :meth:`.Mutable.coerce` is not overridden, any values
-applied to a parent object which are not instances of the mutable type
-will raise a ``ValueError``.
+appropriate type. Defining this method is optional; we could just as well
+created our ``JSONEncodedDict`` such that it always returns an instance
+of ``MutableDict``, and additionally ensured that all calling code
+uses ``MutableDict`` explicitly. When :meth:`.Mutable.coerce` is not
+overridden, any values applied to a parent object which are not instances
+of the mutable type will raise a ``ValueError``.
Our new ``MutableDict`` type offers a class method
:meth:`~.Mutable.as_mutable` which we can use within column metadata
True
The ``MutableDict`` can be associated with all future instances
-of ``JSONEncodedDict`` in one step, using :meth:`~.Mutable.associate_with`. This
-is similar to :meth:`~.Mutable.as_mutable` except it will intercept
-all occurrences of ``MutableDict`` in all mappings unconditionally, without
+of ``JSONEncodedDict`` in one step, using
+:meth:`~.Mutable.associate_with`. This is similar to
+:meth:`~.Mutable.as_mutable` except it will intercept all occurrences
+of ``MutableDict`` in all mappings unconditionally, without
the need to declare it individually::
MutableDict.associate_with(JSONEncodedDict)
from .. import event, types
from ..orm import mapper, object_mapper
from ..util import memoized_property
-from .. import exc
import weakref
+
class MutableBase(object):
- """Common base class to :class:`.Mutable` and :class:`.MutableComposite`."""
+ """Common base class to :class:`.Mutable`
+ and :class:`.MutableComposite`.
+
+ """
@memoized_property
def _parents(self):
"""
if value is None:
return None
- raise ValueError("Attribute '%s' does not accept objects of type %s" % (key, type(value)))
+ msg = "Attribute '%s' does not accept objects of type %s"
+ raise ValueError(msg % (key, type(value)))
@classmethod
def _listen_on_attribute(cls, attribute, coerce, parent_cls):
for val in state_dict['ext.mutable.values']:
val._parents[state.obj()] = key
+ event.listen(parent_cls, 'load', load,
+ raw=True, propagate=True)
+ event.listen(parent_cls, 'refresh', load,
+ raw=True, propagate=True)
+ event.listen(attribute, 'set', set,
+ raw=True, retval=True, propagate=True)
+ event.listen(parent_cls, 'pickle', pickle,
+ raw=True, propagate=True)
+ event.listen(parent_cls, 'unpickle', unpickle,
+ raw=True, propagate=True)
- event.listen(parent_cls, 'load', load, raw=True, propagate=True)
- event.listen(parent_cls, 'refresh', load, raw=True, propagate=True)
- event.listen(attribute, 'set', set, raw=True, retval=True, propagate=True)
- event.listen(parent_cls, 'pickle', pickle, raw=True, propagate=True)
- event.listen(parent_cls, 'unpickle', unpickle, raw=True, propagate=True)
class Mutable(MutableBase):
"""Mixin that defines transparent propagation of change
"""Associate this wrapper with all future mapped columns
of the given type.
- This is a convenience method that calls ``associate_with_attribute`` automatically.
+ This is a convenience method that calls
+ ``associate_with_attribute`` automatically.
.. warning::
The listeners established by this method are *global*
to all mappers, and are *not* garbage collected. Only use
- :meth:`.associate_with` for types that are permanent to an application,
- not with ad-hoc types else this will cause unbounded growth
- in memory usage.
+ :meth:`.associate_with` for types that are permanent to an
+ application, not with ad-hoc types else this will cause unbounded
+ growth in memory usage.
"""
)
Note that the returned type is always an instance, even if a class
- is given, and that only columns which are declared specifically with that
- type instance receive additional instrumentation.
+ is given, and that only columns which are declared specifically with
+ that type instance receive additional instrumentation.
To associate a particular mutable type with all occurrences of a
particular type, use the :meth:`.Mutable.associate_with` classmethod
return sqltype
+
class _MutableCompositeMeta(type):
def __init__(cls, classname, bases, dict_):
cls._setup_listeners()
return type.__init__(cls, classname, bases, dict_)
+
class MutableComposite(MutableBase):
"""Mixin that defines transparent propagation of change
events on a SQLAlchemy "composite" object to its
.. warning::
The listeners established by the :class:`.MutableComposite`
- class are *global* to all mappers, and are *not* garbage collected. Only use
- :class:`.MutableComposite` for types that are permanent to an application,
- not with ad-hoc types else this will cause unbounded growth
- in memory usage.
+ class are *global* to all mappers, and are *not* garbage
+ collected. Only use :class:`.MutableComposite` for types that are
+ permanent to an application, not with ad-hoc types else this will
+ cause unbounded growth in memory usage.
"""
__metaclass__ = _MutableCompositeMeta
"""Associate this wrapper with all future mapped composites
of the given type.
- This is a convenience method that calls ``associate_with_attribute`` automatically.
+ This is a convenience method that calls ``associate_with_attribute``
+ automatically.
"""
def listen_for_type(mapper, class_):
for prop in mapper.iterate_properties:
- if hasattr(prop, 'composite_class') and issubclass(prop.composite_class, cls):
- cls._listen_on_attribute(getattr(class_, prop.key), False, class_)
+ if (hasattr(prop, 'composite_class') and
+ issubclass(prop.composite_class, cls)):
+ cls._listen_on_attribute(
+ getattr(class_, prop.key), False, class_)
event.listen(mapper, 'mapper_configured', listen_for_type)
-
class MutableDict(Mutable, dict):
"""A dictionary type that implements :class:`.Mutable`.
:author: Jason Kirtland
-``orderinglist`` is a helper for mutable ordered relationships. It will intercept
-list operations performed on a relationship collection and automatically
-synchronize changes in list position with an attribute on the related objects.
-(See :ref:`advdatamapping_entitycollections` for more information on the general pattern.)
+``orderinglist`` is a helper for mutable ordered relationships. It will
+intercept list operations performed on a relationship collection and
+automatically synchronize changes in list position with an attribute on the
+related objects. (See :ref:`advdatamapping_entitycollections` for more
+information on the general pattern.)
Example: Two tables that store slides in a presentation. Each slide
has a number of bullet points, displayed in order by the 'position'
})
mapper(Bullet, bullets_table)
-The standard relationship mapping will produce a list-like attribute on each Slide
-containing all related Bullets, but coping with changes in ordering is totally
-your responsibility. If you insert a Bullet into that list, there is no
-magic- it won't have a position attribute unless you assign it it one, and
+The standard relationship mapping will produce a list-like attribute on each
+Slide containing all related Bullets, but coping with changes in ordering is
+totally your responsibility. If you insert a Bullet into that list, there is
+no magic - it won't have a position attribute unless you assign it it one, and
you'll need to manually renumber all the subsequent Bullets in the list to
accommodate the insert.
-An ``orderinglist`` can automate this and manage the 'position' attribute on all
-related bullets for you.
+An ``orderinglist`` can automate this and manage the 'position' attribute on
+all related bullets for you.
.. sourcecode:: python+sql
s.bullets[2].position
>>> 2
-Use the ``ordering_list`` function to set up the ``collection_class`` on relationships
-(as in the mapper example above). This implementation depends on the list
-starting in the proper order, so be SURE to put an order_by on your relationship.
+Use the ``ordering_list`` function to set up the ``collection_class`` on
+relationships (as in the mapper example above). This implementation depends
+on the list starting in the proper order, so be SURE to put an order_by on
+your relationship.
.. warning::
``ordering_list`` only provides limited functionality when a primary
- key column or unique column is the target of the sort. Since changing the order of
- entries often means that two rows must trade values, this is not possible when
- the value is constrained by a primary key or unique constraint, since one of the rows
- would temporarily have to point to a third available value so that the other row
- could take its old value. ``ordering_list`` doesn't do any of this for you,
+ key column or unique column is the target of the sort. Since changing the
+ order of entries often means that two rows must trade values, this is not
+ possible when the value is constrained by a primary key or unique
+ constraint, since one of the rows would temporarily have to point to a
+ third available value so that the other row could take its old
+ value. ``ordering_list`` doesn't do any of this for you,
nor does SQLAlchemy itself.
``ordering_list`` takes the name of the related object's ordering attribute as
from ..orm.collections import collection
from .. import util
-__all__ = [ 'ordering_list' ]
+__all__ = ['ordering_list']
def ordering_list(attr, count_from=None, **kw):
"""Prepares an OrderingList factory for use in mapper definitions.
- Returns an object suitable for use as an argument to a Mapper relationship's
- ``collection_class`` option. Arguments are:
+ Returns an object suitable for use as an argument to a Mapper
+ relationship's ``collection_class`` option. Arguments are:
attr
Name of the mapped attribute to use for storage and retrieval of
kw = _unsugar_count_from(count_from=count_from, **kw)
return lambda: OrderingList(attr, **kw)
+
# Ordering utility functions
+
+
def count_from_0(index, collection):
"""Numbering function: consecutive integers starting at 0."""
return index
+
def count_from_1(index, collection):
"""Numbering function: consecutive integers starting at 1."""
return index + 1
+
def count_from_n_factory(start):
"""Numbering function: consecutive integers starting at arbitrary start."""
pass
return f
+
def _unsugar_count_from(**kw):
"""Builds counting functions from keyword arguments.
kw['ordering_func'] = count_from_n_factory(count_from)
return kw
+
class OrderingList(list):
"""A custom list that manages position information for its children.
Name of the attribute that stores the object's order in the
relationship.
- :param ordering_func: Optional. A function that maps the position in the Python list to a
- value to store in the ``ordering_attr``. Values returned are
- usually (but need not be!) integers.
+ :param ordering_func: Optional. A function that maps the position in
+ the Python list to a value to store in the
+ ``ordering_attr``. Values returned are usually (but need not be!)
+ integers.
An ``ordering_func`` is called with two positional parameters: the
index of the element in the list, and the list itself.
func.__doc__ = getattr(list, func_name).__doc__
del func_name, func
+
def _reconstitute(cls, dict_, items):
""" Reconstitute an ``OrderingList``.
needed for:
* instances of user-defined classes. These contain no references to engines,
- sessions or expression constructs in the typical case and can be serialized directly.
+ sessions or expression constructs in the typical case and can be serialized
+ directly.
-* Table metadata that is to be loaded entirely from the serialized structure (i.e. is
- not already declared in the application). Regular pickle.loads()/dumps() can
- be used to fully dump any ``MetaData`` object, typically one which was reflected
- from an existing database at some previous point in time. The serializer module
- is specifically for the opposite case, where the Table metadata is already present
- in memory.
+* Table metadata that is to be loaded entirely from the serialized structure
+ (i.e. is not already declared in the application). Regular
+ pickle.loads()/dumps() can be used to fully dump any ``MetaData`` object,
+ typically one which was reflected from an existing database at some previous
+ point in time. The serializer module is specifically for the opposite case,
+ where the Table metadata is already present in memory.
"""
-from ..orm import class_mapper, Query
+from ..orm import class_mapper
from ..orm.session import Session
from ..orm.mapper import Mapper
from ..orm.attributes import QueryableAttribute
__all__ = ['Serializer', 'Deserializer', 'dumps', 'loads']
-
def Serializer(*args, **kw):
pickler = pickle.Pickler(*args, **kw)
our_ids = re.compile(r'(mapper|table|column|session|attribute|engine):(.*)')
+
def Deserializer(file, metadata=None, scoped_session=None, engine=None):
unpickler = pickle.Unpickler(file)
unpickler.persistent_load = persistent_load
return unpickler
+
def dumps(obj, protocol=0):
buf = byte_buffer()
pickler = Serializer(buf, protocol)
pickler.dump(obj)
return buf.getvalue()
+
def loads(data, metadata=None, scoped_session=None, engine=None):
buf = byte_buffer(data)
unpickler = Deserializer(buf, metadata, scoped_session, engine)
return unpickler.load()
-
-