-*- coding: utf-8; fill-column: 68 -*-
-
=======
CHANGES
=======
+
0.5.0rc5
========
+
- new features
- general
- - Documentation has been converted to Sphinx.
- In particular, the generated API documentation
- has been constructed into a full blown
- "API Reference" section which organizes
- editorial documentation combined with
- generated docstrings. Cross linking between
- sections and API docs are vastly improved,
- a javascript-powered search feature is
- provided, and a full index of all
- classes, functions and members is provided.
-
- - setup.py now imports setuptools only optionally.
- If not present, distutils is used.
- The new "pip" installer is recommended over
- easy_install as it installs in a more simplified
- way.
+ - Documentation has been converted to Sphinx. In particular,
+ the generated API documentation has been constructed into a
+ full blown "API Reference" section which organizes editorial
+ documentation combined with generated docstrings. Cross
+ linking between sections and API docs are vastly improved, a
+ javascript-powered search feature is provided, and a full
+ index of all classes, functions and members is provided.
+
+ - setup.py now imports setuptools only optionally. If not
+ present, distutils is used. The new "pip" installer is
+ recommended over easy_install as it installs in a more
+ simplified way.
- orm
- - Query.with_polymorphic() now accepts a third
- argument "discriminator" which will replace
- the value of mapper.polymorphic_on for that
- query. Mappers themselves no longer require
- polymorphic_on to be set, even if the mapper
- has a polymorphic_identity. When not set,
- the mapper will load non-polymorphically
- by default. Together, these two features allow
- a non-polymorphic concrete inheritance setup
- to use polymorphic loading on a per-query basis,
- since concrete setups are prone to many
- issues when used polymorphically in all cases.
-
+ - Query.with_polymorphic() now accepts a third argument
+ "discriminator" which will replace the value of
+ mapper.polymorphic_on for that query. Mappers themselves no
+ longer require polymorphic_on to be set, even if the mapper
+ has a polymorphic_identity. When not set, the mapper will
+ load non-polymorphically by default. Together, these two
+ features allow a non-polymorphic concrete inheritance setup to
+ use polymorphic loading on a per-query basis, since concrete
+ setups are prone to many issues when used polymorphically in
+ all cases.
+
+ - dynamic_loader accepts a query_class= to customize the Query
+ classes used for both the dynamic collection and the queries
+ built from it.
+
- bugfixes, behavioral changes
- general
- orm
"""
return RelationProperty(argument, secondary=secondary, **kwargs)
-def dynamic_loader(argument, secondary=None, primaryjoin=None, secondaryjoin=None,
- foreign_keys=None, backref=None, post_update=False, cascade=False, remote_side=None, enable_typechecks=True,
- passive_deletes=False, order_by=None, comparator_factory=None):
+def dynamic_loader(argument, secondary=None, primaryjoin=None,
+ secondaryjoin=None, foreign_keys=None, backref=None,
+ post_update=False, cascade=False, remote_side=None,
+ enable_typechecks=True, passive_deletes=False,
+ order_by=None, comparator_factory=None, query_class=None):
"""Construct a dynamically-loading mapper property.
This property is similar to :func:`relation`, except read
generally mutually exclusive with the use of the *secondary*
keyword argument.
+ :param query_class:
+ Optional, a custom Query subclass to be used as the basis for
+ dynamic collection.
"""
from sqlalchemy.orm.dynamic import DynaLoader
- return RelationProperty(argument, secondary=secondary, primaryjoin=primaryjoin,
- secondaryjoin=secondaryjoin, foreign_keys=foreign_keys, backref=backref,
- post_update=post_update, cascade=cascade, remote_side=remote_side, enable_typechecks=enable_typechecks,
- passive_deletes=passive_deletes, order_by=order_by, comparator_factory=comparator_factory,
- strategy_class=DynaLoader)
+ return RelationProperty(
+ argument, secondary=secondary, primaryjoin=primaryjoin,
+ secondaryjoin=secondaryjoin, foreign_keys=foreign_keys, backref=backref,
+ post_update=post_update, cascade=cascade, remote_side=remote_side,
+ enable_typechecks=enable_typechecks, passive_deletes=passive_deletes,
+ order_by=order_by, comparator_factory=comparator_factory,
+ strategy_class=DynaLoader, query_class=query_class)
def column_property(*args, **kwargs):
"""Provide a column-level property for use with a Mapper.
class DynaLoader(strategies.AbstractRelationLoader):
def init_class_attribute(self):
self.is_class_level = True
- self._register_attribute(self.parent.class_, impl_class=DynamicAttributeImpl, target_mapper=self.parent_property.mapper, order_by=self.parent_property.order_by)
+ self._register_attribute(self.parent.class_, impl_class=DynamicAttributeImpl, target_mapper=self.parent_property.mapper, order_by=self.parent_property.order_by, query_class=self.parent_property.query_class)
def create_row_processor(self, selectcontext, path, mapper, row, adapter):
return (None, None)
class DynamicAttributeImpl(attributes.AttributeImpl):
uses_objects = True
accepts_scalar_loader = False
-
- def __init__(self, class_, key, typecallable, class_manager, target_mapper, order_by, **kwargs):
- super(DynamicAttributeImpl, self).__init__(class_, key, typecallable, class_manager, **kwargs)
+
+ def __init__(self, class_, key, typecallable, class_manager,
+ target_mapper, order_by, query_class=None, **kwargs):
+ super(DynamicAttributeImpl, self).__init__(
+ class_, key, typecallable, class_manager, **kwargs)
self.target_mapper = target_mapper
self.order_by = order_by
- self.query_class = AppenderQuery
+ if not query_class:
+ self.query_class = AppenderQuery
+ else:
+ self.query_class = mixin_user_query(query_class)
def get(self, state, passive=False):
if passive:
for ext in self.extensions:
ext.remove(state, value, initiator or self)
-
+
def _modified_event(self, state):
state.modified = True
if self.key not in state.committed_state:
# this is a hack to allow the _base.ComparableEntity fixture
# to work
state.dict[self.key] = True
-
return state.committed_state[self.key]
-
+
def set(self, state, value, initiator):
if initiator is self:
return
-
+
collection_history = self._modified_event(state)
if _state_has_identity(state):
old_collection = list(self.get(state))
def delete(self, *args, **kwargs):
raise NotImplementedError()
-
+
def get_history(self, state, passive=False):
c = self._get_collection_history(state, passive)
return attributes.History(c.added_items, c.unchanged_items, c.deleted_items)
-
+
def _get_collection_history(self, state, passive=False):
if self.key in state.committed_state:
c = state.committed_state[self.key]
self.fire_remove_event(state, value, initiator)
-class AppenderQuery(Query):
+class AppenderMixin(object):
+ query_class = None
+
def __init__(self, attr, state):
- super(AppenderQuery, self).__init__(attr.target_mapper, None)
+ Query.__init__(self, attr.target_mapper, None)
self.instance = state.obj()
self.attr = attr
passive=True).added_items)
else:
return self._clone(sess).count()
-
+
def _clone(self, sess=None):
- # note we're returning an entirely new Query class instance here
- # without any assignment capabilities;
- # the class of this query is determined by the session.
+ # note we're returning an entirely new Query class instance
+ # here without any assignment capabilities; the class of this
+ # query is determined by the session.
instance = self.instance
if sess is None:
sess = object_session(instance)
if sess is None:
- raise sa_exc.UnboundExecutionError("Parent instance %s is not bound to a Session, and no contextual session is established; lazy load operation of attribute '%s' cannot proceed" % (mapperutil.instance_str(instance), self.attr.key))
+ raise sa_exc.UnboundExecutionError(
+ "Parent instance %s is not bound to a Session, and no "
+ "contextual session is established; lazy load operation "
+ "of attribute '%s' cannot proceed" % (
+ mapperutil.instance_str(instance), self.attr.key))
+
+ if self.query_class:
+ query = self.query_class(self.attr.target_mapper, session=sess)
+ else:
+ query = sess.query(self.attr.target_mapper)
+ query = query.with_parent(instance, self.attr.key)
- q = sess.query(self.attr.target_mapper).with_parent(instance, self.attr.key)
if self.attr.order_by:
- q = q.order_by(self.attr.order_by)
- return q
+ query = query.order_by(self.attr.order_by)
+ return query
def append(self, item):
self.attr.append(attributes.instance_state(self.instance), item, None)
def remove(self, item):
self.attr.remove(attributes.instance_state(self.instance), item, None)
-
+
+class AppenderQuery(AppenderMixin, Query):
+ """A dynamic query that supports basic collection storage operations."""
+
+
+def mixin_user_query(cls):
+ """Return a new class with AppenderQuery functionality layered over."""
+ name = 'Appender' + cls.__name__
+ return type(name, (AppenderMixin, cls), {'query_class': cls})
+
class CollectionHistory(object):
"""Overrides AttributeHistory to receive append/remove events directly."""
"""
def __init__(self, argument,
- secondary=None, primaryjoin=None,
- secondaryjoin=None,
- foreign_keys=None,
- uselist=None,
- order_by=False,
- backref=None,
- _is_backref=False,
- post_update=False,
- cascade=False, extension=None,
- viewonly=False, lazy=True,
- collection_class=None, passive_deletes=False,
- passive_updates=True, remote_side=None,
- enable_typechecks=True, join_depth=None,
- comparator_factory=None,
- strategy_class=None, _local_remote_pairs=None):
-
+ secondary=None, primaryjoin=None, secondaryjoin=None,
+ foreign_keys=None, uselist=None, order_by=False, backref=None,
+ _is_backref=False, post_update=False, cascade=False,
+ extension=None, viewonly=False, lazy=True,
+ collection_class=None, passive_deletes=False,
+ passive_updates=True, remote_side=None,
+ enable_typechecks=True, join_depth=None,
+ comparator_factory=None, strategy_class=None,
+ _local_remote_pairs=None, query_class=None):
self.uselist = uselist
self.argument = argument
self.secondary = secondary
self.passive_updates = passive_updates
self.remote_side = remote_side
self.enable_typechecks = enable_typechecks
-
+ self.query_class = query_class
+
self.join_depth = join_depth
self.local_remote_pairs = _local_remote_pairs
self.extension = extension
from sqlalchemy.orm import dynamic_loader, backref
from testlib import testing
from testlib.sa import Table, Column, Integer, String, ForeignKey, desc
-from testlib.sa.orm import mapper, relation, create_session
+from testlib.sa.orm import mapper, relation, create_session, Query
from testlib.testing import eq_
from testlib.compat import _function_named
from orm import _base, _fixtures
assert u1.addresses.count() == 1
assert u1.addresses[0] == Address()
+ @testing.resolve_artifact_names
+ def test_custom_query(self):
+ class MyQuery(Query):
+ pass
+
+ mapper(User, users, properties={
+ 'addresses':dynamic_loader(mapper(Address, addresses),
+ query_class=MyQuery)
+ })
+ sess = create_session()
+ u = User()
+ sess.add(u)
+
+ col = u.addresses
+ assert isinstance(col, Query)
+ assert isinstance(col, MyQuery)
+ assert hasattr(col, 'append')
+ assert type(col).__name__ == 'AppenderMyQuery'
+
+ q = col.limit(1)
+ assert isinstance(q, Query)
+ assert isinstance(q, MyQuery)
+ assert not hasattr(q, 'append')
+ assert type(q).__name__ == 'MyQuery'
+
+
class FlushTest(_fixtures.FixtureTest):
run_inserts = None