from sqlalchemy.orm.interfaces import (
EXT_CONTINUE,
EXT_STOP,
- ExtensionOption,
InstrumentationManager,
MapperExtension,
PropComparator,
to have fewer properties than its superclass, ``A``.
:param extension: A :class:`.MapperExtension` instance or
- list of :class:`~sqlalchemy.orm.interfaces.MapperExtension`
+ list of :class:`.MapperExtension`
instances which will be applied to all operations by this
- :class:`~sqlalchemy.orm.mapper.Mapper`.
+ :class:`.Mapper`. Deprecated.
+ The event package is now used.
:param include_properties: An inclusive list or set of string column
names to map. As of SQLAlchemy 0.6.4, this collection may also
finally:
mapperlib._COMPILE_MUTEX.release()
-def extension(ext):
- """Return a ``MapperOption`` that will insert the given
- ``MapperExtension`` to the beginning of the list of extensions
- that will be called in the context of the ``Query``.
-
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
-
- """
- return ExtensionOption(ext)
-
@sa_util.accepts_a_list_as_starargs(list_deprecation='deprecated')
def joinedload(*keys, **kw):
"""Return a ``MapperOption`` that will convert the property of the given
"""
+ @classmethod
+ def _adapt_instrument_class(cls, self, listener):
+ cls._adapt_listener_methods(self, listener, ('instrument_class',))
+
+ @classmethod
+ def _adapt_listener(cls, self, listener):
+ cls._adapt_listener_methods(
+ self, listener,
+ (
+ 'init_instance',
+ 'init_failed',
+ 'translate_row',
+ 'create_instance',
+ 'append_result',
+ 'populate_instance',
+ 'reconstruct_instance',
+ 'before_insert',
+ 'after_insert',
+ 'before_update',
+ 'after_update',
+ 'before_delete',
+ 'after_delete'
+ ))
+
+ @classmethod
+ def _adapt_listener_methods(cls, self, listener, methods):
+ for meth in methods:
+ me_meth = getattr(MapperExtension, meth)
+ ls_meth = getattr(listener, meth)
+ # TODO: comparing self.methods to cls.method,
+ # this comparison is probably moot
+ if me_meth is not ls_meth:
+ if meth == 'reconstruct_instance':
+ def go(ls_meth):
+ def reconstruct(instance):
+ ls_meth(self, instance)
+ return reconstruct
+ event.listen(go(ls_meth), 'on_load', self.class_manager, raw=False)
+ else:
+ event.listen(ls_meth, "on_%s" % meth, self, raw=False, retval=True)
+
+
def instrument_class(self, mapper, class_):
"""Receive a class when the mapper is first constructed, and has
applied instrumentation to the mapped class.
"""
from sqlalchemy import event, util, exc
+import inspect
class InstrumentationEvents(event.Events):
"""Events related to class instrumentation events.
class MapperEvents(event.Events):
- """"""
+ """Define events specific to mappings.
+
+ e.g.::
+
+ from sqlalchemy import event
+ from sqlalchemy.orm import mapper
+
+ # attach to a class
+ event.listen(my_before_insert_listener, 'on_before_insert', SomeMappedClass)
+
+ # attach to all mappers
+ event.listen(some_listener, 'on_before_insert', mapper)
+
+ Mapper event listeners are propagated to subclass (inheriting)
+ mappers unconditionally.
+
+ Several modifiers are available to the listen() function.
+
+ :param raw=False: When True, the "target" argument to the
+ event, if applicable will be the :class:`.InstanceState` management
+ object, rather than the mapped instance itself.
+ :param retval=False: when True, the user-defined event listening
+ must have a return value, the purpose of which is either to
+ control subsequent event propagation, or to otherwise alter
+ the operation in progress by the mapper. Possible values
+ here are::
+
+ * `sqlalchemy.orm.interfaces.EXT_CONTINUE` - continue event
+ processing normally.
+ * `sqlalchemy.orm.interfaces.EXT_STOP` - cancel all subsequent
+ event handlers in the chain.
+ * other values - the return value specified by specific listeners,
+ such as "translate_row" or "create_instance".
+
+ """
+
+ @classmethod
+ def accept_with(cls, target):
+ from sqlalchemy.orm import mapper, class_mapper, Mapper
+ if target is mapper:
+ return Mapper
+ elif isinstance(target, type):
+ return class_mapper(target)
+ else:
+ return target
+
+ @classmethod
+ def listen(cls, fn, identifier, target,
+ raw=False, retval=False):
+ from sqlalchemy.orm.interfaces import EXT_CONTINUE
+
+ if not raw or not retval:
+ if not raw:
+ meth = getattr(cls, identifier)
+ try:
+ target_index = inspect.getargspec(meth)[0].index('target') - 1
+ except ValueError:
+ target_index = None
+
+ wrapped_fn = fn
+ def wrap(*arg, **kw):
+ if not raw and target_index is not None:
+ arg = list(arg)
+ arg[target_index] = arg[target_index].obj()
+ if not retval:
+ wrapped_fn(*arg, **kw)
+ return EXT_CONTINUE
+ else:
+ return wrapped_fn(*arg, **kw)
+ fn = wrap
+
+ for mapper in target.self_and_descendants:
+ event.Events.listen(fn, identifier, mapper)
+
+
+ def on_instrument_class(self, mapper, class_):
+ """Receive a class when the mapper is first constructed, and has
+ applied instrumentation to the mapped class.
+
+ This listener can generally only be applied to the :class:`.Mapper`
+ class overall.
+
+ """
+
+ def on_init_instance(self, mapper, class_, oldinit, target, args, kwargs):
+ """Receive an instance when it's constructor is called.
+
+ This method is only called during a userland construction of
+ an object. It is not called when an object is loaded from the
+ database.
+
+ The return value is only significant within the ``MapperExtension``
+ chain; the parent mapper's behavior isn't modified by this method.
+
+ """
+
+ def on_init_failed(self, mapper, class_, oldinit, target, args, kwargs):
+ """Receive an instance when it's constructor has been called,
+ and raised an exception.
+
+ This method is only called during a userland construction of
+ an object. It is not called when an object is loaded from the
+ database.
+
+ The return value is only significant within the ``MapperExtension``
+ chain; the parent mapper's behavior isn't modified by this method.
+
+ """
+
+ def on_translate_row(self, mapper, context, row):
+ """Perform pre-processing on the given result row and return a
+ new row instance.
+
+ This listener is typically registered with ``retval=True``.
+ It is called when the mapper first receives a row, before
+ the object identity or the instance itself has been derived
+ from that row. The given row may or may not be a
+ ``RowProxy`` object - it will always be a dictionary-like
+ object which contains mapped columns as keys. The
+ returned object should also be a dictionary-like object
+ which recognizes mapped columns as keys.
+
+ If the ultimate return value is EXT_CONTINUE, the row
+ is not translated.
+
+ """
+
+ def on_create_instance(self, mapper, context, row, class_):
+ """Receive a row when a new object instance is about to be
+ created from that row.
+
+ The method can choose to create the instance itself, or it can return
+ EXT_CONTINUE to indicate normal object creation should take place.
+ This listener is typically registered with ``retval=True``.
+
+ mapper
+ The mapper doing the operation
+
+ context
+ The QueryContext generated from the Query.
+
+ row
+ The result row from the database
+
+ class\_
+ The class we are mapping.
+
+ return value
+ A new object instance, or EXT_CONTINUE
+
+ """
+
+ def on_append_result(self, mapper, context, row, target,
+ result, **flags):
+ """Receive an object instance before that instance is appended
+ to a result list.
+
+ If this method is registered with ``retval=True``,
+ the append operation can be replaced. If any value other than
+ EXT_CONTINUE is returned, result appending will not proceed for
+ this instance, giving this extension an opportunity to do the
+ appending itself, if desired.
+
+ mapper
+ The mapper doing the operation.
+
+ selectcontext
+ The QueryContext generated from the Query.
+
+ row
+ The result row from the database.
+
+ target
+ The object instance to be appended to the result, or
+ the InstanceState if registered with ``raw=True``.
+
+ result
+ List to which results are being appended.
+
+ \**flags
+ extra information about the row, same as criterion in
+ ``create_row_processor()`` method of
+ :class:`~sqlalchemy.orm.interfaces.MapperProperty`
+ """
+
+
+ def on_populate_instance(self, mapper, context, row,
+ target, **flags):
+ """Receive an instance before that instance has
+ its attributes populated.
+
+ This usually corresponds to a newly loaded instance but may
+ also correspond to an already-loaded instance which has
+ unloaded attributes to be populated. The method may be called
+ many times for a single instance, as multiple result rows are
+ used to populate eagerly loaded collections.
+
+ If this listener is registered with ``retval=True`` and
+ returns EXT_CONTINUE, instance population will
+ proceed normally. If any other value or None is returned,
+ instance population will not proceed, giving this extension an
+ opportunity to populate the instance itself, if desired.
+
+ As of 0.5, most usages of this hook are obsolete. For a
+ generic "object has been newly created from a row" hook, use
+ ``reconstruct_instance()``, or the ``@orm.reconstructor``
+ decorator.
+
+ """
+
+ def on_reconstruct_instance(self, mapper, target):
+ """Receive an object instance after it has been created via
+ ``__new__``, and after initial attribute population has
+ occurred.
+
+ This typically occurs when the instance is created based on
+ incoming result rows, and is only called once for that
+ instance's lifetime.
+
+ Note that during a result-row load, this method is called upon
+ the first row received for this instance. Note that some
+ attributes and collections may or may not be loaded or even
+ initialized, depending on what's present in the result rows.
+
+ The return value is only significant within the ``MapperExtension``
+ chain; the parent mapper's behavior isn't modified by this method.
+
+ """
+
+ def on_before_insert(self, mapper, connection, target):
+ """Receive an object instance before that instance is inserted
+ into its table.
+
+ This is a good place to set up primary key values and such
+ that aren't handled otherwise.
+
+ Column-based attributes can be modified within this method
+ which will result in the new value being inserted. However
+ *no* changes to the overall flush plan can be made, and
+ manipulation of the ``Session`` will not have the desired effect.
+ To manipulate the ``Session`` within an extension, use
+ ``SessionExtension``.
+
+ """
+
+
+ def on_after_insert(self, mapper, connection, target):
+ """Receive an object instance after that instance is inserted.
+
+ """
+
+ def on_before_update(self, mapper, connection, target):
+ """Receive an object instance before that instance is updated.
+
+ Note that this method is called for all instances that are marked as
+ "dirty", even those which have no net changes to their column-based
+ attributes. An object is marked as dirty when any of its column-based
+ attributes have a "set attribute" operation called or when any of its
+ collections are modified. If, at update time, no column-based
+ attributes have any net changes, no UPDATE statement will be issued.
+ This means that an instance being sent to before_update is *not* a
+ guarantee that an UPDATE statement will be issued (although you can
+ affect the outcome here).
+
+ To detect if the column-based attributes on the object have net
+ changes, and will therefore generate an UPDATE statement, use
+ ``object_session(instance).is_modified(instance,
+ include_collections=False)``.
+
+ Column-based attributes can be modified within this method
+ which will result in the new value being updated. However
+ *no* changes to the overall flush plan can be made, and
+ manipulation of the ``Session`` will not have the desired effect.
+ To manipulate the ``Session`` within an extension, use
+ ``SessionExtension``.
+
+ """
+
+ def on_after_update(self, mapper, connection, target):
+ """Receive an object instance after that instance is updated.
+
+ """
+
+ def on_before_delete(self, mapper, connection, target):
+ """Receive an object instance before that instance is deleted.
+
+ Note that *no* changes to the overall flush plan can be made
+ here; and manipulation of the ``Session`` will not have the
+ desired effect. To manipulate the ``Session`` within an
+ extension, use ``SessionExtension``.
+
+ """
+
+ def on_after_delete(self, mapper, connection, target):
+ """Receive an object instance after that instance is deleted.
+
+ """
+
@classmethod
def remove(cls, fn, identifier, target):
raise NotImplementedError("Removal of mapper events not yet implemented")
:param raw=False: When True, the "target" argument to the
event will be the :class:`.InstanceState` management
object, rather than the mapped instance itself.
- :param retval=False:` when True, the user-defined event
+ :param retval=False: when True, the user-defined event
listening must return the "value" argument from the
function. This gives the listening function the opportunity
to change the value that is ultimately used for a "set"
# TODO: for removal, need to package the identity
# of the wrapper with the original function.
- if raw is False or retval is False:
+ if not raw or not retval:
@util.decorator
def wrap(fn, target, value, *arg):
if not raw:
return l, mappers
-class ExtensionOption(MapperOption):
-
- """a MapperOption that applies a MapperExtension to a query
- operation.
-
- This class is **deprecated**.
-
- """
-
- def __init__(self, ext):
- self.ext = ext
-
- def process_query(self, query):
- entity = query._generate_mapper_zero()
- entity.extension = entity.extension.copy()
- entity.extension.push(self.ext)
-
class StrategizedOption(PropertyOption):
"""A MapperOption that affects which LoaderStrategy will be used
from sqlalchemy import sql, util, log, exc as sa_exc, event
from sqlalchemy.sql import expression, visitors, operators, util as sqlutil
from sqlalchemy.orm import instrumentation, attributes, sync, \
- exc as orm_exc, unitofwork
-from sqlalchemy.orm.interfaces import (
- MapperProperty, EXT_CONTINUE, PropComparator
- )
-from sqlalchemy.orm.util import (
- ExtensionCarrier, _INSTRUMENTOR, _class_to_mapper,
- _state_mapper, class_mapper, instance_str, state_str,
- )
+ exc as orm_exc, unitofwork, events
+from sqlalchemy.orm.interfaces import MapperProperty, EXT_CONTINUE, \
+ PropComparator
+
+from sqlalchemy.orm.util import _INSTRUMENTOR, _class_to_mapper, \
+ _state_mapper, class_mapper, instance_str, state_str
+
import sys
__all__ = (
_memoized_compiled_property = util.group_expirable_memoized_property()
-# a list of MapperExtensions that will be installed in all mappers by default
-global_extensions = []
-
# a constant returned by _get_attr_by_column to indicate
# this mapper is not handling an attribute for a particular
# column
self.local_table = local_table
self.inherit_condition = inherit_condition
self.inherit_foreign_keys = inherit_foreign_keys
- self.extension = extension
self._init_properties = properties or {}
self.delete_orphans = []
self.batch = batch
self._inherits_equated_pairs = None
self._memoized_values = {}
self._compiled_cache_size = _compiled_cache_size
+
+ self._deprecated_extensions = extension
if allow_null_pks:
util.warn_deprecated(
_COMPILE_MUTEX.acquire()
try:
self._configure_inheritance()
- self._configure_extensions()
+ self._configure_legacy_instrument_class()
self._configure_class_instrumentation()
+ self._configure_listeners()
self._configure_properties()
self._configure_pks()
global _new_mappers
self._expire_memoizations()
finally:
_COMPILE_MUTEX.release()
+
+ dispatch = event.dispatcher(events.MapperEvents)
def _configure_inheritance(self):
"""Configure settings related to inherting and/or inherited mappers
raise sa_exc.ArgumentError(
"Mapper '%s' does not have a mapped_table specified."
% self)
-
- def _configure_extensions(self):
- """Go through the global_extensions list as well as the list
- of ``MapperExtensions`` specified for this ``Mapper`` and
- creates a linked list of those extensions.
+
+ def _configure_legacy_instrument_class(self):
+ # TODO: tests failing
+ for ext in util.to_list(self._deprecated_extensions or []):
+ ext._adapt_instrument_class(self, ext)
+
+ def _configure_listeners(self):
+ # TODO: this has to be made smarter to look
+ # for existing extensions
+
+ for ext in util.to_list(self._deprecated_extensions or []):
+ ext._adapt_listener(self, ext)
- """
- extlist = util.OrderedSet()
-
- extension = self.extension
- if extension:
- for ext_obj in util.to_list(extension):
- # local MapperExtensions have already instrumented the class
- extlist.add(ext_obj)
-
if self.inherits:
- for ext in self.inherits.extension:
- if ext not in extlist:
- extlist.add(ext)
- else:
- for ext in global_extensions:
- if isinstance(ext, type):
- ext = ext()
- if ext not in extlist:
- extlist.add(ext)
-
- self.extension = ExtensionCarrier()
- for ext in extlist:
- self.extension.append(ext)
+ self.dispatch.update(self.inherits.dispatch)
def _configure_class_instrumentation(self):
"""If this mapper is to be a primary mapper (i.e. the
_mapper_registry[self] = True
- self.extension.instrument_class(self, self.class_)
+ self.dispatch.on_instrument_class(self, self.class_)
if manager is None:
manager = instrumentation.register_class(self.class_,
for name in method.__sa_validators__:
self._validators[name] = method
- if 'reconstruct_instance' in self.extension:
- def reconstruct(instance):
- self.extension.reconstruct_instance(self, instance)
- event.listen(reconstruct, 'on_load', manager, raw=False)
-
manager.info[_INSTRUMENTOR] = self
def dispose(self):
row_switch = None
# call before_XXX extensions
if not has_identity:
- if 'before_insert' in mapper.extension:
- mapper.extension.before_insert(
- mapper, conn, state.obj())
+ mapper.dispatch.on_before_insert(mapper, conn, state)
else:
- if 'before_update' in mapper.extension:
- mapper.extension.before_update(
- mapper, conn, state.obj())
+ mapper.dispatch.on_before_update(mapper, conn, state)
# detect if we have a "pending" instance (i.e. has
# no instance_key attached to it), and another instance
# call after_XXX extensions
if not has_identity:
- if 'after_insert' in mapper.extension:
- mapper.extension.after_insert(
- mapper, connection, state.obj())
+ mapper.dispatch.on_after_insert(mapper, connection, state)
else:
- if 'after_update' in mapper.extension:
- mapper.extension.after_update(
- mapper, connection, state.obj())
+ mapper.dispatch.on_after_update(mapper, connection, state)
def _postfetch(self, uowtransaction, table,
state, dict_, resultproxy,
else:
conn = connection
- if 'before_delete' in mapper.extension:
- mapper.extension.before_delete(mapper, conn, state.obj())
+ mapper.dispatch.on_before_delete(mapper, conn, state)
tups.append((state,
state.dict,
)
for state, state_dict, mapper, has_identity, connection in tups:
- if 'after_delete' in mapper.extension:
- mapper.extension.after_delete(mapper, connection, state.obj())
+ mapper.dispatch.on_after_delete(mapper, connection, state)
def _instance_processor(self, context, path, adapter,
- polymorphic_from=None, extension=None,
+ polymorphic_from=None,
only_load_props=None, refresh_state=None,
polymorphic_discriminator=None):
session_identity_map = context.session.identity_map
- if not extension:
- extension = self.extension
-
- translate_row = extension.get('translate_row', None)
- create_instance = extension.get('create_instance', None)
- populate_instance = extension.get('populate_instance', None)
- append_result = extension.get('append_result', None)
+ listeners = self.dispatch
+
+ translate_row = listeners.on_translate_row or None
+ create_instance = listeners.on_create_instance or None
+ populate_instance = listeners.on_populate_instance or None
+ append_result = listeners.on_append_result or None
populate_existing = context.populate_existing or self.always_refresh
if self.allow_partial_pks:
is_not_primary_key = _none_set.issuperset
def _instance(row, result):
if translate_row:
- ret = translate_row(self, context, row)
- if ret is not EXT_CONTINUE:
- row = ret
-
+ for fn in translate_row:
+ ret = fn(self, context, row)
+ if ret is not EXT_CONTINUE:
+ row = ret
+ break
+
if polymorphic_on is not None:
discriminator = row[polymorphic_on]
if discriminator is not None:
loaded_instance = True
if create_instance:
- instance = create_instance(self,
+ for fn in create_instance:
+ instance = fn(self,
context,
row, self.class_)
- if instance is EXT_CONTINUE:
- instance = self.class_manager.new_instance()
+ if instance is not EXT_CONTINUE:
+ manager = attributes.manager_of_class(
+ instance.__class__)
+ # TODO: if manager is None, raise a friendly error
+ # about returning instances of unmapped types
+ manager.setup_instance(instance)
+ break
else:
- manager = attributes.manager_of_class(
- instance.__class__)
- # TODO: if manager is None, raise a friendly error
- # about returning instances of unmapped types
- manager.setup_instance(instance)
+ instance = self.class_manager.new_instance()
else:
instance = self.class_manager.new_instance()
state.runid = context.runid
context.progress[state] = dict_
- if not populate_instance or \
- populate_instance(self, context, row, instance,
+ if populate_instance:
+ for fn in populate_instance:
+ ret = fn(self, context, row, state,
only_load_props=only_load_props,
- instancekey=identitykey, isnew=isnew) is \
- EXT_CONTINUE:
+ instancekey=identitykey, isnew=isnew)
+ if ret is not EXT_CONTINUE:
+ break
+ else:
+ populate_state(state, dict_, row, isnew, only_load_props)
+ else:
populate_state(state, dict_, row, isnew, only_load_props)
-
+
else:
# populate attributes on non-loading instances which have
# been expired
if loaded_instance:
state._run_on_load()
- if result is not None and \
- (not append_result or
- append_result(self, context, row, instance,
+ if result is not None:
+ if append_result:
+ for fn in append_result:
+ if fn(self, context, row, state,
result, instancekey=identitykey,
- isnew=isnew)
- is EXT_CONTINUE):
- result.append(instance)
+ isnew=isnew) is not EXT_CONTINUE:
+ break
+ else:
+ result.append(instance)
+ else:
+ result.append(instance)
return instance
return _instance
instrumenting_mapper = state.manager.info[_INSTRUMENTOR]
# compile() always compiles all mappers
instrumenting_mapper.compile()
- if 'init_instance' in instrumenting_mapper.extension:
- instrumenting_mapper.extension.init_instance(
- instrumenting_mapper, instrumenting_mapper.class_,
- state.manager.original_init,
- state.obj(), args, kwargs)
+ instrumenting_mapper.dispatch.on_init_instance(
+ instrumenting_mapper, instrumenting_mapper.class_,
+ state.manager.original_init,
+ state, args, kwargs)
def _event_on_init_failure(state, args, kwargs):
"""Run init_failed hooks."""
instrumenting_mapper = state.manager.info[_INSTRUMENTOR]
- if 'init_failed' in instrumenting_mapper.extension:
- util.warn_exception(
- instrumenting_mapper.extension.init_failed,
- instrumenting_mapper, instrumenting_mapper.class_,
- state.manager.original_init, state.obj(), args, kwargs)
+ util.warn_exception(
+ instrumenting_mapper.dispatch.on_init_failed,
+ instrumenting_mapper, instrumenting_mapper.class_,
+ state.manager.original_init, state, args, kwargs)
def _event_on_resurrect(state):
# re-populate the primary key elements
return self._select_from_entity or \
self._entity_zero().entity_zero
- def _extension_zero(self):
- ent = self._entity_zero()
- return getattr(ent, 'extension', ent.mapper.extension)
-
@property
def _mapper_entities(self):
# TODO: this is wrong, its hardcoded to "priamry entity" when
filter = None
custom_rows = single_entity and \
- 'append_result' in self._entities[0].extension
+ self._entities[0].mapper.dispatch.on_append_result
(process, labels) = \
zip(*[
def setup_entity(self, entity, mapper, adapter,
from_obj, is_aliased_class, with_polymorphic):
self.mapper = mapper
- self.extension = self.mapper.extension
self.adapter = adapter
self.selectable = from_obj
self._with_polymorphic = with_polymorphic
context,
(self.path_entity,),
adapter,
- extension=self.extension,
only_load_props=query._only_load_props,
refresh_state=context.refresh_state,
polymorphic_discriminator=
"""A signature-matching decorator factory."""
def decorate(fn):
+ if not inspect.isfunction(fn):
+ raise Exception("not a decoratable function")
spec = inspect.getargspec(fn)
names = tuple(spec[0]) + spec[1:3] + (fn.func_name,)
targ_name, fn_name = unique_symbols(names, 'target', 'fn')