from sqlalchemy.orm import (mapper, relationship, create_session,
InstrumentationManager)
-from sqlalchemy.orm.attributes import set_attribute, get_attribute, del_attribute, is_instrumented
+from sqlalchemy.orm.attributes import set_attribute, get_attribute, del_attribute
+from sqlalchemy.orm.instrumentation import is_instrumented
from sqlalchemy.orm.collections import collection_adapter
import inspect, StringIO, sys, operator
from itertools import izip
-from sqlalchemy import exc, schema, util, types, log, interfaces, event
+from sqlalchemy import exc, schema, util, types, log, interfaces, \
+ event, events
from sqlalchemy.sql import expression
class Dialect(object):
def _do_commit(self):
self.connection._commit_twophase_impl(self.xid, self._is_prepared)
-class EngineEvents(event.Events):
- """Available events for :class:`.Engine`."""
-
- @classmethod
- def listen(cls, fn, identifier, target):
- if target.Connection is Connection:
- target.Connection = _listener_connection_cls(
- Connection,
- target.dispatch)
- event.Events.listen(fn, identifier, target)
-
- def on_before_execute(self, conn, clauseelement, multiparams, params):
- """Intercept high level execute() events."""
-
- def on_after_execute(self, conn, clauseelement, multiparams, params, result):
- """Intercept high level execute() events."""
-
- def on_before_cursor_execute(self, conn, cursor, statement,
- parameters, context, executemany):
- """Intercept low-level cursor execute() events."""
-
- def on_after_cursor_execute(self, conn, cursor, statement,
- parameters, context, executemany):
- """Intercept low-level cursor execute() events."""
-
- def on_begin(self, conn):
- """Intercept begin() events."""
-
- def on_rollback(self, conn):
- """Intercept rollback() events."""
-
- def on_commit(self, conn):
- """Intercept commit() events."""
-
- def on_savepoint(self, conn, name=None):
- """Intercept savepoint() events."""
-
- def on_rollback_savepoint(self, conn, name, context):
- """Intercept rollback_savepoint() events."""
-
- def on_release_savepoint(self, conn, name, context):
- """Intercept release_savepoint() events."""
-
- def on_begin_twophase(self, conn, xid):
- """Intercept begin_twophase() events."""
-
- def on_prepare_twophase(self, conn, xid):
- """Intercept prepare_twophase() events."""
-
- def on_rollback_twophase(self, conn, xid, is_prepared):
- """Intercept rollback_twophase() events."""
-
- def on_commit_twophase(self, conn, xid, is_prepared):
- """Intercept commit_twophase() events."""
class Engine(Connectable, log.Identified):
"""
self.update_execution_options(**execution_options)
- dispatch = event.dispatcher(EngineEvents)
+ dispatch = event.dispatcher(events.EngineEvents)
def update_execution_options(self, **opt):
"""update the execution_options dictionary of this :class:`Engine`.
invoked automatically when the threadlocal engine strategy is used.
"""
-from sqlalchemy import util, event
+from sqlalchemy import util, event, events
from sqlalchemy.engine import base
import weakref
self.__opencount = 0
base.Connection.close(self)
-class TLEvents(base.EngineEvents):
+class TLEvents(events.EngineEvents):
@classmethod
def listen(cls, fn, identifier, target):
if target.TLConnection is TLConnection:
target.TLConnection = base._listener_connection_cls(
TLConnection,
target.dispatch)
- base.EngineEvents.listen(fn, identifier, target)
+ events.EngineEvents.listen(fn, identifier, target)
class TLEngine(base.Engine):
"""An Engine that includes support for thread-local managed transactions."""
--- /dev/null
+"""Core event interfaces."""
+
+from sqlalchemy import event
+
+class DDLEvents(event.Events):
+ """
+ Define create/drop event listers for schema objects.
+
+ See also:
+
+ :mod:`sqlalchemy.event`
+
+ """
+
+ def on_before_create(self, target, connection, **kw):
+ pass
+
+ def on_after_create(self, target, connection, **kw):
+ pass
+
+ def on_before_drop(self, target, connection, **kw):
+ pass
+
+ def on_after_drop(self, target, connection, **kw):
+ pass
+
+
+class PoolEvents(event.Events):
+ """Available events for :class:`.Pool`.
+
+ The methods here define the name of an event as well
+ as the names of members that are passed to listener
+ functions.
+
+ e.g.::
+
+ from sqlalchemy import events
+
+ def my_on_checkout(dbapi_conn, connection_rec, connection_proxy):
+ "handle an on checkout event"
+
+ events.listen(my_on_checkout, 'on_checkout', Pool)
+
+ """
+
+ def on_connect(self, dbapi_connection, connection_record):
+ """Called once for each new DB-API connection or Pool's ``creator()``.
+
+ :param dbapi_con:
+ A newly connected raw DB-API connection (not a SQLAlchemy
+ ``Connection`` wrapper).
+
+ :param con_record:
+ The ``_ConnectionRecord`` that persistently manages the connection
+
+ """
+
+ def on_first_connect(self, dbapi_connection, connection_record):
+ """Called exactly once for the first DB-API connection.
+
+ :param dbapi_con:
+ A newly connected raw DB-API connection (not a SQLAlchemy
+ ``Connection`` wrapper).
+
+ :param con_record:
+ The ``_ConnectionRecord`` that persistently manages the connection
+
+ """
+
+ def on_checkout(self, dbapi_connection, connection_record, connection_proxy):
+ """Called when a connection is retrieved from the Pool.
+
+ :param dbapi_con:
+ A raw DB-API connection
+
+ :param con_record:
+ The ``_ConnectionRecord`` that persistently manages the connection
+
+ :param con_proxy:
+ The ``_ConnectionFairy`` which manages the connection for the span of
+ the current checkout.
+
+ If you raise an ``exc.DisconnectionError``, the current
+ connection will be disposed and a fresh connection retrieved.
+ Processing of all checkout listeners will abort and restart
+ using the new connection.
+ """
+
+ def on_checkin(self, dbapi_connection, connection_record):
+ """Called when a connection returns to the pool.
+
+ Note that the connection may be closed, and may be None if the
+ connection has been invalidated. ``checkin`` will not be called
+ for detached connections. (They do not return to the pool.)
+
+ :param dbapi_con:
+ A raw DB-API connection
+
+ :param con_record:
+ The ``_ConnectionRecord`` that persistently manages the connection
+
+ """
+
+class EngineEvents(event.Events):
+ """Available events for :class:`.Engine`."""
+
+ @classmethod
+ def listen(cls, fn, identifier, target):
+ from sqlalchemy.engine.base import Connection, \
+ _listener_connection_cls
+ if target.Connection is Connection:
+ target.Connection = _listener_connection_cls(
+ Connection,
+ target.dispatch)
+ event.Events.listen(fn, identifier, target)
+
+ def on_before_execute(self, conn, clauseelement, multiparams, params):
+ """Intercept high level execute() events."""
+
+ def on_after_execute(self, conn, clauseelement, multiparams, params, result):
+ """Intercept high level execute() events."""
+
+ def on_before_cursor_execute(self, conn, cursor, statement,
+ parameters, context, executemany):
+ """Intercept low-level cursor execute() events."""
+
+ def on_after_cursor_execute(self, conn, cursor, statement,
+ parameters, context, executemany):
+ """Intercept low-level cursor execute() events."""
+
+ def on_begin(self, conn):
+ """Intercept begin() events."""
+
+ def on_rollback(self, conn):
+ """Intercept rollback() events."""
+
+ def on_commit(self, conn):
+ """Intercept commit() events."""
+
+ def on_savepoint(self, conn, name=None):
+ """Intercept savepoint() events."""
+
+ def on_rollback_savepoint(self, conn, name, context):
+ """Intercept rollback_savepoint() events."""
+
+ def on_release_savepoint(self, conn, name, context):
+ """Intercept release_savepoint() events."""
+
+ def on_begin_twophase(self, conn, xid):
+ """Intercept begin_twophase() events."""
+
+ def on_prepare_twophase(self, conn, xid):
+ """Intercept prepare_twophase() events."""
+
+ def on_rollback_twophase(self, conn, xid, is_prepared):
+ """Intercept rollback_twophase() events."""
+
+ def on_commit_twophase(self, conn, xid, is_prepared):
+ """Intercept commit_twophase() events."""
+
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
-"""Interfaces and abstract types."""
+"""Interfaces and abstract types.
+
+This module is **deprecated** and is superceded by the
+event system.
+
+"""
from sqlalchemy import event, util
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
-"""Defines SQLAlchemy's system of class instrumentation..
+"""Defines instrumentation for class attributes and their interaction
+with instances.
This module is usually not directly visible to user applications, but
defines a large part of the ORM's interactivity.
-SQLA's instrumentation system is completely customizable, in which
-case an understanding of the general mechanics of this module is helpful.
-An example of full customization is in /examples/custom_attributes.
"""
import operator
-from operator import attrgetter, itemgetter
-import types
-import weakref
+from operator import itemgetter
from sqlalchemy import util, event
-from sqlalchemy.orm import interfaces, collections, exc
+from sqlalchemy.orm import interfaces, collections
import sqlalchemy.exceptions as sa_exc
# lazy imports
PASSIVE_OFF = False #util.symbol('PASSIVE_OFF')
"""Symbol indicating that loader callables should be executed."""
-INSTRUMENTATION_MANAGER = '__sa_instrumentation_manager__'
-"""Attribute, elects custom instrumentation when present on a mapped class.
-
-Allows a class to specify a slightly or wildly different technique for
-tracking changes made to mapped attributes and collections.
-
-Only one instrumentation implementation is allowed in a given object
-inheritance hierarchy.
-
-The value of this attribute must be a callable and will be passed a class
-object. The callable must return one of:
-
- - An instance of an interfaces.InstrumentationManager or subclass
- - An object implementing all or some of InstrumentationManager (TODO)
- - A dictionary of callables, implementing all or some of the above (TODO)
- - An instance of a ClassManager or subclass
-
-interfaces.InstrumentationManager is public API and will remain stable
-between releases. ClassManager is not public and no guarantees are made
-about stability. Caveat emptor.
-
-This attribute is consulted by the default SQLAlchemy instrumentation
-resolution code. If custom finders are installed in the global
-instrumentation_finders list, they may or may not choose to honor this
-attribute.
-
-"""
-
-instrumentation_finders = []
-"""An extensible sequence of instrumentation implementation finding callables.
-
-Finders callables will be passed a class object. If None is returned, the
-next finder in the sequence is consulted. Otherwise the return must be an
-instrumentation factory that follows the same guidelines as
-INSTRUMENTATION_MANAGER.
-
-By default, the only finder is find_native_user_instrumentation_hook, which
-searches for INSTRUMENTATION_MANAGER. If all finders return None, standard
-ClassManager instrumentation is used.
-
-"""
class AttributeEvents(event.Events):
"""Events for ORM attributes.
initiator,
passive=PASSIVE_NO_FETCH)
-class ClassEvents(event.Events):
- def on_init(self, state, instance, args, kwargs):
- """"""
-
- def on_init_failure(self, state, instance, args, kwargs):
- """"""
-
- def on_load(self, instance):
- """"""
-
- def on_resurrect(self, state, instance):
- """"""
-
-class ClassManager(dict):
- """tracks state information at the class level."""
-
- MANAGER_ATTR = '_sa_class_manager'
- STATE_ATTR = '_sa_instance_state'
-
- deferred_scalar_loader = None
-
- original_init = object.__init__
-
- def __init__(self, class_):
- self.class_ = class_
- self.factory = None # where we came from, for inheritance bookkeeping
- self.info = {}
- self.new_init = None
- self.mutable_attributes = set()
- self.local_attrs = {}
- self.originals = {}
- for base in class_.__mro__[-2:0:-1]: # reverse, skipping 1st and last
- if not isinstance(base, type):
- continue
- cls_state = manager_of_class(base)
- if cls_state:
- self.update(cls_state)
- self.manage()
- self._instrument_init()
-
- dispatch = event.dispatcher(ClassEvents)
-
- @property
- def is_mapped(self):
- return 'mapper' in self.__dict__
-
- @util.memoized_property
- def mapper(self):
- raise exc.UnmappedClassError(self.class_)
-
- def _attr_has_impl(self, key):
- """Return True if the given attribute is fully initialized.
-
- i.e. has an impl.
- """
-
- return key in self and self[key].impl is not None
-
- def _configure_create_arguments(self,
- _source=None,
- deferred_scalar_loader=None):
- """Accept extra **kw arguments passed to create_manager_for_cls.
-
- The current contract of ClassManager and other managers is that they
- take a single "cls" argument in their constructor (as per
- test/orm/instrumentation.py InstrumentationCollisionTest). This
- is to provide consistency with the current API of "class manager"
- callables and such which may return various ClassManager and
- ClassManager-like instances. So create_manager_for_cls sends
- in ClassManager-specific arguments via this method once the
- non-proxied ClassManager is available.
-
- """
- if _source:
- deferred_scalar_loader = _source.deferred_scalar_loader
-
- if deferred_scalar_loader:
- self.deferred_scalar_loader = deferred_scalar_loader
-
- def _subclass_manager(self, cls):
- """Create a new ClassManager for a subclass of this ClassManager's
- class.
-
- This is called automatically when attributes are instrumented so that
- the attributes can be propagated to subclasses against their own
- class-local manager, without the need for mappers etc. to have already
- pre-configured managers for the full class hierarchy. Mappers
- can post-configure the auto-generated ClassManager when needed.
-
- """
- manager = manager_of_class(cls)
- if manager is None:
- manager = _create_manager_for_cls(cls, _source=self)
- return manager
-
- def _instrument_init(self):
- # TODO: self.class_.__init__ is often the already-instrumented
- # __init__ from an instrumented superclass. We still need to make
- # our own wrapper, but it would
- # be nice to wrap the original __init__ and not our existing wrapper
- # of such, since this adds method overhead.
- self.original_init = self.class_.__init__
- self.new_init = _generate_init(self.class_, self)
- self.install_member('__init__', self.new_init)
-
- def _uninstrument_init(self):
- if self.new_init:
- self.uninstall_member('__init__')
- self.new_init = None
-
- def _create_instance_state(self, instance):
- if self.mutable_attributes:
- return state.MutableAttrInstanceState(instance, self)
- else:
- return state.InstanceState(instance, self)
-
- def manage(self):
- """Mark this instance as the manager for its class."""
-
- setattr(self.class_, self.MANAGER_ATTR, self)
-
- def dispose(self):
- """Dissasociate this manager from its class."""
-
- delattr(self.class_, self.MANAGER_ATTR)
-
- def manager_getter(self):
- return attrgetter(self.MANAGER_ATTR)
-
- def instrument_attribute(self, key, inst, propagated=False):
- if propagated:
- if key in self.local_attrs:
- return # don't override local attr with inherited attr
- else:
- self.local_attrs[key] = inst
- self.install_descriptor(key, inst)
- self[key] = inst
-
- for cls in self.class_.__subclasses__():
- manager = self._subclass_manager(cls)
- manager.instrument_attribute(key, inst, True)
-
- def post_configure_attribute(self, key):
- pass
-
- def uninstrument_attribute(self, key, propagated=False):
- if key not in self:
- return
- if propagated:
- if key in self.local_attrs:
- return # don't get rid of local attr
- else:
- del self.local_attrs[key]
- self.uninstall_descriptor(key)
- del self[key]
- if key in self.mutable_attributes:
- self.mutable_attributes.remove(key)
- for cls in self.class_.__subclasses__():
- manager = self._subclass_manager(cls)
- manager.uninstrument_attribute(key, True)
-
- def unregister(self):
- """remove all instrumentation established by this ClassManager."""
-
- self._uninstrument_init()
-
- self.mapper = self.dispatch = None
- self.info.clear()
-
- for key in list(self):
- if key in self.local_attrs:
- self.uninstrument_attribute(key)
-
- def install_descriptor(self, key, inst):
- if key in (self.STATE_ATTR, self.MANAGER_ATTR):
- raise KeyError("%r: requested attribute name conflicts with "
- "instrumentation attribute of the same name." %
- key)
- setattr(self.class_, key, inst)
-
- def uninstall_descriptor(self, key):
- delattr(self.class_, key)
-
- def install_member(self, key, implementation):
- if key in (self.STATE_ATTR, self.MANAGER_ATTR):
- raise KeyError("%r: requested attribute name conflicts with "
- "instrumentation attribute of the same name." %
- key)
- self.originals.setdefault(key, getattr(self.class_, key, None))
- setattr(self.class_, key, implementation)
-
- def uninstall_member(self, key):
- original = self.originals.pop(key, None)
- if original is not None:
- setattr(self.class_, key, original)
-
- def instrument_collection_class(self, key, collection_class):
- return collections.prepare_instrumentation(collection_class)
-
- def initialize_collection(self, key, state, factory):
- user_data = factory()
- adapter = collections.CollectionAdapter(
- self.get_impl(key), state, user_data)
- return adapter, user_data
-
- def is_instrumented(self, key, search=False):
- if search:
- return key in self
- else:
- return key in self.local_attrs
-
- def get_impl(self, key):
- return self[key].impl
-
- @property
- def attributes(self):
- return self.itervalues()
-
- ## InstanceState management
-
- def new_instance(self, state=None):
- instance = self.class_.__new__(self.class_)
- setattr(instance, self.STATE_ATTR,
- state or self._create_instance_state(instance))
- return instance
-
- def setup_instance(self, instance, state=None):
- setattr(instance, self.STATE_ATTR,
- state or self._create_instance_state(instance))
-
- def teardown_instance(self, instance):
- delattr(instance, self.STATE_ATTR)
-
- def _new_state_if_none(self, instance):
- """Install a default InstanceState if none is present.
-
- A private convenience method used by the __init__ decorator.
-
- """
- if hasattr(instance, self.STATE_ATTR):
- return False
- elif self.class_ is not instance.__class__ and \
- self.is_mapped:
- # this will create a new ClassManager for the
- # subclass, without a mapper. This is likely a
- # user error situation but allow the object
- # to be constructed, so that it is usable
- # in a non-ORM context at least.
- return self._subclass_manager(instance.__class__).\
- _new_state_if_none(instance)
- else:
- state = self._create_instance_state(instance)
- setattr(instance, self.STATE_ATTR, state)
- return state
-
- def state_getter(self):
- """Return a (instance) -> InstanceState callable.
-
- "state getter" callables should raise either KeyError or
- AttributeError if no InstanceState could be found for the
- instance.
- """
-
- return attrgetter(self.STATE_ATTR)
-
- def dict_getter(self):
- return attrgetter('__dict__')
-
- def has_state(self, instance):
- return hasattr(instance, self.STATE_ATTR)
-
- def has_parent(self, state, key, optimistic=False):
- """TODO"""
- return self.get_impl(key).hasparent(state, optimistic=optimistic)
-
- def __nonzero__(self):
- """All ClassManagers are non-zero regardless of attribute state."""
- return True
-
- def __repr__(self):
- return '<%s of %r at %x>' % (
- self.__class__.__name__, self.class_, id(self))
-
-class _ClassInstrumentationAdapter(ClassManager):
- """Adapts a user-defined InstrumentationManager to a ClassManager."""
-
- def __init__(self, class_, override, **kw):
- self._adapted = override
- self._get_state = self._adapted.state_getter(class_)
- self._get_dict = self._adapted.dict_getter(class_)
-
- ClassManager.__init__(self, class_, **kw)
-
- def manage(self):
- self._adapted.manage(self.class_, self)
-
- def dispose(self):
- self._adapted.dispose(self.class_)
-
- def manager_getter(self):
- return self._adapted.manager_getter(self.class_)
-
- def instrument_attribute(self, key, inst, propagated=False):
- ClassManager.instrument_attribute(self, key, inst, propagated)
- if not propagated:
- self._adapted.instrument_attribute(self.class_, key, inst)
-
- def post_configure_attribute(self, key):
- self._adapted.post_configure_attribute(self.class_, key, self[key])
-
- def install_descriptor(self, key, inst):
- self._adapted.install_descriptor(self.class_, key, inst)
-
- def uninstall_descriptor(self, key):
- self._adapted.uninstall_descriptor(self.class_, key)
-
- def install_member(self, key, implementation):
- self._adapted.install_member(self.class_, key, implementation)
-
- def uninstall_member(self, key):
- self._adapted.uninstall_member(self.class_, key)
-
- def instrument_collection_class(self, key, collection_class):
- return self._adapted.instrument_collection_class(
- self.class_, key, collection_class)
-
- def initialize_collection(self, key, state, factory):
- delegate = getattr(self._adapted, 'initialize_collection', None)
- if delegate:
- return delegate(key, state, factory)
- else:
- return ClassManager.initialize_collection(self, key,
- state, factory)
-
- def new_instance(self, state=None):
- instance = self.class_.__new__(self.class_)
- self.setup_instance(instance, state)
- return instance
-
- def _new_state_if_none(self, instance):
- """Install a default InstanceState if none is present.
-
- A private convenience method used by the __init__ decorator.
- """
- if self.has_state(instance):
- return False
- else:
- return self.setup_instance(instance)
-
- def setup_instance(self, instance, state=None):
- self._adapted.initialize_instance_dict(self.class_, instance)
-
- if state is None:
- state = self._create_instance_state(instance)
-
- # the given instance is assumed to have no state
- self._adapted.install_state(self.class_, instance, state)
- return state
-
- def teardown_instance(self, instance):
- self._adapted.remove_state(self.class_, instance)
-
- def has_state(self, instance):
- try:
- state = self._get_state(instance)
- except exc.NO_STATE:
- return False
- else:
- return True
-
- def state_getter(self):
- return self._get_state
-
- def dict_getter(self):
- return self._get_dict
class History(tuple):
"""A 3-tuple of added, unchanged and deleted values,
state = instance_state(obj)
return manager.has_parent(state, key, optimistic)
-def register_class(class_, **kw):
- """Register class instrumentation.
-
- Returns the existing or newly created class manager.
- """
-
- manager = manager_of_class(class_)
- if manager is None:
- manager = _create_manager_for_cls(class_, **kw)
- return manager
-
-def unregister_class(class_):
- """Unregister class instrumentation."""
-
- instrumentation_registry.unregister(class_)
-
def register_attribute(class_, key, **kw):
proxy_property = kw.pop('proxy_property', None)
state, dict_ = instance_state(instance), instance_dict(instance)
state.get_impl(key).delete(state, dict_)
-def is_instrumented(instance, key):
- """Return True if the given attribute on the given instance is
- instrumented by the attributes package.
-
- This function may be used regardless of instrumentation
- applied directly to the class, i.e. no descriptors are required.
-
- """
- return manager_of_class(instance.__class__).\
- is_instrumented(key, search=True)
-
-class InstrumentationRegistry(object):
- """Private instrumentation registration singleton.
-
- All classes are routed through this registry
- when first instrumented, however the InstrumentationRegistry
- is not actually needed unless custom ClassManagers are in use.
-
- """
-
- _manager_finders = weakref.WeakKeyDictionary()
- _state_finders = util.WeakIdentityMapping()
- _dict_finders = util.WeakIdentityMapping()
- _extended = False
-
- def create_manager_for_cls(self, class_, **kw):
- assert class_ is not None
- assert manager_of_class(class_) is None
-
- for finder in instrumentation_finders:
- factory = finder(class_)
- if factory is not None:
- break
- else:
- factory = ClassManager
-
- existing_factories = self._collect_management_factories_for(class_).\
- difference([factory])
- if existing_factories:
- raise TypeError(
- "multiple instrumentation implementations specified "
- "in %s inheritance hierarchy: %r" % (
- class_.__name__, list(existing_factories)))
-
- manager = factory(class_)
- if not isinstance(manager, ClassManager):
- manager = _ClassInstrumentationAdapter(class_, manager)
-
- if factory != ClassManager and not self._extended:
- # somebody invoked a custom ClassManager.
- # reinstall global "getter" functions with the more
- # expensive ones.
- self._extended = True
- _install_lookup_strategy(self)
-
- manager._configure_create_arguments(**kw)
-
- manager.factory = factory
- self._manager_finders[class_] = manager.manager_getter()
- self._state_finders[class_] = manager.state_getter()
- self._dict_finders[class_] = manager.dict_getter()
- return manager
-
- def _collect_management_factories_for(self, cls):
- """Return a collection of factories in play or specified for a
- hierarchy.
-
- Traverses the entire inheritance graph of a cls and returns a
- collection of instrumentation factories for those classes. Factories
- are extracted from active ClassManagers, if available, otherwise
- instrumentation_finders is consulted.
-
- """
- hierarchy = util.class_hierarchy(cls)
- factories = set()
- for member in hierarchy:
- manager = manager_of_class(member)
- if manager is not None:
- factories.add(manager.factory)
- else:
- for finder in instrumentation_finders:
- factory = finder(member)
- if factory is not None:
- break
- else:
- factory = None
- factories.add(factory)
- factories.discard(None)
- return factories
-
- def manager_of_class(self, cls):
- # this is only called when alternate instrumentation
- # has been established
- if cls is None:
- return None
- try:
- finder = self._manager_finders[cls]
- except KeyError:
- return None
- else:
- return finder(cls)
-
- def state_of(self, instance):
- # this is only called when alternate instrumentation
- # has been established
- if instance is None:
- raise AttributeError("None has no persistent state.")
- try:
- return self._state_finders[instance.__class__](instance)
- except KeyError:
- raise AttributeError("%r is not instrumented" %
- instance.__class__)
-
- def dict_of(self, instance):
- # this is only called when alternate instrumentation
- # has been established
- if instance is None:
- raise AttributeError("None has no persistent state.")
- try:
- return self._dict_finders[instance.__class__](instance)
- except KeyError:
- raise AttributeError("%r is not instrumented" %
- instance.__class__)
-
- def unregister(self, class_):
- if class_ in self._manager_finders:
- manager = self.manager_of_class(class_)
- manager.unregister()
- manager.dispose()
- del self._manager_finders[class_]
- del self._state_finders[class_]
- del self._dict_finders[class_]
- if ClassManager.MANAGER_ATTR in class_.__dict__:
- delattr(class_, ClassManager.MANAGER_ATTR)
-
-instrumentation_registry = InstrumentationRegistry()
-
-def _install_lookup_strategy(implementation):
- """Replace global class/object management functions
- with either faster or more comprehensive implementations,
- based on whether or not extended class instrumentation
- has been detected.
-
- This function is called only by InstrumentationRegistry()
- and unit tests specific to this behavior.
-
- """
- global instance_state, instance_dict, manager_of_class
- if implementation is util.symbol('native'):
- instance_state = attrgetter(ClassManager.STATE_ATTR)
- instance_dict = attrgetter("__dict__")
- def manager_of_class(cls):
- return cls.__dict__.get(ClassManager.MANAGER_ATTR, None)
- else:
- instance_state = instrumentation_registry.state_of
- instance_dict = instrumentation_registry.dict_of
- manager_of_class = instrumentation_registry.manager_of_class
-
-_create_manager_for_cls = instrumentation_registry.create_manager_for_cls
-
-# Install default "lookup" strategies. These are basically
-# very fast attrgetters for key attributes.
-# When a custom ClassManager is installed, more expensive per-class
-# strategies are copied over these.
-_install_lookup_strategy(util.symbol('native'))
-
-def find_native_user_instrumentation_hook(cls):
- """Find user-specified instrumentation management for a class."""
- return getattr(cls, INSTRUMENTATION_MANAGER, None)
-instrumentation_finders.append(find_native_user_instrumentation_hook)
-
-def _generate_init(class_, class_manager):
- """Build an __init__ decorator that triggers ClassManager events."""
-
- # TODO: we should use the ClassManager's notion of the
- # original '__init__' method, once ClassManager is fixed
- # to always reference that.
- original__init__ = class_.__init__
- assert original__init__
-
- # Go through some effort here and don't change the user's __init__
- # calling signature.
- # FIXME: need to juggle local names to avoid constructor argument
- # clashes.
- func_body = """\
-def __init__(%(apply_pos)s):
- new_state = class_manager._new_state_if_none(%(self_arg)s)
- if new_state:
- return new_state.initialize_instance(%(apply_kw)s)
- else:
- return original__init__(%(apply_kw)s)
-"""
- func_vars = util.format_argspec_init(original__init__, grouped=False)
- func_text = func_body % func_vars
-
- # Py3K
- #func_defaults = getattr(original__init__, '__defaults__', None)
- # Py2K
- func = getattr(original__init__, 'im_func', original__init__)
- func_defaults = getattr(func, 'func_defaults', None)
- # end Py2K
-
- env = locals().copy()
- exec func_text in env
- __init__ = env['__init__']
- __init__.__doc__ = original__init__.__doc__
- if func_defaults:
- __init__.func_defaults = func_defaults
- return __init__
--- /dev/null
+from sqlalchemy import event
+from interfaces import EXT_CONTINUE
+
+
+class MapperExtension(object):
+ """Base implementation for customizing ``Mapper`` behavior.
+
+ New extension classes subclass ``MapperExtension`` and are specified
+ using the ``extension`` mapper() argument, which is a single
+ ``MapperExtension`` or a list of such. A single mapper
+ can maintain a chain of ``MapperExtension`` objects. When a
+ particular mapping event occurs, the corresponding method
+ on each ``MapperExtension`` is invoked serially, and each method
+ has the ability to halt the chain from proceeding further.
+
+ Each ``MapperExtension`` method returns the symbol
+ EXT_CONTINUE by default. This symbol generally means "move
+ to the next ``MapperExtension`` for processing". For methods
+ that return objects like translated rows or new object
+ instances, EXT_CONTINUE means the result of the method
+ should be ignored. In some cases it's required for a
+ default mapper activity to be performed, such as adding a
+ new instance to a result list.
+
+ The symbol EXT_STOP has significance within a chain
+ of ``MapperExtension`` objects that the chain will be stopped
+ when this symbol is returned. Like EXT_CONTINUE, it also
+ has additional significance in some cases that a default
+ mapper activity will not be performed.
+
+ """
+
+ def instrument_class(self, mapper, class_):
+ """Receive a class when the mapper is first constructed, and has
+ applied instrumentation to the mapped class.
+
+ The return value is only significant within the ``MapperExtension``
+ chain; the parent mapper's behavior isn't modified by this method.
+
+ """
+ return EXT_CONTINUE
+
+ def init_instance(self, mapper, class_, oldinit, instance, args, kwargs):
+ """Receive an instance when it's constructor is called.
+
+ This method is only called during a userland construction of
+ an object. It is not called when an object is loaded from the
+ database.
+
+ The return value is only significant within the ``MapperExtension``
+ chain; the parent mapper's behavior isn't modified by this method.
+
+ """
+ return EXT_CONTINUE
+
+ def init_failed(self, mapper, class_, oldinit, instance, args, kwargs):
+ """Receive an instance when it's constructor has been called,
+ and raised an exception.
+
+ This method is only called during a userland construction of
+ an object. It is not called when an object is loaded from the
+ database.
+
+ The return value is only significant within the ``MapperExtension``
+ chain; the parent mapper's behavior isn't modified by this method.
+
+ """
+ return EXT_CONTINUE
+
+ def translate_row(self, mapper, context, row):
+ """Perform pre-processing on the given result row and return a
+ new row instance.
+
+ This is called when the mapper first receives a row, before
+ the object identity or the instance itself has been derived
+ from that row. The given row may or may not be a
+ ``RowProxy`` object - it will always be a dictionary-like
+ object which contains mapped columns as keys. The
+ returned object should also be a dictionary-like object
+ which recognizes mapped columns as keys.
+
+ If the ultimate return value is EXT_CONTINUE, the row
+ is not translated.
+
+ """
+ return EXT_CONTINUE
+
+ def create_instance(self, mapper, selectcontext, row, class_):
+ """Receive a row when a new object instance is about to be
+ created from that row.
+
+ The method can choose to create the instance itself, or it can return
+ EXT_CONTINUE to indicate normal object creation should take place.
+
+ mapper
+ The mapper doing the operation
+
+ selectcontext
+ The QueryContext generated from the Query.
+
+ row
+ The result row from the database
+
+ class\_
+ The class we are mapping.
+
+ return value
+ A new object instance, or EXT_CONTINUE
+
+ """
+ return EXT_CONTINUE
+
+ def append_result(self, mapper, selectcontext, row, instance,
+ result, **flags):
+ """Receive an object instance before that instance is appended
+ to a result list.
+
+ If this method returns EXT_CONTINUE, result appending will proceed
+ normally. if this method returns any other value or None,
+ result appending will not proceed for this instance, giving
+ this extension an opportunity to do the appending itself, if
+ desired.
+
+ mapper
+ The mapper doing the operation.
+
+ selectcontext
+ The QueryContext generated from the Query.
+
+ row
+ The result row from the database.
+
+ instance
+ The object instance to be appended to the result.
+
+ result
+ List to which results are being appended.
+
+ \**flags
+ extra information about the row, same as criterion in
+ ``create_row_processor()`` method of
+ :class:`~sqlalchemy.orm.interfaces.MapperProperty`
+ """
+
+ return EXT_CONTINUE
+
+ def populate_instance(self, mapper, selectcontext, row,
+ instance, **flags):
+ """Receive an instance before that instance has
+ its attributes populated.
+
+ This usually corresponds to a newly loaded instance but may
+ also correspond to an already-loaded instance which has
+ unloaded attributes to be populated. The method may be called
+ many times for a single instance, as multiple result rows are
+ used to populate eagerly loaded collections.
+
+ If this method returns EXT_CONTINUE, instance population will
+ proceed normally. If any other value or None is returned,
+ instance population will not proceed, giving this extension an
+ opportunity to populate the instance itself, if desired.
+
+ As of 0.5, most usages of this hook are obsolete. For a
+ generic "object has been newly created from a row" hook, use
+ ``reconstruct_instance()``, or the ``@orm.reconstructor``
+ decorator.
+
+ """
+ return EXT_CONTINUE
+
+ def reconstruct_instance(self, mapper, instance):
+ """Receive an object instance after it has been created via
+ ``__new__``, and after initial attribute population has
+ occurred.
+
+ This typically occurs when the instance is created based on
+ incoming result rows, and is only called once for that
+ instance's lifetime.
+
+ Note that during a result-row load, this method is called upon
+ the first row received for this instance. Note that some
+ attributes and collections may or may not be loaded or even
+ initialized, depending on what's present in the result rows.
+
+ The return value is only significant within the ``MapperExtension``
+ chain; the parent mapper's behavior isn't modified by this method.
+
+ """
+ return EXT_CONTINUE
+
+ def before_insert(self, mapper, connection, instance):
+ """Receive an object instance before that instance is inserted
+ into its table.
+
+ This is a good place to set up primary key values and such
+ that aren't handled otherwise.
+
+ Column-based attributes can be modified within this method
+ which will result in the new value being inserted. However
+ *no* changes to the overall flush plan can be made, and
+ manipulation of the ``Session`` will not have the desired effect.
+ To manipulate the ``Session`` within an extension, use
+ ``SessionExtension``.
+
+ The return value is only significant within the ``MapperExtension``
+ chain; the parent mapper's behavior isn't modified by this method.
+
+ """
+
+ return EXT_CONTINUE
+
+ def after_insert(self, mapper, connection, instance):
+ """Receive an object instance after that instance is inserted.
+
+ The return value is only significant within the ``MapperExtension``
+ chain; the parent mapper's behavior isn't modified by this method.
+
+ """
+
+ return EXT_CONTINUE
+
+ def before_update(self, mapper, connection, instance):
+ """Receive an object instance before that instance is updated.
+
+ Note that this method is called for all instances that are marked as
+ "dirty", even those which have no net changes to their column-based
+ attributes. An object is marked as dirty when any of its column-based
+ attributes have a "set attribute" operation called or when any of its
+ collections are modified. If, at update time, no column-based
+ attributes have any net changes, no UPDATE statement will be issued.
+ This means that an instance being sent to before_update is *not* a
+ guarantee that an UPDATE statement will be issued (although you can
+ affect the outcome here).
+
+ To detect if the column-based attributes on the object have net
+ changes, and will therefore generate an UPDATE statement, use
+ ``object_session(instance).is_modified(instance,
+ include_collections=False)``.
+
+ Column-based attributes can be modified within this method
+ which will result in the new value being updated. However
+ *no* changes to the overall flush plan can be made, and
+ manipulation of the ``Session`` will not have the desired effect.
+ To manipulate the ``Session`` within an extension, use
+ ``SessionExtension``.
+
+ The return value is only significant within the ``MapperExtension``
+ chain; the parent mapper's behavior isn't modified by this method.
+
+ """
+
+ return EXT_CONTINUE
+
+ def after_update(self, mapper, connection, instance):
+ """Receive an object instance after that instance is updated.
+
+ The return value is only significant within the ``MapperExtension``
+ chain; the parent mapper's behavior isn't modified by this method.
+
+ """
+
+ return EXT_CONTINUE
+
+ def before_delete(self, mapper, connection, instance):
+ """Receive an object instance before that instance is deleted.
+
+ Note that *no* changes to the overall flush plan can be made
+ here; and manipulation of the ``Session`` will not have the
+ desired effect. To manipulate the ``Session`` within an
+ extension, use ``SessionExtension``.
+
+ The return value is only significant within the ``MapperExtension``
+ chain; the parent mapper's behavior isn't modified by this method.
+
+ """
+
+ return EXT_CONTINUE
+
+ def after_delete(self, mapper, connection, instance):
+ """Receive an object instance after that instance is deleted.
+
+ The return value is only significant within the ``MapperExtension``
+ chain; the parent mapper's behavior isn't modified by this method.
+
+ """
+
+ return EXT_CONTINUE
+
+class SessionExtension(object):
+
+ """An extension hook object for Sessions. Subclasses may be
+ installed into a Session (or sessionmaker) using the ``extension``
+ keyword argument. """
+
+ def before_commit(self, session):
+ """Execute right before commit is called.
+
+ Note that this may not be per-flush if a longer running
+ transaction is ongoing."""
+
+ def after_commit(self, session):
+ """Execute after a commit has occured.
+
+ Note that this may not be per-flush if a longer running
+ transaction is ongoing."""
+
+ def after_rollback(self, session):
+ """Execute after a rollback has occured.
+
+ Note that this may not be per-flush if a longer running
+ transaction is ongoing."""
+
+ def before_flush( self, session, flush_context, instances):
+ """Execute before flush process has started.
+
+ `instances` is an optional list of objects which were passed to
+ the ``flush()`` method. """
+
+ def after_flush(self, session, flush_context):
+ """Execute after flush has completed, but before commit has been
+ called.
+
+ Note that the session's state is still in pre-flush, i.e. 'new',
+ 'dirty', and 'deleted' lists still show pre-flush state as well
+ as the history settings on instance attributes."""
+
+ def after_flush_postexec(self, session, flush_context):
+ """Execute after flush has completed, and after the post-exec
+ state occurs.
+
+ This will be when the 'new', 'dirty', and 'deleted' lists are in
+ their final state. An actual commit() may or may not have
+ occured, depending on whether or not the flush started its own
+ transaction or participated in a larger transaction. """
+
+ def after_begin( self, session, transaction, connection):
+ """Execute after a transaction is begun on a connection
+
+ `transaction` is the SessionTransaction. This method is called
+ after an engine level transaction is begun on a connection. """
+
+ def after_attach(self, session, instance):
+ """Execute after an instance is attached to a session.
+
+ This is called after an add, delete or merge. """
+
+ def after_bulk_update( self, session, query, query_context, result):
+ """Execute after a bulk update operation to the session.
+
+ This is called after a session.query(...).update()
+
+ `query` is the query object that this update operation was
+ called on. `query_context` was the query context object.
+ `result` is the result object returned from the bulk operation.
+ """
+
+ def after_bulk_delete( self, session, query, query_context, result):
+ """Execute after a bulk delete operation to the session.
+
+ This is called after a session.query(...).delete()
+
+ `query` is the query object that this delete operation was
+ called on. `query_context` was the query context object.
+ `result` is the result object returned from the bulk operation.
+ """
+
+
+class AttributeExtension(object):
+ """An event handler for individual attribute change events.
+
+ .. note:: :class:`AttributeExtension` is deprecated. Please
+ refer to :func:`event.listen` as well as
+ :attr:`AttributeImpl.events`.
+
+ AttributeExtension is assembled within the descriptors associated
+ with a mapped class.
+
+ """
+
+ active_history = True
+ """indicates that the set() method would like to receive the 'old' value,
+ even if it means firing lazy callables.
+ """
+
+ @classmethod
+ def _adapt_listener(cls, self, listener):
+ event.listen(listener.append, 'on_append', self,
+ active_history=listener.active_history)
+ event.listen(listener.remove, 'on_remove', self,
+ active_history=listener.active_history)
+ event.listen(listener.set, 'on_set', self,
+ active_history=listener.active_history)
+
+
+ def append(self, state, value, initiator):
+ """Receive a collection append event.
+
+ The returned value will be used as the actual value to be
+ appended.
+
+ """
+ return value
+
+ def remove(self, state, value, initiator):
+ """Receive a remove event.
+
+ No return value is defined.
+
+ """
+ pass
+
+ def set(self, state, value, oldvalue, initiator):
+ """Receive a set event.
+
+ The returned value will be used as the actual value to be
+ set.
+
+ """
+ return value
+
+
--- /dev/null
+"""ORM event interfaces.
+
+"""
+from sqlalchemy import event
+
+class ClassEvents(event.Events):
+ @classmethod
+ def accept_with(cls, target):
+ from sqlalchemy.orm.instrumentation import ClassManager
+
+ if isinstance(target, ClassManager):
+ return [target]
+ elif isinstance(target, type):
+ manager = manager_of_class(target)
+ if manager:
+ return [manager]
+ return []
+
+ # TODO: change these to accept "target" -
+ # the target is the state or the instance, depending
+ # on if the listener was registered with "raw=True" -
+ # do the same thing for all the other events here (Mapper, Session, Attributes).
+ # Not sending raw=True means the listen() method of the
+ # Events subclass will wrap incoming listeners to marshall each
+ # "target" argument into "instance". The name "target" can be
+ # used consistently to make it simple.
+ #
+ # this way end users don't have to deal with InstanceState and
+ # the internals can have faster performance.
+
+ def on_init(self, state, instance, args, kwargs):
+ """"""
+
+ def on_init_failure(self, state, instance, args, kwargs):
+ """"""
+
+ def on_load(self, instance):
+ """"""
+
+ def on_resurrect(self, state, instance):
+ """"""
+
+class MapperEvents(event.Events):
+ """"""
+
+class SessionEvents(event.Events):
+ """"""
+
+class AttributeEvents(event.Events):
+ """"""
\ No newline at end of file
--- /dev/null
+"""Defines SQLAlchemy's system of class instrumentation.
+
+This module is usually not directly visible to user applications, but
+defines a large part of the ORM's interactivity.
+
+instrumentation.py deals with registration of end-user classes
+for state tracking. It interacts closely with state.py
+and attributes.py which establish per-instance and per-class-attribute
+instrumentation, respectively.
+
+SQLA's instrumentation system is completely customizable, in which
+case an understanding of the general mechanics of this module is helpful.
+An example of full customization is in /examples/custom_attributes.
+
+"""
+
+
+from sqlalchemy.orm import exc, collections, events
+from operator import attrgetter, itemgetter
+from sqlalchemy import event, util
+import weakref
+from sqlalchemy.orm import state, attributes
+
+
+INSTRUMENTATION_MANAGER = '__sa_instrumentation_manager__'
+"""Attribute, elects custom instrumentation when present on a mapped class.
+
+Allows a class to specify a slightly or wildly different technique for
+tracking changes made to mapped attributes and collections.
+
+Only one instrumentation implementation is allowed in a given object
+inheritance hierarchy.
+
+The value of this attribute must be a callable and will be passed a class
+object. The callable must return one of:
+
+ - An instance of an interfaces.InstrumentationManager or subclass
+ - An object implementing all or some of InstrumentationManager (TODO)
+ - A dictionary of callables, implementing all or some of the above (TODO)
+ - An instance of a ClassManager or subclass
+
+interfaces.InstrumentationManager is public API and will remain stable
+between releases. ClassManager is not public and no guarantees are made
+about stability. Caveat emptor.
+
+This attribute is consulted by the default SQLAlchemy instrumentation
+resolution code. If custom finders are installed in the global
+instrumentation_finders list, they may or may not choose to honor this
+attribute.
+
+"""
+
+instrumentation_finders = []
+"""An extensible sequence of instrumentation implementation finding callables.
+
+Finders callables will be passed a class object. If None is returned, the
+next finder in the sequence is consulted. Otherwise the return must be an
+instrumentation factory that follows the same guidelines as
+INSTRUMENTATION_MANAGER.
+
+By default, the only finder is find_native_user_instrumentation_hook, which
+searches for INSTRUMENTATION_MANAGER. If all finders return None, standard
+ClassManager instrumentation is used.
+
+"""
+
+
+class ClassManager(dict):
+ """tracks state information at the class level."""
+
+ MANAGER_ATTR = '_sa_class_manager'
+ STATE_ATTR = '_sa_instance_state'
+
+ deferred_scalar_loader = None
+
+ original_init = object.__init__
+
+ def __init__(self, class_):
+ self.class_ = class_
+ self.factory = None # where we came from, for inheritance bookkeeping
+ self.info = {}
+ self.new_init = None
+ self.mutable_attributes = set()
+ self.local_attrs = {}
+ self.originals = {}
+ for base in class_.__mro__[-2:0:-1]: # reverse, skipping 1st and last
+ if not isinstance(base, type):
+ continue
+ cls_state = manager_of_class(base)
+ if cls_state:
+ self.update(cls_state)
+ self.manage()
+ self._instrument_init()
+
+ dispatch = event.dispatcher(events.ClassEvents)
+
+ @property
+ def is_mapped(self):
+ return 'mapper' in self.__dict__
+
+ @util.memoized_property
+ def mapper(self):
+ raise exc.UnmappedClassError(self.class_)
+
+ def _attr_has_impl(self, key):
+ """Return True if the given attribute is fully initialized.
+
+ i.e. has an impl.
+ """
+
+ return key in self and self[key].impl is not None
+
+ def _configure_create_arguments(self,
+ _source=None,
+ deferred_scalar_loader=None):
+ """Accept extra **kw arguments passed to create_manager_for_cls.
+
+ The current contract of ClassManager and other managers is that they
+ take a single "cls" argument in their constructor (as per
+ test/orm/instrumentation.py InstrumentationCollisionTest). This
+ is to provide consistency with the current API of "class manager"
+ callables and such which may return various ClassManager and
+ ClassManager-like instances. So create_manager_for_cls sends
+ in ClassManager-specific arguments via this method once the
+ non-proxied ClassManager is available.
+
+ """
+ if _source:
+ deferred_scalar_loader = _source.deferred_scalar_loader
+
+ if deferred_scalar_loader:
+ self.deferred_scalar_loader = deferred_scalar_loader
+
+ def _subclass_manager(self, cls):
+ """Create a new ClassManager for a subclass of this ClassManager's
+ class.
+
+ This is called automatically when attributes are instrumented so that
+ the attributes can be propagated to subclasses against their own
+ class-local manager, without the need for mappers etc. to have already
+ pre-configured managers for the full class hierarchy. Mappers
+ can post-configure the auto-generated ClassManager when needed.
+
+ """
+ manager = manager_of_class(cls)
+ if manager is None:
+ manager = _create_manager_for_cls(cls, _source=self)
+ return manager
+
+ def _instrument_init(self):
+ # TODO: self.class_.__init__ is often the already-instrumented
+ # __init__ from an instrumented superclass. We still need to make
+ # our own wrapper, but it would
+ # be nice to wrap the original __init__ and not our existing wrapper
+ # of such, since this adds method overhead.
+ self.original_init = self.class_.__init__
+ self.new_init = _generate_init(self.class_, self)
+ self.install_member('__init__', self.new_init)
+
+ def _uninstrument_init(self):
+ if self.new_init:
+ self.uninstall_member('__init__')
+ self.new_init = None
+
+ def _create_instance_state(self, instance):
+ if self.mutable_attributes:
+ return state.MutableAttrInstanceState(instance, self)
+ else:
+ return state.InstanceState(instance, self)
+
+ def manage(self):
+ """Mark this instance as the manager for its class."""
+
+ setattr(self.class_, self.MANAGER_ATTR, self)
+
+ def dispose(self):
+ """Dissasociate this manager from its class."""
+
+ delattr(self.class_, self.MANAGER_ATTR)
+
+ def manager_getter(self):
+ return attrgetter(self.MANAGER_ATTR)
+
+ def instrument_attribute(self, key, inst, propagated=False):
+ if propagated:
+ if key in self.local_attrs:
+ return # don't override local attr with inherited attr
+ else:
+ self.local_attrs[key] = inst
+ self.install_descriptor(key, inst)
+ self[key] = inst
+
+ for cls in self.class_.__subclasses__():
+ manager = self._subclass_manager(cls)
+ manager.instrument_attribute(key, inst, True)
+
+ def post_configure_attribute(self, key):
+ pass
+
+ def uninstrument_attribute(self, key, propagated=False):
+ if key not in self:
+ return
+ if propagated:
+ if key in self.local_attrs:
+ return # don't get rid of local attr
+ else:
+ del self.local_attrs[key]
+ self.uninstall_descriptor(key)
+ del self[key]
+ if key in self.mutable_attributes:
+ self.mutable_attributes.remove(key)
+ for cls in self.class_.__subclasses__():
+ manager = self._subclass_manager(cls)
+ manager.uninstrument_attribute(key, True)
+
+ def unregister(self):
+ """remove all instrumentation established by this ClassManager."""
+
+ self._uninstrument_init()
+
+ self.mapper = self.dispatch = None
+ self.info.clear()
+
+ for key in list(self):
+ if key in self.local_attrs:
+ self.uninstrument_attribute(key)
+
+ def install_descriptor(self, key, inst):
+ if key in (self.STATE_ATTR, self.MANAGER_ATTR):
+ raise KeyError("%r: requested attribute name conflicts with "
+ "instrumentation attribute of the same name." %
+ key)
+ setattr(self.class_, key, inst)
+
+ def uninstall_descriptor(self, key):
+ delattr(self.class_, key)
+
+ def install_member(self, key, implementation):
+ if key in (self.STATE_ATTR, self.MANAGER_ATTR):
+ raise KeyError("%r: requested attribute name conflicts with "
+ "instrumentation attribute of the same name." %
+ key)
+ self.originals.setdefault(key, getattr(self.class_, key, None))
+ setattr(self.class_, key, implementation)
+
+ def uninstall_member(self, key):
+ original = self.originals.pop(key, None)
+ if original is not None:
+ setattr(self.class_, key, original)
+
+ def instrument_collection_class(self, key, collection_class):
+ return collections.prepare_instrumentation(collection_class)
+
+ def initialize_collection(self, key, state, factory):
+ user_data = factory()
+ adapter = collections.CollectionAdapter(
+ self.get_impl(key), state, user_data)
+ return adapter, user_data
+
+ def is_instrumented(self, key, search=False):
+ if search:
+ return key in self
+ else:
+ return key in self.local_attrs
+
+ def get_impl(self, key):
+ return self[key].impl
+
+ @property
+ def attributes(self):
+ return self.itervalues()
+
+ ## InstanceState management
+
+ def new_instance(self, state=None):
+ instance = self.class_.__new__(self.class_)
+ setattr(instance, self.STATE_ATTR,
+ state or self._create_instance_state(instance))
+ return instance
+
+ def setup_instance(self, instance, state=None):
+ setattr(instance, self.STATE_ATTR,
+ state or self._create_instance_state(instance))
+
+ def teardown_instance(self, instance):
+ delattr(instance, self.STATE_ATTR)
+
+ def _new_state_if_none(self, instance):
+ """Install a default InstanceState if none is present.
+
+ A private convenience method used by the __init__ decorator.
+
+ """
+ if hasattr(instance, self.STATE_ATTR):
+ return False
+ elif self.class_ is not instance.__class__ and \
+ self.is_mapped:
+ # this will create a new ClassManager for the
+ # subclass, without a mapper. This is likely a
+ # user error situation but allow the object
+ # to be constructed, so that it is usable
+ # in a non-ORM context at least.
+ return self._subclass_manager(instance.__class__).\
+ _new_state_if_none(instance)
+ else:
+ state = self._create_instance_state(instance)
+ setattr(instance, self.STATE_ATTR, state)
+ return state
+
+ def state_getter(self):
+ """Return a (instance) -> InstanceState callable.
+
+ "state getter" callables should raise either KeyError or
+ AttributeError if no InstanceState could be found for the
+ instance.
+ """
+
+ return attrgetter(self.STATE_ATTR)
+
+ def dict_getter(self):
+ return attrgetter('__dict__')
+
+ def has_state(self, instance):
+ return hasattr(instance, self.STATE_ATTR)
+
+ def has_parent(self, state, key, optimistic=False):
+ """TODO"""
+ return self.get_impl(key).hasparent(state, optimistic=optimistic)
+
+ def __nonzero__(self):
+ """All ClassManagers are non-zero regardless of attribute state."""
+ return True
+
+ def __repr__(self):
+ return '<%s of %r at %x>' % (
+ self.__class__.__name__, self.class_, id(self))
+
+class _ClassInstrumentationAdapter(ClassManager):
+ """Adapts a user-defined InstrumentationManager to a ClassManager."""
+
+ def __init__(self, class_, override, **kw):
+ self._adapted = override
+ self._get_state = self._adapted.state_getter(class_)
+ self._get_dict = self._adapted.dict_getter(class_)
+
+ ClassManager.__init__(self, class_, **kw)
+
+ def manage(self):
+ self._adapted.manage(self.class_, self)
+
+ def dispose(self):
+ self._adapted.dispose(self.class_)
+
+ def manager_getter(self):
+ return self._adapted.manager_getter(self.class_)
+
+ def instrument_attribute(self, key, inst, propagated=False):
+ ClassManager.instrument_attribute(self, key, inst, propagated)
+ if not propagated:
+ self._adapted.instrument_attribute(self.class_, key, inst)
+
+ def post_configure_attribute(self, key):
+ self._adapted.post_configure_attribute(self.class_, key, self[key])
+
+ def install_descriptor(self, key, inst):
+ self._adapted.install_descriptor(self.class_, key, inst)
+
+ def uninstall_descriptor(self, key):
+ self._adapted.uninstall_descriptor(self.class_, key)
+
+ def install_member(self, key, implementation):
+ self._adapted.install_member(self.class_, key, implementation)
+
+ def uninstall_member(self, key):
+ self._adapted.uninstall_member(self.class_, key)
+
+ def instrument_collection_class(self, key, collection_class):
+ return self._adapted.instrument_collection_class(
+ self.class_, key, collection_class)
+
+ def initialize_collection(self, key, state, factory):
+ delegate = getattr(self._adapted, 'initialize_collection', None)
+ if delegate:
+ return delegate(key, state, factory)
+ else:
+ return ClassManager.initialize_collection(self, key,
+ state, factory)
+
+ def new_instance(self, state=None):
+ instance = self.class_.__new__(self.class_)
+ self.setup_instance(instance, state)
+ return instance
+
+ def _new_state_if_none(self, instance):
+ """Install a default InstanceState if none is present.
+
+ A private convenience method used by the __init__ decorator.
+ """
+ if self.has_state(instance):
+ return False
+ else:
+ return self.setup_instance(instance)
+
+ def setup_instance(self, instance, state=None):
+ self._adapted.initialize_instance_dict(self.class_, instance)
+
+ if state is None:
+ state = self._create_instance_state(instance)
+
+ # the given instance is assumed to have no state
+ self._adapted.install_state(self.class_, instance, state)
+ return state
+
+ def teardown_instance(self, instance):
+ self._adapted.remove_state(self.class_, instance)
+
+ def has_state(self, instance):
+ try:
+ state = self._get_state(instance)
+ except exc.NO_STATE:
+ return False
+ else:
+ return True
+
+ def state_getter(self):
+ return self._get_state
+
+ def dict_getter(self):
+ return self._get_dict
+
+def register_class(class_, **kw):
+ """Register class instrumentation.
+
+ Returns the existing or newly created class manager.
+ """
+
+ manager = manager_of_class(class_)
+ if manager is None:
+ manager = _create_manager_for_cls(class_, **kw)
+ return manager
+
+def unregister_class(class_):
+ """Unregister class instrumentation."""
+
+ instrumentation_registry.unregister(class_)
+
+
+def is_instrumented(instance, key):
+ """Return True if the given attribute on the given instance is
+ instrumented by the attributes package.
+
+ This function may be used regardless of instrumentation
+ applied directly to the class, i.e. no descriptors are required.
+
+ """
+ return manager_of_class(instance.__class__).\
+ is_instrumented(key, search=True)
+
+class InstrumentationRegistry(object):
+ """Private instrumentation registration singleton.
+
+ All classes are routed through this registry
+ when first instrumented, however the InstrumentationRegistry
+ is not actually needed unless custom ClassManagers are in use.
+
+ """
+
+ _manager_finders = weakref.WeakKeyDictionary()
+ _state_finders = util.WeakIdentityMapping()
+ _dict_finders = util.WeakIdentityMapping()
+ _extended = False
+
+ def create_manager_for_cls(self, class_, **kw):
+ assert class_ is not None
+ assert manager_of_class(class_) is None
+
+ for finder in instrumentation_finders:
+ factory = finder(class_)
+ if factory is not None:
+ break
+ else:
+ factory = ClassManager
+
+ existing_factories = self._collect_management_factories_for(class_).\
+ difference([factory])
+ if existing_factories:
+ raise TypeError(
+ "multiple instrumentation implementations specified "
+ "in %s inheritance hierarchy: %r" % (
+ class_.__name__, list(existing_factories)))
+
+ manager = factory(class_)
+ if not isinstance(manager, ClassManager):
+ manager = _ClassInstrumentationAdapter(class_, manager)
+
+ if factory != ClassManager and not self._extended:
+ # somebody invoked a custom ClassManager.
+ # reinstall global "getter" functions with the more
+ # expensive ones.
+ self._extended = True
+ _install_lookup_strategy(self)
+
+ manager._configure_create_arguments(**kw)
+
+ manager.factory = factory
+ self._manager_finders[class_] = manager.manager_getter()
+ self._state_finders[class_] = manager.state_getter()
+ self._dict_finders[class_] = manager.dict_getter()
+ return manager
+
+ def _collect_management_factories_for(self, cls):
+ """Return a collection of factories in play or specified for a
+ hierarchy.
+
+ Traverses the entire inheritance graph of a cls and returns a
+ collection of instrumentation factories for those classes. Factories
+ are extracted from active ClassManagers, if available, otherwise
+ instrumentation_finders is consulted.
+
+ """
+ hierarchy = util.class_hierarchy(cls)
+ factories = set()
+ for member in hierarchy:
+ manager = manager_of_class(member)
+ if manager is not None:
+ factories.add(manager.factory)
+ else:
+ for finder in instrumentation_finders:
+ factory = finder(member)
+ if factory is not None:
+ break
+ else:
+ factory = None
+ factories.add(factory)
+ factories.discard(None)
+ return factories
+
+ def manager_of_class(self, cls):
+ # this is only called when alternate instrumentation
+ # has been established
+ if cls is None:
+ return None
+ try:
+ finder = self._manager_finders[cls]
+ except KeyError:
+ return None
+ else:
+ return finder(cls)
+
+ def state_of(self, instance):
+ # this is only called when alternate instrumentation
+ # has been established
+ if instance is None:
+ raise AttributeError("None has no persistent state.")
+ try:
+ return self._state_finders[instance.__class__](instance)
+ except KeyError:
+ raise AttributeError("%r is not instrumented" %
+ instance.__class__)
+
+ def dict_of(self, instance):
+ # this is only called when alternate instrumentation
+ # has been established
+ if instance is None:
+ raise AttributeError("None has no persistent state.")
+ try:
+ return self._dict_finders[instance.__class__](instance)
+ except KeyError:
+ raise AttributeError("%r is not instrumented" %
+ instance.__class__)
+
+ def unregister(self, class_):
+ if class_ in self._manager_finders:
+ manager = self.manager_of_class(class_)
+ manager.unregister()
+ manager.dispose()
+ del self._manager_finders[class_]
+ del self._state_finders[class_]
+ del self._dict_finders[class_]
+ if ClassManager.MANAGER_ATTR in class_.__dict__:
+ delattr(class_, ClassManager.MANAGER_ATTR)
+
+instrumentation_registry = InstrumentationRegistry()
+
+
+def _install_lookup_strategy(implementation):
+ """Replace global class/object management functions
+ with either faster or more comprehensive implementations,
+ based on whether or not extended class instrumentation
+ has been detected.
+
+ This function is called only by InstrumentationRegistry()
+ and unit tests specific to this behavior.
+
+ """
+ global instance_state, instance_dict, manager_of_class
+ if implementation is util.symbol('native'):
+ instance_state = attrgetter(ClassManager.STATE_ATTR)
+ instance_dict = attrgetter("__dict__")
+ def manager_of_class(cls):
+ return cls.__dict__.get(ClassManager.MANAGER_ATTR, None)
+ else:
+ instance_state = instrumentation_registry.state_of
+ instance_dict = instrumentation_registry.dict_of
+ manager_of_class = instrumentation_registry.manager_of_class
+ attributes.instance_state = instance_state
+ attributes.instance_dict = instance_dict
+ attributes.manager_of_class = manager_of_class
+
+_create_manager_for_cls = instrumentation_registry.create_manager_for_cls
+
+# Install default "lookup" strategies. These are basically
+# very fast attrgetters for key attributes.
+# When a custom ClassManager is installed, more expensive per-class
+# strategies are copied over these.
+_install_lookup_strategy(util.symbol('native'))
+
+
+def find_native_user_instrumentation_hook(cls):
+ """Find user-specified instrumentation management for a class."""
+ return getattr(cls, INSTRUMENTATION_MANAGER, None)
+instrumentation_finders.append(find_native_user_instrumentation_hook)
+
+def _generate_init(class_, class_manager):
+ """Build an __init__ decorator that triggers ClassManager events."""
+
+ # TODO: we should use the ClassManager's notion of the
+ # original '__init__' method, once ClassManager is fixed
+ # to always reference that.
+ original__init__ = class_.__init__
+ assert original__init__
+
+ # Go through some effort here and don't change the user's __init__
+ # calling signature.
+ # FIXME: need to juggle local names to avoid constructor argument
+ # clashes.
+ func_body = """\
+def __init__(%(apply_pos)s):
+ new_state = class_manager._new_state_if_none(%(self_arg)s)
+ if new_state:
+ return new_state.initialize_instance(%(apply_kw)s)
+ else:
+ return original__init__(%(apply_kw)s)
+"""
+ func_vars = util.format_argspec_init(original__init__, grouped=False)
+ func_text = func_body % func_vars
+
+ # Py3K
+ #func_defaults = getattr(original__init__, '__defaults__', None)
+ # Py2K
+ func = getattr(original__init__, 'im_func', original__init__)
+ func_defaults = getattr(func, 'func_defaults', None)
+ # end Py2K
+
+ env = locals().copy()
+ exec func_text in env
+ __init__ = env['__init__']
+ __init__.__doc__ = original__init__.__doc__
+ if func_defaults:
+ __init__.func_defaults = func_defaults
+ return __init__
MANYTOONE = util.symbol('MANYTOONE')
MANYTOMANY = util.symbol('MANYTOMANY')
-class MapperExtension(object):
- """Base implementation for customizing ``Mapper`` behavior.
-
- New extension classes subclass ``MapperExtension`` and are specified
- using the ``extension`` mapper() argument, which is a single
- ``MapperExtension`` or a list of such. A single mapper
- can maintain a chain of ``MapperExtension`` objects. When a
- particular mapping event occurs, the corresponding method
- on each ``MapperExtension`` is invoked serially, and each method
- has the ability to halt the chain from proceeding further.
-
- Each ``MapperExtension`` method returns the symbol
- EXT_CONTINUE by default. This symbol generally means "move
- to the next ``MapperExtension`` for processing". For methods
- that return objects like translated rows or new object
- instances, EXT_CONTINUE means the result of the method
- should be ignored. In some cases it's required for a
- default mapper activity to be performed, such as adding a
- new instance to a result list.
-
- The symbol EXT_STOP has significance within a chain
- of ``MapperExtension`` objects that the chain will be stopped
- when this symbol is returned. Like EXT_CONTINUE, it also
- has additional significance in some cases that a default
- mapper activity will not be performed.
-
- """
-
- def instrument_class(self, mapper, class_):
- """Receive a class when the mapper is first constructed, and has
- applied instrumentation to the mapped class.
-
- The return value is only significant within the ``MapperExtension``
- chain; the parent mapper's behavior isn't modified by this method.
-
- """
- return EXT_CONTINUE
-
- def init_instance(self, mapper, class_, oldinit, instance, args, kwargs):
- """Receive an instance when it's constructor is called.
-
- This method is only called during a userland construction of
- an object. It is not called when an object is loaded from the
- database.
-
- The return value is only significant within the ``MapperExtension``
- chain; the parent mapper's behavior isn't modified by this method.
-
- """
- return EXT_CONTINUE
-
- def init_failed(self, mapper, class_, oldinit, instance, args, kwargs):
- """Receive an instance when it's constructor has been called,
- and raised an exception.
-
- This method is only called during a userland construction of
- an object. It is not called when an object is loaded from the
- database.
-
- The return value is only significant within the ``MapperExtension``
- chain; the parent mapper's behavior isn't modified by this method.
-
- """
- return EXT_CONTINUE
-
- def translate_row(self, mapper, context, row):
- """Perform pre-processing on the given result row and return a
- new row instance.
-
- This is called when the mapper first receives a row, before
- the object identity or the instance itself has been derived
- from that row. The given row may or may not be a
- ``RowProxy`` object - it will always be a dictionary-like
- object which contains mapped columns as keys. The
- returned object should also be a dictionary-like object
- which recognizes mapped columns as keys.
-
- If the ultimate return value is EXT_CONTINUE, the row
- is not translated.
-
- """
- return EXT_CONTINUE
-
- def create_instance(self, mapper, selectcontext, row, class_):
- """Receive a row when a new object instance is about to be
- created from that row.
-
- The method can choose to create the instance itself, or it can return
- EXT_CONTINUE to indicate normal object creation should take place.
-
- mapper
- The mapper doing the operation
-
- selectcontext
- The QueryContext generated from the Query.
-
- row
- The result row from the database
-
- class\_
- The class we are mapping.
-
- return value
- A new object instance, or EXT_CONTINUE
-
- """
- return EXT_CONTINUE
-
- def append_result(self, mapper, selectcontext, row, instance,
- result, **flags):
- """Receive an object instance before that instance is appended
- to a result list.
-
- If this method returns EXT_CONTINUE, result appending will proceed
- normally. if this method returns any other value or None,
- result appending will not proceed for this instance, giving
- this extension an opportunity to do the appending itself, if
- desired.
-
- mapper
- The mapper doing the operation.
-
- selectcontext
- The QueryContext generated from the Query.
-
- row
- The result row from the database.
-
- instance
- The object instance to be appended to the result.
-
- result
- List to which results are being appended.
-
- \**flags
- extra information about the row, same as criterion in
- ``create_row_processor()`` method of
- :class:`~sqlalchemy.orm.interfaces.MapperProperty`
- """
-
- return EXT_CONTINUE
-
- def populate_instance(self, mapper, selectcontext, row,
- instance, **flags):
- """Receive an instance before that instance has
- its attributes populated.
-
- This usually corresponds to a newly loaded instance but may
- also correspond to an already-loaded instance which has
- unloaded attributes to be populated. The method may be called
- many times for a single instance, as multiple result rows are
- used to populate eagerly loaded collections.
-
- If this method returns EXT_CONTINUE, instance population will
- proceed normally. If any other value or None is returned,
- instance population will not proceed, giving this extension an
- opportunity to populate the instance itself, if desired.
-
- As of 0.5, most usages of this hook are obsolete. For a
- generic "object has been newly created from a row" hook, use
- ``reconstruct_instance()``, or the ``@orm.reconstructor``
- decorator.
-
- """
- return EXT_CONTINUE
-
- def reconstruct_instance(self, mapper, instance):
- """Receive an object instance after it has been created via
- ``__new__``, and after initial attribute population has
- occurred.
-
- This typically occurs when the instance is created based on
- incoming result rows, and is only called once for that
- instance's lifetime.
-
- Note that during a result-row load, this method is called upon
- the first row received for this instance. Note that some
- attributes and collections may or may not be loaded or even
- initialized, depending on what's present in the result rows.
-
- The return value is only significant within the ``MapperExtension``
- chain; the parent mapper's behavior isn't modified by this method.
-
- """
- return EXT_CONTINUE
-
- def before_insert(self, mapper, connection, instance):
- """Receive an object instance before that instance is inserted
- into its table.
-
- This is a good place to set up primary key values and such
- that aren't handled otherwise.
-
- Column-based attributes can be modified within this method
- which will result in the new value being inserted. However
- *no* changes to the overall flush plan can be made, and
- manipulation of the ``Session`` will not have the desired effect.
- To manipulate the ``Session`` within an extension, use
- ``SessionExtension``.
-
- The return value is only significant within the ``MapperExtension``
- chain; the parent mapper's behavior isn't modified by this method.
-
- """
-
- return EXT_CONTINUE
-
- def after_insert(self, mapper, connection, instance):
- """Receive an object instance after that instance is inserted.
-
- The return value is only significant within the ``MapperExtension``
- chain; the parent mapper's behavior isn't modified by this method.
-
- """
-
- return EXT_CONTINUE
-
- def before_update(self, mapper, connection, instance):
- """Receive an object instance before that instance is updated.
-
- Note that this method is called for all instances that are marked as
- "dirty", even those which have no net changes to their column-based
- attributes. An object is marked as dirty when any of its column-based
- attributes have a "set attribute" operation called or when any of its
- collections are modified. If, at update time, no column-based
- attributes have any net changes, no UPDATE statement will be issued.
- This means that an instance being sent to before_update is *not* a
- guarantee that an UPDATE statement will be issued (although you can
- affect the outcome here).
-
- To detect if the column-based attributes on the object have net
- changes, and will therefore generate an UPDATE statement, use
- ``object_session(instance).is_modified(instance,
- include_collections=False)``.
-
- Column-based attributes can be modified within this method
- which will result in the new value being updated. However
- *no* changes to the overall flush plan can be made, and
- manipulation of the ``Session`` will not have the desired effect.
- To manipulate the ``Session`` within an extension, use
- ``SessionExtension``.
-
- The return value is only significant within the ``MapperExtension``
- chain; the parent mapper's behavior isn't modified by this method.
-
- """
-
- return EXT_CONTINUE
-
- def after_update(self, mapper, connection, instance):
- """Receive an object instance after that instance is updated.
-
- The return value is only significant within the ``MapperExtension``
- chain; the parent mapper's behavior isn't modified by this method.
-
- """
-
- return EXT_CONTINUE
-
- def before_delete(self, mapper, connection, instance):
- """Receive an object instance before that instance is deleted.
-
- Note that *no* changes to the overall flush plan can be made
- here; and manipulation of the ``Session`` will not have the
- desired effect. To manipulate the ``Session`` within an
- extension, use ``SessionExtension``.
-
- The return value is only significant within the ``MapperExtension``
- chain; the parent mapper's behavior isn't modified by this method.
-
- """
-
- return EXT_CONTINUE
-
- def after_delete(self, mapper, connection, instance):
- """Receive an object instance after that instance is deleted.
-
- The return value is only significant within the ``MapperExtension``
- chain; the parent mapper's behavior isn't modified by this method.
-
- """
-
- return EXT_CONTINUE
-
-class SessionExtension(object):
-
- """An extension hook object for Sessions. Subclasses may be
- installed into a Session (or sessionmaker) using the ``extension``
- keyword argument. """
-
- def before_commit(self, session):
- """Execute right before commit is called.
-
- Note that this may not be per-flush if a longer running
- transaction is ongoing."""
-
- def after_commit(self, session):
- """Execute after a commit has occured.
-
- Note that this may not be per-flush if a longer running
- transaction is ongoing."""
+from deprecated_interfaces import AttributeExtension, SessionExtension, \
+ MapperExtension
- def after_rollback(self, session):
- """Execute after a rollback has occured.
-
- Note that this may not be per-flush if a longer running
- transaction is ongoing."""
-
- def before_flush( self, session, flush_context, instances):
- """Execute before flush process has started.
-
- `instances` is an optional list of objects which were passed to
- the ``flush()`` method. """
-
- def after_flush(self, session, flush_context):
- """Execute after flush has completed, but before commit has been
- called.
-
- Note that the session's state is still in pre-flush, i.e. 'new',
- 'dirty', and 'deleted' lists still show pre-flush state as well
- as the history settings on instance attributes."""
-
- def after_flush_postexec(self, session, flush_context):
- """Execute after flush has completed, and after the post-exec
- state occurs.
-
- This will be when the 'new', 'dirty', and 'deleted' lists are in
- their final state. An actual commit() may or may not have
- occured, depending on whether or not the flush started its own
- transaction or participated in a larger transaction. """
-
- def after_begin( self, session, transaction, connection):
- """Execute after a transaction is begun on a connection
-
- `transaction` is the SessionTransaction. This method is called
- after an engine level transaction is begun on a connection. """
-
- def after_attach(self, session, instance):
- """Execute after an instance is attached to a session.
-
- This is called after an add, delete or merge. """
-
- def after_bulk_update( self, session, query, query_context, result):
- """Execute after a bulk update operation to the session.
-
- This is called after a session.query(...).update()
-
- `query` is the query object that this update operation was
- called on. `query_context` was the query context object.
- `result` is the result object returned from the bulk operation.
- """
-
- def after_bulk_delete( self, session, query, query_context, result):
- """Execute after a bulk delete operation to the session.
-
- This is called after a session.query(...).delete()
-
- `query` is the query object that this delete operation was
- called on. `query_context` was the query context object.
- `result` is the result object returned from the bulk operation.
- """
class MapperProperty(object):
"""Manage the relationship of a ``Mapper`` to a single class
self.process_query(query)
-class ExtensionOption(MapperOption):
-
- """a MapperOption that applies a MapperExtension to a query
- operation."""
-
- def __init__(self, ext):
- self.ext = ext
-
- def process_query(self, query):
- entity = query._generate_mapper_zero()
- entity.extension = entity.extension.copy()
- entity.extension.push(self.ext)
-
class PropertyOption(MapperOption):
"""A MapperOption that is applied to a property off the mapper or
one of its child mappers, identified by a dot-separated key. """
return [], []
return l, mappers
-class AttributeExtension(object):
- """An event handler for individual attribute change events.
-
- .. note:: :class:`AttributeExtension` is deprecated. Please
- refer to :func:`event.listen` as well as
- :attr:`AttributeImpl.events`.
-
- AttributeExtension is assembled within the descriptors associated
- with a mapped class.
-
- """
- active_history = True
- """indicates that the set() method would like to receive the 'old' value,
- even if it means firing lazy callables.
- """
+class ExtensionOption(MapperOption):
- @classmethod
- def _adapt_listener(cls, self, listener):
- event.listen(listener.append, 'on_append', self,
- active_history=listener.active_history)
- event.listen(listener.remove, 'on_remove', self,
- active_history=listener.active_history)
- event.listen(listener.set, 'on_set', self,
- active_history=listener.active_history)
-
+ """a MapperOption that applies a MapperExtension to a query
+ operation.
- def append(self, state, value, initiator):
- """Receive a collection append event.
-
- The returned value will be used as the actual value to be
- appended.
-
- """
- return value
-
- def remove(self, state, value, initiator):
- """Receive a remove event.
-
- No return value is defined.
-
- """
- pass
-
- def set(self, state, value, oldvalue, initiator):
- """Receive a set event.
+ This class is **deprecated**.
+
+ """
- The returned value will be used as the actual value to be
- set.
+ def __init__(self, ext):
+ self.ext = ext
- """
- return value
+ def process_query(self, query):
+ entity = query._generate_mapper_zero()
+ entity.extension = entity.extension.copy()
+ entity.extension.push(self.ext)
class StrategizedOption(PropertyOption):
deque = __import__('collections').deque
from sqlalchemy import sql, util, log, exc as sa_exc, event
-from sqlalchemy.sql import expression, visitors, operators, util as sqlutil
-from sqlalchemy.orm import attributes, sync, exc as orm_exc, unitofwork
-from sqlalchemy.orm.interfaces import (
- MapperProperty, EXT_CONTINUE, PropComparator
- )
-from sqlalchemy.orm.util import (
- ExtensionCarrier, _INSTRUMENTOR, _class_to_mapper,
- _state_mapper, class_mapper, instance_str, state_str,
- )
+from sqlalchemy.sql import expression, visitors, operators, \
+ util as sqlutil
+from sqlalchemy.orm import instrumentation, sync, exc as orm_exc, \
+ unitofwork, attributes
+from sqlalchemy.orm.interfaces import MapperProperty, EXT_CONTINUE, \
+ PropComparator
+from sqlalchemy.orm.util import ExtensionCarrier, _INSTRUMENTOR, \
+ _class_to_mapper, _state_mapper, class_mapper, instance_str, \
+ state_str
__all__ = (
'Mapper',
self.extension.instrument_class(self, self.class_)
if manager is None:
- manager = attributes.register_class(self.class_,
+ manager = instrumentation.register_class(self.class_,
deferred_scalar_loader = _load_scalar_attributes
)
if not self.non_primary and \
self.class_manager.is_mapped and \
self.class_manager.mapper is self:
- attributes.unregister_class(self.class_)
+ instrumentation.unregister_class(self.class_)
def _configure_pks(self):
+"""Defines instrumentation of instances.
+
+This module is usually not directly visible to user applications, but
+defines a large part of the ORM's interactivity.
+
+"""
+
from sqlalchemy.util import EMPTY_SET
import weakref
from sqlalchemy import util
-from sqlalchemy.orm.attributes import PASSIVE_NO_RESULT, PASSIVE_OFF, \
- NEVER_SET, NO_VALUE, manager_of_class, \
- ATTR_WAS_SET
-from sqlalchemy.orm import attributes, exc as orm_exc, interfaces
+
+from sqlalchemy.orm import exc as orm_exc, attributes, interfaces
+from sqlalchemy.orm.attributes import PASSIVE_OFF, PASSIVE_NO_RESULT, \
+ PASSIVE_NO_FETCH, NEVER_SET, ATTR_WAS_SET, NO_VALUE
import sys
-attributes.state = sys.modules['sqlalchemy.orm.state']
class InstanceState(object):
"""tracks state information at the instance level."""
return d
def __setstate__(self, state):
+ from sqlalchemy.orm import instrumentation
self.obj = weakref.ref(state['instance'], self._cleanup)
self.class_ = state['instance'].__class__
- self.manager = manager = manager_of_class(self.class_)
+ self.manager = manager = instrumentation.manager_of_class(self.class_)
if manager is None:
raise orm_exc.UnmappedInstanceError(
state['instance'],
"""
- if kw.get('passive') is attributes.PASSIVE_NO_FETCH:
- return attributes.PASSIVE_NO_RESULT
+ if kw.get('passive') is PASSIVE_NO_FETCH:
+ return PASSIVE_NO_RESULT
toload = self.expired_attributes.\
intersection(self.unmodified)
import weakref, time, threading
-from sqlalchemy import exc, log, event, interfaces, util
+from sqlalchemy import exc, log, event, events, interfaces, util
from sqlalchemy import queue as sqla_queue
from sqlalchemy.util import threading, pickle, memoized_property
manager.close()
proxies.clear()
-class PoolEvents(event.Events):
- """Available events for :class:`.Pool`.
-
- The methods here define the name of an event as well
- as the names of members that are passed to listener
- functions.
-
- e.g.::
-
- from sqlalchemy import events
-
- def my_on_checkout(dbapi_conn, connection_rec, connection_proxy):
- "handle an on checkout event"
-
- events.listen(my_on_checkout, 'on_checkout', Pool)
-
- """
-
- def on_connect(self, dbapi_connection, connection_record):
- """Called once for each new DB-API connection or Pool's ``creator()``.
-
- :param dbapi_con:
- A newly connected raw DB-API connection (not a SQLAlchemy
- ``Connection`` wrapper).
-
- :param con_record:
- The ``_ConnectionRecord`` that persistently manages the connection
-
- """
-
- def on_first_connect(self, dbapi_connection, connection_record):
- """Called exactly once for the first DB-API connection.
-
- :param dbapi_con:
- A newly connected raw DB-API connection (not a SQLAlchemy
- ``Connection`` wrapper).
-
- :param con_record:
- The ``_ConnectionRecord`` that persistently manages the connection
-
- """
-
- def on_checkout(self, dbapi_connection, connection_record, connection_proxy):
- """Called when a connection is retrieved from the Pool.
-
- :param dbapi_con:
- A raw DB-API connection
-
- :param con_record:
- The ``_ConnectionRecord`` that persistently manages the connection
-
- :param con_proxy:
- The ``_ConnectionFairy`` which manages the connection for the span of
- the current checkout.
-
- If you raise an ``exc.DisconnectionError``, the current
- connection will be disposed and a fresh connection retrieved.
- Processing of all checkout listeners will abort and restart
- using the new connection.
- """
-
- def on_checkin(self, dbapi_connection, connection_record):
- """Called when a connection returns to the pool.
-
- Note that the connection may be closed, and may be None if the
- connection has been invalidated. ``checkin`` will not be called
- for detached connections. (They do not return to the pool.)
-
- :param dbapi_con:
- A raw DB-API connection
-
- :param con_record:
- The ``_ConnectionRecord`` that persistently manages the connection
-
- """
class Pool(log.Identified):
"""Abstract base class for connection pools."""
for l in listeners:
self.add_listener(l)
- dispatch = event.dispatcher(PoolEvents)
+ dispatch = event.dispatcher(events.PoolEvents)
@util.deprecated(2.7, "Pool.add_listener is deprecated. Use event.listen()")
def add_listener(self, listener):
import re, inspect
from sqlalchemy import exc, util, dialects
from sqlalchemy.sql import expression, visitors
-from sqlalchemy import event
+from sqlalchemy import event, events
URL = None
else:
return schema + "." + name
-class DDLEvents(event.Events):
- """
- Define create/drop event listers for schema objects.
-
- See also:
-
- :mod:`sqlalchemy.event`
-
- """
-
- def on_before_create(self, target, connection, **kw):
- pass
-
- def on_after_create(self, target, connection, **kw):
- pass
-
- def on_before_drop(self, target, connection, **kw):
- pass
-
- def on_after_drop(self, target, connection, **kw):
- pass
-
class Table(SchemaItem, expression.TableClause):
"""Represent a table in a database.
__visit_name__ = 'table'
- dispatch = event.dispatcher(DDLEvents)
+ dispatch = event.dispatcher(events.DDLEvents)
def __new__(cls, *args, **kw):
if not args:
def supports_alter(ddl, event, schema_item, bind, **kw):
return table in set(kw['tables']) and \
bind.dialect.supports_alter
-
- AddConstraint(self, on=supports_alter).\
- execute_at('after-create', table.metadata)
- DropConstraint(self, on=supports_alter).\
- execute_at('before-drop', table.metadata)
+
+ event.listen(AddConstraint(self, on=supports_alter), "on_after_create", table.metadata)
+ event.listen(DropConstraint(self, on=supports_alter), "on_before_drop", table.metadata)
+
def copy(self, **kw):
return ForeignKeyConstraint(
__visit_name__ = 'metadata'
- dispatch = event.dispatcher(DDLEvents)
+ dispatch = event.dispatcher(events.DDLEvents)
def __init__(self, bind=None, reflect=False):
"""Create a new MetaData object.
def test_table_standalone(self):
users, engine = self.users, self.engine
- DDL('mxyzptlk').execute_at('before-create', users)
- DDL('klptzyxm').execute_at('after-create', users)
- DDL('xyzzy').execute_at('before-drop', users)
- DDL('fnord').execute_at('after-drop', users)
+ event.listen(DDL('mxyzptlk'), 'on_before_create', users)
+ event.listen(DDL('klptzyxm'), 'on_after_create', users)
+ event.listen(DDL('xyzzy'), 'on_before_drop', users)
+ event.listen(DDL('fnord'), 'on_after_drop', users)
users.create()
strings = [str(x) for x in engine.mock]
assert 'xyzzy' in strings
assert 'fnord' in strings
+ @testing.uses_deprecated(r'See DDLEvents')
def test_table_by_metadata_deprecated(self):
metadata, users, engine = self.metadata, self.users, self.engine
DDL('mxyzptlk').execute_at('before-create', users)
assert 'xyzzy' in strings
assert 'fnord' in strings
+ @testing.uses_deprecated(r'See DDLEvents')
def test_metadata_deprecated(self):
metadata, engine = self.metadata, self.engine
strings = ' '.join(str(x) for x in pg_mock.mock)
assert 'my_test_constraint' in strings
+ @testing.uses_deprecated(r'See DDLEvents')
def test_conditional_constraint_deprecated(self):
metadata, users, engine = self.metadata, self.users, self.engine
nonpg_mock = engines.mock_engine(dialect_name='sqlite')
!= 'bogus').
_should_execute(tbl, cx))
+ @testing.uses_deprecated(r'See DDLEvents')
def test_filter_deprecated(self):
cx = self.mock_engine()
import pickle
-import sqlalchemy.orm.attributes as attributes
+from sqlalchemy.orm import attributes, instrumentation
from sqlalchemy.orm.collections import collection
from sqlalchemy.orm.interfaces import AttributeExtension
from sqlalchemy import exc as sa_exc
def test_basic(self):
class User(object):pass
- attributes.register_class(User)
+ instrumentation.register_class(User)
attributes.register_attribute(User, 'user_id', uselist=False, useobject=False)
attributes.register_attribute(User, 'user_name', uselist=False, useobject=False)
attributes.register_attribute(User, 'email_address', uselist=False, useobject=False)
self.assert_(u.user_id == 7 and u.user_name == 'heythere' and u.email_address == 'foo@bar.com')
def test_pickleness(self):
- attributes.register_class(MyTest)
- attributes.register_class(MyTest2)
+ instrumentation.register_class(MyTest)
+ instrumentation.register_class(MyTest2)
attributes.register_attribute(MyTest, 'user_id', uselist=False, useobject=False)
attributes.register_attribute(MyTest, 'user_name', uselist=False, useobject=False)
attributes.register_attribute(MyTest, 'email_address', uselist=False, useobject=False)
class Foo(object):
pass
- attributes.register_class(Foo)
+ instrumentation.register_class(Foo)
f = Foo()
state = attributes.instance_state(f)
f.bar = "foo"
state.dict[k] = data[k]
return attributes.ATTR_WAS_SET
- attributes.register_class(Foo)
+ instrumentation.register_class(Foo)
manager = attributes.manager_of_class(Foo)
manager.deferred_scalar_loader = loader
attributes.register_attribute(Foo, 'a', uselist=False, useobject=False)
state.dict[k] = data[k]
return attributes.ATTR_WAS_SET
- attributes.register_class(MyTest)
+ instrumentation.register_class(MyTest)
manager = attributes.manager_of_class(MyTest)
manager.deferred_scalar_loader=loader
attributes.register_attribute(MyTest, 'a', uselist=False, useobject=False)
class User(object):pass
class Address(object):pass
- attributes.register_class(User)
- attributes.register_class(Address)
+ instrumentation.register_class(User)
+ instrumentation.register_class(Address)
attributes.register_attribute(User, 'user_id', uselist=False, useobject=False)
attributes.register_attribute(User, 'user_name', uselist=False, useobject=False)
attributes.register_attribute(User, 'addresses', uselist = True, useobject=True)
state.commit_all(state.dict)
return child
- attributes.register_class(Foo)
- attributes.register_class(Bar)
+ instrumentation.register_class(Foo)
+ instrumentation.register_class(Bar)
b1, b2, b3, b4 = Bar(id='b1'), Bar(id='b2'), Bar(id='b3'), Bar(id='b4')
def set(self, state, child, oldchild, initiator):
return child
- attributes.register_class(Foo)
- attributes.register_class(Bar)
+ instrumentation.register_class(Foo)
+ instrumentation.register_class(Bar)
bar1, bar2, bar3 = [Bar(id=1), Bar(id=2), Bar(id=3)]
def func1(**kw):
results.append(("set", state.obj(), child, oldchild))
return child
- attributes.register_class(Foo)
+ instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'x', uselist=False, mutable_scalars=False, useobject=False, extension=ReceiveEvents())
attributes.register_attribute(Foo, 'y', uselist=False, mutable_scalars=True, useobject=False, copy_function=lambda x:x, extension=ReceiveEvents())
class Post(object):pass
class Blog(object):pass
- attributes.register_class(Post)
- attributes.register_class(Blog)
+ instrumentation.register_class(Post)
+ instrumentation.register_class(Blog)
# set up instrumented attributes with backrefs
attributes.register_attribute(Post, 'blog', uselist=False,
class Bar(Foo):pass
- attributes.register_class(Foo)
- attributes.register_class(Bar)
+ instrumentation.register_class(Foo)
+ instrumentation.register_class(Bar)
def func1(**kw):
return "this is the foo attr"
Foo.__init__(self)
- attributes.register_class(Foo)
- attributes.register_class(Bar)
+ instrumentation.register_class(Foo)
+ instrumentation.register_class(Bar)
b = Bar()
eq_(len(states), 1)
class Element(object):
_state = True
- attributes.register_class(Foo)
- attributes.register_class(Bar)
+ instrumentation.register_class(Foo)
+ instrumentation.register_class(Bar)
attributes.register_attribute(Foo, 'element', uselist=False, useobject=True)
el = Element()
x = Bar()
class Bar(_base.BasicEntity):
pass
- attributes.register_class(Foo)
- attributes.register_class(Bar)
+ instrumentation.register_class(Foo)
+ instrumentation.register_class(Bar)
bar1, bar2, bar3, bar4 = [Bar(id=1), Bar(id=2), Bar(id=3), Bar(id=4)]
def func1(**kw):
class Foo(object):pass
class Bar(object):pass
- attributes.register_class(Foo)
- attributes.register_class(Bar)
+ instrumentation.register_class(Foo)
+ instrumentation.register_class(Bar)
attributes.register_attribute(Foo, 'element', uselist=False, trackparent=True, useobject=True)
attributes.register_attribute(Bar, 'element', uselist=False, trackparent=True, useobject=True)
"""test detection of changes on mutable scalar items"""
class Foo(object):pass
- attributes.register_class(Foo)
+ instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'element', uselist=False, copy_function=lambda x:[y for y in x], mutable_scalars=True, useobject=False)
x = Foo()
x.element = ['one', 'two', 'three']
x.element[1] = 'five'
assert attributes.instance_state(x).modified
- attributes.unregister_class(Foo)
+ instrumentation.unregister_class(Foo)
- attributes.register_class(Foo)
+ instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'element', uselist=False, useobject=False)
x = Foo()
x.element = ['one', 'two', 'three']
class Foo(object):
A = des()
- attributes.register_class(Foo)
- attributes.unregister_class(Foo)
+ instrumentation.register_class(Foo)
+ instrumentation.unregister_class(Foo)
def test_collectionclasses(self):
class Foo(object):pass
- attributes.register_class(Foo)
+ instrumentation.register_class(Foo)
attributes.register_attribute(Foo, "collection", uselist=True, typecallable=set, useobject=True)
assert attributes.manager_of_class(Foo).is_instrumented("collection")
class Bar(object):
pass
- attributes.register_class(Foo)
- attributes.register_class(Bar)
+ instrumentation.register_class(Foo)
+ instrumentation.register_class(Bar)
attributes.register_attribute(Foo, "coll", uselist=True, useobject=True)
f1 = Foo()
class Student(object):pass
class Course(object):pass
- attributes.register_class(Student)
- attributes.register_class(Course)
+ instrumentation.register_class(Student)
+ instrumentation.register_class(Course)
attributes.register_attribute(Student, 'courses', uselist=True,
extension=attributes.GenericBackrefExtension('students'
), useobject=True)
class Post(object):pass
class Blog(object):pass
- attributes.register_class(Post)
- attributes.register_class(Blog)
+ instrumentation.register_class(Post)
+ instrumentation.register_class(Blog)
attributes.register_attribute(Post, 'blog', uselist=False,
extension=attributes.GenericBackrefExtension('posts'),
trackparent=True, useobject=True)
def test_o2o(self):
class Port(object):pass
class Jack(object):pass
- attributes.register_class(Port)
- attributes.register_class(Jack)
+ instrumentation.register_class(Port)
+ instrumentation.register_class(Jack)
attributes.register_attribute(Port, 'jack', uselist=False,
extension=attributes.GenericBackrefExtension('port'),
useobject=True)
p_token = object()
c_token = object()
- attributes.register_class(Parent)
- attributes.register_class(Child)
- attributes.register_class(SubChild)
+ instrumentation.register_class(Parent)
+ instrumentation.register_class(Child)
+ instrumentation.register_class(SubChild)
attributes.register_attribute(Parent, 'child', uselist=False,
extension=attributes.GenericBackrefExtension('parent'),
parent_token = p_token,
p_token = object()
c_token = object()
- attributes.register_class(Parent)
- attributes.register_class(SubParent)
- attributes.register_class(Child)
+ instrumentation.register_class(Parent)
+ instrumentation.register_class(SubParent)
+ instrumentation.register_class(Child)
attributes.register_attribute(Parent, 'children', uselist=True,
extension=attributes.GenericBackrefExtension('parent'),
parent_token = p_token,
return attributes.PASSIVE_NO_RESULT
return load
- attributes.register_class(Post)
- attributes.register_class(Blog)
+ instrumentation.register_class(Post)
+ instrumentation.register_class(Blog)
attributes.register_attribute(Post, 'blog', uselist=False, extension=attributes.GenericBackrefExtension('posts'), trackparent=True, useobject=True)
attributes.register_attribute(Blog, 'posts', uselist=True, extension=attributes.GenericBackrefExtension('blog'), callable_=lazy_posts, trackparent=True, useobject=True)
class Foo(_base.BasicEntity):
pass
- attributes.register_class(Foo)
+ instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'someattr', uselist=False, useobject=False)
f = Foo()
class Foo(_base.BasicEntity):
pass
- attributes.register_class(Foo)
+ instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'someattr', uselist=False, useobject=False)
# case 1. new object
class Foo(_base.BasicEntity):
pass
- attributes.register_class(Foo)
+ instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'someattr', uselist=False, useobject=False, mutable_scalars=True, copy_function=dict)
# case 1. new object
new = Bar(name='new')
old = Bar(name='old')
- attributes.register_class(Foo)
+ instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'someattr', uselist=False, useobject=True)
# case 1. new object
def __nonzero__(self):
assert False
- attributes.register_class(Foo)
+ instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'someattr', uselist=True, useobject=True)
hi = Bar(name='hi')
from sqlalchemy.orm.collections import attribute_mapped_collection
- attributes.register_class(Foo)
+ instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'someattr', uselist=True, useobject=True, typecallable=attribute_mapped_collection('name'))
hi = Bar(name='hi')
class Bar(_base.BasicEntity):
pass
- attributes.register_class(Foo)
+ instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'someattr', uselist=True, useobject=True)
attributes.register_attribute(Foo, 'id', uselist=False, useobject=False)
class Bar(_base.BasicEntity):
pass
- attributes.register_class(Foo)
- attributes.register_class(Bar)
+ instrumentation.register_class(Foo)
+ instrumentation.register_class(Bar)
attributes.register_attribute(Foo, 'bars', uselist=True, extension=attributes.GenericBackrefExtension('foo'), trackparent=True, useobject=True)
attributes.register_attribute(Bar, 'foo', uselist=False, extension=attributes.GenericBackrefExtension('bars'), trackparent=True, useobject=True)
return lazy_load
return load
- attributes.register_class(Foo)
- attributes.register_class(Bar)
+ instrumentation.register_class(Foo)
+ instrumentation.register_class(Bar)
attributes.register_attribute(Foo, 'bars', uselist=True, extension=attributes.GenericBackrefExtension('foo'), trackparent=True, callable_=lazyload, useobject=True)
attributes.register_attribute(Bar, 'foo', uselist=False, extension=attributes.GenericBackrefExtension('bars'), trackparent=True, useobject=True)
return lazy_load
return load
- attributes.register_class(Foo)
- attributes.register_class(Bar)
+ instrumentation.register_class(Foo)
+ instrumentation.register_class(Bar)
attributes.register_attribute(Foo, 'bars', uselist=True, callable_=lazyload, trackparent=True, useobject=True)
bar1, bar2, bar3, bar4 = [Bar(id=1), Bar(id=2), Bar(id=3), Bar(id=4)]
return lazy_load
return load
- attributes.register_class(Foo)
+ instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'bar', uselist=False, callable_=lazyload, useobject=False)
lazy_load = "hi"
return lazy_load
return load
- attributes.register_class(Foo)
+ instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'bar', uselist=False, callable_=lazyload, useobject=False, active_history=True)
lazy_load = "hi"
return lazy_load
return load
- attributes.register_class(Foo)
- attributes.register_class(Bar)
+ instrumentation.register_class(Foo)
+ instrumentation.register_class(Bar)
attributes.register_attribute(Foo, 'bar', uselist=False, callable_=lazyload, trackparent=True, useobject=True)
bar1, bar2 = [Bar(id=1), Bar(id=2)]
lazy_load = bar1
def set(self, state, value, oldvalue, initiator):
return value + " modified"
- attributes.register_class(Foo)
- attributes.register_class(Bar)
+ instrumentation.register_class(Foo)
+ instrumentation.register_class(Bar)
attributes.register_attribute(Foo, 'data', uselist=False, useobject=False, extension=AlteringListener())
attributes.register_attribute(Foo, 'barlist', uselist=True, useobject=True, extension=AlteringListener())
attributes.register_attribute(Foo, 'barset', typecallable=set, uselist=True, useobject=True, extension=AlteringListener())
from sqlalchemy import Integer, String, ForeignKey, text
from sqlalchemy.test.schema import Table, Column
from sqlalchemy import util, exc as sa_exc
-from sqlalchemy.orm import create_session, mapper, relationship, attributes
+from sqlalchemy.orm import create_session, mapper, relationship, \
+ attributes, instrumentation
from test.orm import _base
from sqlalchemy.test.testing import eq_, assert_raises, assert_raises_message
@classmethod
def setup_class(cls):
- attributes.register_class(cls.Entity)
+ instrumentation.register_class(cls.Entity)
@classmethod
def teardown_class(cls):
- attributes.unregister_class(cls.Entity)
+ instrumentation.unregister_class(cls.Entity)
super(CollectionsTest, cls).teardown_class()
_entity_id = 1
pass
canary = Canary()
- attributes.register_class(Foo)
+ instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'attr', uselist=True, extension=canary,
typecallable=typecallable, useobject=True)
pass
canary = Canary()
- attributes.register_class(Foo)
+ instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'attr', uselist=True, extension=canary,
typecallable=typecallable, useobject=True)
pass
canary = Canary()
- attributes.register_class(Foo)
+ instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'attr', uselist=True, extension=canary,
typecallable=typecallable, useobject=True)
pass
canary = Canary()
- attributes.register_class(Foo)
+ instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'attr', uselist=True, extension=canary,
typecallable=typecallable, useobject=True)
pass
canary = Canary()
- attributes.register_class(Foo)
+ instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'attr', uselist=True, extension=canary,
typecallable=typecallable, useobject=True)
pass
canary = Canary()
- attributes.register_class(Foo)
+ instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'attr', uselist=True, extension=canary,
typecallable=typecallable, useobject=True)
pass
canary = Canary()
- attributes.register_class(Foo)
+ instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'attr', uselist=True, extension=canary,
typecallable=typecallable, useobject=True)
pass
canary = Canary()
- attributes.register_class(Foo)
+ instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'attr', uselist=True, extension=canary,
typecallable=typecallable, useobject=True)
class Foo(object):
pass
canary = Canary()
- attributes.register_class(Foo)
+ instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'attr', uselist=True, extension=canary,
typecallable=Custom, useobject=True)
canary = Canary()
creator = self.entity_maker
- attributes.register_class(Foo)
+ instrumentation.register_class(Foo)
attributes.register_attribute(Foo, 'attr', uselist=True, extension=canary, useobject=True)
obj = Foo()
from sqlalchemy.test.testing import eq_, assert_raises, assert_raises_message
import pickle
from sqlalchemy import util
-import sqlalchemy.orm.attributes as attributes
+from sqlalchemy.orm import attributes, instrumentation
from sqlalchemy.orm.collections import collection
-from sqlalchemy.orm.attributes import set_attribute, get_attribute, del_attribute, is_instrumented
+from sqlalchemy.orm.attributes import set_attribute, get_attribute, del_attribute
+from sqlalchemy.orm.instrumentation import is_instrumented
from sqlalchemy.orm import clear_mappers
from sqlalchemy.orm import InstrumentationManager
from sqlalchemy.test import *
@classmethod
def teardown_class(cls):
clear_mappers()
- attributes._install_lookup_strategy(util.symbol('native'))
+ instrumentation._install_lookup_strategy(util.symbol('native'))
def test_instance_dict(self):
class User(MyClass):
pass
- attributes.register_class(User)
+ instrumentation.register_class(User)
attributes.register_attribute(User, 'user_id', uselist = False, useobject=False)
attributes.register_attribute(User, 'user_name', uselist = False, useobject=False)
attributes.register_attribute(User, 'email_address', uselist = False, useobject=False)
class User(base):
pass
- attributes.register_class(User)
+ instrumentation.register_class(User)
attributes.register_attribute(User, 'user_id', uselist = False, useobject=False)
attributes.register_attribute(User, 'user_name', uselist = False, useobject=False)
attributes.register_attribute(User, 'email_address', uselist = False, useobject=False)
state.dict[k] = data[k]
return attributes.ATTR_WAS_SET
- attributes.register_class(Foo)
- manager = attributes.manager_of_class(Foo)
+ manager = instrumentation.register_class(Foo)
manager.deferred_scalar_loader = loader
attributes.register_attribute(Foo, 'a', uselist=False, useobject=False)
attributes.register_attribute(Foo, 'b', uselist=False, useobject=False)
- assert Foo in attributes.instrumentation_registry._state_finders
+ assert Foo in instrumentation.instrumentation_registry._state_finders
f = Foo()
attributes.instance_state(f).expire_attributes(attributes.instance_dict(f), None)
eq_(f.a, "this is a")
class Foo(base):pass
class Bar(Foo):pass
- attributes.register_class(Foo)
- attributes.register_class(Bar)
+ instrumentation.register_class(Foo)
+ instrumentation.register_class(Bar)
def func1(**kw):
print "func1"
class Post(base):pass
class Blog(base):pass
- attributes.register_class(Post)
- attributes.register_class(Blog)
+ instrumentation.register_class(Post)
+ instrumentation.register_class(Blog)
attributes.register_attribute(Post, 'blog', uselist=False, extension=attributes.GenericBackrefExtension('posts'), trackparent=True, useobject=True)
attributes.register_attribute(Blog, 'posts', uselist=True, extension=attributes.GenericBackrefExtension('blog'), trackparent=True, useobject=True)
b = Blog()
class Bar(base):
pass
- attributes.register_class(Foo)
- attributes.register_class(Bar)
+ instrumentation.register_class(Foo)
+ instrumentation.register_class(Bar)
attributes.register_attribute(Foo, "name", uselist=False, useobject=False)
attributes.register_attribute(Foo, "bars", uselist=True, trackparent=True, useobject=True)
attributes.register_attribute(Bar, "name", uselist=False, useobject=False)
def test_null_instrumentation(self):
class Foo(MyBaseClass):
pass
- attributes.register_class(Foo)
+ instrumentation.register_class(Foo)
attributes.register_attribute(Foo, "name", uselist=False, useobject=False)
attributes.register_attribute(Foo, "bars", uselist=True, trackparent=True, useobject=True)
class Unknown(object): pass
class Known(MyBaseClass): pass
- attributes.register_class(Known)
+ instrumentation.register_class(Known)
k, u = Known(), Unknown()
- assert attributes.manager_of_class(Unknown) is None
- assert attributes.manager_of_class(Known) is not None
- assert attributes.manager_of_class(None) is None
+ assert instrumentation.manager_of_class(Unknown) is None
+ assert instrumentation.manager_of_class(Known) is not None
+ assert instrumentation.manager_of_class(None) is None
assert attributes.instance_state(k) is not None
assert_raises((AttributeError, KeyError),
from sqlalchemy import MetaData, Integer, ForeignKey, util, event
from sqlalchemy.test.schema import Table
from sqlalchemy.test.schema import Column
-from sqlalchemy.orm import mapper, relationship, create_session, attributes, class_mapper, clear_mappers
+from sqlalchemy.orm import mapper, relationship, create_session, \
+ attributes, class_mapper, clear_mappers, instrumentation
from sqlalchemy.test.testing import eq_, ne_
from sqlalchemy.util import function_named
from test.orm import _base
def modifies_instrumentation_finders(fn):
def decorated(*args, **kw):
- pristine = attributes.instrumentation_finders[:]
+ pristine = instrumentation.instrumentation_finders[:]
try:
fn(*args, **kw)
finally:
- del attributes.instrumentation_finders[:]
- attributes.instrumentation_finders.extend(pristine)
+ del instrumentation.instrumentation_finders[:]
+ instrumentation.instrumentation_finders.extend(pristine)
return function_named(decorated, fn.func_name)
def with_lookup_strategy(strategy):
def decorate(fn):
def wrapped(*args, **kw):
try:
- attributes._install_lookup_strategy(strategy)
+ instrumentation._install_lookup_strategy(strategy)
return fn(*args, **kw)
finally:
- attributes._install_lookup_strategy(sa.util.symbol('native'))
+ instrumentation._install_lookup_strategy(sa.util.symbol('native'))
return function_named(wrapped, fn.func_name)
return decorate
def register(self, cls, canary):
original_init = cls.__init__
- attributes.register_class(cls)
+ instrumentation.register_class(cls)
ne_(cls.__init__, original_init)
- manager = attributes.manager_of_class(cls)
+ manager = instrumentation.manager_of_class(cls)
def on_init(state, instance, args, kwargs):
canary.append((cls, 'on_init', type(instance)))
event.listen(on_init, 'on_init', manager)
self_.a = a
self_.b = b
self_.c = c
- attributes.register_class(X)
+ instrumentation.register_class(X)
o = X('foo')
eq_(o.a, 'foo')
self_.u = u
self_.o = o
- attributes.register_class(Y)
+ instrumentation.register_class(Y)
o = Y()
assert o.u is Y.unique
class InstrumentationCollisionTest(_base.ORMTest):
def test_none(self):
class A(object): pass
- attributes.register_class(A)
+ instrumentation.register_class(A)
- mgr_factory = lambda cls: attributes.ClassManager(cls)
+ mgr_factory = lambda cls: instrumentation.ClassManager(cls)
class B(object):
__sa_instrumentation_manager__ = staticmethod(mgr_factory)
- attributes.register_class(B)
+ instrumentation.register_class(B)
class C(object):
- __sa_instrumentation_manager__ = attributes.ClassManager
- attributes.register_class(C)
+ __sa_instrumentation_manager__ = instrumentation.ClassManager
+ instrumentation.register_class(C)
def test_single_down(self):
class A(object): pass
- attributes.register_class(A)
+ instrumentation.register_class(A)
- mgr_factory = lambda cls: attributes.ClassManager(cls)
+ mgr_factory = lambda cls: instrumentation.ClassManager(cls)
class B(A):
__sa_instrumentation_manager__ = staticmethod(mgr_factory)
- assert_raises_message(TypeError, "multiple instrumentation implementations", attributes.register_class, B)
+ assert_raises_message(TypeError, "multiple instrumentation implementations", instrumentation.register_class, B)
def test_single_up(self):
class A(object): pass
# delay registration
- mgr_factory = lambda cls: attributes.ClassManager(cls)
+ mgr_factory = lambda cls: instrumentation.ClassManager(cls)
class B(A):
__sa_instrumentation_manager__ = staticmethod(mgr_factory)
- attributes.register_class(B)
+ instrumentation.register_class(B)
- assert_raises_message(TypeError, "multiple instrumentation implementations", attributes.register_class, A)
+ assert_raises_message(TypeError, "multiple instrumentation implementations", instrumentation.register_class, A)
def test_diamond_b1(self):
- mgr_factory = lambda cls: attributes.ClassManager(cls)
+ mgr_factory = lambda cls: instrumentation.ClassManager(cls)
class A(object): pass
class B1(A): pass
__sa_instrumentation_manager__ = staticmethod(mgr_factory)
class C(object): pass
- assert_raises_message(TypeError, "multiple instrumentation implementations", attributes.register_class, B1)
+ assert_raises_message(TypeError, "multiple instrumentation implementations", instrumentation.register_class, B1)
def test_diamond_b2(self):
- mgr_factory = lambda cls: attributes.ClassManager(cls)
+ mgr_factory = lambda cls: instrumentation.ClassManager(cls)
class A(object): pass
class B1(A): pass
__sa_instrumentation_manager__ = staticmethod(mgr_factory)
class C(object): pass
- attributes.register_class(B2)
- assert_raises_message(TypeError, "multiple instrumentation implementations", attributes.register_class, B1)
+ instrumentation.register_class(B2)
+ assert_raises_message(TypeError, "multiple instrumentation implementations", instrumentation.register_class, B1)
def test_diamond_c_b(self):
- mgr_factory = lambda cls: attributes.ClassManager(cls)
+ mgr_factory = lambda cls: instrumentation.ClassManager(cls)
class A(object): pass
class B1(A): pass
__sa_instrumentation_manager__ = staticmethod(mgr_factory)
class C(object): pass
- attributes.register_class(C)
+ instrumentation.register_class(C)
- assert_raises_message(TypeError, "multiple instrumentation implementations", attributes.register_class, B1)
+ assert_raises_message(TypeError, "multiple instrumentation implementations", instrumentation.register_class, B1)
class OnLoadTest(_base.ORMTest):
"""Check that Events.on_load is not hit in regular attributes operations."""
def canary(instance): assert False
try:
- attributes.register_class(A)
- manager = attributes.manager_of_class(A)
+ instrumentation.register_class(A)
+ manager = instrumentation.manager_of_class(A)
event.listen(canary, 'on_load', manager)
a = A()
@classmethod
def teardown_class(cls):
clear_mappers()
- attributes._install_lookup_strategy(util.symbol('native'))
+ instrumentation._install_lookup_strategy(util.symbol('native'))
class ExtendedEventsTest(_base.ORMTest):
@modifies_instrumentation_finders
def test_subclassed(self):
- class MyEvents(attributes.ClassManager.events):
+ class MyEvents(instrumentation.ClassEvents):
pass
- class MyClassManager(attributes.ClassManager):
- events = event.dispatcher(MyEvents)
+ class MyClassManager(instrumentation.ClassManager):
+ dispatch = event.dispatcher(MyEvents)
- attributes.instrumentation_finders.insert(0, lambda cls: MyClassManager)
+ instrumentation.instrumentation_finders.insert(0, lambda cls: MyClassManager)
class A(object): pass
- attributes.register_class(A)
- manager = attributes.manager_of_class(A)
- assert isinstance(manager.events, MyEvents)
-
+ instrumentation.register_class(A)
+ manager = instrumentation.manager_of_class(A)
+ assert issubclass(manager.dispatch.parent_cls.__dict__['dispatch'].events, MyEvents)
class NativeInstrumentationTest(_base.ORMTest):
def test_register_reserved_attribute(self):
class T(object): pass
- attributes.register_class(T)
- manager = attributes.manager_of_class(T)
+ instrumentation.register_class(T)
+ manager = instrumentation.manager_of_class(T)
- sa = attributes.ClassManager.STATE_ATTR
- ma = attributes.ClassManager.MANAGER_ATTR
+ sa = instrumentation.ClassManager.STATE_ATTR
+ ma = instrumentation.ClassManager.MANAGER_ATTR
fails = lambda method, attr: assert_raises(
KeyError, getattr(manager, method), attr, property())
def test_mapped_stateattr(self):
t = Table('t', MetaData(),
Column('id', Integer, primary_key=True),
- Column(attributes.ClassManager.STATE_ATTR, Integer))
+ Column(instrumentation.ClassManager.STATE_ATTR, Integer))
class T(object): pass
def test_mapped_managerattr(self):
t = Table('t', MetaData(),
Column('id', Integer, primary_key=True),
- Column(attributes.ClassManager.MANAGER_ATTR, Integer))
+ Column(instrumentation.ClassManager.MANAGER_ATTR, Integer))
class T(object): pass
assert_raises(KeyError, mapper, T, t)
def test_uninstrument(self):
class A(object):pass
- manager = attributes.register_class(A)
+ manager = instrumentation.register_class(A)
- assert attributes.manager_of_class(A) is manager
- attributes.unregister_class(A)
- assert attributes.manager_of_class(A) is None
+ assert instrumentation.manager_of_class(A) is manager
+ instrumentation.unregister_class(A)
+ assert instrumentation.manager_of_class(A) is None
def test_compileonattr_rel_backref_a(self):
m = MetaData()
def test_standard(self):
class A(object): pass
- attributes.register_class(A)
+ instrumentation.register_class(A)
- eq_(type(attributes.manager_of_class(A)), attributes.ClassManager)
+ eq_(type(instrumentation.manager_of_class(A)), instrumentation.ClassManager)
def test_nativeext_interfaceexact(self):
class A(object):
__sa_instrumentation_manager__ = sa.orm.interfaces.InstrumentationManager
- attributes.register_class(A)
- ne_(type(attributes.manager_of_class(A)), attributes.ClassManager)
+ instrumentation.register_class(A)
+ ne_(type(instrumentation.manager_of_class(A)), instrumentation.ClassManager)
def test_nativeext_submanager(self):
- class Mine(attributes.ClassManager): pass
+ class Mine(instrumentation.ClassManager): pass
class A(object):
__sa_instrumentation_manager__ = Mine
- attributes.register_class(A)
- eq_(type(attributes.manager_of_class(A)), Mine)
+ instrumentation.register_class(A)
+ eq_(type(instrumentation.manager_of_class(A)), Mine)
@modifies_instrumentation_finders
def test_customfinder_greedy(self):
- class Mine(attributes.ClassManager): pass
+ class Mine(instrumentation.ClassManager): pass
class A(object): pass
def find(cls):
return Mine
- attributes.instrumentation_finders.insert(0, find)
- attributes.register_class(A)
- eq_(type(attributes.manager_of_class(A)), Mine)
+ instrumentation.instrumentation_finders.insert(0, find)
+ instrumentation.register_class(A)
+ eq_(type(instrumentation.manager_of_class(A)), Mine)
@modifies_instrumentation_finders
def test_customfinder_pass(self):
def find(cls):
return None
- attributes.instrumentation_finders.insert(0, find)
- attributes.register_class(A)
- eq_(type(attributes.manager_of_class(A)), attributes.ClassManager)
+ instrumentation.instrumentation_finders.insert(0, find)
+ instrumentation.register_class(A)
+ eq_(type(instrumentation.manager_of_class(A)), instrumentation.ClassManager)
@testing.resolve_artifact_names
def test_direct_stateish(self):
- for reserved in (sa.orm.attributes.ClassManager.STATE_ATTR,
- sa.orm.attributes.ClassManager.MANAGER_ATTR):
+ for reserved in (sa.orm.instrumentation.ClassManager.STATE_ATTR,
+ sa.orm.instrumentation.ClassManager.MANAGER_ATTR):
t = Table('t', sa.MetaData(),
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column(reserved, Integer))
@testing.resolve_artifact_names
def test_indirect_stateish(self):
- for reserved in (sa.orm.attributes.ClassManager.STATE_ATTR,
- sa.orm.attributes.ClassManager.MANAGER_ATTR):
+ for reserved in (sa.orm.instrumentation.ClassManager.STATE_ATTR,
+ sa.orm.instrumentation.ClassManager.MANAGER_ATTR):
class M(object):
pass
from sqlalchemy.test.testing import eq_, ne_
from test.orm import _base, _fixtures
from sqlalchemy.test.schema import Table, Column
+from sqlalchemy import event
class MergeTest(_fixtures.FixtureTest):
"""Session.merge() functionality"""
canary.called += 1
canary.called = 0
- manager = sa.orm.attributes.manager_of_class(cls)
- manager.events.listen(canary, 'on_load', manager)
+ event.listen(canary, 'on_load', cls)
return canary