]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
- removes the "on_" prefix.
authorMike Bayer <mike_mp@zzzcomputing.com>
Thu, 30 Dec 2010 19:22:43 +0000 (14:22 -0500)
committerMike Bayer <mike_mp@zzzcomputing.com>
Thu, 30 Dec 2010 19:22:43 +0000 (14:22 -0500)
42 files changed:
CHANGES
doc/build/core/event.rst
doc/build/core/schema.rst
doc/build/core/types.rst
doc/build/orm/mapper_config.rst
examples/custom_attributes/listen_for_events.py
lib/sqlalchemy/engine/base.py
lib/sqlalchemy/engine/ddl.py
lib/sqlalchemy/engine/strategies.py
lib/sqlalchemy/events.py
lib/sqlalchemy/ext/mutable.py
lib/sqlalchemy/interfaces.py
lib/sqlalchemy/orm/attributes.py
lib/sqlalchemy/orm/collections.py
lib/sqlalchemy/orm/deprecated_interfaces.py
lib/sqlalchemy/orm/descriptor_props.py
lib/sqlalchemy/orm/dynamic.py
lib/sqlalchemy/orm/events.py
lib/sqlalchemy/orm/instrumentation.py
lib/sqlalchemy/orm/mapper.py
lib/sqlalchemy/orm/query.py
lib/sqlalchemy/orm/session.py
lib/sqlalchemy/orm/state.py
lib/sqlalchemy/orm/strategies.py
lib/sqlalchemy/orm/unitofwork.py
lib/sqlalchemy/orm/util.py
lib/sqlalchemy/pool.py
lib/sqlalchemy/schema.py
test/base/test_events.py
test/dialect/test_mysql.py
test/engine/test_ddlevents.py
test/engine/test_execute.py
test/engine/test_pool.py
test/ext/test_mutable.py
test/lib/engines.py
test/orm/test_attributes.py
test/orm/test_defaults.py
test/orm/test_instrumentation.py
test/orm/test_mapper.py
test/orm/test_merge.py
test/orm/test_session.py
test/orm/test_unitofwork.py

diff --git a/CHANGES b/CHANGES
index 0878133e7df6177607c4c40dbd476991b6287fe3..2e242f78d8fb665cc0367ad3d1b3ce8aca33ac49 100644 (file)
--- a/CHANGES
+++ b/CHANGES
@@ -1825,7 +1825,7 @@ CHANGES
     hex identifier string.  [ticket:1555]
     
   - The visit_pool() method of Dialect is removed, and replaced with
-    on_connect().  This method returns a callable which receives
+    connect().  This method returns a callable which receives
     the raw DBAPI connection after each one is created.   The callable
     is assembled into a first_connect/connect pool listener by the 
     connection strategy if non-None.   Provides a simpler interface 
@@ -4678,8 +4678,8 @@ CHANGES
       joined-table inheritance subclasses, using explicit join
       criteria (i.e. not on a relation).
 
-    - @orm.attributes.on_reconstitute and
-      MapperExtension.on_reconstitute have been renamed to
+    - @orm.attributes.reconstitute and
+      MapperExtension.reconstitute have been renamed to
       @orm.reconstructor and MapperExtension.reconstruct_instance
 
     - Fixed @reconstructor hook for subclasses which inherit from a
index 5ff8573eaf0570a7baf1695765167201daa557f4..f1a428d8ca3a10c5f8ecac1745a3b098e39c4a2b 100644 (file)
@@ -19,7 +19,7 @@ instructions regarding secondary event targets based on the given target.
 
 The name of an event and the argument signature of a corresponding listener function is derived from 
 a class bound specification method, which exists bound to a marker class that's described in the documentation.
-For example, the documentation for :meth:`.PoolEvents.on_connect` indicates that the event name is ``"on_connect"``
+For example, the documentation for :meth:`.PoolEvents.connect` indicates that the event name is ``"connect"``
 and that a user-defined listener function should receive two positional arguments::
 
     from sqlalchemy.event import listen
@@ -28,14 +28,14 @@ and that a user-defined listener function should receive two positional argument
     def my_on_connect(dbapi_con, connection_record):
         print "New DBAPI connection:", dbapi_con
         
-    listen(Pool, 'on_connect', my_on_connect)
+    listen(Pool, 'connect', my_on_connect)
 
 Targets
 -------
 
 The :func:`.listen` function is very flexible regarding targets.  It generally accepts classes, instances of those
 classes, and related classes or objects from which the appropriate target can be derived.  For example,
-the above mentioned ``"on_connect"`` event accepts :class:`.Engine` classes and objects as well as :class:`.Pool`
+the above mentioned ``"connect"`` event accepts :class:`.Engine` classes and objects as well as :class:`.Pool`
 classes and objects::
 
     from sqlalchemy.event import listen
@@ -51,17 +51,17 @@ classes and objects::
     my_engine = create_engine('postgresql://ed@localhost/test')
     
     # associate listener with all instances of Pool
-    listen(Pool, 'on_connect', my_on_connect)
+    listen(Pool, 'connect', my_on_connect)
 
     # associate listener with all instances of Pool
     # via the Engine class
-    listen(Engine, 'on_connect', my_on_connect)
+    listen(Engine, 'connect', my_on_connect)
 
     # associate listener with my_pool
-    listen(my_pool, 'on_connect', my_on_connect)
+    listen(my_pool, 'connect', my_on_connect)
 
     # associate listener with my_engine.pool
-    listen(my_engine, 'on_connect', my_on_connect)
+    listen(my_engine, 'connect', my_on_connect)
 
 Modifiers
 ----------
@@ -78,7 +78,7 @@ which modifies the subsequent handling.   By default, no listener ever requires
         
     # setup listener on UserContact.phone attribute, instructing
     # it to use the return value
-    listen(UserContact.phone, 'on_set', validate_phone, retval=True)
+    listen(UserContact.phone, 'set', validate_phone, retval=True)
 
 Event Reference
 ----------------
index 146514724aa50f85bbafc86bf225a6cd1faeac7e..c1f99feeb86c5921d754dbd6715f359935035353 100644 (file)
@@ -1222,12 +1222,12 @@ constraint will be added via ALTER:
     
     event.listen(
         users,
-        "on_after_create", 
+        "after_create", 
         AddConstraint(constraint)
     )
     event.listen(
         users,
-        "on_before_drop",
+        "before_drop",
         DropConstraint(constraint)
     )
 
@@ -1258,12 +1258,12 @@ Postgresql and not other databases, we could limit its usage to just that dialec
 
     event.listen(
         users,
-        'on_after_create',
+        'after_create',
         AddConstraint(constraint).execute_if(dialect='postgresql')
     )
     event.listen(
         users,
-        'on_before_drop',
+        'before_drop',
         DropConstraint(constraint).execute_if(dialect='postgresql')
     )
 
@@ -1271,12 +1271,12 @@ Or to any set of dialects::
     
     event.listen(
         users,
-        "on_after_create",
+        "after_create",
         AddConstraint(constraint).execute_if(dialect=('postgresql', 'mysql'))
     )
     event.listen(
         users,
-        "on_before_drop",
+        "before_drop",
         DropConstraint(constraint).execute_if(dialect=('postgresql', 'mysql'))
     )
 
@@ -1300,12 +1300,12 @@ that check for the presence of our named constraint:
 
     event.listen(
         users,
-        "on_after_create",
+        "after_create",
         AddConstraint(constraint).execute_if(callable_=should_create)
     )
     event.listen(
         users,
-        "on_before_drop",
+        "before_drop",
         DropConstraint(constraint).execute_if(callable_=should_drop)
     )
 
@@ -1335,7 +1335,7 @@ other DDL elements except it accepts a string which is the text to be emitted:
     
     event.listen(
         metadata,
-        "on_after_create",
+        "after_create",
         DDL("ALTER TABLE users ADD CONSTRAINT "
             "cst_user_name_length "
             " CHECK (length(user_name) >= 8)")
index b7121b5e102c4fe1eb22a94b25b8562ef8deed6c..47bbd87d79ccf616ef71b81262fc56afa1f29a7d 100644 (file)
@@ -395,13 +395,13 @@ described in :ref:`mutable_toplevel`::
             """Detect dictionary set events and emit change events."""
             
             dict.__setitem__(self, key, value)
-            self.on_change()
+            self.change()
 
         def __delitem__(self, key):
             """Detect dictionary del events and emit change events."""
 
             dict.__delitem__(self, key)
-            self.on_change()
+            self.change()
         
         # additional dict methods would be overridden here
 
index 6e0023ae410cd78ec70c67f84e209d7ef63b1515..4c57b8c457615079a4305bb4519c9d1a4073dd39 100644 (file)
@@ -82,7 +82,7 @@ collections (new feature as of 0.6.4)::
 
 It should be noted that insert and update defaults configured on individal
 :class:`.Column` objects, such as those configured by the "default",
-"on_update", "server_default" and "server_onupdate" arguments, will continue
+"update", "server_default" and "server_onupdate" arguments, will continue
 to function normally even if those :class:`.Column` objects are not mapped.
 This functionality is part of the SQL expression and execution system and
 occurs below the level of the ORM.
index 0cf014c82e524035266aa0f2dd82d3dbc5bc3d63..2899f7fb100472b912e02e179a3763a5dc906c7c 100644 (file)
@@ -16,9 +16,9 @@ def configure_listener(class_, key, inst):
     def set_(instance, value, oldvalue, initiator):
         instance.receive_change_event("set", key, value, oldvalue)
 
-    event.listen(inst, 'on_append', append)
-    event.listen(inst, 'on_remove', remove)
-    event.listen(inst, 'on_set', set_)
+    event.listen(inst, 'append', append)
+    event.listen(inst, 'remove', remove)
+    event.listen(inst, 'set', set_)
 
 
 if __name__ == '__main__':
@@ -38,7 +38,7 @@ if __name__ == '__main__':
             
     Base = declarative_base(cls=Base)
 
-    event.listen(Base, 'on_attribute_instrument', configure_listener)
+    event.listen(Base, 'attribute_instrument', configure_listener)
 
     class MyMappedClass(Base):
         __tablename__ = "mytable"
index c1f9905b675f649df6325a0778120447cfb41441..e39e2b1756d50a90a2177f8e7ccf6ed83f0e3f83 100644 (file)
@@ -507,7 +507,7 @@ class Dialect(object):
 
         raise NotImplementedError()
 
-    def on_connect(self):
+    def connect(self):
         """return a callable which sets up a newly created DBAPI connection.
 
         The callable accepts a single argument "conn" which is the 
@@ -1967,14 +1967,14 @@ def _listener_connection_cls(cls, dispatch):
     """
     class EventListenerConnection(cls):
         def execute(self, clauseelement, *multiparams, **params):
-            for fn in dispatch.on_before_execute:
+            for fn in dispatch.before_execute:
                 clauseelement, multiparams, params = \
                     fn(self, clauseelement, multiparams, params)
             
             ret = super(EventListenerConnection, self).\
                     execute(clauseelement, *multiparams, **params)
 
-            for fn in dispatch.on_after_execute:
+            for fn in dispatch.after_execute:
                 fn(self, clauseelement, multiparams, params, ret)
             
             return ret
@@ -1987,7 +1987,7 @@ def _listener_connection_cls(cls, dispatch):
 
         def _before_cursor_execute(self, context, cursor, 
                                             statement, parameters):
-            for fn in dispatch.on_before_cursor_execute:
+            for fn in dispatch.before_cursor_execute:
                 statement, parameters = \
                             fn(self, cursor, statement, parameters, 
                                         context, context.executemany)
@@ -1995,59 +1995,59 @@ def _listener_connection_cls(cls, dispatch):
         
         def _after_cursor_execute(self, context, cursor, 
                                             statement, parameters):
-            dispatch.on_after_cursor_execute(self, cursor, 
+            dispatch.after_cursor_execute(self, cursor, 
                                                 statement, 
                                                 parameters, 
                                                 context, 
                                                 context.executemany)
             
         def _begin_impl(self):
-            dispatch.on_begin(self)
+            dispatch.begin(self)
             return super(EventListenerConnection, self).\
                         _begin_impl()
             
         def _rollback_impl(self):
-            dispatch.on_rollback(self)
+            dispatch.rollback(self)
             return super(EventListenerConnection, self).\
                         _rollback_impl()
 
         def _commit_impl(self):
-            dispatch.on_commit(self)
+            dispatch.commit(self)
             return super(EventListenerConnection, self).\
                         _commit_impl()
 
         def _savepoint_impl(self, name=None):
-            dispatch.on_savepoint(self, name)
+            dispatch.savepoint(self, name)
             return super(EventListenerConnection, self).\
                         _savepoint_impl(name=name)
                 
         def _rollback_to_savepoint_impl(self, name, context):
-            dispatch.on_rollback_savepoint(self, name, context)
+            dispatch.rollback_savepoint(self, name, context)
             return super(EventListenerConnection, self).\
                         _rollback_to_savepoint_impl(name, context)
             
         def _release_savepoint_impl(self, name, context):
-            dispatch.on_release_savepoint(self, name, context)
+            dispatch.release_savepoint(self, name, context)
             return super(EventListenerConnection, self).\
                         _release_savepoint_impl(name, context)
             
         def _begin_twophase_impl(self, xid):
-            dispatch.on_begin_twophase(self, xid)
+            dispatch.begin_twophase(self, xid)
             return super(EventListenerConnection, self).\
                         _begin_twophase_impl(xid)
 
         def _prepare_twophase_impl(self, xid):
-            dispatch.on_prepare_twophase(self, xid)
+            dispatch.prepare_twophase(self, xid)
             return super(EventListenerConnection, self).\
                         _prepare_twophase_impl(xid)
 
         def _rollback_twophase_impl(self, xid, is_prepared):
-            dispatch.on_rollback_twophase(self, xid)
+            dispatch.rollback_twophase(self, xid)
             return super(EventListenerConnection, self).\
                         _rollback_twophase_impl(xid, is_prepared)
 
         def _commit_twophase_impl(self, xid, is_prepared):
-            dispatch.on_commit_twophase(self, xid, is_prepared)
+            dispatch.commit_twophase(self, xid, is_prepared)
             return super(EventListenerConnection, self).\
                         _commit_twophase_impl(xid, is_prepared)
 
index 0e165b8f599a72d7d1ab0fbec4f345b4796ca8ec..76af06529f38513c385f1f68e34caf9d0425c7b2 100644 (file)
@@ -35,20 +35,20 @@ class SchemaGenerator(DDLBase):
             tables = metadata.tables.values()
         collection = [t for t in sql_util.sort_tables(tables) if self._can_create(t)]
         
-        metadata.dispatch.on_before_create(metadata, self.connection,
+        metadata.dispatch.before_create(metadata, self.connection,
                                     tables=collection)
         
         for table in collection:
             self.traverse_single(table, create_ok=True)
             
-        metadata.dispatch.on_after_create(metadata, self.connection,
+        metadata.dispatch.after_create(metadata, self.connection,
                                     tables=collection)
 
     def visit_table(self, table, create_ok=False):
         if not create_ok and not self._can_create(table):
             return
         
-        table.dispatch.on_before_create(table, self.connection)
+        table.dispatch.before_create(table, self.connection)
 
         for column in table.columns:
             if column.default is not None:
@@ -60,7 +60,7 @@ class SchemaGenerator(DDLBase):
             for index in table.indexes:
                 self.traverse_single(index)
 
-        table.dispatch.on_after_create(table, self.connection)
+        table.dispatch.after_create(table, self.connection)
 
     def visit_sequence(self, sequence):
         if self.dialect.supports_sequences:
@@ -89,13 +89,13 @@ class SchemaDropper(DDLBase):
             tables = metadata.tables.values()
         collection = [t for t in reversed(sql_util.sort_tables(tables)) if self._can_drop(t)]
         
-        metadata.dispatch.on_before_drop(metadata, self.connection,
+        metadata.dispatch.before_drop(metadata, self.connection,
                                             tables=collection)
         
         for table in collection:
             self.traverse_single(table, drop_ok=True)
 
-        metadata.dispatch.on_after_drop(metadata, self.connection,
+        metadata.dispatch.after_drop(metadata, self.connection,
                                             tables=collection)
 
     def _can_drop(self, table):
@@ -111,7 +111,7 @@ class SchemaDropper(DDLBase):
         if not drop_ok and not self._can_drop(table):
             return
 
-        table.dispatch.on_before_drop(table, self.connection)
+        table.dispatch.before_drop(table, self.connection)
 
         for column in table.columns:
             if column.default is not None:
@@ -119,7 +119,7 @@ class SchemaDropper(DDLBase):
 
         self.connection.execute(schema.DropTable(table))
         
-        table.dispatch.on_after_drop(table, self.connection)
+        table.dispatch.after_drop(table, self.connection)
 
     def visit_sequence(self, sequence):
         if self.dialect.supports_sequences:
index 5a81bd5f2389b8f9d0a05b682b6d3a97dcf7c6c4..1191006c4bd3bea21fb779f342737538cbfc6daa 100644 (file)
@@ -137,13 +137,13 @@ class DefaultEngineStrategy(EngineStrategy):
                         return
                     do_on_connect(conn)
                 
-                event.listen(pool, 'on_first_connect', on_connect)
-                event.listen(pool, 'on_connect', on_connect)
+                event.listen(pool, 'first_connect', on_connect)
+                event.listen(pool, 'connect', on_connect)
                     
             def first_connect(dbapi_connection, connection_record):
                 c = base.Connection(engine, connection=dbapi_connection)
                 dialect.initialize(c)
-            event.listen(pool, 'on_first_connect', first_connect)
+            event.listen(pool, 'first_connect', first_connect)
 
         return engine
 
index 5801c94a77cfbe5d9311d54a39415fda56f0f33f..6785c79685e15d901452a872566583e09c651069 100644 (file)
@@ -17,11 +17,11 @@ class DDLEvents(event.Events):
         m = MetaData()
         some_table = Table('some_table', m, Column('data', Integer))
         
-        def on_after_create(target, connection, **kw):
+        def after_create(target, connection, **kw):
             connection.execute("ALTER TABLE %s SET name=foo_%s" % 
                                     (target.name, target.name))
                                     
-        event.listen(some_table, "on_after_create", on_after_create)
+        event.listen(some_table, "after_create", after_create)
     
     DDL events integrate closely with the 
     :class:`.DDL` class and the :class:`.DDLElement` hierarchy
@@ -31,7 +31,7 @@ class DDLEvents(event.Events):
         from sqlalchemy import DDL
         event.listen(
             some_table,
-            "on_after_create",
+            "after_create",
             DDL("ALTER TABLE %(table)s SET name=foo_%(table)s")
         )
     
@@ -51,7 +51,7 @@ class DDLEvents(event.Events):
     
     """
     
-    def on_before_create(self, target, connection, **kw):
+    def before_create(self, target, connection, **kw):
         """Called before CREATE statments are emitted.
         
         :param target: the :class:`.MetaData` or :class:`.Table`
@@ -66,7 +66,7 @@ class DDLEvents(event.Events):
            
         """
 
-    def on_after_create(self, target, connection, **kw):
+    def after_create(self, target, connection, **kw):
         """Called after CREATE statments are emitted.
         
         :param target: the :class:`.MetaData` or :class:`.Table`
@@ -81,7 +81,7 @@ class DDLEvents(event.Events):
            
         """
 
-    def on_before_drop(self, target, connection, **kw):
+    def before_drop(self, target, connection, **kw):
         """Called before DROP statments are emitted.
         
         :param target: the :class:`.MetaData` or :class:`.Table`
@@ -96,7 +96,7 @@ class DDLEvents(event.Events):
            
         """
     
-    def on_after_drop(self, target, connection, **kw):
+    def after_drop(self, target, connection, **kw):
         """Called after DROP statments are emitted.
         
         :param target: the :class:`.MetaData` or :class:`.Table`
@@ -126,7 +126,7 @@ class PoolEvents(event.Events):
         def my_on_checkout(dbapi_conn, connection_rec, connection_proxy):
             "handle an on checkout event"
             
-        events.listen(Pool, 'on_checkout', my_on_checkout)
+        events.listen(Pool, 'checkout', my_on_checkout)
 
     In addition to accepting the :class:`.Pool` class and :class:`.Pool` instances,
     :class:`.PoolEvents` also accepts :class:`.Engine` objects and
@@ -137,7 +137,7 @@ class PoolEvents(event.Events):
         engine = create_engine("postgresql://scott:tiger@localhost/test")
         
         # will associate with engine.pool
-        events.listen(engine, 'on_checkout', my_on_checkout)
+        events.listen(engine, 'checkout', my_on_checkout)
 
     """
     
@@ -156,7 +156,7 @@ class PoolEvents(event.Events):
         else:
             return target
     
-    def on_connect(self, dbapi_connection, connection_record):
+    def connect(self, dbapi_connection, connection_record):
         """Called once for each new DB-API connection or Pool's ``creator()``.
 
         :param dbapi_con:
@@ -168,7 +168,7 @@ class PoolEvents(event.Events):
 
         """
 
-    def on_first_connect(self, dbapi_connection, connection_record):
+    def first_connect(self, dbapi_connection, connection_record):
         """Called exactly once for the first DB-API connection.
 
         :param dbapi_con:
@@ -180,7 +180,7 @@ class PoolEvents(event.Events):
 
         """
 
-    def on_checkout(self, dbapi_connection, connection_record, connection_proxy):
+    def checkout(self, dbapi_connection, connection_record, connection_proxy):
         """Called when a connection is retrieved from the Pool.
 
         :param dbapi_con:
@@ -199,7 +199,7 @@ class PoolEvents(event.Events):
         using the new connection.
         """
 
-    def on_checkin(self, dbapi_connection, connection_record):
+    def checkin(self, dbapi_connection, connection_record):
         """Called when a connection returns to the pool.
 
         Note that the connection may be closed, and may be None if the
@@ -223,16 +223,16 @@ class EngineEvents(event.Events):
     
         from sqlalchemy import event, create_engine
         
-        def on_before_execute(conn, clauseelement, multiparams, params):
+        def before_execute(conn, clauseelement, multiparams, params):
             log.info("Received statement: %s" % clauseelement)
         
         engine = create_engine('postgresql://scott:tiger@localhost/test')
-        event.listen(engine, "on_before_execute", on_before_execute)
+        event.listen(engine, "before_execute", before_execute)
     
     Some events allow modifiers to the listen() function.
     
-    :param retval=False: Applies to the :meth:`.on_before_execute` and 
-      :meth:`.on_before_cursor_execute` events only.  When True, the
+    :param retval=False: Applies to the :meth:`.before_execute` and 
+      :meth:`.before_cursor_execute` events only.  When True, the
       user-defined event function must have a return value, which
       is a tuple of parameters that replace the given statement 
       and parameters.  See those methods for a description of
@@ -250,13 +250,13 @@ class EngineEvents(event.Events):
                                         target.dispatch)
         
         if not retval:
-            if identifier == 'on_before_execute':
+            if identifier == 'before_execute':
                 orig_fn = fn
                 def wrap(conn, clauseelement, multiparams, params):
                     orig_fn(conn, clauseelement, multiparams, params)
                     return clauseelement, multiparams, params
                 fn = wrap
-            elif identifier == 'on_before_cursor_execute':
+            elif identifier == 'before_cursor_execute':
                 orig_fn = fn
                 def wrap(conn, cursor, statement, 
                         parameters, context, executemany):
@@ -265,55 +265,55 @@ class EngineEvents(event.Events):
                     return statement, parameters
                 fn = wrap
                     
-        elif retval and identifier not in ('on_before_execute', 'on_before_cursor_execute'):
+        elif retval and identifier not in ('before_execute', 'before_cursor_execute'):
             raise exc.ArgumentError(
-                    "Only the 'on_before_execute' and "
-                    "'on_before_cursor_execute' engine "
+                    "Only the 'before_execute' and "
+                    "'before_cursor_execute' engine "
                     "event listeners accept the 'retval=True' "
                     "argument.")
         event.Events._listen(target, identifier, fn)
 
-    def on_before_execute(self, conn, clauseelement, multiparams, params):
+    def before_execute(self, conn, clauseelement, multiparams, params):
         """Intercept high level execute() events."""
 
-    def on_after_execute(self, conn, clauseelement, multiparams, params, result):
+    def after_execute(self, conn, clauseelement, multiparams, params, result):
         """Intercept high level execute() events."""
         
-    def on_before_cursor_execute(self, conn, cursor, statement, 
+    def before_cursor_execute(self, conn, cursor, statement, 
                         parameters, context, executemany):
         """Intercept low-level cursor execute() events."""
 
-    def on_after_cursor_execute(self, conn, cursor, statement, 
+    def after_cursor_execute(self, conn, cursor, statement, 
                         parameters, context, executemany):
         """Intercept low-level cursor execute() events."""
 
-    def on_begin(self, conn):
+    def begin(self, conn):
         """Intercept begin() events."""
         
-    def on_rollback(self, conn):
+    def rollback(self, conn):
         """Intercept rollback() events."""
         
-    def on_commit(self, conn):
+    def commit(self, conn):
         """Intercept commit() events."""
         
-    def on_savepoint(self, conn, name=None):
+    def savepoint(self, conn, name=None):
         """Intercept savepoint() events."""
         
-    def on_rollback_savepoint(self, conn, name, context):
+    def rollback_savepoint(self, conn, name, context):
         """Intercept rollback_savepoint() events."""
         
-    def on_release_savepoint(self, conn, name, context):
+    def release_savepoint(self, conn, name, context):
         """Intercept release_savepoint() events."""
         
-    def on_begin_twophase(self, conn, xid):
+    def begin_twophase(self, conn, xid):
         """Intercept begin_twophase() events."""
         
-    def on_prepare_twophase(self, conn, xid):
+    def prepare_twophase(self, conn, xid):
         """Intercept prepare_twophase() events."""
         
-    def on_rollback_twophase(self, conn, xid, is_prepared):
+    def rollback_twophase(self, conn, xid, is_prepared):
         """Intercept rollback_twophase() events."""
         
-    def on_commit_twophase(self, conn, xid, is_prepared):
+    def commit_twophase(self, conn, xid, is_prepared):
         """Intercept commit_twophase() events."""
 
index 7dcbfd996a7cbd499af8291885f0aaac56cc66d9..f3bd91efb67f466203d84b7bdd56501e2869b873 100644 (file)
@@ -25,7 +25,7 @@ class Mutable(object):
         
         return weakref.WeakKeyDictionary()
         
-    def on_change(self):
+    def change(self):
         """Subclasses should call this method whenever change events occur."""
         
         for parent, key in self._parents.items():
@@ -51,7 +51,7 @@ class Mutable(object):
         key = attribute.key
         parent_cls = attribute.class_
         
-        def on_load(state):
+        def load(state):
             """Listen for objects loaded or refreshed.   
             
             Wrap the target data member's value with 
@@ -64,7 +64,7 @@ class Mutable(object):
                 state.dict[key] = val
                 val._parents[state.obj()] = key
 
-        def on_set(target, value, oldvalue, initiator):
+        def set(target, value, oldvalue, initiator):
             """Listen for set/replace events on the target
             data member.
             
@@ -81,9 +81,9 @@ class Mutable(object):
                 oldvalue._parents.pop(state.obj(), None)
             return value
         
-        event.listen(parent_cls, 'on_load', on_load, raw=True)
-        event.listen(parent_cls, 'on_refresh', on_load, raw=True)
-        event.listen(attribute, 'on_set', on_set, raw=True, retval=True)
+        event.listen(parent_cls, 'load', load, raw=True)
+        event.listen(parent_cls, 'refresh', load, raw=True)
+        event.listen(attribute, 'set', set, raw=True, retval=True)
 
         # TODO: need a deserialize hook here
 
@@ -109,7 +109,7 @@ class Mutable(object):
                         cls.associate_with_attribute(getattr(class_, prop.key))
                         break
                     
-        event.listen(mapper, 'on_mapper_configured', listen_for_type)
+        event.listen(mapper, 'mapper_configured', listen_for_type)
     
     @classmethod
     def as_mutable(cls, sqltype):
@@ -151,7 +151,7 @@ class Mutable(object):
                         cls.associate_with_attribute(getattr(class_, prop.key))
                         break
                 
-        event.listen(mapper, 'on_mapper_configured', listen_for_type)
+        event.listen(mapper, 'mapper_configured', listen_for_type)
         
         return sqltype
 
@@ -168,7 +168,7 @@ class MutableComposite(object):
     
     Composite classes, in addition to meeting the usage contract
     defined in :ref:`mapper_composite`, also define some system
-    of relaying change events to the given :meth:`.on_change` 
+    of relaying change events to the given :meth:`.change` 
     method, which will notify all parents of the change.  Below
     the special Python method ``__setattr__`` is used to intercept
     all changes::
@@ -180,7 +180,7 @@ class MutableComposite(object):
 
             def __setattr__(self, key, value):
                 object.__setattr__(self, key, value)
-                self.on_change()
+                self.change()
         
             def __composite_values__(self):
                 return self.x, self.y
@@ -210,7 +210,7 @@ class MutableComposite(object):
         
         return weakref.WeakKeyDictionary()
 
-    def on_change(self):
+    def change(self):
         """Subclasses should call this method whenever change events occur."""
         
         for parent, key in self._parents.items():
@@ -230,7 +230,7 @@ class MutableComposite(object):
         key = attribute.key
         parent_cls = attribute.class_
         
-        def on_load(state):
+        def load(state):
             """Listen for objects loaded or refreshed.   
             
             Wrap the target data member's value with 
@@ -242,7 +242,7 @@ class MutableComposite(object):
             if val is not None:
                 val._parents[state.obj()] = key
 
-        def on_set(target, value, oldvalue, initiator):
+        def set(target, value, oldvalue, initiator):
             """Listen for set/replace events on the target
             data member.
             
@@ -257,9 +257,9 @@ class MutableComposite(object):
                 oldvalue._parents.pop(state.obj(), None)
             return value
         
-        event.listen(parent_cls, 'on_load', on_load, raw=True)
-        event.listen(parent_cls, 'on_refresh', on_load, raw=True)
-        event.listen(attribute, 'on_set', on_set, raw=True, retval=True)
+        event.listen(parent_cls, 'load', load, raw=True)
+        event.listen(parent_cls, 'refresh', load, raw=True)
+        event.listen(attribute, 'set', set, raw=True, retval=True)
 
         # TODO: need a deserialize hook here
     
@@ -277,5 +277,5 @@ class MutableComposite(object):
                 if hasattr(prop, 'composite_class') and issubclass(prop.composite_class, cls):
                     cls._listen_on_attribute(getattr(class_, prop.key))
                     
-        event.listen(mapper, 'on_mapper_configured', listen_for_type)
+        event.listen(mapper, 'mapper_configured', listen_for_type)
 
index 26910a5e6b4a7aad30853984bc32fea32329d780..f30602a1e794f0067df297d4e7531428798c81a9 100644 (file)
@@ -78,13 +78,13 @@ class PoolListener(object):
         listener = util.as_interface(listener, methods=('connect',
                                 'first_connect', 'checkout', 'checkin'))
         if hasattr(listener, 'connect'):
-            event.listen(self, 'on_connect', listener.connect)
+            event.listen(self, 'connect', listener.connect)
         if hasattr(listener, 'first_connect'):
-            event.listen(self, 'on_first_connect', listener.first_connect)
+            event.listen(self, 'first_connect', listener.first_connect)
         if hasattr(listener, 'checkout'):
-            event.listen(self, 'on_checkout', listener.checkout)
+            event.listen(self, 'checkout', listener.checkout)
         if hasattr(listener, 'checkin'):
-            event.listen(self, 'on_checkin', listener.checkin)
+            event.listen(self, 'checkin', listener.checkin)
             
         
     def connect(self, dbapi_con, con_record):
@@ -187,7 +187,7 @@ class ConnectionProxy(object):
                                     clauseelement, *multiparams,
                                     **params)
 
-        event.listen(self, 'on_before_execute', adapt_execute)
+        event.listen(self, 'before_execute', adapt_execute)
 
         def adapt_cursor_execute(conn, cursor, statement, 
                                 parameters,context, executemany, ):
@@ -209,7 +209,7 @@ class ConnectionProxy(object):
                 executemany,
                 )
 
-        event.listen(self, 'on_before_cursor_execute', adapt_cursor_execute)
+        event.listen(self, 'before_cursor_execute', adapt_cursor_execute)
 
         def do_nothing_callback(*arg, **kw):
             pass
@@ -221,23 +221,23 @@ class ConnectionProxy(object):
 
             return util.update_wrapper(go, fn)
 
-        event.listen(self, 'on_begin', adapt_listener(listener.begin))
-        event.listen(self, 'on_rollback',
+        event.listen(self, 'begin', adapt_listener(listener.begin))
+        event.listen(self, 'rollback',
                      adapt_listener(listener.rollback))
-        event.listen(self, 'on_commit', adapt_listener(listener.commit))
-        event.listen(self, 'on_savepoint',
+        event.listen(self, 'commit', adapt_listener(listener.commit))
+        event.listen(self, 'savepoint',
                      adapt_listener(listener.savepoint))
-        event.listen(self, 'on_rollback_savepoint',
+        event.listen(self, 'rollback_savepoint',
                      adapt_listener(listener.rollback_savepoint))
-        event.listen(self, 'on_release_savepoint',
+        event.listen(self, 'release_savepoint',
                      adapt_listener(listener.release_savepoint))
-        event.listen(self, 'on_begin_twophase',
+        event.listen(self, 'begin_twophase',
                      adapt_listener(listener.begin_twophase))
-        event.listen(self, 'on_prepare_twophase',
+        event.listen(self, 'prepare_twophase',
                      adapt_listener(listener.prepare_twophase))
-        event.listen(self, 'on_rollback_twophase',
+        event.listen(self, 'rollback_twophase',
                      adapt_listener(listener.rollback_twophase))
-        event.listen(self, 'on_commit_twophase',
+        event.listen(self, 'commit_twophase',
                      adapt_listener(listener.commit_twophase))
         
         
index d32d4f1b17030ad3c2cc34acf90cd7db86ab843f..816a12168253bfad8cf62123e0f3b175916a3a7a 100644 (file)
@@ -447,7 +447,7 @@ class ScalarAttributeImpl(AttributeImpl):
         else:
             old = dict_.get(self.key, NO_VALUE)
 
-        if self.dispatch.on_remove:
+        if self.dispatch.remove:
             self.fire_remove_event(state, dict_, old, None)
         state.modified_event(dict_, self, old)
         del dict_[self.key]
@@ -465,19 +465,19 @@ class ScalarAttributeImpl(AttributeImpl):
         else:
             old = dict_.get(self.key, NO_VALUE)
 
-        if self.dispatch.on_set:
+        if self.dispatch.set:
             value = self.fire_replace_event(state, dict_, 
                                                 value, old, initiator)
         state.modified_event(dict_, self, old)
         dict_[self.key] = value
 
     def fire_replace_event(self, state, dict_, value, previous, initiator):
-        for fn in self.dispatch.on_set:
+        for fn in self.dispatch.set:
             value = fn(state, value, previous, initiator or self)
         return value
 
     def fire_remove_event(self, state, dict_, value, initiator):
-        for fn in self.dispatch.on_remove:
+        for fn in self.dispatch.remove:
             fn(state, value, initiator or self)
 
     @property
@@ -618,7 +618,7 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl):
         if self.trackparent and value is not None:
             self.sethasparent(instance_state(value), False)
         
-        for fn in self.dispatch.on_remove:
+        for fn in self.dispatch.remove:
             fn(state, value, initiator or self)
 
         state.modified_event(dict_, self, value)
@@ -630,7 +630,7 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl):
                 previous is not PASSIVE_NO_RESULT):
                 self.sethasparent(instance_state(previous), False)
         
-        for fn in self.dispatch.on_set:
+        for fn in self.dispatch.set:
             value = fn(state, value, previous, initiator or self)
 
         state.modified_event(dict_, self, previous)
@@ -709,7 +709,7 @@ class CollectionAttributeImpl(AttributeImpl):
 
         
     def fire_append_event(self, state, dict_, value, initiator):
-        for fn in self.dispatch.on_append:
+        for fn in self.dispatch.append:
             value = fn(state, value, initiator or self)
 
         state.modified_event(dict_, self, NEVER_SET, True)
@@ -726,7 +726,7 @@ class CollectionAttributeImpl(AttributeImpl):
         if self.trackparent and value is not None:
             self.sethasparent(instance_state(value), False)
 
-        for fn in self.dispatch.on_remove:
+        for fn in self.dispatch.remove:
             fn(state, value, initiator or self)
 
         state.modified_event(dict_, self, NEVER_SET, True)
@@ -927,11 +927,11 @@ def backref_listeners(attribute, key, uselist):
                                             passive=PASSIVE_NO_FETCH)
     
     if uselist:
-        event.listen(attribute, "on_append", append, retval=True, raw=True)
+        event.listen(attribute, "append", append, retval=True, raw=True)
     else:
-        event.listen(attribute, "on_set", set_, retval=True, raw=True)
+        event.listen(attribute, "set", set_, retval=True, raw=True)
     # TODO: need coverage in test/orm/ of remove event
-    event.listen(attribute, "on_remove", remove, retval=True, raw=True)
+    event.listen(attribute, "remove", remove, retval=True, raw=True)
         
 class History(tuple):
     """A 3-tuple of added, unchanged and deleted values,
index 99e6464f2995821b9c46c9155a5db003cb0cf2e3..f891e49018a01c84d73359c304671f025b8ce090 100644 (file)
@@ -180,7 +180,7 @@ class collection(object):
     The decorators fall into two groups: annotations and interception recipes.
 
     The annotating decorators (appender, remover, iterator,
-    internally_instrumented, on_link) indicate the method's purpose and take no
+    internally_instrumented, link) indicate the method's purpose and take no
     arguments.  They are not written with parens::
 
         @collection.appender
@@ -309,7 +309,7 @@ class collection(object):
         return fn
 
     @staticmethod
-    def on_link(fn):
+    def link(fn):
         """Tag the method as a the "linked to attribute" event handler.
 
         This optional event handler will be called when the collection class
@@ -319,7 +319,7 @@ class collection(object):
         that has been linked, or None if unlinking.
 
         """
-        setattr(fn, '_sa_instrument_role', 'on_link')
+        setattr(fn, '_sa_instrument_role', 'link')
         return fn
 
     @staticmethod
@@ -795,7 +795,7 @@ def _instrument_class(cls):
         if hasattr(method, '_sa_instrument_role'):
             role = method._sa_instrument_role
             assert role in ('appender', 'remover', 'iterator',
-                            'on_link', 'converter')
+                            'link', 'converter')
             roles[role] = name
 
         # transfer instrumentation requests from decorated function
index 8ec5c804240f8c3b9a1399392889c3a8cac3ea16..b294a8d7d6b3732ec91891b5c362fae3de206843 100644 (file)
@@ -86,7 +86,7 @@ class MapperExtension(object):
                         def reconstruct(instance):
                             ls_meth(self, instance)
                         return reconstruct
-                    event.listen(self.class_manager, 'on_load', 
+                    event.listen(self.class_manager, 'load', 
                                         go(ls_meth), raw=False, propagate=True)
                 elif meth == 'init_instance':
                     def go(ls_meth):
@@ -95,7 +95,7 @@ class MapperExtension(object):
                                         self.class_manager.original_init, 
                                         instance, args, kwargs)
                         return init_instance
-                    event.listen(self.class_manager, 'on_init', 
+                    event.listen(self.class_manager, 'init', 
                                         go(ls_meth), raw=False, propagate=True)
                 elif meth == 'init_failed':
                     def go(ls_meth):
@@ -105,10 +105,10 @@ class MapperExtension(object):
                                             instance, args, kwargs)
                             
                         return init_failed
-                    event.listen(self.class_manager, 'on_init_failure', 
+                    event.listen(self.class_manager, 'init_failure', 
                                         go(ls_meth), raw=False, propagate=True)
                 else:
-                    event.listen(self, "on_%s" % meth, ls_meth, 
+                    event.listen(self, "%s" % meth, ls_meth, 
                                         raw=False, retval=True, propagate=True)
 
 
@@ -395,16 +395,16 @@ class SessionExtension(object):
 
     @classmethod
     def _adapt_listener(cls, self, listener):
-        event.listen(self, 'on_before_commit', listener.before_commit)
-        event.listen(self, 'on_after_commit', listener.after_commit)
-        event.listen(self, 'on_after_rollback', listener.after_rollback)
-        event.listen(self, 'on_before_flush', listener.before_flush)
-        event.listen(self, 'on_after_flush', listener.after_flush)
-        event.listen(self, 'on_after_flush_postexec', listener.after_flush_postexec)
-        event.listen(self, 'on_after_begin', listener.after_begin)
-        event.listen(self, 'on_after_attach', listener.after_attach)
-        event.listen(self, 'on_after_bulk_update', listener.after_bulk_update)
-        event.listen(self, 'on_after_bulk_delete', listener.after_bulk_delete)
+        event.listen(self, 'before_commit', listener.before_commit)
+        event.listen(self, 'after_commit', listener.after_commit)
+        event.listen(self, 'after_rollback', listener.after_rollback)
+        event.listen(self, 'before_flush', listener.before_flush)
+        event.listen(self, 'after_flush', listener.after_flush)
+        event.listen(self, 'after_flush_postexec', listener.after_flush_postexec)
+        event.listen(self, 'after_begin', listener.after_begin)
+        event.listen(self, 'after_attach', listener.after_attach)
+        event.listen(self, 'after_bulk_update', listener.after_bulk_update)
+        event.listen(self, 'after_bulk_delete', listener.after_bulk_delete)
 
     def before_commit(self, session):
         """Execute right before commit is called.
@@ -534,13 +534,13 @@ class AttributeExtension(object):
 
     @classmethod
     def _adapt_listener(cls, self, listener):
-        event.listen(self, 'on_append', listener.append,
+        event.listen(self, 'append', listener.append,
                             active_history=listener.active_history,
                             raw=True, retval=True)
-        event.listen(self, 'on_remove', listener.remove,
+        event.listen(self, 'remove', listener.remove,
                             active_history=listener.active_history, 
                             raw=True, retval=True)
-        event.listen(self, 'on_set', listener.set,
+        event.listen(self, 'set', listener.set,
                             active_history=listener.active_history, 
                             raw=True, retval=True)
     
index d0f871664280ee23aea0be4fa3e5c07fa7d1663d..2c3a7559db4440475058cbc606c45c0be903e446 100644 (file)
@@ -116,7 +116,7 @@ class CompositeProperty(DescriptorProperty):
             state = attributes.instance_state(instance)
             attr = state.manager[self.key]
             previous = dict_.get(self.key, attributes.NO_VALUE)
-            for fn in attr.dispatch.on_set:
+            for fn in attr.dispatch.set:
                 value = fn(state, value, previous, attr.impl)
             dict_[self.key] = value
             if value is None:
@@ -133,7 +133,7 @@ class CompositeProperty(DescriptorProperty):
             dict_ = attributes.instance_dict(instance)
             previous = dict_.pop(self.key, attributes.NO_VALUE)
             attr = state.manager[self.key]
-            attr.dispatch.on_remove(state, previous, attr.impl)
+            attr.dispatch.remove(state, previous, attr.impl)
             for key in self._attribute_keys:
                 setattr(instance, key, None)
         
@@ -183,13 +183,13 @@ class CompositeProperty(DescriptorProperty):
                     self._attribute_keys]
                 )
             
-        event.listen(self.parent, 'on_after_insert', 
+        event.listen(self.parent, 'after_insert', 
                                     insert_update_handler, raw=True)
-        event.listen(self.parent, 'on_after_update', 
+        event.listen(self.parent, 'after_update', 
                                     insert_update_handler, raw=True)
-        event.listen(self.parent, 'on_load', load_handler, raw=True)
-        event.listen(self.parent, 'on_refresh', load_handler, raw=True)
-        event.listen(self.parent, "on_expire", expire_handler, raw=True)
+        event.listen(self.parent, 'load', load_handler, raw=True)
+        event.listen(self.parent, 'refresh', load_handler, raw=True)
+        event.listen(self.parent, "expire", expire_handler, raw=True)
         
         # TODO: need a deserialize hook here
         
index 92bd78a5830b5329dcf67a6b4324acaf418e7561..9c0211cc5d77d4461cbe82c723799d2537768953 100644 (file)
@@ -76,7 +76,7 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
         collection_history = self._modified_event(state, dict_)
         collection_history.added_items.append(value)
 
-        for fn in self.dispatch.on_append:
+        for fn in self.dispatch.append:
             value = fn(state, value, initiator or self)
 
         if self.trackparent and value is not None:
@@ -89,7 +89,7 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
         if self.trackparent and value is not None:
             self.sethasparent(attributes.instance_state(value), False)
 
-        for fn in self.dispatch.on_remove:
+        for fn in self.dispatch.remove:
             fn(state, value, initiator or self)
 
     def _modified_event(self, state, dict_):
index e8dd07142842e2c085e2e08970aa5416884dd23e..bb011e5f75de45c315223a5e64cb08bbd19c19b4 100644 (file)
@@ -33,7 +33,7 @@ class InstrumentationEvents(event.Events):
     def _remove(cls, identifier, target, fn):
         raise NotImplementedError("Removal of instrumentation events not yet implemented")
 
-    def on_class_instrument(self, cls):
+    def class_instrument(self, cls):
         """Called after the given class is instrumented.
         
         To get at the :class:`.ClassManager`, use
@@ -41,7 +41,7 @@ class InstrumentationEvents(event.Events):
         
         """
 
-    def on_class_uninstrument(self, cls):
+    def class_uninstrument(self, cls):
         """Called before the given class is uninstrumented.
         
         To get at the :class:`.ClassManager`, use
@@ -50,7 +50,7 @@ class InstrumentationEvents(event.Events):
         """
         
         
-    def on_attribute_instrument(self, cls, key, inst):
+    def attribute_instrument(self, cls, key, inst):
         """Called when an attribute is instrumented."""
 
 class InstanceEvents(event.Events):
@@ -97,12 +97,12 @@ class InstanceEvents(event.Events):
     def _remove(cls, identifier, target, fn):
         raise NotImplementedError("Removal of instance events not yet implemented")
 
-    def on_first_init(self, manager, cls):
+    def first_init(self, manager, cls):
         """Called when the first instance of a particular mapping is called.
 
         """
         
-    def on_init(self, target, args, kwargs):
+    def init(self, target, args, kwargs):
         """Receive an instance when it's constructor is called.
         
         This method is only called during a userland construction of 
@@ -111,7 +111,7 @@ class InstanceEvents(event.Events):
 
         """
         
-    def on_init_failure(self, target, args, kwargs):
+    def init_failure(self, target, args, kwargs):
         """Receive an instance when it's constructor has been called, 
         and raised an exception.
         
@@ -121,7 +121,7 @@ class InstanceEvents(event.Events):
 
         """
     
-    def on_load(self, target):
+    def load(self, target):
         """Receive an object instance after it has been created via
         ``__new__``, and after initial attribute population has
         occurred.
@@ -137,7 +137,7 @@ class InstanceEvents(event.Events):
 
         """
 
-    def on_refresh(self, target):
+    def refresh(self, target):
         """Receive an object instance after one or more attributes have 
         been refreshed.
         
@@ -145,7 +145,7 @@ class InstanceEvents(event.Events):
         
         """
     
-    def on_expire(self, target, keys):
+    def expire(self, target, keys):
         """Receive an object instance after its attributes or some subset
         have been expired.
         
@@ -154,7 +154,7 @@ class InstanceEvents(event.Events):
         
         """
         
-    def on_resurrect(self, target):
+    def resurrect(self, target):
         """Receive an object instance as it is 'resurrected' from 
         garbage collection, which occurs when a "dirty" state falls
         out of scope."""
@@ -175,8 +175,8 @@ class MapperEvents(event.Events):
                                         % target.special_number)
         
         # associate the listener function with SomeMappedClass,
-        # to execute during the "on_before_insert" hook
-        event.listen(SomeMappedClass, 'on_before_insert', my_before_insert_listener)
+        # to execute during the "before_insert" hook
+        event.listen(SomeMappedClass, 'before_insert', my_before_insert_listener)
 
     Available targets include mapped classes, instances of
     :class:`.Mapper` (i.e. returned by :func:`.mapper`,
@@ -190,18 +190,18 @@ class MapperEvents(event.Events):
             log.debug("Instance %s being inserted" % target)
             
         # attach to all mappers
-        event.listen(mapper, 'on_before_insert', some_listener)
+        event.listen(mapper, 'before_insert', some_listener)
     
     Mapper events provide hooks into critical sections of the
     mapper, including those related to object instrumentation,
     object loading, and object persistence. In particular, the
-    persistence methods :meth:`~.MapperEvents.on_before_insert`,
-    and :meth:`~.MapperEvents.on_before_update` are popular
+    persistence methods :meth:`~.MapperEvents.before_insert`,
+    and :meth:`~.MapperEvents.before_update` are popular
     places to augment the state being persisted - however, these
     methods operate with several significant restrictions. The
     user is encouraged to evaluate the
-    :meth:`.SessionEvents.on_before_flush` and
-    :meth:`.SessionEvents.on_after_flush` methods as more
+    :meth:`.SessionEvents.before_flush` and
+    :meth:`.SessionEvents.after_flush` methods as more
     flexible and user-friendly hooks in which to apply
     additional database state during a flush.
     
@@ -226,8 +226,8 @@ class MapperEvents(event.Events):
        * ``sqlalchemy.orm.interfaces.EXT_STOP`` - cancel all subsequent
          event handlers in the chain.
        * other values - the return value specified by specific listeners,
-         such as :meth:`~.MapperEvents.on_translate_row` or 
-         :meth:`~.MapperEvents.on_create_instance`.
+         such as :meth:`~.MapperEvents.translate_row` or 
+         :meth:`~.MapperEvents.create_instance`.
      
     """
 
@@ -275,7 +275,7 @@ class MapperEvents(event.Events):
         else:
             event.Events._listen(target, identifier, fn)
         
-    def on_instrument_class(self, mapper, class_):
+    def instrument_class(self, mapper, class_):
         """Receive a class when the mapper is first constructed, 
         before instrumentation is applied to the mapped class.
         
@@ -291,7 +291,7 @@ class MapperEvents(event.Events):
         
         """
     
-    def on_mapper_configured(self, mapper, class_):
+    def mapper_configured(self, mapper, class_):
         """Called when the mapper for the class is fully configured.
 
         This event is the latest phase of mapper construction.
@@ -304,7 +304,7 @@ class MapperEvents(event.Events):
         """
         # TODO: need coverage for this event
         
-    def on_translate_row(self, mapper, context, row):
+    def translate_row(self, mapper, context, row):
         """Perform pre-processing on the given result row and return a
         new row instance.
 
@@ -332,7 +332,7 @@ class MapperEvents(event.Events):
         
         """
 
-    def on_create_instance(self, mapper, context, row, class_):
+    def create_instance(self, mapper, context, row, class_):
         """Receive a row when a new object instance is about to be
         created from that row.
 
@@ -356,7 +356,7 @@ class MapperEvents(event.Events):
 
         """
 
-    def on_append_result(self, mapper, context, row, target, 
+    def append_result(self, mapper, context, row, target, 
                         result, **flags):
         """Receive an object instance before that instance is appended
         to a result list.
@@ -389,7 +389,7 @@ class MapperEvents(event.Events):
         """
 
 
-    def on_populate_instance(self, mapper, context, row, 
+    def populate_instance(self, mapper, context, row, 
                             target, **flags):
         """Receive an instance before that instance has
         its attributes populated.
@@ -402,7 +402,7 @@ class MapperEvents(event.Events):
         
         Most usages of this hook are obsolete.  For a
         generic "object has been newly created from a row" hook, use
-        :meth:`.InstanceEvents.on_load`.
+        :meth:`.InstanceEvents.load`.
 
         :param mapper: the :class:`.Mapper` which is the target
          of this event.
@@ -420,7 +420,7 @@ class MapperEvents(event.Events):
 
         """
 
-    def on_before_insert(self, mapper, connection, target):
+    def before_insert(self, mapper, connection, target):
         """Receive an object instance before an INSERT statement
         is emitted corresponding to that instance.
         
@@ -460,7 +460,7 @@ class MapperEvents(event.Events):
 
         """
 
-    def on_after_insert(self, mapper, connection, target):
+    def after_insert(self, mapper, connection, target):
         """Receive an object instance after an INSERT statement
         is emitted corresponding to that instance.
         
@@ -492,7 +492,7 @@ class MapperEvents(event.Events):
         
         """
 
-    def on_before_update(self, mapper, connection, target):
+    def before_update(self, mapper, connection, target):
         """Receive an object instance before an UPDATE statement
         is emitted corresponding to that instance.
 
@@ -509,7 +509,7 @@ class MapperEvents(event.Events):
         collections are modified. If, at update time, no
         column-based attributes have any net changes, no UPDATE
         statement will be issued. This means that an instance
-        being sent to :meth:`~.MapperEvents.on_before_update` is
+        being sent to :meth:`~.MapperEvents.before_update` is
         *not* a guarantee that an UPDATE statement will be
         issued, although you can affect the outcome here by
         modifying attributes so that a net change in value does
@@ -550,7 +550,7 @@ class MapperEvents(event.Events):
         :return: No return value is supported by this event.
         """
 
-    def on_after_update(self, mapper, connection, target):
+    def after_update(self, mapper, connection, target):
         """Receive an object instance after an UPDATE statement
         is emitted corresponding to that instance.
 
@@ -568,7 +568,7 @@ class MapperEvents(event.Events):
         collections are modified. If, at update time, no
         column-based attributes have any net changes, no UPDATE
         statement will be issued. This means that an instance
-        being sent to :meth:`~.MapperEvents.on_after_update` is
+        being sent to :meth:`~.MapperEvents.after_update` is
         *not* a guarantee that an UPDATE statement has been
         issued.
         
@@ -600,7 +600,7 @@ class MapperEvents(event.Events):
         
         """
 
-    def on_before_delete(self, mapper, connection, target):
+    def before_delete(self, mapper, connection, target):
         """Receive an object instance before a DELETE statement
         is emitted corresponding to that instance.
         
@@ -634,7 +634,7 @@ class MapperEvents(event.Events):
         
         """
 
-    def on_after_delete(self, mapper, connection, target):
+    def after_delete(self, mapper, connection, target):
         """Receive an object instance after a DELETE statement
         has been emitted corresponding to that instance.
         
@@ -677,7 +677,7 @@ class SessionEvents(event.Events):
         
         Session = sessionmaker()
         
-        event.listen(Session, "on_before_commit", my_before_commit)
+        event.listen(Session, "before_commit", my_before_commit)
     
     The :func:`~.event.listen` function will accept
     :class:`.Session` objects as well as the return result
@@ -714,31 +714,31 @@ class SessionEvents(event.Events):
     def _remove(cls, identifier, target, fn):
         raise NotImplementedError("Removal of session events not yet implemented")
 
-    def on_before_commit(self, session):
+    def before_commit(self, session):
         """Execute before commit is called.
         
         Note that this may not be per-flush if a longer running
         transaction is ongoing."""
 
-    def on_after_commit(self, session):
+    def after_commit(self, session):
         """Execute after a commit has occured.
         
         Note that this may not be per-flush if a longer running
         transaction is ongoing."""
 
-    def on_after_rollback(self, session):
+    def after_rollback(self, session):
         """Execute after a rollback has occured.
         
         Note that this may not be per-flush if a longer running
         transaction is ongoing."""
 
-    def on_before_flush( self, session, flush_context, instances):
+    def before_flush( self, session, flush_context, instances):
         """Execute before flush process has started.
         
         `instances` is an optional list of objects which were passed to
         the ``flush()`` method. """
 
-    def on_after_flush(self, session, flush_context):
+    def after_flush(self, session, flush_context):
         """Execute after flush has completed, but before commit has been
         called.
         
@@ -746,7 +746,7 @@ class SessionEvents(event.Events):
         'dirty', and 'deleted' lists still show pre-flush state as well
         as the history settings on instance attributes."""
 
-    def on_after_flush_postexec(self, session, flush_context):
+    def after_flush_postexec(self, session, flush_context):
         """Execute after flush has completed, and after the post-exec
         state occurs.
         
@@ -755,18 +755,18 @@ class SessionEvents(event.Events):
         occured, depending on whether or not the flush started its own
         transaction or participated in a larger transaction. """
 
-    def on_after_begin( self, session, transaction, connection):
+    def after_begin( self, session, transaction, connection):
         """Execute after a transaction is begun on a connection
         
         `transaction` is the SessionTransaction. This method is called
         after an engine level transaction is begun on a connection. """
 
-    def on_after_attach(self, session, instance):
+    def after_attach(self, session, instance):
         """Execute after an instance is attached to a session.
         
         This is called after an add, delete or merge. """
 
-    def on_after_bulk_update( self, session, query, query_context, result):
+    def after_bulk_update( self, session, query, query_context, result):
         """Execute after a bulk update operation to the session.
         
         This is called after a session.query(...).update()
@@ -776,7 +776,7 @@ class SessionEvents(event.Events):
         `result` is the result object returned from the bulk operation.
         """
 
-    def on_after_bulk_delete( self, session, query, query_context, result):
+    def after_bulk_delete( self, session, query, query_context, result):
         """Execute after a bulk delete operation to the session.
         
         This is called after a session.query(...).delete()
@@ -800,7 +800,7 @@ class AttributeEvents(event.Events):
         def my_append_listener(target, value, initiator):
             print "received append event for target: %s" % target
         
-        event.listen(MyClass.collection, 'on_append', my_append_listener)
+        event.listen(MyClass.collection, 'append', my_append_listener)
     
     Listeners have the option to return a possibly modified version
     of the value, when the ``retval=True`` flag is passed
@@ -813,7 +813,7 @@ class AttributeEvents(event.Events):
         
         # setup listener on UserContact.phone attribute, instructing
         # it to use the return value
-        listen(UserContact.phone, 'on_set', validate_phone, retval=True)
+        listen(UserContact.phone, 'set', validate_phone, retval=True)
     
     A validation function like the above can also raise an exception
     such as :class:`ValueError` to halt the operation.
@@ -821,7 +821,7 @@ class AttributeEvents(event.Events):
     Several modifiers are available to the :func:`~.event.listen` function.
     
     :param active_history=False: When True, indicates that the
-      "on_set" event would like to receive the "old" value being
+      "set" event would like to receive the "old" value being
       replaced unconditionally, even if this requires firing off
       database loads. Note that ``active_history`` can also be
       set directly via :func:`.column_property` and
@@ -889,7 +889,7 @@ class AttributeEvents(event.Events):
     def _remove(cls, identifier, target, fn):
         raise NotImplementedError("Removal of attribute events not yet implemented")
         
-    def on_append(self, target, value, initiator):
+    def append(self, target, value, initiator):
         """Receive a collection append event.
 
         :param target: the object instance receiving the event.
@@ -906,7 +906,7 @@ class AttributeEvents(event.Events):
          
         """
 
-    def on_remove(self, target, value, initiator):
+    def remove(self, target, value, initiator):
         """Receive a collection remove event.
 
         :param target: the object instance receiving the event.
@@ -918,7 +918,7 @@ class AttributeEvents(event.Events):
         :return: No return value is defined for this event.
         """
 
-    def on_set(self, target, value, oldvalue, initiator):
+    def set(self, target, value, oldvalue, initiator):
         """Receive a scalar set event.
 
         :param target: the object instance receiving the event.
index 9876dde3f0c4d2b2388063f3df80155e09eeb073..8cf3b8580a74d129f287a51b60db91707508673e 100644 (file)
@@ -168,7 +168,7 @@ class ClassManager(dict):
     
     @util.memoized_property
     def _state_constructor(self):
-        self.dispatch.on_first_init(self, self.class_)
+        self.dispatch.first_init(self, self.class_)
         if self.mutable_attributes:
             return state.MutableAttrInstanceState
         else:
@@ -211,7 +211,7 @@ class ClassManager(dict):
         
     def post_configure_attribute(self, key):
         instrumentation_registry.dispatch.\
-                on_attribute_instrument(self.class_, key, self[key])
+                attribute_instrument(self.class_, key, self[key])
         
     def uninstrument_attribute(self, key, propagated=False):
         if key not in self:
@@ -527,7 +527,7 @@ class InstrumentationRegistry(object):
         self._state_finders[class_] = manager.state_getter()
         self._dict_finders[class_] = manager.dict_getter()
         
-        self.dispatch.on_class_instrument(class_)
+        self.dispatch.class_instrument(class_)
         
         return manager
 
@@ -595,7 +595,7 @@ class InstrumentationRegistry(object):
     def unregister(self, class_):
         if class_ in self._manager_finders:
             manager = self.manager_of_class(class_)
-            self.dispatch.on_class_uninstrument(class_)
+            self.dispatch.class_uninstrument(class_)
             manager.unregister()
             manager.dispose()
             del self._manager_finders[class_]
index 563de116a5b6c72ecea7e87591f72ad77b981f16..a0265f9a8162c308ef20d0a274478a84de867f8c 100644 (file)
@@ -395,7 +395,7 @@ class Mapper(object):
                 
         _mapper_registry[self] = True
 
-        self.dispatch.on_instrument_class(self, self.class_)
+        self.dispatch.instrument_class(self, self.class_)
 
         if manager is None:
             manager = instrumentation.register_class(self.class_, 
@@ -411,15 +411,15 @@ class Mapper(object):
         if manager.info.get(_INSTRUMENTOR, False):
             return
 
-        event.listen(manager, 'on_first_init', _event_on_first_init, raw=True)
-        event.listen(manager, 'on_init', _event_on_init, raw=True)
-        event.listen(manager, 'on_resurrect', _event_on_resurrect, raw=True)
+        event.listen(manager, 'first_init', _event_on_first_init, raw=True)
+        event.listen(manager, 'init', _event_on_init, raw=True)
+        event.listen(manager, 'resurrect', _event_on_resurrect, raw=True)
         
         for key, method in util.iterate_attributes(self.class_):
             if isinstance(method, types.FunctionType):
                 if hasattr(method, '__sa_reconstructor__'):
                     self._reconstructor = method
-                    event.listen(manager, 'on_load', _event_on_load, raw=True)
+                    event.listen(manager, 'load', _event_on_load, raw=True)
                 elif hasattr(method, '__sa_validators__'):
                     for name in method.__sa_validators__:
                         self._validators[name] = method
@@ -1597,9 +1597,9 @@ class Mapper(object):
             
             # call before_XXX extensions
             if not has_identity:
-                mapper.dispatch.on_before_insert(mapper, conn, state)
+                mapper.dispatch.before_insert(mapper, conn, state)
             else:
-                mapper.dispatch.on_before_update(mapper, conn, state)
+                mapper.dispatch.before_update(mapper, conn, state)
 
             # detect if we have a "pending" instance (i.e. has 
             # no instance_key attached to it), and another instance 
@@ -1911,9 +1911,9 @@ class Mapper(object):
 
             # call after_XXX extensions
             if not has_identity:
-                mapper.dispatch.on_after_insert(mapper, connection, state)
+                mapper.dispatch.after_insert(mapper, connection, state)
             else:
-                mapper.dispatch.on_after_update(mapper, connection, state)
+                mapper.dispatch.after_update(mapper, connection, state)
 
     def _postfetch(self, uowtransaction, table, 
                     state, dict_, prefetch_cols, postfetch_cols,
@@ -1988,7 +1988,7 @@ class Mapper(object):
             else:
                 conn = connection
         
-            mapper.dispatch.on_before_delete(mapper, conn, state)
+            mapper.dispatch.before_delete(mapper, conn, state)
             
             tups.append((state, 
                     state.dict,
@@ -2074,7 +2074,7 @@ class Mapper(object):
                     )
 
         for state, state_dict, mapper, has_identity, connection in tups:
-            mapper.dispatch.on_after_delete(mapper, connection, state)
+            mapper.dispatch.after_delete(mapper, connection, state)
 
     def _instance_processor(self, context, path, reduced_path, adapter, 
                                 polymorphic_from=None, 
@@ -2143,10 +2143,10 @@ class Mapper(object):
 
         listeners = self.dispatch
         
-        translate_row = listeners.on_translate_row or None
-        create_instance = listeners.on_create_instance or None
-        populate_instance = listeners.on_populate_instance or None
-        append_result = listeners.on_append_result or None
+        translate_row = listeners.translate_row or None
+        create_instance = listeners.create_instance or None
+        populate_instance = listeners.populate_instance or None
+        append_result = listeners.append_result or None
         populate_existing = context.populate_existing or self.always_refresh
         if self.allow_partial_pks:
             is_not_primary_key = _none_set.issuperset
@@ -2297,9 +2297,9 @@ class Mapper(object):
                         populate_state(state, dict_, row, isnew, attrs)
 
             if loaded_instance:
-                state.manager.dispatch.on_load(state)
+                state.manager.dispatch.load(state)
             elif isnew:
-                state.manager.dispatch.on_refresh(state)
+                state.manager.dispatch.refresh(state)
                 
             if result is not None:
                 if append_result:
@@ -2408,7 +2408,7 @@ def configure_mappers():
                     try:
                         mapper._post_configure_properties()
                         mapper._expire_memoizations()
-                        mapper.dispatch.on_mapper_configured(mapper, mapper.class_)
+                        mapper.dispatch.mapper_configured(mapper, mapper.class_)
                     except:
                         exc = sys.exc_info()[1]
                         if not hasattr(exc, '_configure_failed'):
index 74678a8d2d27feb4d12f4e4b43f982b0a8382640..22f5e67c6ea43590feb466b6924aecd767d49d01 100644 (file)
@@ -1803,7 +1803,7 @@ class Query(object):
             filter = None
 
         custom_rows = single_entity and \
-                        self._entities[0].mapper.dispatch.on_append_result
+                        self._entities[0].mapper.dispatch.append_result
 
         (process, labels) = \
                     zip(*[
@@ -2193,7 +2193,7 @@ class Query(object):
                         )
                     )
 
-        session.dispatch.on_after_bulk_delete(session, self, context, result)
+        session.dispatch.after_bulk_delete(session, self, context, result)
 
         return result.rowcount
 
@@ -2343,7 +2343,7 @@ class Query(object):
                                 [_attr_as_key(k) for k in values]
                                 )
         
-        session.dispatch.on_after_bulk_update(session, self, context, result)
+        session.dispatch.after_bulk_update(session, self, context, result)
 
         return result.rowcount
 
index eba4ace8c0d1b1de47fd3cb45cefc990236ea1ad..c4c2ee1e7b746ec1b6d8943e35b5f625e8afb1b0 100644 (file)
@@ -345,7 +345,7 @@ class SessionTransaction(object):
 
         self._connections[conn] = self._connections[conn.engine] = \
           (conn, transaction, conn is not bind)
-        self.session.dispatch.on_after_begin(self.session, self, conn)
+        self.session.dispatch.after_begin(self.session, self, conn)
         return conn
 
     def prepare(self):
@@ -357,7 +357,7 @@ class SessionTransaction(object):
     def _prepare_impl(self):
         self._assert_is_active()
         if self._parent is None or self.nested:
-            self.session.dispatch.on_before_commit(self.session)
+            self.session.dispatch.before_commit(self.session)
 
         stx = self.session.transaction
         if stx is not self:
@@ -387,7 +387,7 @@ class SessionTransaction(object):
             for t in set(self._connections.values()):
                 t[1].commit()
 
-            self.session.dispatch.on_after_commit(self.session)
+            self.session.dispatch.after_commit(self.session)
 
             if self.session._enable_transaction_accounting:
                 self._remove_snapshot()
@@ -424,7 +424,7 @@ class SessionTransaction(object):
         if self.session._enable_transaction_accounting:
             self._restore_snapshot()
 
-        self.session.dispatch.on_after_rollback(self.session)
+        self.session.dispatch.after_rollback(self.session)
 
     def _deactivate(self):
         self._active = False
@@ -1255,7 +1255,7 @@ class Session(object):
             merged_state.commit_all(merged_dict, self.identity_map)  
 
         if new_instance:
-            merged_state.manager.dispatch.on_load(merged_state)
+            merged_state.manager.dispatch.load(merged_state)
         return merged
 
     @classmethod
@@ -1339,8 +1339,8 @@ class Session(object):
                                     
         if state.session_id != self.hash_key:
             state.session_id = self.hash_key
-            if self.dispatch.on_after_attach:
-                self.dispatch.on_after_attach(self, state.obj())
+            if self.dispatch.after_attach:
+                self.dispatch.after_attach(self, state.obj())
 
     def __contains__(self, instance):
         """Return True if the instance is associated with this session.
@@ -1415,8 +1415,8 @@ class Session(object):
 
         flush_context = UOWTransaction(self)
         
-        if self.dispatch.on_before_flush:
-            self.dispatch.on_before_flush(self, flush_context, objects)
+        if self.dispatch.before_flush:
+            self.dispatch.before_flush(self, flush_context, objects)
             # re-establish "dirty states" in case the listeners
             # added
             dirty = self._dirty_states
@@ -1470,7 +1470,7 @@ class Session(object):
         try:
             flush_context.execute()
 
-            self.dispatch.on_after_flush(self, flush_context)
+            self.dispatch.after_flush(self, flush_context)
             transaction.commit()
         except:
             transaction.rollback(_capture_exception=True)
@@ -1486,7 +1486,7 @@ class Session(object):
         #            self.identity_map._modified.difference(objects)
         #self.identity_map._modified.clear()
         
-        self.dispatch.on_after_flush_postexec(self, flush_context)
+        self.dispatch.after_flush_postexec(self, flush_context)
 
     def is_modified(self, instance, include_collections=True, passive=False):
         """Return ``True`` if instance has modified attributes.
index 89a84e898473af89a7e77987b4dd6b4a1f794d1a..f007665da5e4b9f7e003aaf9281d427d71dbd272 100644 (file)
@@ -89,7 +89,7 @@ class InstanceState(object):
         self, instance, args = mixed[0], mixed[1], mixed[2:]
         manager = self.manager
 
-        manager.dispatch.on_init(self, args, kwargs)
+        manager.dispatch.init(self, args, kwargs)
             
         #if manager.mutable_attributes:
         #    assert self.__class__ is MutableAttrInstanceState
@@ -97,7 +97,7 @@ class InstanceState(object):
         try:
             return manager.original_init(*mixed[1:], **kwargs)
         except:
-            manager.dispatch.on_init_failure(self, args, kwargs)
+            manager.dispatch.init_failure(self, args, kwargs)
             raise
 
     def get_history(self, key, **kwargs):
@@ -232,7 +232,7 @@ class InstanceState(object):
                 self.callables[key] = self
             dict_.pop(key, None)
         
-        self.manager.dispatch.on_expire(self, None)
+        self.manager.dispatch.expire(self, None)
 
     def expire_attributes(self, dict_, attribute_names):
         pending = self.__dict__.get('pending', None)
@@ -250,7 +250,7 @@ class InstanceState(object):
             if pending:
                 pending.pop(key, None)
 
-        self.manager.dispatch.on_expire(self, attribute_names)
+        self.manager.dispatch.expire(self, attribute_names)
 
     def __call__(self, passive):
         """__call__ allows the InstanceState to act as a deferred
@@ -516,7 +516,7 @@ class MutableAttrInstanceState(InstanceState):
         obj.__dict__.update(self.mutable_dict)
 
         # re-establishes identity attributes from the key
-        self.manager.dispatch.on_resurrect(self)
+        self.manager.dispatch.resurrect(self)
         
         return obj
 
index 92fd74f78d3c5891885853742eed32fae2895119..3dfa15df1185777b499d9455871adac6eb56415f 100644 (file)
@@ -1277,6 +1277,6 @@ def single_parent_validator(desc, prop):
     def set_(state, value, oldvalue, initiator):
         return _do_check(state, value, oldvalue, initiator)
     
-    event.listen(desc, 'on_append', append, raw=True, retval=True, active_history=True)
-    event.listen(desc, 'on_set', set_, raw=True, retval=True, active_history=True)
+    event.listen(desc, 'append', append, raw=True, retval=True, active_history=True)
+    event.listen(desc, 'set', set_, raw=True, retval=True, active_history=True)
     
index 0dd5640a888002c35e6805610f4bfbce1a8529e4..b2798cf839fa9bd54c01137072033e21f5487424 100644 (file)
@@ -74,9 +74,9 @@ def track_cascade_events(descriptor, prop):
                     sess.expunge(oldvalue)
         return newvalue
         
-    event.listen(descriptor, 'on_append', append, raw=True, retval=True)
-    event.listen(descriptor, 'on_remove', remove, raw=True, retval=True)
-    event.listen(descriptor, 'on_set', set_, raw=True, retval=True)
+    event.listen(descriptor, 'append', append, raw=True, retval=True)
+    event.listen(descriptor, 'remove', remove, raw=True, retval=True)
+    event.listen(descriptor, 'set', set_, raw=True, retval=True)
 
 
 class UOWTransaction(object):
index 4a8b1713c7948f19f50166b645436d0753839989..0cfdc64363e039877dc1ffb2385ecb310bc0ab4c 100644 (file)
@@ -63,8 +63,8 @@ def _validator_events(desc, key, validator):
     def set_(state, value, oldvalue, initiator):
         return validator(state.obj(), key, value)
     
-    event.listen(desc, 'on_append', append, raw=True, retval=True)
-    event.listen(desc, 'on_set', set_, raw=True, retval=True)
+    event.listen(desc, 'append', append, raw=True, retval=True)
+    event.listen(desc, 'set', set_, raw=True, retval=True)
     
 def polymorphic_union(table_map, typecolname, aliasname='p_union'):
     """Create a ``UNION`` statement used by a polymorphic mapper.
index 572087217e22e9534821dc30e33d010a2d021d62..050b239c4b61cd9dee6b44c9e82d7896b160c4ad 100644 (file)
@@ -255,8 +255,8 @@ class _ConnectionRecord(object):
         self.connection = self.__connect()
         self.info = {}
 
-        pool.dispatch.on_first_connect.exec_once(self.connection, self)
-        pool.dispatch.on_connect(self.connection, self)
+        pool.dispatch.first_connect.exec_once(self.connection, self)
+        pool.dispatch.connect(self.connection, self)
 
     def close(self):
         if self.connection is not None:
@@ -284,8 +284,8 @@ class _ConnectionRecord(object):
         if self.connection is None:
             self.connection = self.__connect()
             self.info.clear()
-            if self.__pool.dispatch.on_connect:
-                self.__pool.dispatch.on_connect(self.connection, self)
+            if self.__pool.dispatch.connect:
+                self.__pool.dispatch.connect(self.connection, self)
         elif self.__pool._recycle > -1 and \
                 time.time() - self.starttime > self.__pool._recycle:
             self.__pool.logger.info(
@@ -294,8 +294,8 @@ class _ConnectionRecord(object):
             self.__close()
             self.connection = self.__connect()
             self.info.clear()
-            if self.__pool.dispatch.on_connect:
-                self.__pool.dispatch.on_connect(self.connection, self)
+            if self.__pool.dispatch.connect:
+                self.__pool.dispatch.connect(self.connection, self)
         return self.connection
 
     def __close(self):
@@ -348,8 +348,8 @@ def _finalize_fairy(connection, connection_record, pool, ref, echo):
         if echo:
             pool.logger.debug("Connection %r being returned to pool", 
                                     connection)
-        if pool.dispatch.on_checkin:
-            pool.dispatch.on_checkin(connection, connection_record)
+        if pool.dispatch.checkin:
+            pool.dispatch.checkin(connection, connection_record)
         pool._return_conn(connection_record)
 
 _refs = set()
@@ -435,14 +435,14 @@ class _ConnectionFairy(object):
             raise exc.InvalidRequestError("This connection is closed")
         self.__counter += 1
 
-        if not self._pool.dispatch.on_checkout or self.__counter != 1:
+        if not self._pool.dispatch.checkout or self.__counter != 1:
             return self
 
         # Pool listeners can trigger a reconnection on checkout
         attempts = 2
         while attempts > 0:
             try:
-                self._pool.dispatch.on_checkout(self.connection, 
+                self._pool.dispatch.checkout(self.connection, 
                                             self._connection_record,
                                             self)
                 return self
index 8fd758d2d4d048568b495a422a883d4fd08594dc..371181fd8d19f560a6910c20fae5bdead123ad12 100644 (file)
@@ -383,7 +383,7 @@ class Table(SchemaItem, expression.TableClause):
         def adapt_listener(target, connection, **kw):
             listener(event_name, target, connection, **kw)
             
-        event.listen(self, "on_" + event_name.replace('-', '_'), adapt_listener)
+        event.listen(self, "" + event_name.replace('-', '_'), adapt_listener)
 
     def _set_parent(self, metadata):
         metadata._add_table(self.name, self.schema, self)
@@ -1753,8 +1753,8 @@ class ForeignKeyConstraint(Constraint):
                 return table in set(kw['tables']) and \
                             bind.dialect.supports_alter
             
-            event.listen(table.metadata, "on_after_create", AddConstraint(self, on=supports_alter))
-            event.listen(table.metadata, "on_before_drop", DropConstraint(self, on=supports_alter))
+            event.listen(table.metadata, "after_create", AddConstraint(self, on=supports_alter))
+            event.listen(table.metadata, "before_drop", DropConstraint(self, on=supports_alter))
             
             
     def copy(self, **kw):
@@ -2085,7 +2085,7 @@ class MetaData(SchemaItem):
         def adapt_listener(target, connection, **kw):
             listener(event, target, connection, **kw)
             
-        event.listen(self, "on_" + event_name.replace('-', '_'), adapt_listener)
+        event.listen(self, "" + event_name.replace('-', '_'), adapt_listener)
 
     def create_all(self, bind=None, tables=None, checkfirst=True):
         """Create all tables stored in this metadata.
@@ -2219,7 +2219,7 @@ class DDLElement(expression.Executable, expression.ClauseElement):
     
         event.listen(
             users,
-            'on_after_create',
+            'after_create',
             AddConstraint(constraint).execute_if(dialect='postgresql')
         )
 
@@ -2309,7 +2309,7 @@ class DDLElement(expression.Executable, expression.ClauseElement):
                                     target, connection, **kw):
                 return connection.execute(self.against(target))
             
-        event.listen(target, "on_" + event_name.replace('-', '_'), call_event)
+        event.listen(target, "" + event_name.replace('-', '_'), call_event)
 
     @expression._generative
     def against(self, target):
@@ -2326,7 +2326,7 @@ class DDLElement(expression.Executable, expression.ClauseElement):
         
             event.listen(
                         metadata,
-                        'on_before_create', 
+                        'before_create', 
                         DDL("my_ddl").execute_if(dialect='postgresql')
                     )
         
@@ -2446,10 +2446,10 @@ class DDL(DDLElement):
       from sqlalchemy import event, DDL
       
       tbl = Table('users', metadata, Column('uid', Integer))
-      event.listen(tbl, 'on_before_create', DDL('DROP TRIGGER users_trigger'))
+      event.listen(tbl, 'before_create', DDL('DROP TRIGGER users_trigger'))
 
       spow = DDL('ALTER TABLE %(table)s SET secretpowers TRUE')
-      event.listen(tbl, 'on_after_create', spow.execute_if(dialect='somedb'))
+      event.listen(tbl, 'after_create', spow.execute_if(dialect='somedb'))
 
       drop_spow = DDL('ALTER TABLE users SET secretpowers FALSE')
       connection.execute(drop_spow)
index a7b10946b60f9328f05ee1336dddf00868930bbf..e64ec3b81bc0c476d59e8e252087bda3de9fb6d9 100644 (file)
@@ -9,14 +9,14 @@ class TestEvents(TestBase):
     def setUp(self):
         global Target
         
-        assert 'on_event_one' not in event._registrars
-        assert 'on_event_two' not in event._registrars
+        assert 'event_one' not in event._registrars
+        assert 'event_two' not in event._registrars
         
         class TargetEvents(event.Events):
-            def on_event_one(self, x, y):
+            def event_one(self, x, y):
                 pass
             
-            def on_event_two(self, x):
+            def event_two(self, x):
                 pass
                 
         class Target(object):
@@ -29,22 +29,22 @@ class TestEvents(TestBase):
         def listen(x, y):
             pass
         
-        event.listen(Target, "on_event_one", listen)
+        event.listen(Target, "event_one", listen)
         
-        eq_(len(Target().dispatch.on_event_one), 1)
-        eq_(len(Target().dispatch.on_event_two), 0)
+        eq_(len(Target().dispatch.event_one), 1)
+        eq_(len(Target().dispatch.event_two), 0)
 
     def test_register_instance(self):
         def listen(x, y):
             pass
         
         t1 = Target()
-        event.listen(t1, "on_event_one", listen)
+        event.listen(t1, "event_one", listen)
 
-        eq_(len(Target().dispatch.on_event_one), 0)
-        eq_(len(t1.dispatch.on_event_one), 1)
-        eq_(len(Target().dispatch.on_event_two), 0)
-        eq_(len(t1.dispatch.on_event_two), 0)
+        eq_(len(Target().dispatch.event_one), 0)
+        eq_(len(t1.dispatch.event_one), 1)
+        eq_(len(Target().dispatch.event_two), 0)
+        eq_(len(t1.dispatch.event_two), 0)
     
     def test_register_class_instance(self):
         def listen_one(x, y):
@@ -53,22 +53,22 @@ class TestEvents(TestBase):
         def listen_two(x, y):
             pass
 
-        event.listen(Target, "on_event_one", listen_one)
+        event.listen(Target, "event_one", listen_one)
         
         t1 = Target()
-        event.listen(t1, "on_event_one", listen_two)
+        event.listen(t1, "event_one", listen_two)
 
-        eq_(len(Target().dispatch.on_event_one), 1)
-        eq_(len(t1.dispatch.on_event_one), 2)
-        eq_(len(Target().dispatch.on_event_two), 0)
-        eq_(len(t1.dispatch.on_event_two), 0)
+        eq_(len(Target().dispatch.event_one), 1)
+        eq_(len(t1.dispatch.event_one), 2)
+        eq_(len(Target().dispatch.event_two), 0)
+        eq_(len(t1.dispatch.event_two), 0)
         
         def listen_three(x, y):
             pass
         
-        event.listen(Target, "on_event_one", listen_three)
-        eq_(len(Target().dispatch.on_event_one), 2)
-        eq_(len(t1.dispatch.on_event_one), 3)
+        event.listen(Target, "event_one", listen_three)
+        eq_(len(Target().dispatch.event_one), 2)
+        eq_(len(t1.dispatch.event_one), 3)
         
 class TestAcceptTargets(TestBase):
     """Test default target acceptance."""
@@ -77,11 +77,11 @@ class TestAcceptTargets(TestBase):
         global TargetOne, TargetTwo
         
         class TargetEventsOne(event.Events):
-            def on_event_one(self, x, y):
+            def event_one(self, x, y):
                 pass
 
         class TargetEventsTwo(event.Events):
-            def on_event_one(self, x, y):
+            def event_one(self, x, y):
                 pass
                 
         class TargetOne(object):
@@ -111,32 +111,32 @@ class TestAcceptTargets(TestBase):
         def listen_four(x, y):
             pass
             
-        event.listen(TargetOne, "on_event_one", listen_one)
-        event.listen(TargetTwo, "on_event_one", listen_two)
+        event.listen(TargetOne, "event_one", listen_one)
+        event.listen(TargetTwo, "event_one", listen_two)
         
         eq_(
-            list(TargetOne().dispatch.on_event_one),
+            list(TargetOne().dispatch.event_one),
             [listen_one]
         )
 
         eq_(
-            list(TargetTwo().dispatch.on_event_one),
+            list(TargetTwo().dispatch.event_one),
             [listen_two]
         )
 
         t1 = TargetOne()
         t2 = TargetTwo()
 
-        event.listen(t1, "on_event_one", listen_three)
-        event.listen(t2, "on_event_one", listen_four)
+        event.listen(t1, "event_one", listen_three)
+        event.listen(t2, "event_one", listen_four)
         
         eq_(
-            list(t1.dispatch.on_event_one),
+            list(t1.dispatch.event_one),
             [listen_one, listen_three]
         )
 
         eq_(
-            list(t2.dispatch.on_event_one),
+            list(t2.dispatch.event_one),
             [listen_two, listen_four]
         )
         
@@ -154,7 +154,7 @@ class TestCustomTargets(TestBase):
                 else:
                     return None
                     
-            def on_event_one(self, x, y):
+            def event_one(self, x, y):
                 pass
 
         class Target(object):
@@ -167,17 +167,17 @@ class TestCustomTargets(TestBase):
         def listen(x, y):
             pass
         
-        event.listen("one", "on_event_one", listen)
+        event.listen("one", "event_one", listen)
 
         eq_(
-            list(Target().dispatch.on_event_one),
+            list(Target().dispatch.event_one),
             [listen]
         )
         
         assert_raises(
             exc.InvalidRequestError, 
             event.listen,
-            listen, "on_event_one", Target
+            listen, "event_one", Target
         )
         
 class TestListenOverride(TestBase):
@@ -197,7 +197,7 @@ class TestListenOverride(TestBase):
                     
                 event.Events._listen(target, identifier, adapt)
                     
-            def on_event_one(self, x, y):
+            def event_one(self, x, y):
                 pass
 
         class Target(object):
@@ -214,12 +214,12 @@ class TestListenOverride(TestBase):
         def listen_two(x, y):
             result.append((x, y))
         
-        event.listen(Target, "on_event_one", listen_one, add=True)
-        event.listen(Target, "on_event_one", listen_two)
+        event.listen(Target, "event_one", listen_one, add=True)
+        event.listen(Target, "event_one", listen_two)
 
         t1 = Target()
-        t1.dispatch.on_event_one(5, 7)
-        t1.dispatch.on_event_one(10, 5)
+        t1.dispatch.event_one(5, 7)
+        t1.dispatch.event_one(10, 5)
         
         eq_(result,
             [
@@ -232,10 +232,10 @@ class TestPropagate(TestBase):
         global Target
         
         class TargetEvents(event.Events):
-            def on_event_one(self, arg):
+            def event_one(self, arg):
                 pass
             
-            def on_event_two(self, arg):
+            def event_two(self, arg):
                 pass
                 
         class Target(object):
@@ -252,13 +252,13 @@ class TestPropagate(TestBase):
         
         t1 = Target()
         
-        event.listen(t1, "on_event_one", listen_one, propagate=True)
-        event.listen(t1, "on_event_two", listen_two)
+        event.listen(t1, "event_one", listen_one, propagate=True)
+        event.listen(t1, "event_two", listen_two)
 
         t2 = Target()
         
         t2.dispatch._update(t1.dispatch)
         
-        t2.dispatch.on_event_one(t2, 1)
-        t2.dispatch.on_event_two(t2, 2)
+        t2.dispatch.event_one(t2, 1)
+        t2.dispatch.event_two(t2, 2)
         eq_(result, [(t2, 1)])
index 0e0f92d3c43f6087938a7d8e64844bb35bb04984..7d5cffbe260610460903a388506db25d8421e794 100644 (file)
@@ -1281,8 +1281,8 @@ class SQLModeDetectionTest(TestBase):
             cursor.execute("set sql_mode='%s'" % (",".join(modes)))
         e = engines.testing_engine(options={
             'pool_events':[
-                (connect, 'on_first_connect'),
-                (connect, 'on_connect')
+                (connect, 'first_connect'),
+                (connect, 'connect')
             ]
         })
         return e
index 790bc23bc767963a539e25545facb16ac09de973..fbe77b9dc75bb4f21f691e322450867dcb86e445 100644 (file)
@@ -49,7 +49,7 @@ class DDLEventTest(TestBase):
     def test_table_create_before(self):
         table, bind = self.table, self.bind
         canary = self.Canary(table, bind)
-        event.listen(table, 'on_before_create', canary.before_create)
+        event.listen(table, 'before_create', canary.before_create)
 
         table.create(bind)
         assert canary.state == 'before-create'
@@ -59,7 +59,7 @@ class DDLEventTest(TestBase):
     def test_table_create_after(self):
         table, bind = self.table, self.bind
         canary = self.Canary(table, bind)
-        event.listen(table, 'on_after_create', canary.after_create)
+        event.listen(table, 'after_create', canary.after_create)
 
         canary.state = 'skipped'
         table.create(bind)
@@ -70,8 +70,8 @@ class DDLEventTest(TestBase):
     def test_table_create_both(self):
         table, bind = self.table, self.bind
         canary = self.Canary(table, bind)
-        event.listen(table, 'on_before_create', canary.before_create)
-        event.listen(table, 'on_after_create', canary.after_create)
+        event.listen(table, 'before_create', canary.before_create)
+        event.listen(table, 'after_create', canary.after_create)
         
         table.create(bind)
         assert canary.state == 'after-create'
@@ -81,7 +81,7 @@ class DDLEventTest(TestBase):
     def test_table_drop_before(self):
         table, bind = self.table, self.bind
         canary = self.Canary(table, bind)
-        event.listen(table, 'on_before_drop', canary.before_drop)
+        event.listen(table, 'before_drop', canary.before_drop)
 
         table.create(bind)
         assert canary.state is None
@@ -91,7 +91,7 @@ class DDLEventTest(TestBase):
     def test_table_drop_after(self):
         table, bind = self.table, self.bind
         canary = self.Canary(table, bind)
-        event.listen(table, 'on_after_drop', canary.after_drop)
+        event.listen(table, 'after_drop', canary.after_drop)
 
         table.create(bind)
         assert canary.state is None
@@ -103,8 +103,8 @@ class DDLEventTest(TestBase):
         table, bind = self.table, self.bind
         canary = self.Canary(table, bind)
 
-        event.listen(table, 'on_before_drop', canary.before_drop)
-        event.listen(table, 'on_after_drop', canary.after_drop)
+        event.listen(table, 'before_drop', canary.before_drop)
+        event.listen(table, 'after_drop', canary.after_drop)
 
         table.create(bind)
         assert canary.state is None
@@ -115,10 +115,10 @@ class DDLEventTest(TestBase):
         table, bind = self.table, self.bind
         canary = self.Canary(table, bind)
 
-        event.listen(table, 'on_before_create', canary.before_create)
-        event.listen(table, 'on_after_create', canary.after_create)
-        event.listen(table, 'on_before_drop', canary.before_drop)
-        event.listen(table, 'on_after_drop', canary.after_drop)
+        event.listen(table, 'before_create', canary.before_create)
+        event.listen(table, 'after_create', canary.after_create)
+        event.listen(table, 'before_drop', canary.before_drop)
+        event.listen(table, 'after_drop', canary.after_drop)
 
         assert canary.state is None
         table.create(bind)
@@ -130,7 +130,7 @@ class DDLEventTest(TestBase):
     def test_table_create_before(self):
         metadata, bind = self.metadata, self.bind
         canary = self.Canary(metadata, bind)
-        event.listen(metadata, 'on_before_create', canary.before_create)
+        event.listen(metadata, 'before_create', canary.before_create)
 
         metadata.create_all(bind)
         assert canary.state == 'before-create'
@@ -140,7 +140,7 @@ class DDLEventTest(TestBase):
     def test_metadata_create_after(self):
         metadata, bind = self.metadata, self.bind
         canary = self.Canary(metadata, bind)
-        event.listen(metadata, 'on_after_create', canary.after_create)
+        event.listen(metadata, 'after_create', canary.after_create)
 
         canary.state = 'skipped'
         metadata.create_all(bind)
@@ -152,8 +152,8 @@ class DDLEventTest(TestBase):
         metadata, bind = self.metadata, self.bind
         canary = self.Canary(metadata, bind)
             
-        event.listen(metadata, 'on_before_create', canary.before_create)
-        event.listen(metadata, 'on_after_create', canary.after_create)
+        event.listen(metadata, 'before_create', canary.before_create)
+        event.listen(metadata, 'after_create', canary.after_create)
 
         metadata.create_all(bind)
         assert canary.state == 'after-create'
@@ -164,10 +164,10 @@ class DDLEventTest(TestBase):
         metadata, table, bind = self.metadata, self.table, self.bind
         table_canary = self.Canary(table, bind)
 
-        event.listen(table, 'on_before_create', table_canary.before_create)
+        event.listen(table, 'before_create', table_canary.before_create)
 
         metadata_canary = self.Canary(metadata, bind)
-        event.listen(metadata, 'on_before_create', metadata_canary.before_create)
+        event.listen(metadata, 'before_create', metadata_canary.before_create)
         self.table.create(self.bind)
         assert metadata_canary.state == None
 
@@ -196,10 +196,10 @@ class DDLExecutionTest(TestBase):
 
     def test_table_standalone(self):
         users, engine = self.users, self.engine
-        event.listen(users, 'on_before_create', DDL('mxyzptlk'))
-        event.listen(users, 'on_after_create', DDL('klptzyxm'))
-        event.listen(users, 'on_before_drop', DDL('xyzzy'))
-        event.listen(users, 'on_after_drop', DDL('fnord'))
+        event.listen(users, 'before_create', DDL('mxyzptlk'))
+        event.listen(users, 'after_create', DDL('klptzyxm'))
+        event.listen(users, 'before_drop', DDL('xyzzy'))
+        event.listen(users, 'after_drop', DDL('fnord'))
 
         users.create()
         strings = [str(x) for x in engine.mock]
@@ -218,10 +218,10 @@ class DDLExecutionTest(TestBase):
     def test_table_by_metadata(self):
         metadata, users, engine = self.metadata, self.users, self.engine
 
-        event.listen(users, 'on_before_create', DDL('mxyzptlk'))
-        event.listen(users, 'on_after_create', DDL('klptzyxm'))
-        event.listen(users, 'on_before_drop', DDL('xyzzy'))
-        event.listen(users, 'on_after_drop', DDL('fnord'))
+        event.listen(users, 'before_create', DDL('mxyzptlk'))
+        event.listen(users, 'after_create', DDL('klptzyxm'))
+        event.listen(users, 'before_drop', DDL('xyzzy'))
+        event.listen(users, 'after_drop', DDL('fnord'))
 
         metadata.create_all()
         strings = [str(x) for x in engine.mock]
@@ -263,10 +263,10 @@ class DDLExecutionTest(TestBase):
     def test_metadata(self):
         metadata, engine = self.metadata, self.engine
 
-        event.listen(metadata, 'on_before_create', DDL('mxyzptlk'))
-        event.listen(metadata, 'on_after_create', DDL('klptzyxm'))
-        event.listen(metadata, 'on_before_drop', DDL('xyzzy'))
-        event.listen(metadata, 'on_after_drop', DDL('fnord'))
+        event.listen(metadata, 'before_create', DDL('mxyzptlk'))
+        event.listen(metadata, 'after_create', DDL('klptzyxm'))
+        event.listen(metadata, 'before_drop', DDL('xyzzy'))
+        event.listen(metadata, 'after_drop', DDL('fnord'))
 
         metadata.create_all()
         strings = [str(x) for x in engine.mock]
@@ -317,13 +317,13 @@ class DDLExecutionTest(TestBase):
 
         event.listen(
             users,
-            'on_after_create',
+            'after_create',
             AddConstraint(constraint).execute_if(dialect='postgresql'),
         )
         
         event.listen(
             users,
-            'on_before_drop',
+            'before_drop',
             DropConstraint(constraint).execute_if(dialect='postgresql'),
         )
         
index 945641ef1f063372de29c3c0d8eafff81203b434..90d2bc578b3a5b84803cde006010c72bbb4aedca 100644 (file)
@@ -468,8 +468,8 @@ class EngineEventsTest(TestBase):
             engines.testing_engine(options=dict(implicit_returning=False,
                                    strategy='threadlocal'))
             ]:
-            event.listen(engine, 'on_before_execute', execute)
-            event.listen(engine, 'on_before_cursor_execute', cursor_execute)
+            event.listen(engine, 'before_execute', execute)
+            event.listen(engine, 'before_cursor_execute', cursor_execute)
             
             m = MetaData(engine)
             t1 = Table('t1', m, 
@@ -521,15 +521,15 @@ class EngineEventsTest(TestBase):
 
     def test_options(self):
         canary = []
-        def on_execute(conn, *args, **kw):
+        def execute(conn, *args, **kw):
             canary.append('execute')
             
-        def on_cursor_execute(conn, *args, **kw):
+        def cursor_execute(conn, *args, **kw):
             canary.append('cursor_execute')
             
         engine = engines.testing_engine()
-        event.listen(engine, 'on_before_execute', on_execute)
-        event.listen(engine, 'on_before_cursor_execute', on_cursor_execute)
+        event.listen(engine, 'before_execute', execute)
+        event.listen(engine, 'before_cursor_execute', cursor_execute)
         conn = engine.connect()
         c2 = conn.execution_options(foo='bar')
         eq_(c2._execution_options, {'foo':'bar'})
@@ -545,11 +545,11 @@ class EngineEventsTest(TestBase):
                 canary.append(name)
             return go
 
-        def on_execute(conn, clauseelement, multiparams, params):
+        def execute(conn, clauseelement, multiparams, params):
             canary.append('execute')
             return clauseelement, multiparams, params
             
-        def on_cursor_execute(conn, cursor, statement, 
+        def cursor_execute(conn, cursor, statement, 
                         parameters, context, executemany):
             canary.append('cursor_execute')
             return statement, parameters
@@ -558,11 +558,11 @@ class EngineEventsTest(TestBase):
         
         assert_raises(
             tsa.exc.ArgumentError,
-            event.listen, engine, "on_begin", tracker("on_begin"), retval=True
+            event.listen, engine, "begin", tracker("begin"), retval=True
         )
         
-        event.listen(engine, "on_before_execute", on_execute, retval=True)
-        event.listen(engine, "on_before_cursor_execute", on_cursor_execute, retval=True)
+        event.listen(engine, "before_execute", execute, retval=True)
+        event.listen(engine, "before_cursor_execute", cursor_execute, retval=True)
         engine.execute(select([1]))
         eq_(
             canary, ['execute', 'cursor_execute']
@@ -578,11 +578,11 @@ class EngineEventsTest(TestBase):
             return go
             
         engine = engines.testing_engine()
-        event.listen(engine, 'on_before_execute', tracker('execute'))
-        event.listen(engine, 'on_before_cursor_execute', tracker('cursor_execute'))
-        event.listen(engine, 'on_begin', tracker('begin'))
-        event.listen(engine, 'on_commit', tracker('commit'))
-        event.listen(engine, 'on_rollback', tracker('rollback'))
+        event.listen(engine, 'before_execute', tracker('execute'))
+        event.listen(engine, 'before_cursor_execute', tracker('cursor_execute'))
+        event.listen(engine, 'begin', tracker('begin'))
+        event.listen(engine, 'commit', tracker('commit'))
+        event.listen(engine, 'rollback', tracker('rollback'))
         
         conn = engine.connect()
         trans = conn.begin()
@@ -611,7 +611,7 @@ class EngineEventsTest(TestBase):
                     'rollback_savepoint', 'release_savepoint',
                     'rollback', 'begin_twophase', 
                        'prepare_twophase', 'commit_twophase']:
-            event.listen(engine, 'on_%s' % name, tracker(name))
+            event.listen(engine, '%s' % name, tracker(name))
 
         conn = engine.connect()
 
index 392223d79babe507aa34b11a417f74e4efa4d126..e9679fee4b78249b9699e1f121da7ac8d880c9f7 100644 (file)
@@ -196,37 +196,37 @@ class PoolEventsTest(PoolTestBase):
     def _first_connect_event_fixture(self):
         p = self._queuepool_fixture()
         canary = []
-        def on_first_connect(*arg, **kw):
+        def first_connect(*arg, **kw):
             canary.append('first_connect')
         
-        event.listen(p, 'on_first_connect', on_first_connect)
+        event.listen(p, 'first_connect', first_connect)
         
         return p, canary
 
     def _connect_event_fixture(self):
         p = self._queuepool_fixture()
         canary = []
-        def on_connect(*arg, **kw):
+        def connect(*arg, **kw):
             canary.append('connect')
-        event.listen(p, 'on_connect', on_connect)
+        event.listen(p, 'connect', connect)
         
         return p, canary
 
     def _checkout_event_fixture(self):
         p = self._queuepool_fixture()
         canary = []
-        def on_checkout(*arg, **kw):
+        def checkout(*arg, **kw):
             canary.append('checkout')
-        event.listen(p, 'on_checkout', on_checkout)
+        event.listen(p, 'checkout', checkout)
         
         return p, canary
 
     def _checkin_event_fixture(self):
         p = self._queuepool_fixture()
         canary = []
-        def on_checkin(*arg, **kw):
+        def checkin(*arg, **kw):
             canary.append('checkin')
-        event.listen(p, 'on_checkin', on_checkin)
+        event.listen(p, 'checkin', checkin)
         
         return p, canary
         
@@ -361,10 +361,10 @@ class PoolEventsTest(PoolTestBase):
             canary.append("listen_four")
             
         engine = create_engine(testing.db.url)
-        event.listen(pool.Pool, 'on_connect', listen_one)
-        event.listen(engine.pool, 'on_connect', listen_two)
-        event.listen(engine, 'on_connect', listen_three)
-        event.listen(engine.__class__, 'on_connect', listen_four)
+        event.listen(pool.Pool, 'connect', listen_one)
+        event.listen(engine.pool, 'connect', listen_two)
+        event.listen(engine, 'connect', listen_three)
+        event.listen(engine.__class__, 'connect', listen_four)
 
         engine.execute(select([1])).close()
         eq_(
@@ -382,19 +382,19 @@ class PoolEventsTest(PoolTestBase):
         def listen_three(*args):
             canary.append("listen_three")
         
-        event.listen(pool.Pool, 'on_connect', listen_one)
-        event.listen(pool.QueuePool, 'on_connect', listen_two)
-        event.listen(pool.SingletonThreadPool, 'on_connect', listen_three)
+        event.listen(pool.Pool, 'connect', listen_one)
+        event.listen(pool.QueuePool, 'connect', listen_two)
+        event.listen(pool.SingletonThreadPool, 'connect', listen_three)
         
         p1 = pool.QueuePool(creator=MockDBAPI().connect)
         p2 = pool.SingletonThreadPool(creator=MockDBAPI().connect)
         
-        assert listen_one in p1.dispatch.on_connect
-        assert listen_two in p1.dispatch.on_connect
-        assert listen_three not in p1.dispatch.on_connect
-        assert listen_one in p2.dispatch.on_connect
-        assert listen_two not in p2.dispatch.on_connect
-        assert listen_three in p2.dispatch.on_connect
+        assert listen_one in p1.dispatch.connect
+        assert listen_two in p1.dispatch.connect
+        assert listen_three not in p1.dispatch.connect
+        assert listen_one in p2.dispatch.connect
+        assert listen_two not in p2.dispatch.connect
+        assert listen_three in p2.dispatch.connect
 
         p1.connect()
         eq_(canary, ["listen_one", "listen_two"])
@@ -475,10 +475,10 @@ class DeprecatedPoolListenerTest(PoolTestBase):
 
         def assert_listeners(p, total, conn, fconn, cout, cin):
             for instance in (p, p.recreate()):
-                self.assert_(len(instance.dispatch.on_connect) == conn)
-                self.assert_(len(instance.dispatch.on_first_connect) == fconn)
-                self.assert_(len(instance.dispatch.on_checkout) == cout)
-                self.assert_(len(instance.dispatch.on_checkin) == cin)
+                self.assert_(len(instance.dispatch.connect) == conn)
+                self.assert_(len(instance.dispatch.first_connect) == fconn)
+                self.assert_(len(instance.dispatch.checkout) == cout)
+                self.assert_(len(instance.dispatch.checkin) == cin)
 
         p = self._queuepool_fixture()
         assert_listeners(p, 0, 0, 0, 0, 0)
@@ -601,9 +601,9 @@ class DeprecatedPoolListenerTest(PoolTestBase):
 
             def assert_listeners(p, total, conn, cout, cin):
                 for instance in (p, p.recreate()):
-                    eq_(len(instance.dispatch.on_connect), conn)
-                    eq_(len(instance.dispatch.on_checkout), cout)
-                    eq_(len(instance.dispatch.on_checkin), cin)
+                    eq_(len(instance.dispatch.connect), conn)
+                    eq_(len(instance.dispatch.checkout), cout)
+                    eq_(len(instance.dispatch.checkin), cin)
 
             p = self._queuepool_fixture()
             assert_listeners(p, 0, 0, 0, 0)
index 8a51bd3fabb9994aa2c655ffa6ead8d6f98addb1..3e7a23b8db053374d5d4ed0d7b40f7214bf7009b 100644 (file)
@@ -35,11 +35,11 @@ class _MutableDictTestBase(object):
             
             def __setitem__(self, key, value):
                 dict.__setitem__(self, key, value)
-                self.on_change()
+                self.change()
     
             def __delitem__(self, key):
                 dict.__delitem__(self, key)
-                self.on_change()
+                self.change()
         return MutationDict
     
     @testing.resolve_artifact_names
@@ -199,7 +199,7 @@ class MutableCompositesTest(_base.MappedTest):
 
             def __setattr__(self, key, value):
                 object.__setattr__(self, key, value)
-                self.on_change()
+                self.change()
         
             def __composite_values__(self):
                 return self.x, self.y
index a57795f157faa3feaa05ec03084cde258d6bf6f0..60272839bd56870035562c1685b0afb8b9867062 100644 (file)
@@ -132,9 +132,9 @@ def testing_engine(url=None, options=None):
     options = options or config.db_opts
 
     engine = create_engine(url, **options)
-    event.listen(engine, 'on_after_execute', asserter.execute)
-    event.listen(engine, 'on_after_cursor_execute', asserter.cursor_execute)
-    event.listen(engine.pool, 'on_checkout', testing_reaper.checkout)
+    event.listen(engine, 'after_execute', asserter.execute)
+    event.listen(engine, 'after_cursor_execute', asserter.cursor_execute)
+    event.listen(engine.pool, 'checkout', testing_reaper.checkout)
     
     # may want to call this, results
     # in first-connect initializers
index 30156bbb74828bcb997075493eea4654ef735f94..e82e9e854b3bf9711370fd6623aa12f3d8592d74 100644 (file)
@@ -1627,7 +1627,7 @@ class ListenerTest(_base.ORMTest):
         class Bar(object):
             pass
             
-        def on_append(state, child, initiator):
+        def append(state, child, initiator):
             b2 = Bar()
             b2.data = b1.data + " appended"
             return b2
@@ -1642,9 +1642,9 @@ class ListenerTest(_base.ORMTest):
         attributes.register_attribute(Foo, 'barset', typecallable=set, uselist=True, useobject=True)
         attributes.register_attribute(Bar, 'data', uselist=False, useobject=False)
         
-        event.listen(Foo.data, 'on_set', on_set, retval=True)
-        event.listen(Foo.barlist, 'on_append', on_append, retval=True)
-        event.listen(Foo.barset, 'on_append', on_append, retval=True)
+        event.listen(Foo.data, 'set', on_set, retval=True)
+        event.listen(Foo.barlist, 'append', append, retval=True)
+        event.listen(Foo.barset, 'append', append, retval=True)
         
         f1 = Foo()
         f1.data = "some data"
@@ -1694,11 +1694,11 @@ class ListenerTest(_base.ORMTest):
         def attr_c():
             attributes.register_attribute(classes[2], 'attrib', uselist=False, useobject=False)
         
-        def on_set(state, value, oldvalue, initiator):
+        def set(state, value, oldvalue, initiator):
             canary.append(value)
             
         def events_a():
-            event.listen(classes[0].attrib, 'on_set', on_set, propagate=True)
+            event.listen(classes[0].attrib, 'set', set, propagate=True)
         
         def teardown():
             classes[:] = [None, None, None]
index 278eaf0b636fb28fc679312c62b23c6bb4c6454a..f7e39a4fbd7ed5202984f567e71c7778b46eb9a9 100644 (file)
@@ -46,9 +46,9 @@ class TriggerDefaultsTest(_base.MappedTest):
                                 bind.engine.name not in ('oracle', 'mssql', 'sqlite')
                 ),
             ):
-            event.listen(dt, 'on_after_create', ins)
+            event.listen(dt, 'after_create', ins)
         
-        event.listen(dt, 'on_before_drop', sa.DDL("DROP TRIGGER dt_ins"))
+        event.listen(dt, 'before_drop', sa.DDL("DROP TRIGGER dt_ins"))
 
         for up in (
             sa.DDL("CREATE TRIGGER dt_up AFTER UPDATE ON dt "
@@ -71,9 +71,9 @@ class TriggerDefaultsTest(_base.MappedTest):
                                 bind.engine.name not in ('oracle', 'mssql', 'sqlite')
                     ),
             ):
-            event.listen(dt, 'on_after_create', up)
+            event.listen(dt, 'after_create', up)
 
-        event.listen(dt, 'on_before_drop', sa.DDL("DROP TRIGGER dt_up"))
+        event.listen(dt, 'before_drop', sa.DDL("DROP TRIGGER dt_up"))
 
 
     @classmethod
index d624772d04b7b740067fa1358d95f597c1ecb4e9..3d3c96c6a7e5f53e2f7dafb688c67b2d5125640f 100644 (file)
@@ -43,9 +43,9 @@ class InitTest(_base.ORMTest):
         instrumentation.register_class(cls)
         ne_(cls.__init__, original_init)
         manager = instrumentation.manager_of_class(cls)
-        def on_init(state, args, kwargs):
-            canary.append((cls, 'on_init', state.class_))
-        event.listen(manager, 'on_init', on_init, raw=True)
+        def init(state, args, kwargs):
+            canary.append((cls, 'init', state.class_))
+        event.listen(manager, 'init', init, raw=True)
 
     def test_ai(self):
         inits = []
@@ -64,7 +64,7 @@ class InitTest(_base.ORMTest):
         self.register(A, inits)
 
         obj = A()
-        eq_(inits, [(A, 'on_init', A)])
+        eq_(inits, [(A, 'init', A)])
 
     def test_Ai(self):
         inits = []
@@ -75,7 +75,7 @@ class InitTest(_base.ORMTest):
         self.register(A, inits)
 
         obj = A()
-        eq_(inits, [(A, 'on_init', A), (A, '__init__')])
+        eq_(inits, [(A, 'init', A), (A, '__init__')])
 
     def test_ai_B(self):
         inits = []
@@ -93,7 +93,7 @@ class InitTest(_base.ORMTest):
         del inits[:]
 
         obj = B()
-        eq_(inits, [(B, 'on_init', B), (A, '__init__')])
+        eq_(inits, [(B, 'init', B), (A, '__init__')])
 
     def test_ai_Bi(self):
         inits = []
@@ -114,7 +114,7 @@ class InitTest(_base.ORMTest):
         del inits[:]
 
         obj = B()
-        eq_(inits, [(B, 'on_init', B), (B, '__init__'), (A, '__init__')])
+        eq_(inits, [(B, 'init', B), (B, '__init__'), (A, '__init__')])
 
     def test_Ai_bi(self):
         inits = []
@@ -130,12 +130,12 @@ class InitTest(_base.ORMTest):
                 super(B, self).__init__()
 
         obj = A()
-        eq_(inits, [(A, 'on_init', A), (A, '__init__')])
+        eq_(inits, [(A, 'init', A), (A, '__init__')])
 
         del inits[:]
 
         obj = B()
-        eq_(inits, [(B, '__init__'), (A, 'on_init', B), (A, '__init__')])
+        eq_(inits, [(B, '__init__'), (A, 'init', B), (A, '__init__')])
 
     def test_Ai_Bi(self):
         inits = []
@@ -152,12 +152,12 @@ class InitTest(_base.ORMTest):
         self.register(B, inits)
 
         obj = A()
-        eq_(inits, [(A, 'on_init', A), (A, '__init__')])
+        eq_(inits, [(A, 'init', A), (A, '__init__')])
 
         del inits[:]
 
         obj = B()
-        eq_(inits, [(B, 'on_init', B), (B, '__init__'), (A, '__init__')])
+        eq_(inits, [(B, 'init', B), (B, '__init__'), (A, '__init__')])
 
     def test_Ai_B(self):
         inits = []
@@ -171,12 +171,12 @@ class InitTest(_base.ORMTest):
         self.register(B, inits)
 
         obj = A()
-        eq_(inits, [(A, 'on_init', A), (A, '__init__')])
+        eq_(inits, [(A, 'init', A), (A, '__init__')])
 
         del inits[:]
 
         obj = B()
-        eq_(inits, [(B, 'on_init', B), (A, '__init__')])
+        eq_(inits, [(B, 'init', B), (A, '__init__')])
 
     def test_Ai_Bi_Ci(self):
         inits = []
@@ -199,16 +199,16 @@ class InitTest(_base.ORMTest):
         self.register(C, inits)
 
         obj = A()
-        eq_(inits, [(A, 'on_init', A), (A, '__init__')])
+        eq_(inits, [(A, 'init', A), (A, '__init__')])
 
         del inits[:]
 
         obj = B()
-        eq_(inits, [(B, 'on_init', B), (B, '__init__'), (A, '__init__')])
+        eq_(inits, [(B, 'init', B), (B, '__init__'), (A, '__init__')])
 
         del inits[:]
         obj = C()
-        eq_(inits, [(C, 'on_init', C), (C, '__init__'), (B, '__init__'),
+        eq_(inits, [(C, 'init', C), (C, '__init__'), (B, '__init__'),
                    (A, '__init__')])
 
     def test_Ai_bi_Ci(self):
@@ -231,16 +231,16 @@ class InitTest(_base.ORMTest):
         self.register(C, inits)
 
         obj = A()
-        eq_(inits, [(A, 'on_init', A), (A, '__init__')])
+        eq_(inits, [(A, 'init', A), (A, '__init__')])
 
         del inits[:]
 
         obj = B()
-        eq_(inits, [(B, '__init__'), (A, 'on_init', B), (A, '__init__')])
+        eq_(inits, [(B, '__init__'), (A, 'init', B), (A, '__init__')])
 
         del inits[:]
         obj = C()
-        eq_(inits, [(C, 'on_init', C), (C, '__init__'),  (B, '__init__'),
+        eq_(inits, [(C, 'init', C), (C, '__init__'),  (B, '__init__'),
                    (A, '__init__')])
 
     def test_Ai_b_Ci(self):
@@ -260,16 +260,16 @@ class InitTest(_base.ORMTest):
         self.register(C, inits)
 
         obj = A()
-        eq_(inits, [(A, 'on_init', A), (A, '__init__')])
+        eq_(inits, [(A, 'init', A), (A, '__init__')])
 
         del inits[:]
 
         obj = B()
-        eq_(inits, [(A, 'on_init', B), (A, '__init__')])
+        eq_(inits, [(A, 'init', B), (A, '__init__')])
 
         del inits[:]
         obj = C()
-        eq_(inits, [(C, 'on_init', C), (C, '__init__'), (A, '__init__')])
+        eq_(inits, [(C, 'init', C), (C, '__init__'), (A, '__init__')])
 
     def test_Ai_B_Ci(self):
         inits = []
@@ -289,16 +289,16 @@ class InitTest(_base.ORMTest):
         self.register(C, inits)
 
         obj = A()
-        eq_(inits, [(A, 'on_init', A), (A, '__init__')])
+        eq_(inits, [(A, 'init', A), (A, '__init__')])
 
         del inits[:]
 
         obj = B()
-        eq_(inits, [(B, 'on_init', B), (A, '__init__')])
+        eq_(inits, [(B, 'init', B), (A, '__init__')])
 
         del inits[:]
         obj = C()
-        eq_(inits, [(C, 'on_init', C), (C, '__init__'), (A, '__init__')])
+        eq_(inits, [(C, 'init', C), (C, '__init__'), (A, '__init__')])
 
     def test_Ai_B_C(self):
         inits = []
@@ -315,16 +315,16 @@ class InitTest(_base.ORMTest):
         self.register(C, inits)
 
         obj = A()
-        eq_(inits, [(A, 'on_init', A), (A, '__init__')])
+        eq_(inits, [(A, 'init', A), (A, '__init__')])
 
         del inits[:]
 
         obj = B()
-        eq_(inits, [(B, 'on_init', B), (A, '__init__')])
+        eq_(inits, [(B, 'init', B), (A, '__init__')])
 
         del inits[:]
         obj = C()
-        eq_(inits, [(C, 'on_init', C), (A, '__init__')])
+        eq_(inits, [(C, 'init', C), (A, '__init__')])
 
     def test_A_Bi_C(self):
         inits = []
@@ -341,16 +341,16 @@ class InitTest(_base.ORMTest):
         self.register(C, inits)
 
         obj = A()
-        eq_(inits, [(A, 'on_init', A)])
+        eq_(inits, [(A, 'init', A)])
 
         del inits[:]
 
         obj = B()
-        eq_(inits, [(B, 'on_init', B), (B, '__init__')])
+        eq_(inits, [(B, 'init', B), (B, '__init__')])
 
         del inits[:]
         obj = C()
-        eq_(inits, [(C, 'on_init', C), (B, '__init__')])
+        eq_(inits, [(C, 'init', C), (B, '__init__')])
 
     def test_A_B_Ci(self):
         inits = []
@@ -367,16 +367,16 @@ class InitTest(_base.ORMTest):
         self.register(C, inits)
 
         obj = A()
-        eq_(inits, [(A, 'on_init', A)])
+        eq_(inits, [(A, 'init', A)])
 
         del inits[:]
 
         obj = B()
-        eq_(inits, [(B, 'on_init', B)])
+        eq_(inits, [(B, 'init', B)])
 
         del inits[:]
         obj = C()
-        eq_(inits, [(C, 'on_init', C), (C, '__init__')])
+        eq_(inits, [(C, 'init', C), (C, '__init__')])
 
     def test_A_B_C(self):
         inits = []
@@ -391,16 +391,16 @@ class InitTest(_base.ORMTest):
         self.register(C, inits)
 
         obj = A()
-        eq_(inits, [(A, 'on_init', A)])
+        eq_(inits, [(A, 'init', A)])
 
         del inits[:]
 
         obj = B()
-        eq_(inits, [(B, 'on_init', B)])
+        eq_(inits, [(B, 'init', B)])
 
         del inits[:]
         obj = C()
-        eq_(inits, [(C, 'on_init', C)])
+        eq_(inits, [(C, 'init', C)])
 
     def test_defaulted_init(self):
         class X(object):
@@ -537,7 +537,7 @@ class InstrumentationCollisionTest(_base.ORMTest):
         assert_raises_message(TypeError, "multiple instrumentation implementations", instrumentation.register_class, B1)
 
 class OnLoadTest(_base.ORMTest):
-    """Check that Events.on_load is not hit in regular attributes operations."""
+    """Check that Events.load is not hit in regular attributes operations."""
 
     def test_basic(self):
         import pickle
@@ -551,7 +551,7 @@ class OnLoadTest(_base.ORMTest):
         try:
             instrumentation.register_class(A)
             manager = instrumentation.manager_of_class(A)
-            event.listen(manager, 'on_load', canary)
+            event.listen(manager, 'load', canary)
 
             a = A()
             p_a = pickle.dumps(a)
index 11a66c17f19c41350cb68c328124b7712a9ce498..ad0662887d1482de7b1ce2a7a9128f35d55eb90d 100644 (file)
@@ -2271,35 +2271,35 @@ class MapperEventsTest(_fixtures.FixtureTest):
         mapper(A, users)
         mapper(B, addresses, inherits=A)
         
-        def on_init_a(target, args, kwargs):
-            canary.append(('on_init_a', target))
+        def init_a(target, args, kwargs):
+            canary.append(('init_a', target))
             
-        def on_init_b(target, args, kwargs):
-            canary.append(('on_init_b', target))
+        def init_b(target, args, kwargs):
+            canary.append(('init_b', target))
 
-        def on_init_c(target, args, kwargs):
-            canary.append(('on_init_c', target))
+        def init_c(target, args, kwargs):
+            canary.append(('init_c', target))
 
-        def on_init_d(target, args, kwargs):
-            canary.append(('on_init_d', target))
+        def init_d(target, args, kwargs):
+            canary.append(('init_d', target))
 
-        def on_init_e(target, args, kwargs):
-            canary.append(('on_init_e', target))
+        def init_e(target, args, kwargs):
+            canary.append(('init_e', target))
         
-        event.listen(mapper, 'on_init', on_init_a)
-        event.listen(Mapper, 'on_init', on_init_b)
-        event.listen(class_mapper(A), 'on_init', on_init_c)
-        event.listen(A, 'on_init', on_init_d)
-        event.listen(A, 'on_init', on_init_e, propagate=True)
+        event.listen(mapper, 'init', init_a)
+        event.listen(Mapper, 'init', init_b)
+        event.listen(class_mapper(A), 'init', init_c)
+        event.listen(A, 'init', init_d)
+        event.listen(A, 'init', init_e, propagate=True)
         
         a = A()
-        eq_(canary, [('on_init_a', a),('on_init_b', a),
-                        ('on_init_c', a),('on_init_d', a),('on_init_e', a)])
+        eq_(canary, [('init_a', a),('init_b', a),
+                        ('init_c', a),('init_d', a),('init_e', a)])
         
         # test propagate flag
         canary[:] = []
         b = B()
-        eq_(canary, [('on_init_a', b), ('on_init_b', b),('on_init_e', b)])
+        eq_(canary, [('init_a', b), ('init_b', b),('init_e', b)])
     
     def teardown(self):
         # TODO: need to get remove() functionality
@@ -2316,21 +2316,21 @@ class MapperEventsTest(_fixtures.FixtureTest):
             return go
             
         for meth in [
-            'on_init',
-            'on_init_failure',
-            'on_translate_row',
-            'on_create_instance',
-            'on_append_result',
-            'on_populate_instance',
-            'on_load',
-            'on_refresh',
-            'on_expire',
-            'on_before_insert',
-            'on_after_insert',
-            'on_before_update',
-            'on_after_update',
-            'on_before_delete',
-            'on_after_delete'
+            'init',
+            'init_failure',
+            'translate_row',
+            'create_instance',
+            'append_result',
+            'populate_instance',
+            'load',
+            'refresh',
+            'expire',
+            'before_insert',
+            'after_insert',
+            'before_update',
+            'after_update',
+            'before_delete',
+            'after_delete'
         ]:
             event.listen(mapper, meth, evt(meth), **kw)
         return canary
@@ -2354,12 +2354,12 @@ class MapperEventsTest(_fixtures.FixtureTest):
         sess.delete(u)
         sess.flush()
         eq_(canary,
-            ['on_init', 'on_before_insert',
-             'on_after_insert', 'on_expire', 'on_translate_row', 'on_populate_instance',
-             'on_refresh',
-             'on_append_result', 'on_translate_row', 'on_create_instance',
-             'on_populate_instance', 'on_load', 'on_append_result',
-             'on_before_update', 'on_after_update', 'on_before_delete', 'on_after_delete'])
+            ['init', 'before_insert',
+             'after_insert', 'expire', 'translate_row', 'populate_instance',
+             'refresh',
+             'append_result', 'translate_row', 'create_instance',
+             'populate_instance', 'load', 'append_result',
+             'before_update', 'after_update', 'before_delete', 'after_delete'])
 
     @testing.resolve_artifact_names
     def test_inheritance(self):
@@ -2384,24 +2384,24 @@ class MapperEventsTest(_fixtures.FixtureTest):
         sess.flush()
         sess.delete(am)
         sess.flush()
-        eq_(canary1, ['on_init', 'on_before_insert', 'on_after_insert',
-            'on_translate_row', 'on_populate_instance','on_refresh',
-            'on_append_result', 'on_translate_row', 'on_create_instance'
-            , 'on_populate_instance', 'on_load', 'on_append_result',
-            'on_before_update', 'on_after_update', 'on_before_delete',
-            'on_after_delete'])
+        eq_(canary1, ['init', 'before_insert', 'after_insert',
+            'translate_row', 'populate_instance','refresh',
+            'append_result', 'translate_row', 'create_instance'
+            , 'populate_instance', 'load', 'append_result',
+            'before_update', 'after_update', 'before_delete',
+            'after_delete'])
         eq_(canary2, [])
-        eq_(canary3, ['on_init', 'on_before_insert', 'on_after_insert',
-            'on_translate_row', 'on_populate_instance','on_refresh',
-            'on_append_result', 'on_translate_row', 'on_create_instance'
-            , 'on_populate_instance', 'on_load', 'on_append_result',
-            'on_before_update', 'on_after_update', 'on_before_delete',
-            'on_after_delete'])
+        eq_(canary3, ['init', 'before_insert', 'after_insert',
+            'translate_row', 'populate_instance','refresh',
+            'append_result', 'translate_row', 'create_instance'
+            , 'populate_instance', 'load', 'append_result',
+            'before_update', 'after_update', 'before_delete',
+            'after_delete'])
 
     @testing.resolve_artifact_names
     def test_before_after_only_collection(self):
-        """on_before_update is called on parent for collection modifications,
-        on_after_update is called even if no columns were updated.
+        """before_update is called on parent for collection modifications,
+        after_update is called even if no columns were updated.
         
         """
 
@@ -2419,18 +2419,18 @@ class MapperEventsTest(_fixtures.FixtureTest):
         sess.add(k1)
         sess.flush()
         eq_(canary1,
-            ['on_init', 
-            'on_before_insert', 'on_after_insert'])
+            ['init', 
+            'before_insert', 'after_insert'])
         eq_(canary2,
-            ['on_init', 
-            'on_before_insert', 'on_after_insert'])
+            ['init', 
+            'before_insert', 'after_insert'])
 
         canary1[:]= []
         canary2[:]= []
 
         i1.keywords.append(k1)
         sess.flush()
-        eq_(canary1, ['on_before_update', 'on_after_update'])
+        eq_(canary1, ['before_update', 'after_update'])
         eq_(canary2, [])
 
         
@@ -2442,7 +2442,7 @@ class MapperEventsTest(_fixtures.FixtureTest):
             return u
             
         mapper(User, users)
-        event.listen(User, 'on_create_instance', create_instance, retval=True)
+        event.listen(User, 'create_instance', create_instance, retval=True)
         sess = create_session()
         u1 = User()
         u1.name = 'ed'
@@ -2455,10 +2455,10 @@ class MapperEventsTest(_fixtures.FixtureTest):
     @testing.resolve_artifact_names
     def test_instrument_event(self):
         canary = []
-        def on_instrument_class(mapper, cls):
+        def instrument_class(mapper, cls):
             canary.append(cls)
             
-        event.listen(Mapper, 'on_instrument_class', on_instrument_class)
+        event.listen(Mapper, 'instrument_class', instrument_class)
         
         mapper(User, users)
         eq_(canary, [User])
index 5329fb71c0931c211a4064cbb118ff831734054e..d190db96fc25a6ae3df48f14b67b0f95d6e9231f 100644 (file)
@@ -18,13 +18,13 @@ class MergeTest(_fixtures.FixtureTest):
 
     run_inserts = None
 
-    def on_load_tracker(self, cls, canary=None):
+    def load_tracker(self, cls, canary=None):
         if canary is None:
             def canary(instance):
                 canary.called += 1
             canary.called = 0
 
-        event.listen(cls, 'on_load', canary)
+        event.listen(cls, 'load', canary)
 
         return canary
 
@@ -32,12 +32,12 @@ class MergeTest(_fixtures.FixtureTest):
     def test_transient_to_pending(self):
         mapper(User, users)
         sess = create_session()
-        on_load = self.on_load_tracker(User)
+        load = self.load_tracker(User)
 
         u = User(id=7, name='fred')
-        eq_(on_load.called, 0)
+        eq_(load.called, 0)
         u2 = sess.merge(u)
-        eq_(on_load.called, 1)
+        eq_(load.called, 1)
         assert u2 in sess
         eq_(u2, User(id=7, name='fred'))
         sess.flush()
@@ -60,18 +60,18 @@ class MergeTest(_fixtures.FixtureTest):
             'addresses': relationship(Address, backref='user',
                                   collection_class=OrderedSet)})
         mapper(Address, addresses)
-        on_load = self.on_load_tracker(User)
-        self.on_load_tracker(Address, on_load)
+        load = self.load_tracker(User)
+        self.load_tracker(Address, load)
 
         u = User(id=7, name='fred', addresses=OrderedSet([
             Address(id=1, email_address='fred1'),
             Address(id=2, email_address='fred2'),
             ]))
-        eq_(on_load.called, 0)
+        eq_(load.called, 0)
 
         sess = create_session()
         sess.merge(u)
-        eq_(on_load.called, 3)
+        eq_(load.called, 3)
 
         merged_users = [e for e in sess if isinstance(e, User)]
         eq_(len(merged_users), 1)
@@ -90,7 +90,7 @@ class MergeTest(_fixtures.FixtureTest):
     @testing.resolve_artifact_names
     def test_transient_to_persistent(self):
         mapper(User, users)
-        on_load = self.on_load_tracker(User)
+        load = self.load_tracker(User)
 
         sess = create_session()
         u = User(id=7, name='fred')
@@ -98,17 +98,17 @@ class MergeTest(_fixtures.FixtureTest):
         sess.flush()
         sess.expunge_all()
 
-        eq_(on_load.called, 0)
+        eq_(load.called, 0)
 
         _u2 = u2 = User(id=7, name='fred jones')
-        eq_(on_load.called, 0)
+        eq_(load.called, 0)
         u2 = sess.merge(u2)
         assert u2 is not _u2
-        eq_(on_load.called, 1)
+        eq_(load.called, 1)
         sess.flush()
         sess.expunge_all()
         eq_(sess.query(User).first(), User(id=7, name='fred jones'))
-        eq_(on_load.called, 2)
+        eq_(load.called, 2)
 
     @testing.resolve_artifact_names
     def test_transient_to_persistent_collection(self):
@@ -121,8 +121,8 @@ class MergeTest(_fixtures.FixtureTest):
         })
         mapper(Address, addresses)
 
-        on_load = self.on_load_tracker(User)
-        self.on_load_tracker(Address, on_load)
+        load = self.load_tracker(User)
+        self.load_tracker(Address, load)
 
         u = User(id=7, name='fred', addresses=OrderedSet([
             Address(id=1, email_address='fred1'),
@@ -133,7 +133,7 @@ class MergeTest(_fixtures.FixtureTest):
         sess.flush()
         sess.expunge_all()
 
-        eq_(on_load.called, 0)
+        eq_(load.called, 0)
 
         u = User(id=7, name='fred', addresses=OrderedSet([
             Address(id=3, email_address='fred3'),
@@ -146,7 +146,7 @@ class MergeTest(_fixtures.FixtureTest):
         # 2.,3. merges Address ids 3 & 4, saves into session.
         # 4.,5. loads pre-existing elements in "addresses" collection,
         # marks as deleted, Address ids 1 and 2.
-        eq_(on_load.called, 5)
+        eq_(load.called, 5)
 
         eq_(u,
             User(id=7, name='fred', addresses=OrderedSet([
@@ -171,8 +171,8 @@ class MergeTest(_fixtures.FixtureTest):
                                  order_by=addresses.c.id,
                                  collection_class=OrderedSet)})
         mapper(Address, addresses)
-        on_load = self.on_load_tracker(User)
-        self.on_load_tracker(Address, on_load)
+        load = self.load_tracker(User)
+        self.load_tracker(Address, load)
 
         a = Address(id=1, email_address='fred1')
         u = User(id=7, name='fred', addresses=OrderedSet([
@@ -188,9 +188,9 @@ class MergeTest(_fixtures.FixtureTest):
         u.addresses.add(Address(id=3, email_address='fred3'))
         u.addresses.remove(a)
 
-        eq_(on_load.called, 0)
+        eq_(load.called, 0)
         u = sess.merge(u)
-        eq_(on_load.called, 4)
+        eq_(load.called, 4)
         sess.flush()
         sess.expunge_all()
 
@@ -207,8 +207,8 @@ class MergeTest(_fixtures.FixtureTest):
             'addresses':relationship(mapper(Address, addresses),
                                  cascade="all", backref="user")
         })
-        on_load = self.on_load_tracker(User)
-        self.on_load_tracker(Address, on_load)
+        load = self.load_tracker(User)
+        self.load_tracker(Address, load)
         sess = create_session()
 
         u = User(id=7, name='fred')
@@ -218,7 +218,7 @@ class MergeTest(_fixtures.FixtureTest):
         u.addresses.append(a2)
 
         u2 = sess.merge(u)
-        eq_(on_load.called, 3)
+        eq_(load.called, 3)
 
         eq_(u,
             User(id=7, name='fred', addresses=[
@@ -236,7 +236,7 @@ class MergeTest(_fixtures.FixtureTest):
         eq_(u2, User(id=7, name='fred', addresses=[
             Address(email_address='foo@bar.com'),
             Address(email_address='hoho@bar.com')]))
-        eq_(on_load.called, 6)
+        eq_(load.called, 6)
 
     @testing.resolve_artifact_names
     def test_merge_empty_attributes(self):
@@ -324,8 +324,8 @@ class MergeTest(_fixtures.FixtureTest):
         mapper(User, users, properties={
             'addresses':relationship(mapper(Address, addresses), backref='user')
         })
-        on_load = self.on_load_tracker(User)
-        self.on_load_tracker(Address, on_load)
+        load = self.load_tracker(User)
+        self.load_tracker(Address, load)
 
         sess = create_session()
 
@@ -348,12 +348,12 @@ class MergeTest(_fixtures.FixtureTest):
         u.name = 'fred2'
         u.addresses[1].email_address = 'hoho@lalala.com'
 
-        eq_(on_load.called, 3)
+        eq_(load.called, 3)
 
         # new session, merge modified data into session
         sess3 = create_session()
         u3 = sess3.merge(u)
-        eq_(on_load.called, 6)
+        eq_(load.called, 6)
 
         # ensure local changes are pending
         eq_(u3, User(id=7, name='fred2', addresses=[
@@ -369,7 +369,7 @@ class MergeTest(_fixtures.FixtureTest):
         eq_(u, User(id=7, name='fred2', addresses=[
             Address(email_address='foo@bar.com'),
             Address(email_address='hoho@lalala.com')]))
-        eq_(on_load.called, 9)
+        eq_(load.called, 9)
 
         # merge persistent object into another session
         sess4 = create_session()
@@ -381,7 +381,7 @@ class MergeTest(_fixtures.FixtureTest):
             sess4.flush()
         # no changes; therefore flush should do nothing
         self.assert_sql_count(testing.db, go, 0)
-        eq_(on_load.called, 12)
+        eq_(load.called, 12)
 
         # test with "dontload" merge
         sess5 = create_session()
@@ -395,7 +395,7 @@ class MergeTest(_fixtures.FixtureTest):
         # but also, load=False wipes out any difference in committed state,
         # so no flush at all
         self.assert_sql_count(testing.db, go, 0)
-        eq_(on_load.called, 15)
+        eq_(load.called, 15)
 
         sess4 = create_session()
         u = sess4.merge(u, load=False)
@@ -405,13 +405,13 @@ class MergeTest(_fixtures.FixtureTest):
             sess4.flush()
         # afafds change flushes
         self.assert_sql_count(testing.db, go, 1)
-        eq_(on_load.called, 18)
+        eq_(load.called, 18)
 
         sess5 = create_session()
         u2 = sess5.query(User).get(u.id)
         eq_(u2.name, 'fred2')
         eq_(u2.addresses[1].email_address, 'afafds')
-        eq_(on_load.called, 21)
+        eq_(load.called, 21)
 
     @testing.resolve_artifact_names
     def test_no_relationship_cascade(self):
@@ -454,8 +454,8 @@ class MergeTest(_fixtures.FixtureTest):
         mapper(User, users, properties={
             'addresses':relationship(mapper(Address, addresses))})
 
-        on_load = self.on_load_tracker(User)
-        self.on_load_tracker(Address, on_load)
+        load = self.load_tracker(User)
+        self.load_tracker(Address, load)
 
         sess = create_session()
         u = User(name='fred')
@@ -466,24 +466,24 @@ class MergeTest(_fixtures.FixtureTest):
         sess.add(u)
         sess.flush()
 
-        eq_(on_load.called, 0)
+        eq_(load.called, 0)
 
         sess2 = create_session()
         u2 = sess2.query(User).get(u.id)
-        eq_(on_load.called, 1)
+        eq_(load.called, 1)
 
         u.addresses[1].email_address = 'addr 2 modified'
         sess2.merge(u)
         eq_(u2.addresses[1].email_address, 'addr 2 modified')
-        eq_(on_load.called, 3)
+        eq_(load.called, 3)
 
         sess3 = create_session()
         u3 = sess3.query(User).get(u.id)
-        eq_(on_load.called, 4)
+        eq_(load.called, 4)
 
         u.name = 'also fred'
         sess3.merge(u)
-        eq_(on_load.called, 6)
+        eq_(load.called, 6)
         eq_(u3.name, 'also fred')
 
     @testing.resolve_artifact_names
@@ -527,8 +527,8 @@ class MergeTest(_fixtures.FixtureTest):
         mapper(Order, orders, properties={
             'items':relationship(mapper(Item, items), secondary=order_items)})
 
-        on_load = self.on_load_tracker(Order)
-        self.on_load_tracker(Item, on_load)
+        load = self.load_tracker(Order)
+        self.load_tracker(Item, load)
 
         sess = create_session()
 
@@ -546,24 +546,24 @@ class MergeTest(_fixtures.FixtureTest):
         sess.add(o)
         sess.flush()
 
-        eq_(on_load.called, 0)
+        eq_(load.called, 0)
 
         sess2 = create_session()
         o2 = sess2.query(Order).get(o.id)
-        eq_(on_load.called, 1)
+        eq_(load.called, 1)
 
         o.items[1].description = 'item 2 modified'
         sess2.merge(o)
         eq_(o2.items[1].description, 'item 2 modified')
-        eq_(on_load.called,  3)
+        eq_(load.called,  3)
 
         sess3 = create_session()
         o3 = sess3.query(Order).get(o.id)
-        eq_( on_load.called, 4)
+        eq_( load.called, 4)
 
         o.description = 'desc modified'
         sess3.merge(o)
-        eq_(on_load.called, 6)
+        eq_(load.called, 6)
         eq_(o3.description, 'desc modified')
 
     @testing.resolve_artifact_names
@@ -572,8 +572,8 @@ class MergeTest(_fixtures.FixtureTest):
         mapper(User, users, properties={
             'address':relationship(mapper(Address, addresses),uselist = False)
         })
-        on_load = self.on_load_tracker(User)
-        self.on_load_tracker(Address, on_load)
+        load = self.load_tracker(User)
+        self.load_tracker(Address, load)
         sess = create_session()
 
         u = User()
@@ -586,17 +586,17 @@ class MergeTest(_fixtures.FixtureTest):
         sess.add(u)
         sess.flush()
 
-        eq_(on_load.called, 0)
+        eq_(load.called, 0)
 
         sess2 = create_session()
         u2 = sess2.query(User).get(7)
-        eq_(on_load.called, 1)
+        eq_(load.called, 1)
         u2.name = 'fred2'
         u2.address.email_address = 'hoho@lalala.com'
-        eq_(on_load.called, 2)
+        eq_(load.called, 2)
 
         u3 = sess.merge(u2)
-        eq_(on_load.called, 2)
+        eq_(load.called, 2)
         assert u3 is u
 
     @testing.resolve_artifact_names
index 8596e585a1cfe2cabdc4917fba23e08401b7d13b..9aeea828759c8d6dfa991268d79028555857592d 100644 (file)
@@ -1218,10 +1218,10 @@ class SessionEventsTest(_fixtures.FixtureTest):
         def my_listener(*arg, **kw):
             pass
         
-        event.listen(Session, 'on_before_flush', my_listener)
+        event.listen(Session, 'before_flush', my_listener)
         
         s = Session()
-        assert my_listener in s.dispatch.on_before_flush
+        assert my_listener in s.dispatch.before_flush
     
     def test_sessionmaker_listen(self):
         """test that listen can be applied to individual scoped_session() classes."""
@@ -1234,16 +1234,16 @@ class SessionEventsTest(_fixtures.FixtureTest):
         S1 = sessionmaker()
         S2 = sessionmaker()
         
-        event.listen(Session, 'on_before_flush', my_listener_one)
-        event.listen(S1, 'on_before_flush', my_listener_two)
+        event.listen(Session, 'before_flush', my_listener_one)
+        event.listen(S1, 'before_flush', my_listener_two)
         
         s1 = S1()
-        assert my_listener_one in s1.dispatch.on_before_flush
-        assert my_listener_two in s1.dispatch.on_before_flush
+        assert my_listener_one in s1.dispatch.before_flush
+        assert my_listener_two in s1.dispatch.before_flush
         
         s2 = S2()
-        assert my_listener_one in s2.dispatch.on_before_flush
-        assert my_listener_two not in s2.dispatch.on_before_flush
+        assert my_listener_one in s2.dispatch.before_flush
+        assert my_listener_two not in s2.dispatch.before_flush
     
     def test_scoped_session_invalid_callable(self):
         from sqlalchemy.orm import scoped_session
@@ -1257,7 +1257,7 @@ class SessionEventsTest(_fixtures.FixtureTest):
             sa.exc.ArgumentError,
             "Session event listen on a ScopedSession "
             "requries that its creation callable is a Session subclass.",
-            event.listen, scope, "on_before_flush", my_listener_one
+            event.listen, scope, "before_flush", my_listener_one
         )
 
     def test_scoped_session_invalid_class(self):
@@ -1276,7 +1276,7 @@ class SessionEventsTest(_fixtures.FixtureTest):
             sa.exc.ArgumentError,
             "Session event listen on a ScopedSession "
             "requries that its creation callable is a Session subclass.",
-            event.listen, scope, "on_before_flush", my_listener_one
+            event.listen, scope, "before_flush", my_listener_one
         )
     
     def test_scoped_session_listen(self):
@@ -1286,9 +1286,9 @@ class SessionEventsTest(_fixtures.FixtureTest):
             pass
         
         scope = scoped_session(sessionmaker())
-        event.listen(scope, "on_before_flush", my_listener_one)
+        event.listen(scope, "before_flush", my_listener_one)
         
-        assert my_listener_one in scope().dispatch.on_before_flush
+        assert my_listener_one in scope().dispatch.before_flush
     
     def _listener_fixture(self, **kw):
         canary = []
@@ -1300,16 +1300,16 @@ class SessionEventsTest(_fixtures.FixtureTest):
         sess = Session(**kw)
 
         for evt in [
-            'on_before_commit',
-            'on_after_commit',
-            'on_after_rollback',
-            'on_before_flush',
-            'on_after_flush',
-            'on_after_flush_postexec',
-            'on_after_begin',
-            'on_after_attach',
-            'on_after_bulk_update',
-            'on_after_bulk_delete'
+            'before_commit',
+            'after_commit',
+            'after_rollback',
+            'before_flush',
+            'after_flush',
+            'after_flush_postexec',
+            'after_begin',
+            'after_attach',
+            'after_bulk_update',
+            'after_bulk_delete'
         ]:
             event.listen(sess, evt, listener(evt))
         
@@ -1327,9 +1327,9 @@ class SessionEventsTest(_fixtures.FixtureTest):
         sess.flush()
         eq_(
             canary, 
-            [ 'on_after_attach', 'on_before_flush', 'on_after_begin',
-            'on_after_flush', 'on_before_commit', 'on_after_commit',
-            'on_after_flush_postexec', ]
+            [ 'after_attach', 'before_flush', 'after_begin',
+            'after_flush', 'before_commit', 'after_commit',
+            'after_flush_postexec', ]
         )
 
     @testing.resolve_artifact_names
@@ -1341,8 +1341,8 @@ class SessionEventsTest(_fixtures.FixtureTest):
         u = User(name='u1')
         sess.add(u)
         sess.flush()
-        eq_(canary, ['on_after_attach', 'on_before_flush', 'on_after_begin',
-                       'on_after_flush', 'on_after_flush_postexec'])
+        eq_(canary, ['after_attach', 'before_flush', 'after_begin',
+                       'after_flush', 'after_flush_postexec'])
     
     @testing.resolve_artifact_names
     def test_flush_in_commit_hook(self):
@@ -1356,32 +1356,32 @@ class SessionEventsTest(_fixtures.FixtureTest):
         
         u.name = 'ed'
         sess.commit()
-        eq_(canary, ['on_before_commit', 'on_before_flush', 'on_after_flush',
-                       'on_after_flush_postexec', 'on_after_commit'])
+        eq_(canary, ['before_commit', 'before_flush', 'after_flush',
+                       'after_flush_postexec', 'after_commit'])
     
     def test_standalone_on_commit_hook(self):
         sess, canary = self._listener_fixture()
         sess.commit()
-        eq_(canary, ['on_before_commit', 'on_after_commit'])
+        eq_(canary, ['before_commit', 'after_commit'])
         
     @testing.resolve_artifact_names
     def test_on_bulk_update_hook(self):
         sess, canary = self._listener_fixture()
         mapper(User, users)
         sess.query(User).update({'name': 'foo'})
-        eq_(canary, ['on_after_begin', 'on_after_bulk_update'])
+        eq_(canary, ['after_begin', 'after_bulk_update'])
 
     @testing.resolve_artifact_names
     def test_on_bulk_delete_hook(self):
         sess, canary = self._listener_fixture()
         mapper(User, users)
         sess.query(User).delete()
-        eq_(canary, ['on_after_begin', 'on_after_bulk_delete'])
+        eq_(canary, ['after_begin', 'after_bulk_delete'])
     
     def test_connection_emits_after_begin(self):
         sess, canary = self._listener_fixture(bind=testing.db)
         conn = sess.connection()
-        eq_(canary, ['on_after_begin'])
+        eq_(canary, ['after_begin'])
 
     @testing.resolve_artifact_names
     def test_reentrant_flush(self):
@@ -1392,7 +1392,7 @@ class SessionEventsTest(_fixtures.FixtureTest):
             session.flush()
         
         sess = Session()
-        event.listen(sess, 'on_before_flush', before_flush)
+        event.listen(sess, 'before_flush', before_flush)
         sess.add(User(name='foo'))
         assert_raises_message(sa.exc.InvalidRequestError,
                               'already flushing', sess.flush)
@@ -1413,7 +1413,7 @@ class SessionEventsTest(_fixtures.FixtureTest):
                     session.delete(x)
                     
         sess = Session()
-        event.listen(sess, 'on_before_flush', before_flush)
+        event.listen(sess, 'before_flush', before_flush)
 
         u = User(name='u1')
         sess.add(u)
@@ -1460,7 +1460,7 @@ class SessionEventsTest(_fixtures.FixtureTest):
                 obj.name += " modified"
                     
         sess = Session(autoflush=True)
-        event.listen(sess, 'on_before_flush', before_flush)
+        event.listen(sess, 'before_flush', before_flush)
         
         u = User(name='u1')
         sess.add(u)
index 9c472764ec4fc850aed6880c4c9c1a37c5d9491d..e85166f68a8f8563655c9421436c9abb4cad8ea2 100644 (file)
@@ -1374,8 +1374,8 @@ class SaveTest(_fixtures.FixtureTest):
         mapper(User, users, batch=False)
         
         evt = Events()
-        event.listen(User, "on_before_insert", evt.before_insert)
-        event.listen(User, "on_after_insert", evt.after_insert)
+        event.listen(User, "before_insert", evt.before_insert)
+        event.listen(User, "after_insert", evt.after_insert)
         
         u1 = User(name='user1')
         u2 = User(name='user2')
@@ -1399,8 +1399,8 @@ class SaveTest(_fixtures.FixtureTest):
 
         m = mapper(User, users)
         evt = Events()
-        event.listen(User, "on_before_insert", evt.before_insert)
-        event.listen(User, "on_after_insert", evt.after_insert)
+        event.listen(User, "before_insert", evt.before_insert)
+        event.listen(User, "after_insert", evt.after_insert)
 
         u1 = User(name='user1')
         u2 = User(name='user2')