CANCEL = util.symbol('CANCEL')
NO_RETVAL = util.symbol('NO_RETVAL')
+
def listen(target, identifier, fn, *args, **kw):
"""Register a listener function for the given target.
raise exc.InvalidRequestError("No such event '%s' for target '%s'" %
(identifier, target))
+
def listens_for(target, identifier, *args, **kw):
"""Decorate a function as a listener for the given target + identifier.
return fn
return decorate
+
def remove(target, identifier, fn):
"""Remove an event listener.
_registrars = util.defaultdict(list)
+
def _is_event_name(name):
return not name.startswith('_') and name != 'dispatch'
+
class _UnpickleDispatch(object):
- """Serializable callable that re-generates an instance of :class:`_Dispatch`
- given a particular :class:`.Events` subclass.
+ """Serializable callable that re-generates an instance of
+ :class:`_Dispatch` given a particular :class:`.Events` subclass.
"""
def __call__(self, _parent_cls):
else:
raise AttributeError("No class with a 'dispatch' member present.")
+
class _Dispatch(object):
"""Mirror the event listening definitions of an Events class with
listener collections.
if _is_event_name(attr):
getattr(self, attr).for_modify(self).clear()
+
def _event_descriptors(target):
return [getattr(target, k) for k in dir(target) if _is_event_name(k)]
+
class _EventMeta(type):
"""Intercept new Event subclasses and create
associated _Dispatch classes."""
_create_dispatcher_class(cls, classname, bases, dict_)
return type.__init__(cls, classname, bases, dict_)
+
def _create_dispatcher_class(cls, classname, bases, dict_):
"""Create a :class:`._Dispatch` class corresponding to an
:class:`.Events` class."""
setattr(dispatch_cls, k, _DispatchDescriptor(dict_[k]))
_registrars[k].append(cls)
+
def _remove_dispatcher(cls):
for k in dir(cls):
if _is_event_name(k):
if not _registrars[k]:
del _registrars[k]
+
class Events(object):
"""Define event listening functions for a particular target type."""
-
__metaclass__ = _EventMeta
@classmethod
def _clear(cls):
cls.dispatch._clear()
+
class _DispatchDescriptor(object):
"""Class-level attributes on :class:`._Dispatch` classes."""
def __nonzero__(self):
return bool(self.parent_listeners)
+
class _CompoundListener(object):
_exec_once = False
)
return ret
+
class _JoinedListener(_CompoundListener):
_exec_once = False
+
def __init__(self, parent, name, local):
self.parent = parent
self.name = name
return self.dispatch_cls
obj.__dict__['dispatch'] = disp = self.dispatch_cls(cls)
return disp
-
"""
+
class SchemaEventTarget(object):
"""Base class for elements that are the targets of :class:`.DDLEvents`
events.
self._set_parent(parent)
self.dispatch.after_parent_attach(self, parent)
+
class PoolEvents(event.Events):
"""Available events for :class:`.Pool`.
"""
+
class ConnectionEvents(event.Events):
"""Available events for :class:`.Connectable`, which includes
:class:`.Connection` and :class:`.Engine`.
:meth:`.TwoPhaseTransaction.prepare` was called.
"""
-
"""Exceptions used with SQLAlchemy.
-The base exception class is :class:`.SQLAlchemyError`. Exceptions which are raised as a
-result of DBAPI exceptions are all subclasses of
+The base exception class is :class:`.SQLAlchemyError`. Exceptions which are
+raised as a result of DBAPI exceptions are all subclasses of
:class:`.DBAPIError`.
"""
import traceback
+
class SQLAlchemyError(Exception):
"""Generic error class."""
"""
+
class NoForeignKeysError(ArgumentError):
"""Raised when no foreign keys can be located between two selectables
during a join."""
+
class AmbiguousForeignKeysError(ArgumentError):
"""Raised when more than one foreign key matching can be located
between two selectables during a join."""
+
class CircularDependencyError(SQLAlchemyError):
"""Raised by topological sorts when a circular dependency is detected.
return self.__class__, (None, self.cycles,
self.edges, self.args[0])
+
class CompileError(SQLAlchemyError):
"""Raised when an error occurs during SQL compilation"""
+
class IdentifierError(SQLAlchemyError):
"""Raised when a schema name is beyond the max character limit"""
"""A disconnect is detected on a raw DB-API connection.
This error is raised and consumed internally by a connection pool. It can
- be raised by the :meth:`.PoolEvents.checkout` event
- so that the host pool forces a retry; the exception will be caught
- three times in a row before the pool gives up and raises
- :class:`~sqlalchemy.exc.InvalidRequestError` regarding the connection attempt.
+ be raised by the :meth:`.PoolEvents.checkout` event so that the host pool
+ forces a retry; the exception will be caught three times in a row before
+ the pool gives up and raises :class:`~sqlalchemy.exc.InvalidRequestError`
+ regarding the connection attempt.
"""
-
class TimeoutError(SQLAlchemyError):
"""Raised when a connection pool times out on getting a connection."""
"""
+
class NoInspectionAvailable(InvalidRequestError):
"""A subject passed to :func:`sqlalchemy.inspection.inspect` produced
no context for inspection."""
+
class ResourceClosedError(InvalidRequestError):
"""An operation was requested from a connection, cursor, or other
object that's in a closed state."""
+
class NoSuchColumnError(KeyError, InvalidRequestError):
"""A nonexistent column is requested from a ``RowProxy``."""
+
class NoReferenceError(InvalidRequestError):
"""Raised by ``ForeignKey`` to indicate a reference cannot be resolved."""
+
class NoReferencedTableError(NoReferenceError):
- """Raised by ``ForeignKey`` when the referred ``Table`` cannot be located."""
+ """Raised by ``ForeignKey`` when the referred ``Table`` cannot be
+ located.
+ """
def __init__(self, message, tname):
NoReferenceError.__init__(self, message)
self.table_name = tname
def __reduce__(self):
return self.__class__, (self.args[0], self.table_name)
+
class NoReferencedColumnError(NoReferenceError):
- """Raised by ``ForeignKey`` when the referred ``Column`` cannot be located."""
+ """Raised by ``ForeignKey`` when the referred ``Column`` cannot be
+ located.
+ """
def __init__(self, message, tname, cname):
NoReferenceError.__init__(self, message)
self.table_name = tname
return self.__class__, (self.args[0], self.table_name,
self.column_name)
+
class NoSuchTableError(InvalidRequestError):
"""Table does not exist or is not visible to a connection."""
# Moved to orm.exc; compatibility definition installed by orm import until 0.6
UnmappedColumnError = None
+
class StatementError(SQLAlchemyError):
"""An error occurred during execution of a SQL statement.
def __unicode__(self):
return self.__str__()
+
class DBAPIError(StatementError):
"""Raised when the execution of a database operation fails.
raise the same exception type for any given error condition.
:class:`DBAPIError` features :attr:`~.StatementError.statement`
- and :attr:`~.StatementError.params` attributes which supply context regarding
- the specifics of the statement which had an issue, for the
+ and :attr:`~.StatementError.params` attributes which supply context
+ regarding the specifics of the statement which had an issue, for the
typical case when the error was raised within the context of
emitting a SQL statement.
- The wrapped exception object is available in the :attr:`~.StatementError.orig` attribute.
- Its type and properties are DB-API implementation specific.
+ The wrapped exception object is available in the
+ :attr:`~.StatementError.orig` attribute. Its type and properties are
+ DB-API implementation specific.
"""
# not a DBAPI error, statement is present.
# raise a StatementError
if not isinstance(orig, dbapi_base_err) and statement:
+ msg = traceback.format_exception_only(
+ orig.__class__, orig)[-1].strip()
return StatementError(
- "%s (original cause: %s)" % (
- str(orig),
- traceback.format_exception_only(orig.__class__, orig)[-1].strip()
- ), statement, params, orig)
+ "%s (original cause: %s)" % (str(orig), msg),
+ statement, params, orig
+ )
name, glob = orig.__class__.__name__, globals()
if name in glob and issubclass(glob[name], DBAPIError):
from . import util, exc
_registrars = util.defaultdict(list)
+
def inspect(subject, raiseerr=True):
"""Produce an inspection object for the given target.
type_)
return ret
+
def _inspects(*types):
def decorate(fn_or_cls):
for type_ in types:
return fn_or_cls
return decorate
+
def _self_inspects(*types):
- _inspects(*types)(True)
\ No newline at end of file
+ _inspects(*types)(True)
from . import event, util
+
class PoolListener(object):
"""Hooks into the lifecycle of connections in a :class:`.Pool`.
if hasattr(listener, 'checkin'):
event.listen(self, 'checkin', listener.checkin)
-
def connect(self, dbapi_con, con_record):
"""Called once for each new DB-API connection or Pool's ``creator()``.
"""
+
class ConnectionProxy(object):
"""Allows interception of statement execution by Connections.
cursor level executions, e.g.::
class MyProxy(ConnectionProxy):
- def execute(self, conn, execute, clauseelement, *multiparams, **params):
+ def execute(self, conn, execute, clauseelement,
+ *multiparams, **params):
print "compiled statement:", clauseelement
return execute(clauseelement, *multiparams, **params)
- def cursor_execute(self, execute, cursor, statement, parameters, context, executemany):
+ def cursor_execute(self, execute, cursor, statement,
+ parameters, context, executemany):
print "raw statement:", statement
return execute(cursor, statement, parameters, context)
event.listen(self, 'before_execute', adapt_execute)
def adapt_cursor_execute(conn, cursor, statement,
- parameters,context, executemany, ):
+ parameters, context, executemany):
def execute_wrapper(
cursor,
event.listen(self, 'commit_twophase',
adapt_listener(listener.commit_twophase))
-
def execute(self, conn, execute, clauseelement, *multiparams, **params):
"""Intercept high level execute() events."""
-
return execute(clauseelement, *multiparams, **params)
- def cursor_execute(self, execute, cursor, statement, parameters, context, executemany):
+ def cursor_execute(self, execute, cursor, statement, parameters,
+ context, executemany):
"""Intercept low-level cursor execute() events."""
return execute(cursor, statement, parameters, context)
"""Intercept commit_twophase() events."""
return commit_twophase(xid, is_prepared)
-
import logging
import sys
-from . import util
# set initial level to WARN. This so that
# log statements don't occur in the absense of explicit
if rootlogger.level == logging.NOTSET:
rootlogger.setLevel(logging.WARN)
+
def _add_default_handler(logger):
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(logging.Formatter(
'%(asctime)s %(levelname)s %(name)s %(message)s'))
logger.addHandler(handler)
+
_logged_classes = set()
+
+
def class_logger(cls, enable=False):
logger = logging.getLogger(cls.__module__ + "." + cls.__name__)
if enable == 'debug':
def _should_log_info(self):
return self.logger.isEnabledFor(logging.INFO)
+
class InstanceLogger(object):
"""A logger adapter (wrapper) for :class:`.Identified` subclasses.
level = self.logger.getEffectiveLevel()
return level
+
def instance_logger(instance, echoflag=None):
"""create a logger for an instance that implements :class:`.Identified`."""
instance.logger = logger
+
class echo_property(object):
__doc__ = """\
When ``True``, enable log output for this element.
SQLAlchemy connection pool.
"""
-import weakref, time, traceback
+import time
+import traceback
+import weakref
from . import exc, log, event, events, interfaces, util
from .util import queue as sqla_queue
from .util import threading, memoized_property, \
chop_traceback
+
proxies = {}
+
def manage(module, **params):
"""Return a proxy for a DB-API module that automatically
pools connections.
except KeyError:
return proxies.setdefault(module, _DBProxy(module, **params))
+
def clear_managers():
"""Remove all current DB-API 2.0 managers.
dispatch = event.dispatcher(events.PoolEvents)
- @util.deprecated(2.7, "Pool.add_listener is deprecated. Use event.listen()")
+ @util.deprecated(
+ 2.7, "Pool.add_listener is deprecated. Use event.listen()")
def add_listener(self, listener):
"""Add a :class:`.PoolListener`-like object to this pool.
pool.dispatch.checkin(connection, connection_record)
pool._return_conn(connection_record)
+
_refs = set()
+
class _ConnectionFairy(object):
"""Proxies a DB-API connection and provides return-on-dereference
support."""
self.connection = None
self._connection_record = None
+
class SingletonThreadPool(Pool):
"""A Pool that maintains one connection per thread.
self._cleanup()
return c
+
class DummyLock(object):
+
def acquire(self, wait=True):
return True
+
def release(self):
pass
+
class QueuePool(Pool):
"""A :class:`.Pool` that imposes a limit on the number of open connections.
def checkedout(self):
return self._pool.maxsize - self._pool.qsize() + self._overflow
+
class NullPool(Pool):
"""A Pool which does not pool connections.
def _do_get(self):
return self.connection
+
class AssertionPool(Pool):
- """A :class:`.Pool` that allows at most one checked out connection at any given
- time.
+ """A :class:`.Pool` that allows at most one checked out connection at
+ any given time.
This will raise an exception if more than one connection is checked out
at a time. Useful for debugging code that is using more connections
self._checkout_traceback = traceback.format_stack()
return self._conn
+
class _DBProxy(object):
"""Layers connection pooling behavior on top of a standard DB-API module.
import re
import datetime
+
def str_to_datetime_processor_factory(regexp, type_):
rmatch = regexp.match
# Even on python2.6 datetime.strptime is both slower than this code
# and it does not support microseconds.
has_named_groups = bool(regexp.groupindex)
+
def process(value):
if value is None:
return None
return type_(*map(int, m.groups(0)))
return process
+
def boolean_to_int(value):
if value is None:
return None
else:
return int(value)
+
def py_fallback():
def to_unicode_processor_factory(encoding, errors=None):
decoder = codecs.getdecoder(encoding)
except ImportError:
globals().update(py_fallback())
-