]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
PEP8 style fixes
authorBrian Jarrett <celttechie@gmail.com>
Thu, 10 Jul 2014 22:28:49 +0000 (16:28 -0600)
committerMike Bayer <mike_mp@zzzcomputing.com>
Mon, 14 Jul 2014 00:21:06 +0000 (20:21 -0400)
36 files changed:
lib/sqlalchemy/__init__.py
lib/sqlalchemy/connectors/mxodbc.py
lib/sqlalchemy/connectors/pyodbc.py
lib/sqlalchemy/connectors/zxJDBC.py
lib/sqlalchemy/databases/__init__.py
lib/sqlalchemy/engine/__init__.py
lib/sqlalchemy/engine/base.py
lib/sqlalchemy/engine/default.py
lib/sqlalchemy/engine/interfaces.py
lib/sqlalchemy/engine/reflection.py
lib/sqlalchemy/engine/result.py
lib/sqlalchemy/engine/strategies.py
lib/sqlalchemy/engine/threadlocal.py
lib/sqlalchemy/engine/url.py
lib/sqlalchemy/engine/util.py
lib/sqlalchemy/events.py
lib/sqlalchemy/exc.py
lib/sqlalchemy/ext/associationproxy.py
lib/sqlalchemy/ext/automap.py
lib/sqlalchemy/ext/compiler.py
lib/sqlalchemy/ext/declarative/__init__.py
lib/sqlalchemy/ext/declarative/api.py
lib/sqlalchemy/ext/declarative/base.py
lib/sqlalchemy/ext/declarative/clsregistry.py
lib/sqlalchemy/ext/horizontal_shard.py
lib/sqlalchemy/ext/hybrid.py
lib/sqlalchemy/ext/instrumentation.py
lib/sqlalchemy/ext/mutable.py
lib/sqlalchemy/ext/orderinglist.py
lib/sqlalchemy/ext/serializer.py
lib/sqlalchemy/inspection.py
lib/sqlalchemy/interfaces.py
lib/sqlalchemy/log.py
lib/sqlalchemy/pool.py
lib/sqlalchemy/processors.py
lib/sqlalchemy/types.py

index 154062e42d15b77d94ae473740a118db0aa03903..2815b1ff20b813e59a8da83666146dd4eb2d0438 100644 (file)
@@ -119,6 +119,7 @@ from .engine import create_engine, engine_from_config
 
 __version__ = '0.9.7'
 
+
 def __go(lcls):
     global __all__
 
@@ -128,7 +129,7 @@ def __go(lcls):
     import inspect as _inspect
 
     __all__ = sorted(name for name, obj in lcls.items()
-                 if not (name.startswith('_') or _inspect.ismodule(obj)))
+                     if not (name.startswith('_') or _inspect.ismodule(obj)))
 
     _sa_util.dependencies.resolve_all("sqlalchemy")
 __go(locals())
index 355ef24821b03987e12336585e8db9cdf8fc64ba..851dc11e84b8d0698e0bb83aa6ca2d46c7c3f41a 100644 (file)
@@ -80,8 +80,8 @@ class MxODBCConnector(Connector):
             if issubclass(errorclass, MxOdbcWarning):
                 errorclass.__bases__ = (Warning,)
                 warnings.warn(message=str(errorvalue),
-                          category=errorclass,
-                          stacklevel=2)
+                              category=errorclass,
+                              stacklevel=2)
             else:
                 raise errorclass(errorvalue)
         return error_handler
@@ -135,7 +135,7 @@ class MxODBCConnector(Connector):
     def _get_direct(self, context):
         if context:
             native_odbc_execute = context.execution_options.\
-                                        get('native_odbc_execute', 'auto')
+                get('native_odbc_execute', 'auto')
             # default to direct=True in all cases, is more generally
             # compatible especially with SQL Server
             return False if native_odbc_execute is True else True
index 8261d072272171b0539a681ae78d0866465ddeda..bc8a2f0b89fbeef0d07e47f8d29ef05ff29c177c 100644 (file)
@@ -66,17 +66,17 @@ class PyODBCConnector(Connector):
             connectors = [util.unquote_plus(keys.pop('odbc_connect'))]
         else:
             dsn_connection = 'dsn' in keys or \
-                            ('host' in keys and 'database' not in keys)
+                ('host' in keys and 'database' not in keys)
             if dsn_connection:
-                connectors = ['dsn=%s' % (keys.pop('host', '') or \
-                            keys.pop('dsn', ''))]
+                connectors = ['dsn=%s' % (keys.pop('host', '') or
+                                          keys.pop('dsn', ''))]
             else:
                 port = ''
-                if 'port' in keys and not 'port' in query:
+                if 'port' in keys and 'port' not in query:
                     port = ',%d' % int(keys.pop('port'))
 
                 connectors = ["DRIVER={%s}" %
-                                keys.pop('driver', self.pyodbc_driver_name),
+                              keys.pop('driver', self.pyodbc_driver_name),
                               'Server=%s%s' % (keys.pop('host', ''), port),
                               'Database=%s' % keys.pop('database', '')]
 
@@ -93,7 +93,7 @@ class PyODBCConnector(Connector):
             # you query a cp1253 encoded database from a latin1 client...
             if 'odbc_autotranslate' in keys:
                 connectors.append("AutoTranslate=%s" %
-                                    keys.pop("odbc_autotranslate"))
+                                  keys.pop("odbc_autotranslate"))
 
             connectors.extend(['%s=%s' % (k, v) for k, v in keys.items()])
         return [[";".join(connectors)], connect_args]
@@ -101,7 +101,7 @@ class PyODBCConnector(Connector):
     def is_disconnect(self, e, connection, cursor):
         if isinstance(e, self.dbapi.ProgrammingError):
             return "The cursor's connection has been closed." in str(e) or \
-                            'Attempt to use a closed connection.' in str(e)
+                'Attempt to use a closed connection.' in str(e)
         elif isinstance(e, self.dbapi.Error):
             return '[08S01]' in str(e)
         else:
@@ -117,9 +117,9 @@ class PyODBCConnector(Connector):
 
         _sql_driver_name = dbapi_con.getinfo(pyodbc.SQL_DRIVER_NAME)
         self.freetds = bool(re.match(r".*libtdsodbc.*\.so", _sql_driver_name
-                            ))
+                                     ))
         self.easysoft = bool(re.match(r".*libessqlsrv.*\.so", _sql_driver_name
-                            ))
+                                      ))
 
         if self.freetds:
             self.freetds_driver_version = dbapi_con.getinfo(
@@ -149,9 +149,9 @@ class PyODBCConnector(Connector):
 
     def _parse_dbapi_version(self, vers):
         m = re.match(
-                r'(?:py.*-)?([\d\.]+)(?:-(\w+))?',
-                vers
-            )
+            r'(?:py.*-)?([\d\.]+)(?:-(\w+))?',
+            vers
+        )
         if not m:
             return ()
         vers = tuple([int(x) for x in m.group(1).split(".")])
index a3c6bd1f5e81898b91595dc1af5d19c491890dfc..c0af742facdeed51ee91176040ab80b7c23dbf90 100644 (file)
@@ -36,17 +36,17 @@ class ZxJDBCConnector(Connector):
         """Create a JDBC url from a :class:`~sqlalchemy.engine.url.URL`"""
         return 'jdbc:%s://%s%s/%s' % (self.jdbc_db_name, url.host,
                                       url.port is not None
-                                        and ':%s' % url.port or '',
+                                      and ':%s' % url.port or '',
                                       url.database)
 
     def create_connect_args(self, url):
         opts = self._driver_kwargs()
         opts.update(url.query)
         return [
-                [self._create_jdbc_url(url),
-                url.username, url.password,
-                self.jdbc_driver_name],
-                opts]
+            [self._create_jdbc_url(url),
+             url.username, url.password,
+             self.jdbc_driver_name],
+            opts]
 
     def is_disconnect(self, e, connection, cursor):
         if not isinstance(e, self.dbapi.ProgrammingError):
index b78f48df029abf24f98ec59c113ae23c5453e05b..19a7ad6ba27f2d93209ed1b7d917733feca37813 100644 (file)
@@ -29,4 +29,4 @@ __all__ = (
     'sqlite',
     'oracle',
     'sybase',
-    )
+)
index 6d6e7a3549e4a83b9a8f55a5a3aaa5b187f3a24a..9c9e03821e0ca15aeeb0ca84cf5d810735c7bffd 100644 (file)
@@ -69,7 +69,7 @@ from .base import (
     RootTransaction,
     Transaction,
     TwoPhaseTransaction,
-    )
+)
 
 from .result import (
     BufferedColumnResultProxy,
@@ -78,11 +78,11 @@ from .result import (
     FullyBufferedResultProxy,
     ResultProxy,
     RowProxy,
-    )
+)
 
 from .util import (
     connection_memoize
-    )
+)
 
 
 from . import util, strategies
@@ -371,4 +371,4 @@ def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
 __all__ = (
     'create_engine',
     'engine_from_config',
-    )
+)
index 73c35c38f3f581fd897f87f177f7abd0f92558f8..cf06896261edebcd9e8e5e099b7f33fad7a9a19d 100644 (file)
@@ -72,7 +72,7 @@ class Connection(Connectable):
             # want to handle any of the engine's events in that case.
             self.dispatch = self.dispatch._join(engine.dispatch)
         self._has_events = _has_events or (
-                                _has_events is None and engine._has_events)
+            _has_events is None and engine._has_events)
 
         self._echo = self.engine._should_log_info()
         if _execution_options:
@@ -94,11 +94,11 @@ class Connection(Connectable):
         """
 
         return self.engine._connection_cls(
-                                self.engine,
-                                self.__connection,
-                                _branch=True,
-                                _has_events=self._has_events,
-                                _dispatch=self.dispatch)
+            self.engine,
+            self.__connection,
+            _branch=True,
+            _has_events=self._has_events,
+            _dispatch=self.dispatch)
 
     def _clone(self):
         """Create a shallow copy of this Connection.
@@ -239,8 +239,8 @@ class Connection(Connectable):
         if self.__can_reconnect and self.__invalid:
             if self.__transaction is not None:
                 raise exc.InvalidRequestError(
-                                "Can't reconnect until invalid "
-                                "transaction is rolled back")
+                    "Can't reconnect until invalid "
+                    "transaction is rolled back")
             self.__connection = self.engine.raw_connection()
             self.__invalid = False
             return self.__connection
@@ -324,10 +324,10 @@ class Connection(Connectable):
         :meth:`.Connection.invalidate` method is called, at the DBAPI
         level all state associated with this transaction is lost, as
         the DBAPI connection is closed.  The :class:`.Connection`
-        will not allow a reconnection to proceed until the :class:`.Transaction`
-        object is ended, by calling the :meth:`.Transaction.rollback`
-        method; until that point, any attempt at continuing to use the
-        :class:`.Connection` will raise an
+        will not allow a reconnection to proceed until the
+        :class:`.Transaction` object is ended, by calling the
+        :meth:`.Transaction.rollback` method; until that point, any attempt at
+        continuing to use the :class:`.Connection` will raise an
         :class:`~sqlalchemy.exc.InvalidRequestError`.
         This is to prevent applications from accidentally
         continuing an ongoing transactional operations despite the
@@ -335,8 +335,8 @@ class Connection(Connectable):
         invalidation.
 
         The :meth:`.Connection.invalidate` method, just like auto-invalidation,
-        will at the connection pool level invoke the :meth:`.PoolEvents.invalidate`
-        event.
+        will at the connection pool level invoke the
+        :meth:`.PoolEvents.invalidate` event.
 
         .. seealso::
 
@@ -585,7 +585,8 @@ class Connection(Connectable):
         if self._still_open_and_connection_is_valid:
             assert isinstance(self.__transaction, TwoPhaseTransaction)
             try:
-                self.engine.dialect.do_rollback_twophase(self, xid, is_prepared)
+                self.engine.dialect.do_rollback_twophase(
+                    self, xid, is_prepared)
             finally:
                 if self.connection._reset_agent is self.__transaction:
                     self.connection._reset_agent = None
@@ -722,8 +723,8 @@ class Connection(Connectable):
             meth = object._execute_on_connection
         except AttributeError:
             raise exc.InvalidRequestError(
-                                "Unexecutable object type: %s" %
-                                type(object))
+                "Unexecutable object type: %s" %
+                type(object))
         else:
             return meth(self, multiparams, params)
 
@@ -731,7 +732,7 @@ class Connection(Connectable):
         """Execute a sql.FunctionElement object."""
 
         return self._execute_clauseelement(func.select(),
-                                            multiparams, params)
+                                           multiparams, params)
 
     def _execute_default(self, default, multiparams, params):
         """Execute a schema.ColumnDefault object."""
@@ -749,7 +750,7 @@ class Connection(Connectable):
 
             dialect = self.dialect
             ctx = dialect.execution_ctx_cls._init_default(
-                                dialect, self, conn)
+                dialect, self, conn)
         except Exception as e:
             self._handle_dbapi_exception(e, None, None, None, None)
 
@@ -759,7 +760,7 @@ class Connection(Connectable):
 
         if self._has_events or self.engine._has_events:
             self.dispatch.after_execute(self,
-                default, multiparams, params, ret)
+                                        default, multiparams, params, ret)
 
         return ret
 
@@ -783,7 +784,7 @@ class Connection(Connectable):
         )
         if self._has_events or self.engine._has_events:
             self.dispatch.after_execute(self,
-                ddl, multiparams, params, ret)
+                                        ddl, multiparams, params, ret)
         return ret
 
     def _execute_clauseelement(self, elem, multiparams, params):
@@ -809,13 +810,13 @@ class Connection(Connectable):
                 compiled_sql = self._execution_options['compiled_cache'][key]
             else:
                 compiled_sql = elem.compile(
-                                dialect=dialect, column_keys=keys,
-                                inline=len(distilled_params) > 1)
+                    dialect=dialect, column_keys=keys,
+                    inline=len(distilled_params) > 1)
                 self._execution_options['compiled_cache'][key] = compiled_sql
         else:
             compiled_sql = elem.compile(
-                            dialect=dialect, column_keys=keys,
-                            inline=len(distilled_params) > 1)
+                dialect=dialect, column_keys=keys,
+                inline=len(distilled_params) > 1)
 
         ret = self._execute_context(
             dialect,
@@ -826,7 +827,7 @@ class Connection(Connectable):
         )
         if self._has_events or self.engine._has_events:
             self.dispatch.after_execute(self,
-                elem, multiparams, params, ret)
+                                        elem, multiparams, params, ret)
         return ret
 
     def _execute_compiled(self, compiled, multiparams, params):
@@ -848,7 +849,7 @@ class Connection(Connectable):
         )
         if self._has_events or self.engine._has_events:
             self.dispatch.after_execute(self,
-                compiled, multiparams, params, ret)
+                                        compiled, multiparams, params, ret)
         return ret
 
     def _execute_text(self, statement, multiparams, params):
@@ -870,12 +871,12 @@ class Connection(Connectable):
         )
         if self._has_events or self.engine._has_events:
             self.dispatch.after_execute(self,
-                statement, multiparams, params, ret)
+                                        statement, multiparams, params, ret)
         return ret
 
     def _execute_context(self, dialect, constructor,
-                                    statement, parameters,
-                                    *args):
+                         statement, parameters,
+                         *args):
         """Create an :class:`.ExecutionContext` and execute, returning
         a :class:`.ResultProxy`."""
 
@@ -888,15 +889,15 @@ class Connection(Connectable):
             context = constructor(dialect, self, conn, *args)
         except Exception as e:
             self._handle_dbapi_exception(e,
-                        util.text_type(statement), parameters,
-                        None, None)
+                                         util.text_type(statement), parameters,
+                                         None, None)
 
         if context.compiled:
             context.pre_exec()
 
         cursor, statement, parameters = context.cursor, \
-                                        context.statement, \
-                                        context.parameters
+            context.statement, \
+            context.parameters
 
         if not context.executemany:
             parameters = parameters[0]
@@ -904,62 +905,64 @@ class Connection(Connectable):
         if self._has_events or self.engine._has_events:
             for fn in self.dispatch.before_cursor_execute:
                 statement, parameters = \
-                            fn(self, cursor, statement, parameters,
-                                        context, context.executemany)
+                    fn(self, cursor, statement, parameters,
+                       context, context.executemany)
 
         if self._echo:
             self.engine.logger.info(statement)
-            self.engine.logger.info("%r",
-                    sql_util._repr_params(parameters, batches=10))
+            self.engine.logger.info(
+                "%r",
+                sql_util._repr_params(parameters, batches=10)
+            )
         try:
             if context.executemany:
                 for fn in () if not self.dialect._has_events \
-                    else self.dialect.dispatch.do_executemany:
+                        else self.dialect.dispatch.do_executemany:
                     if fn(cursor, statement, parameters, context):
                         break
                 else:
                     self.dialect.do_executemany(
-                                     cursor,
-                                     statement,
-                                     parameters,
-                                     context)
+                        cursor,
+                        statement,
+                        parameters,
+                        context)
 
             elif not parameters and context.no_parameters:
                 for fn in () if not self.dialect._has_events \
-                    else self.dialect.dispatch.do_execute_no_params:
+                        else self.dialect.dispatch.do_execute_no_params:
                     if fn(cursor, statement, context):
                         break
                 else:
                     self.dialect.do_execute_no_params(
-                                     cursor,
-                                     statement,
-                                     context)
+                        cursor,
+                        statement,
+                        context)
 
             else:
                 for fn in () if not self.dialect._has_events \
-                    else self.dialect.dispatch.do_execute:
+                        else self.dialect.dispatch.do_execute:
                     if fn(cursor, statement, parameters, context):
                         break
                 else:
                     self.dialect.do_execute(
-                                     cursor,
-                                     statement,
-                                     parameters,
-                                     context)
+                        cursor,
+                        statement,
+                        parameters,
+                        context)
         except Exception as e:
             self._handle_dbapi_exception(
-                                e,
-                                statement,
-                                parameters,
-                                cursor,
-                                context)
+                e,
+                statement,
+                parameters,
+                cursor,
+                context)
 
         if self._has_events or self.engine._has_events:
             self.dispatch.after_cursor_execute(self, cursor,
-                                                statement,
-                                                parameters,
-                                                context,
-                                                context.executemany)
+                                               statement,
+                                               parameters,
+                                               context,
+                                               context.executemany)
 
         if context.compiled:
             context.post_exec()
@@ -1012,38 +1015,38 @@ class Connection(Connectable):
         if self._has_events or self.engine._has_events:
             for fn in self.dispatch.before_cursor_execute:
                 statement, parameters = \
-                            fn(self, cursor, statement, parameters,
-                                        context,
-                                        False)
+                    fn(self, cursor, statement, parameters,
+                       context,
+                       False)
 
         if self._echo:
             self.engine.logger.info(statement)
             self.engine.logger.info("%r", parameters)
         try:
             for fn in () if not self.dialect._has_events \
-                else self.dialect.dispatch.do_execute:
+                    else self.dialect.dispatch.do_execute:
                 if fn(cursor, statement, parameters, context):
                     break
             else:
                 self.dialect.do_execute(
-                                 cursor,
-                                 statement,
-                                 parameters,
-                                 context)
+                    cursor,
+                    statement,
+                    parameters,
+                    context)
         except Exception as e:
             self._handle_dbapi_exception(
-                                e,
-                                statement,
-                                parameters,
-                                cursor,
-                                context)
+                e,
+                statement,
+                parameters,
+                cursor,
+                context)
 
         if self._has_events or self.engine._has_events:
             self.dispatch.after_cursor_execute(self, cursor,
-                                                statement,
-                                                parameters,
-                                                context,
-                                                False)
+                                               statement,
+                                               parameters,
+                                               context,
+                                               False)
 
     def _safe_close_cursor(self, cursor):
         """Close the given cursor, catching exceptions
@@ -1057,17 +1060,17 @@ class Connection(Connectable):
         except Exception:
             # log the error through the connection pool's logger.
             self.engine.pool.logger.error(
-                                "Error closing cursor", exc_info=True)
+                "Error closing cursor", exc_info=True)
 
     _reentrant_error = False
     _is_disconnect = False
 
     def _handle_dbapi_exception(self,
-                                    e,
-                                    statement,
-                                    parameters,
-                                    cursor,
-                                    context):
+                                e,
+                                statement,
+                                parameters,
+                                cursor,
+                                context):
 
         exc_info = sys.exc_info()
 
@@ -1084,12 +1087,12 @@ class Connection(Connectable):
 
         if self._reentrant_error:
             util.raise_from_cause(
-                        exc.DBAPIError.instance(statement,
-                                            parameters,
-                                            e,
-                                            self.dialect.dbapi.Error),
-                        exc_info
-                        )
+                exc.DBAPIError.instance(statement,
+                                        parameters,
+                                        e,
+                                        self.dialect.dbapi.Error),
+                exc_info
+            )
         self._reentrant_error = True
         try:
             # non-DBAPI error - if we already got a context,
@@ -1113,11 +1116,11 @@ class Connection(Connectable):
                 # legacy dbapi_error event
                 if should_wrap and context:
                     self.dispatch.dbapi_error(self,
-                                                    cursor,
-                                                    statement,
-                                                    parameters,
-                                                    context,
-                                                    e)
+                                              cursor,
+                                              statement,
+                                              parameters,
+                                              context,
+                                              e)
 
                 # new handle_error event
                 ctx = ExceptionContextImpl(
@@ -1153,9 +1156,9 @@ class Connection(Connectable):
                 util.raise_from_cause(newraise, exc_info)
             elif should_wrap:
                 util.raise_from_cause(
-                                    sqlalchemy_exception,
-                                    exc_info
-                                )
+                    sqlalchemy_exception,
+                    exc_info
+                )
             else:
                 util.reraise(*exc_info)
 
@@ -1240,15 +1243,15 @@ class Connection(Connectable):
 
     def _run_visitor(self, visitorcallable, element, **kwargs):
         visitorcallable(self.dialect, self,
-                            **kwargs).traverse_single(element)
+                        **kwargs).traverse_single(element)
 
 
 class ExceptionContextImpl(ExceptionContext):
     """Implement the :class:`.ExceptionContext` interface."""
 
     def __init__(self, exception, sqlalchemy_exception,
-                        connection, cursor, statement, parameters,
-                        context, is_disconnect):
+                 connection, cursor, statement, parameters,
+                 context, is_disconnect):
         self.connection = connection
         self.sqlalchemy_exception = sqlalchemy_exception
         self.original_exception = exception
@@ -1371,6 +1374,7 @@ class NestedTransaction(Transaction):
     The interface is the same as that of :class:`.Transaction`.
 
     """
+
     def __init__(self, connection, parent):
         super(NestedTransaction, self).__init__(connection, parent)
         self._savepoint = self.connection._savepoint_impl()
@@ -1378,12 +1382,12 @@ class NestedTransaction(Transaction):
     def _do_rollback(self):
         if self.is_active:
             self.connection._rollback_to_savepoint_impl(
-                                    self._savepoint, self._parent)
+                self._savepoint, self._parent)
 
     def _do_commit(self):
         if self.is_active:
             self.connection._release_savepoint_impl(
-                                    self._savepoint, self._parent)
+                self._savepoint, self._parent)
 
 
 class TwoPhaseTransaction(Transaction):
@@ -1396,6 +1400,7 @@ class TwoPhaseTransaction(Transaction):
     with the addition of the :meth:`prepare` method.
 
     """
+
     def __init__(self, connection, xid):
         super(TwoPhaseTransaction, self).__init__(connection, None)
         self._is_prepared = False
@@ -1442,9 +1447,9 @@ class Engine(Connectable, log.Identified):
     _connection_cls = Connection
 
     def __init__(self, pool, dialect, url,
-                        logging_name=None, echo=None, proxy=None,
-                        execution_options=None
-                        ):
+                 logging_name=None, echo=None, proxy=None,
+                 execution_options=None
+                 ):
         self.pool = pool
         self.url = url
         self.dialect = dialect
@@ -1477,7 +1482,7 @@ class Engine(Connectable, log.Identified):
 
         """
         self._execution_options = \
-                self._execution_options.union(opt)
+            self._execution_options.union(opt)
         self.dispatch.set_engine_execution_options(self, opt)
         self.dialect.set_engine_execution_options(self, opt)
 
@@ -1526,7 +1531,8 @@ class Engine(Connectable, log.Identified):
             shards = {"default": "base", shard_1: "db1", "shard_2": "db2"}
 
             @event.listens_for(Engine, "before_cursor_execute")
-            def _switch_shard(conn, cursor, stmt, params, context, executemany):
+            def _switch_shard(conn, cursor, stmt,
+                    params, context, executemany):
                 shard_id = conn._execution_options.get('shard_id', "default")
                 current_shard = conn.info.get("current_shard", None)
 
@@ -1606,7 +1612,7 @@ class Engine(Connectable, log.Identified):
             yield connection
 
     def _run_visitor(self, visitorcallable, element,
-                                    connection=None, **kwargs):
+                     connection=None, **kwargs):
         with self._optional_conn_ctx_manager(connection) as conn:
             conn._run_visitor(visitorcallable, element, **kwargs)
 
@@ -1813,8 +1819,8 @@ class Engine(Connectable, log.Identified):
 
         .. seealso::
 
-            :ref:`metadata_reflection_inspector` - detailed schema inspection using
-            the :class:`.Inspector` interface.
+            :ref:`metadata_reflection_inspector` - detailed schema inspection
+            using the :class:`.Inspector` interface.
 
             :class:`.quoted_name` - used to pass quoting information along
             with a schema identifier.
index 58915fed2c98dfd1ca393d77fc37d565149a8ee5..2fece76b9fe5fafcc3f96b0b4e89352e02d76bff 100644 (file)
@@ -24,9 +24,8 @@ import weakref
 from .. import event
 
 AUTOCOMMIT_REGEXP = re.compile(
-            r'\s*(?:UPDATE|INSERT|CREATE|DELETE|DROP|ALTER)',
-            re.I | re.UNICODE)
-
+    r'\s*(?:UPDATE|INSERT|CREATE|DELETE|DROP|ALTER)',
+    re.I | re.UNICODE)
 
 
 class DefaultDialect(interfaces.Dialect):
@@ -195,26 +194,24 @@ class DefaultDialect(interfaces.Dialect):
 
         if label_length and label_length > self.max_identifier_length:
             raise exc.ArgumentError(
-                    "Label length of %d is greater than this dialect's"
-                    " maximum identifier length of %d" %
-                    (label_length, self.max_identifier_length))
+                "Label length of %d is greater than this dialect's"
+                " maximum identifier length of %d" %
+                (label_length, self.max_identifier_length))
         self.label_length = label_length
 
         if self.description_encoding == 'use_encoding':
             self._description_decoder = \
-                            processors.to_unicode_processor_factory(
-                                            encoding
-                                    )
+                processors.to_unicode_processor_factory(
+                    encoding
+                )
         elif self.description_encoding is not None:
             self._description_decoder = \
-                            processors.to_unicode_processor_factory(
-                                            self.description_encoding
-                                    )
+                processors.to_unicode_processor_factory(
+                    self.description_encoding
+                )
         self._encoder = codecs.getencoder(self.encoding)
         self._decoder = processors.to_unicode_processor_factory(self.encoding)
 
-
-
     @util.memoized_property
     def _type_memos(self):
         return weakref.WeakKeyDictionary()
@@ -230,25 +227,25 @@ class DefaultDialect(interfaces.Dialect):
     def initialize(self, connection):
         try:
             self.server_version_info = \
-                            self._get_server_version_info(connection)
+                self._get_server_version_info(connection)
         except NotImplementedError:
             self.server_version_info = None
         try:
             self.default_schema_name = \
-                            self._get_default_schema_name(connection)
+                self._get_default_schema_name(connection)
         except NotImplementedError:
             self.default_schema_name = None
 
         try:
             self.default_isolation_level = \
-                        self.get_isolation_level(connection.connection)
+                self.get_isolation_level(connection.connection)
         except NotImplementedError:
             self.default_isolation_level = None
 
         self.returns_unicode_strings = self._check_unicode_returns(connection)
 
         if self.description_encoding is not None and \
-            self._check_unicode_description(connection):
+                self._check_unicode_description(connection):
             self._description_decoder = self.description_encoding = None
 
         self.do_rollback(connection.connection)
@@ -279,7 +276,8 @@ class DefaultDialect(interfaces.Dialect):
             parameters = {}
 
         def check_unicode(test):
-            statement = cast_to(expression.select([test]).compile(dialect=self))
+            statement = cast_to(
+                expression.select([test]).compile(dialect=self))
             try:
                 cursor = connection.connection.cursor()
                 connection._cursor_execute(cursor, statement, parameters)
@@ -289,7 +287,7 @@ class DefaultDialect(interfaces.Dialect):
                 # note that _cursor_execute() will have closed the cursor
                 # if an exception is thrown.
                 util.warn("Exception attempting to "
-                        "detect unicode returns: %r" % de)
+                          "detect unicode returns: %r" % de)
                 return False
             else:
                 return isinstance(row[0], util.text_type)
@@ -300,7 +298,8 @@ class DefaultDialect(interfaces.Dialect):
                 expression.literal_column("'test plain returns'"),
                 sqltypes.VARCHAR(60)
             ),
-            # detect if there's an NVARCHAR type with different behavior available
+            # detect if there's an NVARCHAR type with different behavior
+            # available
             expression.cast(
                 expression.literal_column("'test unicode returns'"),
                 sqltypes.Unicode(60)
@@ -351,7 +350,8 @@ class DefaultDialect(interfaces.Dialect):
         """
         return sqltypes.adapt_type(typeobj, self.colspecs)
 
-    def reflecttable(self, connection, table, include_columns, exclude_columns):
+    def reflecttable(
+            self, connection, table, include_columns, exclude_columns):
         insp = reflection.Inspector.from_engine(connection)
         return insp.reflecttable(table, include_columns, exclude_columns)
 
@@ -362,8 +362,8 @@ class DefaultDialect(interfaces.Dialect):
         """
         return {
             'constrained_columns':
-                        self.get_primary_keys(conn, table_name,
-                                                schema=schema, **kw)
+            self.get_primary_keys(conn, table_name,
+                                  schema=schema, **kw)
         }
 
     def validate_identifier(self, ident):
@@ -384,6 +384,7 @@ class DefaultDialect(interfaces.Dialect):
     def set_engine_execution_options(self, engine, opts):
         if 'isolation_level' in opts:
             isolation_level = opts['isolation_level']
+
             @event.listens_for(engine, "engine_connect")
             def set_isolation(connection, branch):
                 if not branch:
@@ -398,7 +399,6 @@ class DefaultDialect(interfaces.Dialect):
         connection.connection._connection_record.\
             finalize_callback.append(self.reset_isolation_level)
 
-
     def do_begin(self, dbapi_connection):
         pass
 
@@ -503,7 +503,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
 
     @classmethod
     def _init_compiled(cls, dialect, connection, dbapi_connection,
-                    compiled, parameters):
+                       compiled, parameters):
         """Initialize execution context for a Compiled construct."""
 
         self = cls.__new__(cls)
@@ -530,7 +530,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
         self.unicode_statement = util.text_type(compiled)
         if not dialect.supports_unicode_statements:
             self.statement = self.unicode_statement.encode(
-                                        self.dialect.encoding)
+                self.dialect.encoding)
         else:
             self.statement = self.unicode_statement
 
@@ -540,15 +540,15 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
 
         if self.isinsert or self.isupdate or self.isdelete:
             self._is_explicit_returning = bool(compiled.statement._returning)
-            self._is_implicit_returning = bool(compiled.returning and \
-                                            not compiled.statement._returning)
+            self._is_implicit_returning = bool(
+                compiled.returning and not compiled.statement._returning)
 
         if not parameters:
             self.compiled_parameters = [compiled.construct_params()]
         else:
             self.compiled_parameters = \
-                        [compiled.construct_params(m, _group_number=grp) for
-                                        grp, m in enumerate(parameters)]
+                [compiled.construct_params(m, _group_number=grp) for
+                 grp, m in enumerate(parameters)]
 
             self.executemany = len(parameters) > 1
 
@@ -582,10 +582,10 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
                     for key in compiled_params:
                         if key in processors:
                             param[dialect._encoder(key)[0]] = \
-                                        processors[key](compiled_params[key])
+                                processors[key](compiled_params[key])
                         else:
                             param[dialect._encoder(key)[0]] = \
-                                    compiled_params[key]
+                                compiled_params[key]
                 else:
                     for key in compiled_params:
                         if key in processors:
@@ -599,7 +599,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
 
     @classmethod
     def _init_statement(cls, dialect, connection, dbapi_connection,
-                                                    statement, parameters):
+                        statement, parameters):
         """Initialize execution context for a string SQL statement."""
 
         self = cls.__new__(cls)
@@ -623,12 +623,12 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
                 self.parameters = parameters
             else:
                 self.parameters = [
-                            dict((dialect._encoder(k)[0], d[k]) for k in d)
-                            for d in parameters
-                        ] or [{}]
+                    dict((dialect._encoder(k)[0], d[k]) for k in d)
+                    for d in parameters
+                ] or [{}]
         else:
             self.parameters = [dialect.execute_sequence_format(p)
-                                    for p in parameters]
+                               for p in parameters]
 
         self.executemany = len(parameters) > 1
 
@@ -701,9 +701,9 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
         if type_ is not None:
             # apply type post processors to the result
             proc = type_._cached_result_processor(
-                        self.dialect,
-                        self.cursor.description[0][1]
-                    )
+                self.dialect,
+                self.cursor.description[0][1]
+            )
             if proc:
                 return proc(r)
         return r
@@ -783,8 +783,8 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
             not self._is_explicit_returning and \
             not self.compiled.inline and \
             self.dialect.postfetch_lastrowid and \
-            (not self.inserted_primary_key or \
-                        None in self.inserted_primary_key):
+            (not self.inserted_primary_key or
+             None in self.inserted_primary_key):
 
             table = self.compiled.statement.table
             lastrowid = self.get_lastrowid()
@@ -792,15 +792,15 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
             if autoinc_col is not None:
                 # apply type post processors to the lastrowid
                 proc = autoinc_col.type._cached_result_processor(
-                                        self.dialect, None)
+                    self.dialect, None)
                 if proc is not None:
                     lastrowid = proc(lastrowid)
 
             self.inserted_primary_key = [
                 lastrowid if c is autoinc_col else v
                 for c, v in zip(
-                                    table.primary_key,
-                                    self.inserted_primary_key)
+                    table.primary_key,
+                    self.inserted_primary_key)
             ]
 
     def _fetch_implicit_returning(self, resultproxy):
@@ -839,29 +839,29 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
             return
 
         types = dict(
-                (self.compiled.bind_names[bindparam], bindparam.type)
-                 for bindparam in self.compiled.bind_names)
+            (self.compiled.bind_names[bindparam], bindparam.type)
+            for bindparam in self.compiled.bind_names)
 
         if self.dialect.positional:
             inputsizes = []
             for key in self.compiled.positiontup:
                 typeengine = types[key]
                 dbtype = typeengine.dialect_impl(self.dialect).\
-                                    get_dbapi_type(self.dialect.dbapi)
+                    get_dbapi_type(self.dialect.dbapi)
                 if dbtype is not None and \
-                    (not exclude_types or dbtype not in exclude_types):
+                        (not exclude_types or dbtype not in exclude_types):
                     inputsizes.append(dbtype)
             try:
                 self.cursor.setinputsizes(*inputsizes)
             except Exception as e:
                 self.root_connection._handle_dbapi_exception(
-                                e, None, None, None, self)
+                    e, None, None, None, self)
         else:
             inputsizes = {}
             for key in self.compiled.bind_names.values():
                 typeengine = types[key]
                 dbtype = typeengine.dialect_impl(self.dialect).\
-                                get_dbapi_type(self.dialect.dbapi)
+                    get_dbapi_type(self.dialect.dbapi)
                 if dbtype is not None and \
                         (not exclude_types or dbtype not in exclude_types):
                     if translate:
@@ -873,7 +873,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
                 self.cursor.setinputsizes(**inputsizes)
             except Exception as e:
                 self.root_connection._handle_dbapi_exception(
-                                e, None, None, None, self)
+                    e, None, None, None, self)
 
     def _exec_default(self, default, type_):
         if default.is_sequence:
@@ -935,7 +935,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
                 del self.current_parameters
         else:
             self.current_parameters = compiled_parameters = \
-                                        self.compiled_parameters[0]
+                self.compiled_parameters[0]
 
             for c in self.compiled.prefetch:
                 if self.isinsert:
@@ -949,10 +949,10 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
 
             if self.isinsert:
                 self.inserted_primary_key = [
-                                self.compiled_parameters[0].get(key_getter(c), None)
-                                        for c in self.compiled.\
-                                                statement.table.primary_key
-                                ]
+                    self.compiled_parameters[0].get(key_getter(c), None)
+                    for c in self.compiled.
+                    statement.table.primary_key
+                ]
 
 
 DefaultDialect.execution_ctx_cls = DefaultExecutionContext
index e7f43d82192215899070813a14373b8e3777751b..71df29cac16eef02512354209b0b3de2c4206390 100644 (file)
@@ -12,6 +12,7 @@ from .. import util, event
 # backwards compat
 from ..sql.compiler import Compiled, TypeCompiler
 
+
 class Dialect(object):
     """Define the behavior of a specific database and DB-API combination.
 
@@ -153,7 +154,6 @@ class Dialect(object):
 
     _has_events = False
 
-
     def create_connect_args(self, url):
         """Build DB-API compatible connection arguments.
 
@@ -197,7 +197,8 @@ class Dialect(object):
 
         pass
 
-    def reflecttable(self, connection, table, include_columns, exclude_columns):
+    def reflecttable(
+            self, connection, table, include_columns, exclude_columns):
         """Load table description from the database.
 
         Given a :class:`.Connection` and a
@@ -254,7 +255,8 @@ class Dialect(object):
 
         Deprecated.  This method is only called by the default
         implementation of :meth:`.Dialect.get_pk_constraint`.  Dialects should
-        instead implement the :meth:`.Dialect.get_pk_constraint` method directly.
+        instead implement the :meth:`.Dialect.get_pk_constraint` method
+        directly.
 
         """
 
@@ -346,7 +348,8 @@ class Dialect(object):
 
         raise NotImplementedError()
 
-    def get_unique_constraints(self, connection, table_name, schema=None, **kw):
+    def get_unique_constraints(
+            self, connection, table_name, schema=None, **kw):
         """Return information about unique constraints in `table_name`.
 
         Given a string `table_name` and an optional string `schema`, return
@@ -359,7 +362,8 @@ class Dialect(object):
           list of column names in order
 
         \**kw
-          other options passed to the dialect's get_unique_constraints() method.
+          other options passed to the dialect's get_unique_constraints()
+          method.
 
         .. versionadded:: 0.9.0
 
@@ -465,7 +469,6 @@ class Dialect(object):
 
         raise NotImplementedError()
 
-
     def do_commit(self, dbapi_connection):
         """Provide an implementation of ``connection.commit()``, given a
         DB-API connection.
@@ -551,7 +554,7 @@ class Dialect(object):
         raise NotImplementedError()
 
     def do_rollback_twophase(self, connection, xid, is_prepared=True,
-                            recover=False):
+                             recover=False):
         """Rollback a two phase transaction on the given connection.
 
         :param connection: a :class:`.Connection`.
@@ -565,7 +568,7 @@ class Dialect(object):
         raise NotImplementedError()
 
     def do_commit_twophase(self, connection, xid, is_prepared=True,
-                            recover=False):
+                           recover=False):
         """Commit a two phase transaction on the given connection.
 
 
@@ -742,7 +745,6 @@ class ExecutionContext(object):
 
     """
 
-
     def create_cursor(self):
         """Return a new cursor generated from this ExecutionContext's
         connection.
@@ -878,12 +880,13 @@ class Connectable(object):
         raise NotImplementedError()
 
     def _run_visitor(self, visitorcallable, element,
-                                    **kwargs):
+                     **kwargs):
         raise NotImplementedError()
 
     def _execute_clauseelement(self, elem, multiparams=None, params=None):
         raise NotImplementedError()
 
+
 class ExceptionContext(object):
     """Encapsulate information about an error condition in progress.
 
index 2fa5dc4946576b5be5d05915ace32f4ddd32a065..012d1d35ddb60dc8b4ecd8ad9df0dd728ea842a9 100644 (file)
@@ -41,14 +41,14 @@ def cache(fn, self, con, *args, **kw):
     if info_cache is None:
         return fn(self, con, *args, **kw)
     key = (
-            fn.__name__,
-            tuple(a for a in args if isinstance(a, util.string_types)),
-            tuple((k, v) for k, v in kw.items() if
-                    isinstance(v,
-                        util.string_types + util.int_types + (float, )
-                    )
-                )
-        )
+        fn.__name__,
+        tuple(a for a in args if isinstance(a, util.string_types)),
+        tuple((k, v) for k, v in kw.items() if
+              isinstance(v,
+                         util.string_types + util.int_types + (float, )
+                         )
+              )
+    )
     ret = info_cache.get(key)
     if ret is None:
         ret = fn(self, con, *args, **kw)
@@ -155,7 +155,7 @@ class Inspector(object):
 
         if hasattr(self.dialect, 'get_schema_names'):
             return self.dialect.get_schema_names(self.bind,
-                                                    info_cache=self.info_cache)
+                                                 info_cache=self.info_cache)
         return []
 
     def get_table_names(self, schema=None, order_by=None):
@@ -188,8 +188,8 @@ class Inspector(object):
         """
 
         if hasattr(self.dialect, 'get_table_names'):
-            tnames = self.dialect.get_table_names(self.bind,
-            schema, info_cache=self.info_cache)
+            tnames = self.dialect.get_table_names(
+                self.bind, schema, info_cache=self.info_cache)
         else:
             tnames = self.engine.table_names(schema)
         if order_by == 'foreign_key':
@@ -230,7 +230,7 @@ class Inspector(object):
         """
 
         return self.dialect.get_view_names(self.bind, schema,
-                                                  info_cache=self.info_cache)
+                                           info_cache=self.info_cache)
 
     def get_view_definition(self, view_name, schema=None):
         """Return definition for `view_name`.
@@ -293,8 +293,8 @@ class Inspector(object):
         """
 
         return self.dialect.get_pk_constraint(self.bind, table_name, schema,
-                                               info_cache=self.info_cache,
-                                               **kw)['constrained_columns']
+                                              info_cache=self.info_cache,
+                                              **kw)['constrained_columns']
 
     def get_pk_constraint(self, table_name, schema=None, **kw):
         """Return information about primary key constraint on `table_name`.
@@ -352,8 +352,8 @@ class Inspector(object):
         """
 
         return self.dialect.get_foreign_keys(self.bind, table_name, schema,
-                                                info_cache=self.info_cache,
-                                                **kw)
+                                             info_cache=self.info_cache,
+                                             **kw)
 
     def get_indexes(self, table_name, schema=None, **kw):
         """Return information about indexes in `table_name`.
@@ -380,8 +380,8 @@ class Inspector(object):
         """
 
         return self.dialect.get_indexes(self.bind, table_name,
-                                                  schema,
-                                            info_cache=self.info_cache, **kw)
+                                        schema,
+                                        info_cache=self.info_cache, **kw)
 
     def get_unique_constraints(self, table_name, schema=None, **kw):
         """Return information about unique constraints in `table_name`.
@@ -446,7 +446,8 @@ class Inspector(object):
         )
 
         # reflect table options, like mysql_engine
-        tbl_opts = self.get_table_options(table_name, schema, **table.dialect_kwargs)
+        tbl_opts = self.get_table_options(
+            table_name, schema, **table.dialect_kwargs)
         if tbl_opts:
             # add additional kwargs to the Table if the dialect
             # returned them
@@ -461,7 +462,8 @@ class Inspector(object):
         found_table = False
         cols_by_orig_name = {}
 
-        for col_d in self.get_columns(table_name, schema, **table.dialect_kwargs):
+        for col_d in self.get_columns(
+                table_name, schema, **table.dialect_kwargs):
             found_table = True
             orig_name = col_d['name']
 
@@ -503,7 +505,7 @@ class Inspector(object):
                 colargs.append(sequence)
 
             cols_by_orig_name[orig_name] = col = \
-                        sa_schema.Column(name, coltype, *colargs, **col_kw)
+                sa_schema.Column(name, coltype, *colargs, **col_kw)
 
             if col.key in table.primary_key:
                 col.primary_key = True
@@ -512,7 +514,8 @@ class Inspector(object):
         if not found_table:
             raise exc.NoSuchTableError(table.name)
 
-        pk_cons = self.get_pk_constraint(table_name, schema, **table.dialect_kwargs)
+        pk_cons = self.get_pk_constraint(
+            table_name, schema, **table.dialect_kwargs)
         if pk_cons:
             pk_cols = [
                 cols_by_orig_name[pk]
@@ -527,18 +530,19 @@ class Inspector(object):
             # its column collection
             table.primary_key._reload(pk_cols)
 
-        fkeys = self.get_foreign_keys(table_name, schema, **table.dialect_kwargs)
+        fkeys = self.get_foreign_keys(
+            table_name, schema, **table.dialect_kwargs)
         for fkey_d in fkeys:
             conname = fkey_d['name']
             # look for columns by orig name in cols_by_orig_name,
             # but support columns that are in-Python only as fallback
             constrained_columns = [
-                                    cols_by_orig_name[c].key
-                                    if c in cols_by_orig_name else c
-                                    for c in fkey_d['constrained_columns']
-                                ]
+                cols_by_orig_name[c].key
+                if c in cols_by_orig_name else c
+                for c in fkey_d['constrained_columns']
+            ]
             if exclude_columns and set(constrained_columns).intersection(
-                                exclude_columns):
+                    exclude_columns):
                 continue
             referred_schema = fkey_d['referred_schema']
             referred_table = fkey_d['referred_table']
@@ -576,7 +580,7 @@ class Inspector(object):
             unique = index_d['unique']
             flavor = index_d.get('type', 'unknown type')
             if include_columns and \
-                            not set(columns).issubset(include_columns):
+                    not set(columns).issubset(include_columns):
                 util.warn(
                     "Omitting %s KEY for (%s), key covers omitted columns." %
                     (flavor, ', '.join(columns)))
@@ -584,8 +588,8 @@ class Inspector(object):
             # look for columns by orig name in cols_by_orig_name,
             # but support columns that are in-Python only as fallback
             sa_schema.Index(name, *[
-                                cols_by_orig_name[c] if c in cols_by_orig_name
-                                        else table.c[c]
-                                for c in columns
-                        ],
-                         **dict(unique=unique))
+                cols_by_orig_name[c] if c in cols_by_orig_name
+                else table.c[c]
+                for c in columns
+            ],
+                **dict(unique=unique))
index 33676b2aa6d3949c71c51069fd4c989917645674..e7d67c060b84ce3cc3994473ddc54a73dec21a99 100644 (file)
@@ -9,7 +9,6 @@
 and :class:`.RowProxy."""
 
 
-
 from .. import exc, util
 from ..sql import expression, sqltypes
 import collections
@@ -75,7 +74,7 @@ except ImportError:
                 if isinstance(key, slice):
                     l = []
                     for processor, value in zip(self._processors[key],
-                                                 self._row[key]):
+                                                self._row[key]):
                         if processor is None:
                             l.append(value)
                         else:
@@ -85,8 +84,8 @@ except ImportError:
                     raise
             if index is None:
                 raise exc.InvalidRequestError(
-                        "Ambiguous column name '%s' in result set! "
-                        "try 'use_labels' option on select statement." % key)
+                    "Ambiguous column name '%s' in result set! "
+                    "try 'use_labels' option on select statement." % key)
             if processor is not None:
                 return processor(self._row[index])
             else:
@@ -219,15 +218,14 @@ class ResultMetaData(object):
 
             if context.result_map:
                 try:
-                    name, obj, type_ = context.result_map[colname
-                                                    if self.case_sensitive
-                                                    else colname.lower()]
+                    name, obj, type_ = context.result_map[
+                        colname if self.case_sensitive else colname.lower()]
                 except KeyError:
                     name, obj, type_ = \
                         colname, None, typemap.get(coltype, sqltypes.NULLTYPE)
             else:
                 name, obj, type_ = \
-                        colname, None, typemap.get(coltype, sqltypes.NULLTYPE)
+                    colname, None, typemap.get(coltype, sqltypes.NULLTYPE)
 
             processor = context.get_result_processor(type_, colname, coltype)
 
@@ -240,9 +238,9 @@ class ResultMetaData(object):
 
             # populate primary keymap, looking for conflicts.
             if primary_keymap.setdefault(
-                                name if self.case_sensitive
-                                else name.lower(),
-                                rec) is not rec:
+                    name if self.case_sensitive
+                    else name.lower(),
+                    rec) is not rec:
                 # place a record that doesn't have the "index" - this
                 # is interpreted later as an AmbiguousColumnError,
                 # but only when actually accessed.   Columns
@@ -250,8 +248,8 @@ class ResultMetaData(object):
                 # aren't used; integer access is always
                 # unambiguous.
                 primary_keymap[name
-                                if self.case_sensitive
-                                else name.lower()] = rec = (None, obj, None)
+                               if self.case_sensitive
+                               else name.lower()] = rec = (None, obj, None)
 
             self.keys.append(colname)
             if obj:
@@ -263,7 +261,7 @@ class ResultMetaData(object):
                     #    keymap[o] = (None, obj, None)
 
             if translate_colname and \
-                untranslated:
+                    untranslated:
                 keymap[untranslated] = rec
 
         # overwrite keymap values with those of the
@@ -275,7 +273,7 @@ class ResultMetaData(object):
                 "Col %r", tuple(x[0] for x in metadata))
 
     @util.pending_deprecation("0.8", "sqlite dialect uses "
-                    "_translate_colname() now")
+                              "_translate_colname() now")
     def _set_keymap_synonym(self, name, origname):
         """Set a synonym for the given name.
 
@@ -285,8 +283,8 @@ class ResultMetaData(object):
 
         """
         rec = (processor, obj, i) = self._keymap[origname if
-                                                self.case_sensitive
-                                                else origname.lower()]
+                                                 self.case_sensitive
+                                                 else origname.lower()]
         if self._keymap.setdefault(name, rec) is not rec:
             self._keymap[name] = (processor, obj, None)
 
@@ -301,26 +299,26 @@ class ResultMetaData(object):
         # pickle/unpickle roundtrip
         elif isinstance(key, expression.ColumnElement):
             if key._label and (
-                            key._label
-                            if self.case_sensitive
-                            else key._label.lower()) in map:
+                    key._label
+                    if self.case_sensitive
+                    else key._label.lower()) in map:
                 result = map[key._label
-                            if self.case_sensitive
-                            else key._label.lower()]
+                             if self.case_sensitive
+                             else key._label.lower()]
             elif hasattr(key, 'name') and (
-                                    key.name
-                                    if self.case_sensitive
-                                    else key.name.lower()) in map:
+                    key.name
+                    if self.case_sensitive
+                    else key.name.lower()) in map:
                 # match is only on name.
                 result = map[key.name
-                            if self.case_sensitive
-                            else key.name.lower()]
+                             if self.case_sensitive
+                             else key.name.lower()]
             # search extra hard to make sure this
             # isn't a column/label name overlap.
             # this check isn't currently available if the row
             # was unpickled.
             if result is not None and \
-                 result[1] is not None:
+                    result[1] is not None:
                 for obj in result[1]:
                     if key._compare_name_for_result(obj):
                         break
@@ -330,7 +328,7 @@ class ResultMetaData(object):
             if raiseerr:
                 raise exc.NoSuchColumnError(
                     "Could not locate column in row for column '%s'" %
-                        expression._string_or_unprintable(key))
+                    expression._string_or_unprintable(key))
             else:
                 return None
         else:
@@ -401,7 +399,7 @@ class ResultProxy(object):
         self.cursor = self._saved_cursor = context.cursor
         self.connection = context.root_connection
         self._echo = self.connection._echo and \
-                        context.engine._should_log_debug()
+            context.engine._should_log_debug()
         self._init_metadata()
 
     def _init_metadata(self):
@@ -461,7 +459,7 @@ class ResultProxy(object):
             return self.context.rowcount
         except Exception as e:
             self.connection._handle_dbapi_exception(
-                              e, None, None, self.cursor, self.context)
+                e, None, None, self.cursor, self.context)
 
     @property
     def lastrowid(self):
@@ -483,8 +481,8 @@ class ResultProxy(object):
             return self._saved_cursor.lastrowid
         except Exception as e:
             self.connection._handle_dbapi_exception(
-                                 e, None, None,
-                                 self._saved_cursor, self.context)
+                e, None, None,
+                self._saved_cursor, self.context)
 
     @property
     def returns_rows(self):
@@ -540,7 +538,7 @@ class ResultProxy(object):
             self.closed = True
             self.connection._safe_close_cursor(self.cursor)
             if _autoclose_connection and \
-                self.connection.should_close_with_result:
+                    self.connection.should_close_with_result:
                 self.connection.close()
             # allow consistent errors
             self.cursor = None
@@ -582,17 +580,17 @@ class ResultProxy(object):
 
         if not self.context.compiled:
             raise exc.InvalidRequestError(
-                        "Statement is not a compiled "
-                        "expression construct.")
+                "Statement is not a compiled "
+                "expression construct.")
         elif not self.context.isinsert:
             raise exc.InvalidRequestError(
-                        "Statement is not an insert() "
-                        "expression construct.")
+                "Statement is not an insert() "
+                "expression construct.")
         elif self.context._is_explicit_returning:
             raise exc.InvalidRequestError(
-                        "Can't call inserted_primary_key "
-                        "when returning() "
-                        "is used.")
+                "Can't call inserted_primary_key "
+                "when returning() "
+                "is used.")
 
         return self.context.inserted_primary_key
 
@@ -607,12 +605,12 @@ class ResultProxy(object):
         """
         if not self.context.compiled:
             raise exc.InvalidRequestError(
-                        "Statement is not a compiled "
-                        "expression construct.")
+                "Statement is not a compiled "
+                "expression construct.")
         elif not self.context.isupdate:
             raise exc.InvalidRequestError(
-                        "Statement is not an update() "
-                        "expression construct.")
+                "Statement is not an update() "
+                "expression construct.")
         elif self.context.executemany:
             return self.context.compiled_parameters
         else:
@@ -629,12 +627,12 @@ class ResultProxy(object):
         """
         if not self.context.compiled:
             raise exc.InvalidRequestError(
-                        "Statement is not a compiled "
-                        "expression construct.")
+                "Statement is not a compiled "
+                "expression construct.")
         elif not self.context.isinsert:
             raise exc.InvalidRequestError(
-                        "Statement is not an insert() "
-                        "expression construct.")
+                "Statement is not an insert() "
+                "expression construct.")
         elif self.context.executemany:
             return self.context.compiled_parameters
         else:
@@ -682,12 +680,12 @@ class ResultProxy(object):
 
         if not self.context.compiled:
             raise exc.InvalidRequestError(
-                        "Statement is not a compiled "
-                        "expression construct.")
+                "Statement is not a compiled "
+                "expression construct.")
         elif not self.context.isinsert and not self.context.isupdate:
             raise exc.InvalidRequestError(
-                        "Statement is not an insert() or update() "
-                        "expression construct.")
+                "Statement is not an insert() or update() "
+                "expression construct.")
         return self.context.postfetch_cols
 
     def prefetch_cols(self):
@@ -704,12 +702,12 @@ class ResultProxy(object):
 
         if not self.context.compiled:
             raise exc.InvalidRequestError(
-                        "Statement is not a compiled "
-                        "expression construct.")
+                "Statement is not a compiled "
+                "expression construct.")
         elif not self.context.isinsert and not self.context.isupdate:
             raise exc.InvalidRequestError(
-                        "Statement is not an insert() or update() "
-                        "expression construct.")
+                "Statement is not an insert() or update() "
+                "expression construct.")
         return self.context.prefetch_cols
 
     def supports_sane_rowcount(self):
@@ -754,8 +752,8 @@ class ResultProxy(object):
     def _non_result(self):
         if self._metadata is None:
             raise exc.ResourceClosedError(
-            "This result object does not return rows. "
-            "It has been closed automatically.",
+                "This result object does not return rows. "
+                "It has been closed automatically.",
             )
         else:
             raise exc.ResourceClosedError("This result object is closed.")
@@ -785,8 +783,8 @@ class ResultProxy(object):
             return l
         except Exception as e:
             self.connection._handle_dbapi_exception(
-                                    e, None, None,
-                                    self.cursor, self.context)
+                e, None, None,
+                self.cursor, self.context)
 
     def fetchmany(self, size=None):
         """Fetch many rows, just like DB-API
@@ -804,8 +802,8 @@ class ResultProxy(object):
             return l
         except Exception as e:
             self.connection._handle_dbapi_exception(
-                                    e, None, None,
-                                    self.cursor, self.context)
+                e, None, None,
+                self.cursor, self.context)
 
     def fetchone(self):
         """Fetch one row, just like DB-API ``cursor.fetchone()``.
@@ -823,8 +821,8 @@ class ResultProxy(object):
                 return None
         except Exception as e:
             self.connection._handle_dbapi_exception(
-                                    e, None, None,
-                                    self.cursor, self.context)
+                e, None, None,
+                self.cursor, self.context)
 
     def first(self):
         """Fetch the first row and then close the result set unconditionally.
@@ -839,8 +837,8 @@ class ResultProxy(object):
             row = self._fetchone_impl()
         except Exception as e:
             self.connection._handle_dbapi_exception(
-                                    e, None, None,
-                                    self.cursor, self.context)
+                e, None, None,
+                self.cursor, self.context)
 
         try:
             if row is not None:
@@ -937,6 +935,7 @@ class FullyBufferedResultProxy(ResultProxy):
     such as MSSQL INSERT...OUTPUT after an autocommit.
 
     """
+
     def _init_metadata(self):
         super(FullyBufferedResultProxy, self)._init_metadata()
         self.__rowbuffer = self._buffer_rows()
index 23d24e9797b3256c84aedf37f003a7e69908e0f1..38206be89772efef4ff4315e7d41d254df46988f 100644 (file)
@@ -91,8 +91,8 @@ class DefaultEngineStrategy(EngineStrategy):
                 except dialect.dbapi.Error as e:
                     invalidated = dialect.is_disconnect(e, None, None)
                     util.raise_from_cause(
-                        exc.DBAPIError.instance(None, None,
-                            e, dialect.dbapi.Error,
+                        exc.DBAPIError.instance(
+                            None, None, e, dialect.dbapi.Error,
                             connection_invalidated=invalidated
                         )
                     )
@@ -161,7 +161,7 @@ class DefaultEngineStrategy(EngineStrategy):
 
             def first_connect(dbapi_connection, connection_record):
                 c = base.Connection(engine, connection=dbapi_connection,
-                            _has_events=False)
+                                    _has_events=False)
                 dialect.initialize(c)
             event.listen(pool, 'first_connect', first_connect, once=True)
 
@@ -246,11 +246,11 @@ class MockEngineStrategy(EngineStrategy):
                 self.dialect, self, **kwargs).traverse_single(entity)
 
         def _run_visitor(self, visitorcallable, element,
-                                        connection=None,
-                                        **kwargs):
+                         connection=None,
+                         **kwargs):
             kwargs['checkfirst'] = False
             visitorcallable(self.dialect, self,
-                                **kwargs).traverse_single(element)
+                            **kwargs).traverse_single(element)
 
         def execute(self, object, *multiparams, **params):
             raise NotImplementedError()
index eb923e96bed23f2d929e67ae355b74ac201a8119..637523a0e887cbd8e986c29390946edf022d2da3 100644 (file)
@@ -95,20 +95,20 @@ class TLEngine(base.Engine):
 
     def prepare(self):
         if not hasattr(self._connections, 'trans') or \
-            not self._connections.trans:
+                not self._connections.trans:
             return
         self._connections.trans[-1].prepare()
 
     def commit(self):
         if not hasattr(self._connections, 'trans') or \
-            not self._connections.trans:
+                not self._connections.trans:
             return
         trans = self._connections.trans.pop(-1)
         trans.commit()
 
     def rollback(self):
         if not hasattr(self._connections, 'trans') or \
-            not self._connections.trans:
+                not self._connections.trans:
             return
         trans = self._connections.trans.pop(-1)
         trans.rollback()
@@ -120,8 +120,8 @@ class TLEngine(base.Engine):
     @property
     def closed(self):
         return not hasattr(self._connections, 'conn') or \
-                self._connections.conn() is None or \
-                self._connections.conn().closed
+            self._connections.conn() is None or \
+            self._connections.conn().closed
 
     def close(self):
         if not self.closed:
index 7d61968b9ede714783088b197fd10eaa8469c84a..e3629613fe7aba375b0f92721efd30194be5e279 100644 (file)
@@ -25,8 +25,8 @@ class URL(object):
     Represent the components of a URL used to connect to a database.
 
     This object is suitable to be passed directly to a
-    :func:`~sqlalchemy.create_engine` call.  The fields of the URL are parsed from a
-    string by the :func:`.make_url` function.  the string
+    :func:`~sqlalchemy.create_engine` call.  The fields of the URL are parsed
+    from a string by the :func:`.make_url` function.  the string
     format of the URL is an RFC-1738-style string.
 
     All initialization parameters are available as public attributes.
@@ -119,8 +119,8 @@ class URL(object):
         # would return a module with 'dialect' as the
         # actual class
         if hasattr(cls, 'dialect') and \
-            isinstance(cls.dialect, type) and \
-            issubclass(cls.dialect, Dialect):
+                isinstance(cls.dialect, type) and \
+                issubclass(cls.dialect, Dialect):
             return cls.dialect
         else:
             return cls
@@ -189,7 +189,8 @@ def _parse_rfc1738_args(name):
         if components['database'] is not None:
             tokens = components['database'].split('?', 2)
             components['database'] = tokens[0]
-            query = (len(tokens) > 1 and dict(util.parse_qsl(tokens[1]))) or None
+            query = (
+                len(tokens) > 1 and dict(util.parse_qsl(tokens[1]))) or None
             if util.py2k and query is not None:
                 query = dict((k.encode('ascii'), query[k]) for k in query)
         else:
@@ -215,9 +216,11 @@ def _parse_rfc1738_args(name):
 def _rfc_1738_quote(text):
     return re.sub(r'[:@/]', lambda m: "%%%X" % ord(m.group(0)), text)
 
+
 def _rfc_1738_unquote(text):
     return util.unquote(text)
 
+
 def _parse_keyvalue_args(name):
     m = re.match(r'(\w+)://(.*)', name)
     if m is not None:
index 6a6fe6ee1d11376a4e20d992db8ed794af0139c9..d9eb1df10890dbb588d08ad930e383cea7b53701 100644 (file)
@@ -7,6 +7,7 @@
 
 from .. import util
 
+
 def connection_memoize(key):
     """Decorator, memoize a function in a connection.info stash.
 
@@ -61,7 +62,7 @@ def py_fallback():
                 return [[zero]]
         else:
             if hasattr(multiparams[0], '__iter__') and \
-                not hasattr(multiparams[0], 'strip'):
+                    not hasattr(multiparams[0], 'strip'):
                 return multiparams
             else:
                 return [multiparams]
index 3f7a4830486617870d085d9d24e59613f2efb8b9..42bbbfc0fb56ed622fa6fc4b84bede996ec09fc6 100644 (file)
@@ -12,6 +12,7 @@ from .pool import Pool
 from .engine import Connectable, Engine, Dialect
 from .sql.base import SchemaEventTarget
 
+
 class DDLEvents(event.Events):
     """
     Define event listeners for schema objects,
@@ -221,7 +222,6 @@ class DDLEvents(event.Events):
         """
 
 
-
 class PoolEvents(event.Events):
     """Available events for :class:`.Pool`.
 
@@ -291,9 +291,9 @@ class PoolEvents(event.Events):
         :class:`.Pool` refers to a single "creator" function (which in terms
         of a :class:`.Engine` refers to the URL and connection options used),
         it is typically valid to make observations about a single connection
-        that can be safely assumed to be valid about all subsequent connections,
-        such as the database version, the server and client encoding settings,
-        collation settings, and many others.
+        that can be safely assumed to be valid about all subsequent
+        connections, such as the database version, the server and client
+        encoding settings, collation settings, and many others.
 
         :param dbapi_connection: a DBAPI connection.
 
@@ -311,8 +311,8 @@ class PoolEvents(event.Events):
          DBAPI connection.
 
         :param connection_proxy: the :class:`._ConnectionFairy` object which
-          will proxy the public interface of the DBAPI connection for the lifespan
-          of the checkout.
+          will proxy the public interface of the DBAPI connection for the
+          lifespan of the checkout.
 
         If you raise a :class:`~sqlalchemy.exc.DisconnectionError`, the current
         connection will be disposed and a fresh connection retrieved.
@@ -372,8 +372,8 @@ class PoolEvents(event.Events):
 
         This event is called any time the :meth:`._ConnectionRecord.invalidate`
         method is invoked, either from API usage or via "auto-invalidation".
-        The event occurs before a final attempt to call ``.close()`` on the connection
-        occurs.
+        The event occurs before a final attempt to call ``.close()`` on the
+        connection occurs.
 
         :param dbapi_connection: a DBAPI connection.
 
@@ -467,7 +467,6 @@ class ConnectionEvents(event.Events):
     _target_class_doc = "SomeEngine"
     _dispatch_target = Connectable
 
-
     @classmethod
     def _listen(cls, event_key, retval=False):
         target, identifier, fn = \
@@ -480,7 +479,7 @@ class ConnectionEvents(event.Events):
                 orig_fn = fn
 
                 def wrap_before_execute(conn, clauseelement,
-                                                multiparams, params):
+                                        multiparams, params):
                     orig_fn(conn, clauseelement, multiparams, params)
                     return clauseelement, multiparams, params
                 fn = wrap_before_execute
@@ -488,19 +487,20 @@ class ConnectionEvents(event.Events):
                 orig_fn = fn
 
                 def wrap_before_cursor_execute(conn, cursor, statement,
-                        parameters, context, executemany):
+                                               parameters, context,
+                                               executemany):
                     orig_fn(conn, cursor, statement,
-                        parameters, context, executemany)
+                            parameters, context, executemany)
                     return statement, parameters
                 fn = wrap_before_cursor_execute
         elif retval and \
             identifier not in ('before_execute',
-                                    'before_cursor_execute', 'handle_error'):
+                               'before_cursor_execute', 'handle_error'):
             raise exc.ArgumentError(
-                    "Only the 'before_execute', "
-                    "'before_cursor_execute' and 'handle_error' engine "
-                    "event listeners accept the 'retval=True' "
-                    "argument.")
+                "Only the 'before_execute', "
+                "'before_cursor_execute' and 'handle_error' engine "
+                "event listeners accept the 'retval=True' "
+                "argument.")
         event_key.with_wrapper(fn).base_listen()
 
     def before_execute(self, conn, clauseelement, multiparams, params):
@@ -546,7 +546,7 @@ class ConnectionEvents(event.Events):
         """
 
     def before_cursor_execute(self, conn, cursor, statement,
-                        parameters, context, executemany):
+                              parameters, context, executemany):
         """Intercept low-level cursor execute() events before execution,
         receiving the string
         SQL statement and DBAPI-specific parameter list to be invoked
@@ -588,7 +588,7 @@ class ConnectionEvents(event.Events):
         """
 
     def after_cursor_execute(self, conn, cursor, statement,
-                        parameters, context, executemany):
+                             parameters, context, executemany):
         """Intercept low-level cursor execute() events after execution.
 
         :param conn: :class:`.Connection` object
@@ -607,7 +607,7 @@ class ConnectionEvents(event.Events):
         """
 
     def dbapi_error(self, conn, cursor, statement, parameters,
-                        context, exception):
+                    context, exception):
         """Intercept a raw DBAPI error.
 
         This event is called with the DBAPI exception instance
@@ -750,8 +750,8 @@ class ConnectionEvents(event.Events):
         It also differs from the :meth:`.PoolEvents.checkout` event
         in that it is specific to the :class:`.Connection` object, not the
         DBAPI connection that :meth:`.PoolEvents.checkout` deals with, although
-        this DBAPI connection is available here via the :attr:`.Connection.connection`
-        attribute.  But note there can in fact
+        this DBAPI connection is available here via the
+        :attr:`.Connection.connection` attribute.  But note there can in fact
         be multiple :meth:`.PoolEvents.checkout` events within the lifespan
         of a single :class:`.Connection` object, if that :class:`.Connection`
         is invalidated and re-established.  There can also be multiple
@@ -773,8 +773,8 @@ class ConnectionEvents(event.Events):
             :meth:`.PoolEvents.checkout` the lower-level pool checkout event
             for an individual DBAPI connection
 
-            :meth:`.ConnectionEvents.set_connection_execution_options` - a copy of a
-            :class:`.Connection` is also made when the
+            :meth:`.ConnectionEvents.set_connection_execution_options` - a copy
+            of a :class:`.Connection` is also made when the
             :meth:`.Connection.execution_options` method is called.
 
         """
@@ -828,7 +828,8 @@ class ConnectionEvents(event.Events):
         .. seealso::
 
             :meth:`.ConnectionEvents.set_connection_execution_options` - event
-            which is called when :meth:`.Connection.execution_options` is called.
+            which is called when :meth:`.Connection.execution_options` is
+            called.
 
         """
 
@@ -941,10 +942,10 @@ class DialectEvents(event.Events):
 
         :class:`.DialectEvents` hooks should be considered **semi-public**
         and experimental.
-        These hooks are not for general use and are only for those situations where
-        intricate re-statement of DBAPI mechanics must be injected onto an existing
-        dialect.   For general-use statement-interception events, please
-        use the :class:`.ConnectionEvents` interface.
+        These hooks are not for general use and are only for those situations
+        where intricate re-statement of DBAPI mechanics must be injected onto
+        an existing dialect.  For general-use statement-interception events,
+        please use the :class:`.ConnectionEvents` interface.
 
     .. seealso::
 
@@ -1010,4 +1011,3 @@ class DialectEvents(event.Events):
         place within the event handler.
 
         """
-
index 965e5569212e8a40e2f6cf1592bb189f6de25cbd..7d333fc01efd62e11f769199545aab9640ed92a8 100644 (file)
@@ -27,10 +27,12 @@ class ArgumentError(SQLAlchemyError):
 
     """
 
+
 class NoSuchModuleError(ArgumentError):
     """Raised when a dynamically-loaded module (usually a database dialect)
     of a particular name cannot be located."""
 
+
 class NoForeignKeysError(ArgumentError):
     """Raised when no foreign keys can be located between two selectables
     during a join."""
@@ -70,12 +72,13 @@ class CircularDependencyError(SQLAlchemyError):
 
     def __reduce__(self):
         return self.__class__, (None, self.cycles,
-                            self.edges, self.args[0])
+                                self.edges, self.args[0])
 
 
 class CompileError(SQLAlchemyError):
     """Raised when an error occurs during SQL compilation"""
 
+
 class UnsupportedCompilationError(CompileError):
     """Raised when an operation is not supported by the given compiler.
 
@@ -86,8 +89,9 @@ class UnsupportedCompilationError(CompileError):
 
     def __init__(self, compiler, element_type):
         super(UnsupportedCompilationError, self).__init__(
-                    "Compiler %r can't render element of type %s" %
-                                (compiler, element_type))
+            "Compiler %r can't render element of type %s" %
+            (compiler, element_type))
+
 
 class IdentifierError(SQLAlchemyError):
     """Raised when a schema name is beyond the max character limit"""
@@ -160,7 +164,7 @@ class NoReferencedColumnError(NoReferenceError):
 
     def __reduce__(self):
         return self.__class__, (self.args[0], self.table_name,
-                            self.column_name)
+                                self.column_name)
 
 
 class NoSuchTableError(InvalidRequestError):
@@ -237,11 +241,11 @@ class StatementError(SQLAlchemyError):
         params_repr = util._repr_params(self.params, 10)
 
         return ' '.join([
-                            "(%s)" % det for det in self.detail
-                        ] + [
-                            SQLAlchemyError.__str__(self),
-                             repr(self.statement), repr(params_repr)
-                        ])
+            "(%s)" % det for det in self.detail
+            ] + [
+                SQLAlchemyError.__str__(self),
+                repr(self.statement), repr(params_repr)
+            ])
 
     def __unicode__(self):
         return self.__str__()
@@ -272,9 +276,8 @@ class DBAPIError(StatementError):
 
     @classmethod
     def instance(cls, statement, params,
-                        orig,
-                        dbapi_base_err,
-                        connection_invalidated=False):
+                 orig, dbapi_base_err,
+                 connection_invalidated=False):
         # Don't ever wrap these, just return them directly as if
         # DBAPIError didn't exist.
         if isinstance(orig, (KeyboardInterrupt, SystemExit, DontWrapMixin)):
@@ -299,7 +302,7 @@ class DBAPIError(StatementError):
 
     def __reduce__(self):
         return self.__class__, (self.statement, self.params,
-                    self.orig, self.connection_invalidated)
+                                self.orig, self.connection_invalidated)
 
     def __init__(self, statement, params, orig, connection_invalidated=False):
         try:
@@ -309,11 +312,11 @@ class DBAPIError(StatementError):
         except Exception as e:
             text = 'Error in str() of DB-API-generated exception: ' + str(e)
         StatementError.__init__(
-                self,
-                '(%s) %s' % (orig.__class__.__name__, text),
-                statement,
-                params,
-                orig
+            self,
+            '(%s) %s' % (orig.__class__.__name__, text),
+            statement,
+            params,
+            orig
         )
         self.connection_invalidated = connection_invalidated
 
index 92816310ae9021c536c1f612452d941a7096c4eb..a987ab413440eed58ceefaa4928adf4718354900 100644 (file)
@@ -85,13 +85,13 @@ ASSOCIATION_PROXY = util.symbol('ASSOCIATION_PROXY')
 
 """
 
+
 class AssociationProxy(interfaces._InspectionAttr):
     """A descriptor that presents a read/write view of an object attribute."""
 
     is_attribute = False
     extension_type = ASSOCIATION_PROXY
 
-
     def __init__(self, target_collection, attr, creator=None,
                  getset_factory=None, proxy_factory=None,
                  proxy_bulk_set=None):
@@ -230,7 +230,7 @@ class AssociationProxy(interfaces._InspectionAttr):
     @util.memoized_property
     def _value_is_scalar(self):
         return not self._get_property().\
-                    mapper.get_property(self.value_attr).uselist
+            mapper.get_property(self.value_attr).uselist
 
     @util.memoized_property
     def _target_is_object(self):
@@ -349,8 +349,8 @@ class AssociationProxy(interfaces._InspectionAttr):
             proxy.update(values)
         else:
             raise exc.ArgumentError(
-               'no proxy_bulk_set supplied for custom '
-               'collection_class implementation')
+                'no proxy_bulk_set supplied for custom '
+                'collection_class implementation')
 
     @property
     def _comparator(self):
@@ -378,12 +378,12 @@ class AssociationProxy(interfaces._InspectionAttr):
         # the "can't call any() on a scalar" msg is raised.
         if self.scalar and not self._value_is_scalar:
             return self._comparator.has(
-                    value_expr
-                )
+                value_expr
+            )
         else:
             return self._comparator.any(
-                    value_expr
-                )
+                value_expr
+            )
 
     def has(self, criterion=None, **kwargs):
         """Produce a proxied 'has' expression using EXISTS.
@@ -397,14 +397,14 @@ class AssociationProxy(interfaces._InspectionAttr):
 
         if self._target_is_object:
             return self._comparator.has(
-                    getattr(self.target_class, self.value_attr).\
-                        has(criterion, **kwargs)
-                )
+                getattr(self.target_class, self.value_attr).
+                has(criterion, **kwargs)
+            )
         else:
             if criterion is not None or kwargs:
                 raise exc.ArgumentError(
-                        "Non-empty has() not allowed for "
-                        "column-targeted association proxy; use ==")
+                    "Non-empty has() not allowed for "
+                    "column-targeted association proxy; use ==")
             return self._comparator.has()
 
     def contains(self, obj):
@@ -429,9 +429,9 @@ class AssociationProxy(interfaces._InspectionAttr):
         # is only allowed with a scalar.
         if obj is None:
             return or_(
-                        self._comparator.has(**{self.value_attr: obj}),
-                        self._comparator == None
-                    )
+                self._comparator.has(**{self.value_attr: obj}),
+                self._comparator == None
+            )
         else:
             return self._comparator.has(**{self.value_attr: obj})
 
@@ -439,7 +439,7 @@ class AssociationProxy(interfaces._InspectionAttr):
         # note the has() here will fail for collections; eq_()
         # is only allowed with a scalar.
         return self._comparator.has(
-                    getattr(self.target_class, self.value_attr) != obj)
+            getattr(self.target_class, self.value_attr) != obj)
 
 
 class _lazy_collection(object):
@@ -451,8 +451,8 @@ class _lazy_collection(object):
         obj = self.ref()
         if obj is None:
             raise exc.InvalidRequestError(
-               "stale association proxy, parent object has gone out of "
-               "scope")
+                "stale association proxy, parent object has gone out of "
+                "scope")
         return getattr(obj, self.target)
 
     def __getstate__(self):
@@ -698,7 +698,7 @@ class _AssociationList(_AssociationCollection):
 
     for func_name, func in list(locals().items()):
         if (util.callable(func) and func.__name__ == func_name and
-            not func.__doc__ and hasattr(list, func_name)):
+                not func.__doc__ and hasattr(list, func_name)):
             func.__doc__ = getattr(list, func_name).__doc__
     del func_name, func
 
@@ -835,8 +835,8 @@ class _AssociationDict(_AssociationCollection):
                         self[k] = v
                 except ValueError:
                     raise ValueError(
-                            "dictionary update sequence "
-                            "requires 2-element tuples")
+                        "dictionary update sequence "
+                        "requires 2-element tuples")
 
         for key, value in kw:
             self[key] = value
@@ -849,7 +849,7 @@ class _AssociationDict(_AssociationCollection):
 
     for func_name, func in list(locals().items()):
         if (util.callable(func) and func.__name__ == func_name and
-            not func.__doc__ and hasattr(dict, func_name)):
+                not func.__doc__ and hasattr(dict, func_name)):
             func.__doc__ = getattr(dict, func_name).__doc__
     del func_name, func
 
@@ -1049,6 +1049,6 @@ class _AssociationSet(_AssociationCollection):
 
     for func_name, func in list(locals().items()):
         if (util.callable(func) and func.__name__ == func_name and
-            not func.__doc__ and hasattr(set, func_name)):
+                not func.__doc__ and hasattr(set, func_name)):
             func.__doc__ = getattr(set, func_name).__doc__
     del func_name, func
index 1da65011ddd9e0f70e2a5da8fba373506625e144..17ebef5b5af9865ee58f09db0515c4b1fc1e7cb5 100644 (file)
@@ -60,7 +60,8 @@ asking it to reflect the schema and produce mappings::
     session.add(Address(email_address="foo@bar.com", user=User(name="foo")))
     session.commit()
 
-    # collection-based relationships are by default named "<classname>_collection"
+    # collection-based relationships are by default named
+    # "<classname>_collection"
     print (u1.address_collection)
 
 Above, calling :meth:`.AutomapBase.prepare` while passing along the
@@ -72,16 +73,17 @@ generated automatically.  The :class:`.ForeignKeyConstraint` objects which
 link the various tables together will be used to produce new, bidirectional
 :func:`.relationship` objects between classes.   The classes and relationships
 follow along a default naming scheme that we can customize.  At this point,
-our basic mapping consisting of related ``User`` and ``Address`` classes is ready
-to use in the traditional way.
+our basic mapping consisting of related ``User`` and ``Address`` classes is
+ready to use in the traditional way.
 
 Generating Mappings from an Existing MetaData
 =============================================
 
 We can pass a pre-declared :class:`.MetaData` object to :func:`.automap_base`.
 This object can be constructed in any way, including programmatically, from
-a serialized file, or from itself being reflected using :meth:`.MetaData.reflect`.
-Below we illustrate a combination of reflection and explicit table declaration::
+a serialized file, or from itself being reflected using
+:meth:`.MetaData.reflect`.  Below we illustrate a combination of reflection and
+explicit table declaration::
 
     from sqlalchemy import create_engine, MetaData, Table, Column, ForeignKey
     engine = create_engine("sqlite:///mydatabase.db")
@@ -106,7 +108,8 @@ Below we illustrate a combination of reflection and explicit table declaration::
     Base.prepare()
 
     # mapped classes are ready
-    User, Address, Order = Base.classes.user, Base.classes.address, Base.classes.user_order
+    User, Address, Order = Base.classes.user, Base.classes.address,\
+        Base.classes.user_order
 
 Specifying Classes Explcitly
 ============================
@@ -114,11 +117,11 @@ Specifying Classes Explcitly
 The :mod:`.sqlalchemy.ext.automap` extension allows classes to be defined
 explicitly, in a way similar to that of the :class:`.DeferredReflection` class.
 Classes that extend from :class:`.AutomapBase` act like regular declarative
-classes, but are not immediately mapped after their construction, and are instead
-mapped when we call :meth:`.AutomapBase.prepare`.  The :meth:`.AutomapBase.prepare`
-method will make use of the classes we've established based on the table name
-we use.  If our schema contains tables ``user`` and ``address``, we can define
-one or both of the classes to be used::
+classes, but are not immediately mapped after their construction, and are
+instead mapped when we call :meth:`.AutomapBase.prepare`.  The
+:meth:`.AutomapBase.prepare` method will make use of the classes we've
+established based on the table name we use.  If our schema contains tables
+``user`` and ``address``, we can define one or both of the classes to be used::
 
     from sqlalchemy.ext.automap import automap_base
     from sqlalchemy import create_engine
@@ -134,9 +137,9 @@ one or both of the classes to be used::
         user_name = Column('name', String)
 
         # override relationships too, if desired.
-        # we must use the same name that automap would use for the relationship,
-        # and also must refer to the class name that automap will generate
-        # for "address"
+        # we must use the same name that automap would use for the
+        # relationship, and also must refer to the class name that automap will
+        # generate for "address"
         address_collection = relationship("address", collection_class=set)
 
     # reflect
@@ -158,10 +161,10 @@ one or both of the classes to be used::
 Above, one of the more intricate details is that we illustrated overriding
 one of the :func:`.relationship` objects that automap would have created.
 To do this, we needed to make sure the names match up with what automap
-would normally generate, in that the relationship name would be ``User.address_collection``
-and the name of the class referred to, from automap's perspective, is called
-``address``, even though we are referring to it as ``Address`` within our usage
-of this class.
+would normally generate, in that the relationship name would be
+``User.address_collection`` and the name of the class referred to, from
+automap's perspective, is called ``address``, even though we are referring to
+it as ``Address`` within our usage of this class.
 
 Overriding Naming Schemes
 =========================
@@ -212,7 +215,8 @@ scheme for class names and a "pluralizer" for collection names using the
         )
 
 From the above mapping, we would now have classes ``User`` and ``Address``,
-where the collection from ``User`` to ``Address`` is called ``User.addresses``::
+where the collection from ``User`` to ``Address`` is called
+``User.addresses``::
 
     User, Address = Base.classes.User, Base.classes.Address
 
@@ -223,7 +227,8 @@ Relationship Detection
 
 The vast majority of what automap accomplishes is the generation of
 :func:`.relationship` structures based on foreign keys.  The mechanism
-by which this works for many-to-one and one-to-many relationships is as follows:
+by which this works for many-to-one and one-to-many relationships is as
+follows:
 
 1. A given :class:`.Table`, known to be mapped to a particular class,
    is examined for :class:`.ForeignKeyConstraint` objects.
@@ -232,10 +237,10 @@ by which this works for many-to-one and one-to-many relationships is as follows:
    object present is matched up to the class to which it is to be mapped,
    if any, else it is skipped.
 
-3. As the :class:`.ForeignKeyConstraint` we are examining corresponds to a reference
-   from the immediate mapped class,
-   the relationship will be set up as a many-to-one referring to the referred class;
-   a corresponding one-to-many backref will be created on the referred class referring
+3. As the :class:`.ForeignKeyConstraint` we are examining corresponds to a
+   reference from the immediate mapped class,  the relationship will be set up
+   as a many-to-one referring to the referred class; a corresponding
+   one-to-many backref will be created on the referred class referring
    to this class.
 
 4. The names of the relationships are determined using the
@@ -248,15 +253,15 @@ by which this works for many-to-one and one-to-many relationships is as follows:
    name will be derived.
 
 5. The classes are inspected for an existing mapped property matching these
-   names.  If one is detected on one side, but none on the other side, :class:`.AutomapBase`
-   attempts to create a relationship on the missing side, then uses the
-   :paramref:`.relationship.back_populates` parameter in order to point
-   the new relationship to the other side.
+   names.  If one is detected on one side, but none on the other side,
+   :class:`.AutomapBase` attempts to create a relationship on the missing side,
+   then uses the :paramref:`.relationship.back_populates` parameter in order to
+   point the new relationship to the other side.
 
 6. In the usual case where no relationship is on either side,
-   :meth:`.AutomapBase.prepare` produces a :func:`.relationship` on the "many-to-one"
-   side and matches it to the other using the :paramref:`.relationship.backref`
-   parameter.
+   :meth:`.AutomapBase.prepare` produces a :func:`.relationship` on the
+   "many-to-one" side and matches it to the other using the
+   :paramref:`.relationship.backref` parameter.
 
 7. Production of the :func:`.relationship` and optionally the :func:`.backref`
    is handed off to the :paramref:`.AutomapBase.prepare.generate_relationship`
@@ -288,7 +293,7 @@ options along to all one-to-many relationships::
         # make use of the built-in function to actually return
         # the result.
         return generate_relationship(base, direction, return_fn,
-                                        attrname, local_cls, referred_cls, **kw)
+                                     attrname, local_cls, referred_cls, **kw)
 
     from sqlalchemy.ext.automap import automap_base
     from sqlalchemy import create_engine
@@ -307,16 +312,17 @@ Many-to-Many relationships
 those which contain a ``secondary`` argument.  The process for producing these
 is as follows:
 
-1. A given :class:`.Table` is examined for :class:`.ForeignKeyConstraint` objects,
-   before any mapped class has been assigned to it.
+1. A given :class:`.Table` is examined for :class:`.ForeignKeyConstraint`
+   objects, before any mapped class has been assigned to it.
 
 2. If the table contains two and exactly two :class:`.ForeignKeyConstraint`
    objects, and all columns within this table are members of these two
    :class:`.ForeignKeyConstraint` objects, the table is assumed to be a
    "secondary" table, and will **not be mapped directly**.
 
-3. The two (or one, for self-referential) external tables to which the :class:`.Table`
-   refers to are matched to the classes to which they will be mapped, if any.
+3. The two (or one, for self-referential) external tables to which the
+   :class:`.Table` refers to are matched to the classes to which they will be
+   mapped, if any.
 
 4. If mapped classes for both sides are located, a many-to-many bi-directional
    :func:`.relationship` / :func:`.backref` pair is created between the two
@@ -330,8 +336,8 @@ Relationships with Inheritance
 ------------------------------
 
 :mod:`.sqlalchemy.ext.automap` will not generate any relationships between
-two classes that are in an inheritance relationship.   That is, with two classes
-given as follows::
+two classes that are in an inheritance relationship.   That is, with two
+classes given as follows::
 
     class Employee(Base):
         __tablename__ = 'employee'
@@ -348,8 +354,8 @@ given as follows::
             'polymorphic_identity':'engineer',
         }
 
-The foreign key from ``Engineer`` to ``Employee`` is used not for a relationship,
-but to establish joined inheritance between the two classes.
+The foreign key from ``Engineer`` to ``Employee`` is used not for a
+relationship, but to establish joined inheritance between the two classes.
 
 Note that this means automap will not generate *any* relationships
 for foreign keys that link from a subclass to a superclass.  If a mapping
@@ -373,7 +379,8 @@ SQLAlchemy can guess::
         id = Column(Integer, ForeignKey('employee.id'), primary_key=True)
         favorite_employee_id = Column(Integer, ForeignKey('employee.id'))
 
-        favorite_employee = relationship(Employee, foreign_keys=favorite_employee_id)
+        favorite_employee = relationship(Employee,
+                                         foreign_keys=favorite_employee_id)
 
         __mapper_args__ = {
             'polymorphic_identity':'engineer',
@@ -387,8 +394,8 @@ Using Automap with Explicit Declarations
 As noted previously, automap has no dependency on reflection, and can make
 use of any collection of :class:`.Table` objects within a :class:`.MetaData`
 collection.  From this, it follows that automap can also be used
-generate missing relationships given an otherwise complete model that fully defines
-table metadata::
+generate missing relationships given an otherwise complete model that fully
+defines table metadata::
 
     from sqlalchemy.ext.automap import automap_base
     from sqlalchemy import Column, Integer, String, ForeignKey
@@ -420,12 +427,12 @@ table metadata::
 
 Above, given mostly complete ``User`` and ``Address`` mappings, the
 :class:`.ForeignKey` which we defined on ``Address.user_id`` allowed a
-bidirectional relationship pair ``Address.user`` and ``User.address_collection``
-to be generated on the mapped classes.
+bidirectional relationship pair ``Address.user`` and
+``User.address_collection`` to be generated on the mapped classes.
 
-Note that when subclassing :class:`.AutomapBase`, the :meth:`.AutomapBase.prepare`
-method is required; if not called, the classes we've declared are in an
-un-mapped state.
+Note that when subclassing :class:`.AutomapBase`,
+the :meth:`.AutomapBase.prepare` method is required; if not called, the classes
+we've declared are in an un-mapped state.
 
 
 """
@@ -459,15 +466,16 @@ def classname_for_table(base, tablename, table):
 
      .. note::
 
-        In Python 2, the string used for the class name **must** be a non-Unicode
-        object, e.g. a ``str()`` object.  The ``.name`` attribute of
-        :class:`.Table` is typically a Python unicode subclass, so the ``str()``
-        function should be applied to this name, after accounting for any non-ASCII
-        characters.
+        In Python 2, the string used for the class name **must** be a
+        non-Unicode object, e.g. a ``str()`` object.  The ``.name`` attribute
+        of :class:`.Table` is typically a Python unicode subclass, so the
+        ``str()`` function should be applied to this name, after accounting for
+        any non-ASCII characters.
 
     """
     return str(tablename)
 
+
 def name_for_scalar_relationship(base, local_cls, referred_cls, constraint):
     """Return the attribute name that should be used to refer from one
     class to another, for a scalar object reference.
@@ -492,7 +500,9 @@ def name_for_scalar_relationship(base, local_cls, referred_cls, constraint):
     """
     return referred_cls.__name__.lower()
 
-def name_for_collection_relationship(base, local_cls, referred_cls, constraint):
+
+def name_for_collection_relationship(
+        base, local_cls, referred_cls, constraint):
     """Return the attribute name that should be used to refer from one
     class to another, for a collection reference.
 
@@ -501,7 +511,8 @@ def name_for_collection_relationship(base, local_cls, referred_cls, constraint):
         return referred_cls.__name__.lower() + "_collection"
 
     Alternate implementations
-    can be specified using the :paramref:`.AutomapBase.prepare.name_for_collection_relationship`
+    can be specified using the
+    :paramref:`.AutomapBase.prepare.name_for_collection_relationship`
     parameter.
 
     :param base: the :class:`.AutomapBase` class doing the prepare.
@@ -516,7 +527,9 @@ def name_for_collection_relationship(base, local_cls, referred_cls, constraint):
     """
     return referred_cls.__name__.lower() + "_collection"
 
-def generate_relationship(base, direction, return_fn, attrname, local_cls, referred_cls, **kw):
+
+def generate_relationship(
+        base, direction, return_fn, attrname, local_cls, referred_cls, **kw):
     """Generate a :func:`.relationship` or :func:`.backref` on behalf of two
     mapped classes.
 
@@ -538,11 +551,11 @@ def generate_relationship(base, direction, return_fn, attrname, local_cls, refer
      be one of :data:`.ONETOMANY`, :data:`.MANYTOONE`, :data:`.MANYTOONE`.
 
     :param return_fn: the function that is used by default to create the
-     relationship.  This will be either :func:`.relationship` or :func:`.backref`.
-     The :func:`.backref` function's result will be used to produce a new
-     :func:`.relationship` in a second step, so it is critical that user-defined
-     implementations correctly differentiate between the two functions, if
-     a custom relationship function is being used.
+     relationship.  This will be either :func:`.relationship` or
+     :func:`.backref`.  The :func:`.backref` function's result will be used to
+     produce a new :func:`.relationship` in a second step, so it is critical
+     that user-defined implementations correctly differentiate between the two
+     functions, if a custom relationship function is being used.
 
     :attrname: the attribute name to which this relationship is being assigned.
      If the value of :paramref:`.generate_relationship.return_fn` is the
@@ -552,8 +565,8 @@ def generate_relationship(base, direction, return_fn, attrname, local_cls, refer
     :param local_cls: the "local" class to which this relationship or backref
      will be locally present.
 
-    :param referred_cls: the "referred" class to which the relationship or backref
-     refers to.
+    :param referred_cls: the "referred" class to which the relationship or
+     backref refers to.
 
     :param \**kw: all additional keyword arguments are passed along to the
      function.
@@ -569,6 +582,7 @@ def generate_relationship(base, direction, return_fn, attrname, local_cls, refer
     else:
         raise TypeError("Unknown relationship function: %s" % return_fn)
 
+
 class AutomapBase(object):
     """Base class for an "automap" schema.
 
@@ -601,44 +615,45 @@ class AutomapBase(object):
     """
 
     @classmethod
-    def prepare(cls,
-                engine=None,
-                reflect=False,
-                classname_for_table=classname_for_table,
-                collection_class=list,
-                name_for_scalar_relationship=name_for_scalar_relationship,
-                name_for_collection_relationship=name_for_collection_relationship,
-                generate_relationship=generate_relationship):
-
+    def prepare(
+            cls,
+            engine=None,
+            reflect=False,
+            classname_for_table=classname_for_table,
+            collection_class=list,
+            name_for_scalar_relationship=name_for_scalar_relationship,
+            name_for_collection_relationship=name_for_collection_relationship,
+            generate_relationship=generate_relationship):
         """Extract mapped classes and relationships from the :class:`.MetaData` and
         perform mappings.
 
         :param engine: an :class:`.Engine` or :class:`.Connection` with which
          to perform schema reflection, if specified.
-         If the :paramref:`.AutomapBase.prepare.reflect` argument is False, this
-         object is not used.
+         If the :paramref:`.AutomapBase.prepare.reflect` argument is False,
+         this object is not used.
 
         :param reflect: if True, the :meth:`.MetaData.reflect` method is called
          on the :class:`.MetaData` associated with this :class:`.AutomapBase`.
-         The :class:`.Engine` passed via :paramref:`.AutomapBase.prepare.engine` will
-         be used to perform the reflection if present; else, the :class:`.MetaData`
-         should already be bound to some engine else the operation will fail.
+         The :class:`.Engine` passed via
+         :paramref:`.AutomapBase.prepare.engine` will be used to perform the
+         reflection if present; else, the :class:`.MetaData` should already be
+         bound to some engine else the operation will fail.
 
         :param classname_for_table: callable function which will be used to
          produce new class names, given a table name.  Defaults to
          :func:`.classname_for_table`.
 
-        :param name_for_scalar_relationship: callable function which will be used
-         to produce relationship names for scalar relationships.  Defaults to
-         :func:`.name_for_scalar_relationship`.
+        :param name_for_scalar_relationship: callable function which will be
+         used to produce relationship names for scalar relationships.  Defaults
+         to :func:`.name_for_scalar_relationship`.
 
-        :param name_for_collection_relationship: callable function which will be used
-         to produce relationship names for collection-oriented relationships.  Defaults to
-         :func:`.name_for_collection_relationship`.
+        :param name_for_collection_relationship: callable function which will
+         be used to produce relationship names for collection-oriented
+         relationships.  Defaults to :func:`.name_for_collection_relationship`.
 
         :param generate_relationship: callable function which will be used to
-         actually generate :func:`.relationship` and :func:`.backref` constructs.
-         Defaults to :func:`.generate_relationship`.
+         actually generate :func:`.relationship` and :func:`.backref`
+         constructs.  Defaults to :func:`.generate_relationship`.
 
         :param collection_class: the Python collection class that will be used
          when a new :func:`.relationship` object is created that represents a
@@ -647,16 +662,16 @@ class AutomapBase(object):
         """
         if reflect:
             cls.metadata.reflect(
-                        engine,
-                        extend_existing=True,
-                        autoload_replace=False
-                    )
+                engine,
+                extend_existing=True,
+                autoload_replace=False
+            )
 
         table_to_map_config = dict(
-                                (m.local_table, m)
-                                for m in _DeferredMapperConfig.
-                                    classes_for_base(cls, sort=False)
-                            )
+            (m.local_table, m)
+            for m in _DeferredMapperConfig.
+            classes_for_base(cls, sort=False)
+        )
 
         many_to_many = []
 
@@ -678,25 +693,24 @@ class AutomapBase(object):
 
         for map_config in table_to_map_config.values():
             _relationships_for_fks(cls,
-                            map_config,
-                            table_to_map_config,
-                            collection_class,
-                            name_for_scalar_relationship,
-                            name_for_collection_relationship,
-                            generate_relationship)
+                                   map_config,
+                                   table_to_map_config,
+                                   collection_class,
+                                   name_for_scalar_relationship,
+                                   name_for_collection_relationship,
+                                   generate_relationship)
 
         for lcl_m2m, rem_m2m, m2m_const, table in many_to_many:
             _m2m_relationship(cls, lcl_m2m, rem_m2m, m2m_const, table,
-                            table_to_map_config,
-                            collection_class,
-                            name_for_scalar_relationship,
-                            name_for_collection_relationship,
-                            generate_relationship)
+                              table_to_map_config,
+                              collection_class,
+                              name_for_scalar_relationship,
+                              name_for_collection_relationship,
+                              generate_relationship)
 
         for map_config in _DeferredMapperConfig.classes_for_base(cls):
             map_config.map()
 
-
     _sa_decl_prepare = True
     """Indicate that the mapping of classes should be deferred.
 
@@ -718,6 +732,7 @@ class AutomapBase(object):
 
     """
 
+
 def automap_base(declarative_base=None, **kw):
     """Produce a declarative automap base.
 
@@ -731,8 +746,8 @@ def automap_base(declarative_base=None, **kw):
 
     :param declarative_base: an existing class produced by
      :func:`.declarative.declarative_base`.  When this is passed, the function
-     no longer invokes :func:`.declarative.declarative_base` itself, and all other
-     keyword arguments are ignored.
+     no longer invokes :func:`.declarative.declarative_base` itself, and all
+     other keyword arguments are ignored.
 
     :param \**kw: keyword arguments are passed along to
      :func:`.declarative.declarative_base`.
@@ -744,20 +759,21 @@ def automap_base(declarative_base=None, **kw):
         Base = declarative_base
 
     return type(
-                Base.__name__,
-                (AutomapBase, Base,),
-                {"__abstract__": True, "classes": util.Properties({})}
-            )
+        Base.__name__,
+        (AutomapBase, Base,),
+        {"__abstract__": True, "classes": util.Properties({})}
+    )
+
 
 def _is_many_to_many(automap_base, table):
     fk_constraints = [const for const in table.constraints
-                    if isinstance(const, ForeignKeyConstraint)]
+                      if isinstance(const, ForeignKeyConstraint)]
     if len(fk_constraints) != 2:
         return None, None, None
 
     cols = sum(
-                [[fk.parent for fk in fk_constraint.elements]
-                for fk_constraint in fk_constraints], [])
+        [[fk.parent for fk in fk_constraint.elements]
+         for fk_constraint in fk_constraints], [])
 
     if set(cols) != set(table.c):
         return None, None, None
@@ -768,11 +784,12 @@ def _is_many_to_many(automap_base, table):
         fk_constraints
     )
 
+
 def _relationships_for_fks(automap_base, map_config, table_to_map_config,
-                                collection_class,
-                                name_for_scalar_relationship,
-                                name_for_collection_relationship,
-                                generate_relationship):
+                           collection_class,
+                           name_for_scalar_relationship,
+                           name_for_collection_relationship,
+                           generate_relationship):
     local_table = map_config.local_table
     local_cls = map_config.cls
 
@@ -787,62 +804,73 @@ def _relationships_for_fks(automap_base, map_config, table_to_map_config,
                 continue
             referred_cls = referred_cfg.cls
 
-            if local_cls is not referred_cls and issubclass(local_cls, referred_cls):
+            if local_cls is not referred_cls and issubclass(
+                    local_cls, referred_cls):
                 continue
 
             relationship_name = name_for_scalar_relationship(
-                                        automap_base,
-                                        local_cls,
-                                        referred_cls, constraint)
+                automap_base,
+                local_cls,
+                referred_cls, constraint)
             backref_name = name_for_collection_relationship(
-                                        automap_base,
-                                        referred_cls,
-                                        local_cls,
-                                        constraint
-                                    )
+                automap_base,
+                referred_cls,
+                local_cls,
+                constraint
+            )
 
             create_backref = backref_name not in referred_cfg.properties
 
             if relationship_name not in map_config.properties:
                 if create_backref:
-                    backref_obj = generate_relationship(automap_base,
-                                        interfaces.ONETOMANY, backref,
-                                        backref_name, referred_cls, local_cls,
-                                        collection_class=collection_class)
+                    backref_obj = generate_relationship(
+                        automap_base,
+                        interfaces.ONETOMANY, backref,
+                        backref_name, referred_cls, local_cls,
+                        collection_class=collection_class)
                 else:
                     backref_obj = None
                 rel = generate_relationship(automap_base,
-                        interfaces.MANYTOONE,
-                        relationship,
-                        relationship_name,
-                        local_cls, referred_cls,
-                        foreign_keys=[fk.parent for fk in constraint.elements],
-                        backref=backref_obj,
-                        remote_side=[fk.column for fk in constraint.elements]
-                    )
+                                            interfaces.MANYTOONE,
+                                            relationship,
+                                            relationship_name,
+                                            local_cls, referred_cls,
+                                            foreign_keys=[
+                                                fk.parent
+                                                for fk in constraint.elements],
+                                            backref=backref_obj,
+                                            remote_side=[
+                                                fk.column
+                                                for fk in constraint.elements]
+                                            )
                 if rel is not None:
                     map_config.properties[relationship_name] = rel
                     if not create_backref:
-                        referred_cfg.properties[backref_name].back_populates = relationship_name
+                        referred_cfg.properties[
+                            backref_name].back_populates = relationship_name
             elif create_backref:
                 rel = generate_relationship(automap_base,
-                        interfaces.ONETOMANY,
-                        relationship,
-                        backref_name,
-                        referred_cls, local_cls,
-                        foreign_keys=[fk.parent for fk in constraint.elements],
-                        back_populates=relationship_name,
-                        collection_class=collection_class)
+                                            interfaces.ONETOMANY,
+                                            relationship,
+                                            backref_name,
+                                            referred_cls, local_cls,
+                                            foreign_keys=[
+                                                fk.parent
+                                                for fk in constraint.elements],
+                                            back_populates=relationship_name,
+                                            collection_class=collection_class)
                 if rel is not None:
                     referred_cfg.properties[backref_name] = rel
-                    map_config.properties[relationship_name].back_populates = backref_name
+                    map_config.properties[
+                        relationship_name].back_populates = backref_name
+
 
 def _m2m_relationship(automap_base, lcl_m2m, rem_m2m, m2m_const, table,
-                            table_to_map_config,
-                            collection_class,
-                            name_for_scalar_relationship,
-                            name_for_collection_relationship,
-                            generate_relationship):
+                      table_to_map_config,
+                      collection_class,
+                      name_for_scalar_relationship,
+                      name_for_collection_relationship,
+                      generate_relationship):
 
     map_config = table_to_map_config.get(lcl_m2m, None)
     referred_cfg = table_to_map_config.get(rem_m2m, None)
@@ -853,56 +881,67 @@ def _m2m_relationship(automap_base, lcl_m2m, rem_m2m, m2m_const, table,
     referred_cls = referred_cfg.cls
 
     relationship_name = name_for_collection_relationship(
-                                automap_base,
-                                local_cls,
-                                referred_cls, m2m_const[0])
+        automap_base,
+        local_cls,
+        referred_cls, m2m_const[0])
     backref_name = name_for_collection_relationship(
-                                automap_base,
-                                referred_cls,
-                                local_cls,
-                                m2m_const[1]
-                            )
+        automap_base,
+        referred_cls,
+        local_cls,
+        m2m_const[1]
+    )
 
     create_backref = backref_name not in referred_cfg.properties
 
     if relationship_name not in map_config.properties:
         if create_backref:
-            backref_obj = generate_relationship(automap_base,
-                            interfaces.MANYTOMANY,
-                            backref,
-                            backref_name,
-                            referred_cls, local_cls,
-                            collection_class=collection_class
-                            )
+            backref_obj = generate_relationship(
+                automap_base,
+                interfaces.MANYTOMANY,
+                backref,
+                backref_name,
+                referred_cls, local_cls,
+                collection_class=collection_class
+            )
         else:
             backref_obj = None
         rel = generate_relationship(automap_base,
-                interfaces.MANYTOMANY,
-                relationship,
-                relationship_name,
-                local_cls, referred_cls,
-                secondary=table,
-                primaryjoin=and_(fk.column == fk.parent for fk in m2m_const[0].elements),
-                secondaryjoin=and_(fk.column == fk.parent for fk in m2m_const[1].elements),
-                backref=backref_obj,
-                collection_class=collection_class
-                )
+                                    interfaces.MANYTOMANY,
+                                    relationship,
+                                    relationship_name,
+                                    local_cls, referred_cls,
+                                    secondary=table,
+                                    primaryjoin=and_(
+                                        fk.column == fk.parent
+                                        for fk in m2m_const[0].elements),
+                                    secondaryjoin=and_(
+                                        fk.column == fk.parent
+                                        for fk in m2m_const[1].elements),
+                                    backref=backref_obj,
+                                    collection_class=collection_class
+                                    )
         if rel is not None:
             map_config.properties[relationship_name] = rel
 
             if not create_backref:
-                referred_cfg.properties[backref_name].back_populates = relationship_name
+                referred_cfg.properties[
+                    backref_name].back_populates = relationship_name
     elif create_backref:
         rel = generate_relationship(automap_base,
-                interfaces.MANYTOMANY,
-                relationship,
-                backref_name,
-                referred_cls, local_cls,
-                secondary=table,
-                primaryjoin=and_(fk.column == fk.parent for fk in m2m_const[1].elements),
-                secondaryjoin=and_(fk.column == fk.parent for fk in m2m_const[0].elements),
-                back_populates=relationship_name,
-                collection_class=collection_class)
+                                    interfaces.MANYTOMANY,
+                                    relationship,
+                                    backref_name,
+                                    referred_cls, local_cls,
+                                    secondary=table,
+                                    primaryjoin=and_(
+                                        fk.column == fk.parent
+                                        for fk in m2m_const[1].elements),
+                                    secondaryjoin=and_(
+                                        fk.column == fk.parent
+                                        for fk in m2m_const[0].elements),
+                                    back_populates=relationship_name,
+                                    collection_class=collection_class)
         if rel is not None:
             referred_cfg.properties[backref_name] = rel
-            map_config.properties[relationship_name].back_populates = backref_name
+            map_config.properties[
+                relationship_name].back_populates = backref_name
index 03fde2668da3e212e389572d57eb503c8787d1e0..8d169aa57daba40e78003dd6a36ed6ec9e9fb767 100644 (file)
@@ -58,7 +58,8 @@ invoked for the dialect in use::
 
     @compiles(AlterColumn, 'postgresql')
     def visit_alter_column(element, compiler, **kw):
-        return "ALTER TABLE %s ALTER COLUMN %s ..." % (element.table.name, element.column.name)
+        return "ALTER TABLE %s ALTER COLUMN %s ..." % (element.table.name,
+                                                       element.column.name)
 
 The second ``visit_alter_table`` will be invoked when any ``postgresql``
 dialect is used.
@@ -93,7 +94,8 @@ method which can be used for compilation of embedded attributes::
 
 Produces::
 
-    "INSERT INTO mytable (SELECT mytable.x, mytable.y, mytable.z FROM mytable WHERE mytable.x > :x_1)"
+    "INSERT INTO mytable (SELECT mytable.x, mytable.y, mytable.z
+                          FROM mytable WHERE mytable.x > :x_1)"
 
 .. note::
 
@@ -408,7 +410,7 @@ def compiles(class_, *specs):
 
             # TODO: why is the lambda needed ?
             setattr(class_, '_compiler_dispatch',
-                lambda *arg, **kw: existing(*arg, **kw))
+                    lambda *arg, **kw: existing(*arg, **kw))
             setattr(class_, '_compiler_dispatcher', existing)
 
         if specs:
@@ -444,6 +446,6 @@ class _dispatcher(object):
                 fn = self.specs['default']
             except KeyError:
                 raise exc.CompileError(
-                        "%s construct has no default "
-                        "compilation handler." % type(element))
+                    "%s construct has no default "
+                    "compilation handler." % type(element))
         return fn(element, compiler, **kw)
index eba6cb808ceb4f134363724902d8b7f3e875131d..3cbc85c0c0ded8cafb67ff2e03e56008c1dcffb7 100644 (file)
@@ -955,9 +955,9 @@ Mapping a class using the above mixin, we will get an error like::
     sqlalchemy.exc.InvalidRequestError: this ForeignKey's parent column is not
     yet associated with a Table.
 
-This is because the ``target_id`` :class:`.Column` we've called upon in our ``target()``
-method is not the same :class:`.Column` that declarative is actually going to map
-to our table.
+This is because the ``target_id`` :class:`.Column` we've called upon in our
+``target()`` method is not the same :class:`.Column` that declarative is
+actually going to map to our table.
 
 The condition above is resolved using a lambda::
 
@@ -1220,8 +1220,8 @@ assumed to be completed and the 'configure' step has finished::
 ``__declare_first__()``
 ~~~~~~~~~~~~~~~~~~~~~~~
 
-Like ``__declare_last__()``, but is called at the beginning of mapper configuration
-via the :meth:`.MapperEvents.before_configured` event::
+Like ``__declare_last__()``, but is called at the beginning of mapper
+configuration via the :meth:`.MapperEvents.before_configured` event::
 
     class MyClass(Base):
         @classmethod
@@ -1312,6 +1312,6 @@ from .api import declarative_base, synonym_for, comparable_using, \
 
 
 __all__ = ['declarative_base', 'synonym_for', 'has_inherited_table',
-            'comparable_using', 'instrument_declarative', 'declared_attr',
-            'ConcreteBase', 'AbstractConcreteBase', 'DeclarativeMeta',
-            'DeferredReflection']
+           'comparable_using', 'instrument_declarative', 'declared_attr',
+           'ConcreteBase', 'AbstractConcreteBase', 'DeclarativeMeta',
+           'DeferredReflection']
index 5f3d0742ee90b3938aa61760b36318d5c6d5ac96..daf8bffb5c73867cc20c72f19f61f32fea752158 100644 (file)
@@ -9,8 +9,8 @@
 
 from ...schema import Table, MetaData
 from ...orm import synonym as _orm_synonym, mapper,\
-                                comparable_property,\
-                                interfaces, properties
+    comparable_property,\
+    interfaces, properties
 from ...orm.util import polymorphic_union
 from ...orm.base import _mapper_or_none
 from ...util import OrderedDict
@@ -18,11 +18,12 @@ from ... import exc
 import weakref
 
 from .base import _as_declarative, \
-                _declarative_constructor,\
-                _DeferredMapperConfig, _add_attribute
+    _declarative_constructor,\
+    _DeferredMapperConfig, _add_attribute
 from .clsregistry import _class_resolver
 from . import clsregistry
 
+
 def instrument_declarative(cls, registry, metadata):
     """Given a class, configure the class declaratively,
     using the given registry, which can be any dictionary, and
@@ -31,8 +32,8 @@ def instrument_declarative(cls, registry, metadata):
     """
     if '_decl_class_registry' in cls.__dict__:
         raise exc.InvalidRequestError(
-                            "Class %r already has been "
-                            "instrumented declaratively" % cls)
+            "Class %r already has been "
+            "instrumented declaratively" % cls)
     cls._decl_class_registry = registry
     cls.metadata = metadata
     _as_declarative(cls, cls.__name__, cls.__dict__)
@@ -245,6 +246,7 @@ def declarative_base(bind=None, metadata=None, mapper=None, cls=object,
 
     return metaclass(name, bases, class_dict)
 
+
 def as_declarative(**kw):
     """
     Class decorator for :func:`.declarative_base`.
@@ -282,6 +284,7 @@ def as_declarative(**kw):
 
     return decorate
 
+
 class ConcreteBase(object):
     """A helper class for 'concrete' declarative mappings.
 
@@ -323,7 +326,7 @@ class ConcreteBase(object):
         return polymorphic_union(OrderedDict(
             (mp.polymorphic_identity, mp.local_table)
             for mp in mappers
-         ), 'type', 'pjoin')
+        ), 'type', 'pjoin')
 
     @classmethod
     def __declare_first__(cls):
@@ -478,7 +481,7 @@ class DeferredReflection(object):
             metadata = mapper.class_.metadata
             for rel in mapper._props.values():
                 if isinstance(rel, properties.RelationshipProperty) and \
-                    rel.secondary is not None:
+                        rel.secondary is not None:
                     if isinstance(rel.secondary, Table):
                         cls._reflect_table(rel.secondary, engine)
                     elif isinstance(rel.secondary, _class_resolver):
@@ -506,9 +509,9 @@ class DeferredReflection(object):
     @classmethod
     def _reflect_table(cls, table, engine):
         Table(table.name,
-            table.metadata,
-            extend_existing=True,
-            autoload_replace=False,
-            autoload=True,
-            autoload_with=engine,
-            schema=table.schema)
+              table.metadata,
+              extend_existing=True,
+              autoload_replace=False,
+              autoload=True,
+              autoload_with=engine,
+              schema=table.schema)
index 41190e407d27635d07ff0cdcc918c3c18cd72440..94baeeb518c2a49323bccd48e9f1a214212012d2 100644 (file)
@@ -20,6 +20,7 @@ from . import clsregistry
 import collections
 import weakref
 
+
 def _declared_mapping_info(cls):
     # deferred mapping
     if _DeferredMapperConfig.has_cls(cls):
@@ -59,8 +60,7 @@ def _as_declarative(cls, classname, dict_):
                 cls.__declare_first__()
         if '__abstract__' in base.__dict__ and base.__abstract__:
             if (base is cls or
-                (base in cls.__bases__ and not _is_declarative_inherits)
-            ):
+                    (base in cls.__bases__ and not _is_declarative_inherits)):
                 return
 
         class_mapped = _declared_mapping_info(base) is not None
@@ -68,9 +68,9 @@ def _as_declarative(cls, classname, dict_):
         for name, obj in vars(base).items():
             if name == '__mapper_args__':
                 if not mapper_args_fn and (
-                                        not class_mapped or
-                                        isinstance(obj, declarative_props)
-                                    ):
+                    not class_mapped or
+                    isinstance(obj, declarative_props)
+                ):
                     # don't even invoke __mapper_args__ until
                     # after we've determined everything about the
                     # mapped table.
@@ -80,29 +80,29 @@ def _as_declarative(cls, classname, dict_):
                     mapper_args_fn = lambda: dict(cls.__mapper_args__)
             elif name == '__tablename__':
                 if not tablename and (
-                                        not class_mapped or
-                                        isinstance(obj, declarative_props)
-                                    ):
+                    not class_mapped or
+                    isinstance(obj, declarative_props)
+                ):
                     tablename = cls.__tablename__
             elif name == '__table_args__':
                 if not table_args and (
-                                        not class_mapped or
-                                        isinstance(obj, declarative_props)
-                                    ):
+                    not class_mapped or
+                    isinstance(obj, declarative_props)
+                ):
                     table_args = cls.__table_args__
                     if not isinstance(table_args, (tuple, dict, type(None))):
                         raise exc.ArgumentError(
-                                "__table_args__ value must be a tuple, "
-                                "dict, or None")
+                            "__table_args__ value must be a tuple, "
+                            "dict, or None")
                     if base is not cls:
                         inherited_table_args = True
             elif class_mapped:
                 if isinstance(obj, declarative_props):
                     util.warn("Regular (i.e. not __special__) "
-                            "attribute '%s.%s' uses @declared_attr, "
-                            "but owning class %s is mapped - "
-                            "not applying to subclass %s."
-                            % (base.__name__, name, base, cls))
+                              "attribute '%s.%s' uses @declared_attr, "
+                              "but owning class %s is mapped - "
+                              "not applying to subclass %s."
+                              % (base.__name__, name, base, cls))
                 continue
             elif base is not cls:
                 # we're a mixin.
@@ -114,18 +114,18 @@ def _as_declarative(cls, classname, dict_):
                         continue
                     if obj.foreign_keys:
                         raise exc.InvalidRequestError(
-                        "Columns with foreign keys to other columns "
-                        "must be declared as @declared_attr callables "
-                        "on declarative mixin classes. ")
+                            "Columns with foreign keys to other columns "
+                            "must be declared as @declared_attr callables "
+                            "on declarative mixin classes. ")
                     if name not in dict_ and not (
                             '__table__' in dict_ and
                             (obj.name or name) in dict_['__table__'].c
-                            ) and name not in potential_columns:
+                    ) and name not in potential_columns:
                         potential_columns[name] = \
-                                column_copies[obj] = \
-                                obj.copy()
+                            column_copies[obj] = \
+                            obj.copy()
                         column_copies[obj]._creation_order = \
-                                obj._creation_order
+                            obj._creation_order
                 elif isinstance(obj, MapperProperty):
                     raise exc.InvalidRequestError(
                         "Mapper properties (i.e. deferred,"
@@ -134,9 +134,9 @@ def _as_declarative(cls, classname, dict_):
                         "on declarative mixin classes.")
                 elif isinstance(obj, declarative_props):
                     dict_[name] = ret = \
-                            column_copies[obj] = getattr(cls, name)
+                        column_copies[obj] = getattr(cls, name)
                     if isinstance(ret, (Column, MapperProperty)) and \
-                        ret.doc is None:
+                            ret.doc is None:
                         ret.doc = obj.__doc__
 
     # apply inherited columns as we should
@@ -167,9 +167,8 @@ def _as_declarative(cls, classname, dict_):
             value = synonym(value.key)
             setattr(cls, k, value)
 
-
         if (isinstance(value, tuple) and len(value) == 1 and
-            isinstance(value[0], (Column, MapperProperty))):
+                isinstance(value[0], (Column, MapperProperty))):
             util.warn("Ignoring declarative-like tuple value of attribute "
                       "%s: possibly a copy-and-paste error with a comma "
                       "left at the end of the line?" % k)
@@ -198,7 +197,7 @@ def _as_declarative(cls, classname, dict_):
         if isinstance(c, (ColumnProperty, CompositeProperty)):
             for col in c.columns:
                 if isinstance(col, Column) and \
-                    col.table is None:
+                        col.table is None:
                     _undefer_column_name(key, col)
                     if not isinstance(c, CompositeProperty):
                         name_to_prop_key[col.name].add(key)
@@ -280,7 +279,7 @@ def _as_declarative(cls, classname, dict_):
             "Class %r does not have a __table__ or __tablename__ "
             "specified and does not inherit from an existing "
             "table-mapped class." % cls
-            )
+        )
     elif inherits:
         inherited_mapper = _declared_mapping_info(inherits)
         inherited_table = inherited_mapper.local_table
@@ -293,14 +292,14 @@ def _as_declarative(cls, classname, dict_):
                 raise exc.ArgumentError(
                     "Can't place __table_args__ on an inherited class "
                     "with no table."
-                    )
+                )
             # add any columns declared here to the inherited table.
             for c in declared_columns:
                 if c.primary_key:
                     raise exc.ArgumentError(
                         "Can't place primary key columns on an inherited "
                         "class with no table."
-                        )
+                    )
                 if c.name in inherited_table.c:
                     if inherited_table.c[c.name] is c:
                         continue
@@ -311,7 +310,7 @@ def _as_declarative(cls, classname, dict_):
                     )
                 inherited_table.append_column(c)
                 if inherited_mapped_table is not None and \
-                    inherited_mapped_table is not inherited_table:
+                        inherited_mapped_table is not inherited_table:
                     inherited_mapped_table._refresh_for_new_column(c)
 
     defer_map = hasattr(cls, '_sa_decl_prepare')
@@ -320,12 +319,12 @@ def _as_declarative(cls, classname, dict_):
     else:
         cfg_cls = _MapperConfig
     mt = cfg_cls(mapper_cls,
-                       cls, table,
-                       inherits,
-                       declared_columns,
-                       column_copies,
-                       our_stuff,
-                       mapper_args_fn)
+                 cls, table,
+                 inherits,
+                 declared_columns,
+                 column_copies,
+                 our_stuff,
+                 mapper_args_fn)
     if not defer_map:
         mt.map()
 
@@ -335,12 +334,12 @@ class _MapperConfig(object):
     mapped_table = None
 
     def __init__(self, mapper_cls,
-                        cls,
-                        table,
-                        inherits,
-                        declared_columns,
-                        column_copies,
-                        properties, mapper_args_fn):
+                 cls,
+                 table,
+                 inherits,
+                 declared_columns,
+                 column_copies,
+                 properties, mapper_args_fn):
         self.mapper_cls = mapper_cls
         self.cls = cls
         self.local_table = table
@@ -350,7 +349,6 @@ class _MapperConfig(object):
         self.declared_columns = declared_columns
         self.column_copies = column_copies
 
-
     def _prepare_mapper_arguments(self):
         properties = self.properties
         if self.mapper_args_fn:
@@ -384,7 +382,7 @@ class _MapperConfig(object):
                     set([c.key for c in inherited_table.c
                          if c not in inherited_mapper._columntoproperty])
                 exclude_properties.difference_update(
-                        [c.key for c in self.declared_columns])
+                    [c.key for c in self.declared_columns])
 
             # look through columns in the current mapper that
             # are keyed to a propname different than the colname
@@ -413,6 +411,7 @@ class _MapperConfig(object):
             **mapper_args
         )
 
+
 class _DeferredMapperConfig(_MapperConfig):
     _configs = util.OrderedDict()
 
@@ -433,32 +432,31 @@ class _DeferredMapperConfig(_MapperConfig):
     def has_cls(cls, class_):
         # 2.6 fails on weakref if class_ is an old style class
         return isinstance(class_, type) and \
-                weakref.ref(class_) in cls._configs
+            weakref.ref(class_) in cls._configs
 
     @classmethod
     def config_for_cls(cls, class_):
         return cls._configs[weakref.ref(class_)]
 
-
     @classmethod
     def classes_for_base(cls, base_cls, sort=True):
         classes_for_base = [m for m in cls._configs.values()
-                        if issubclass(m.cls, base_cls)]
+                            if issubclass(m.cls, base_cls)]
         if not sort:
             return classes_for_base
 
         all_m_by_cls = dict(
-                            (m.cls, m)
-                            for m in classes_for_base
-                        )
+            (m.cls, m)
+            for m in classes_for_base
+        )
 
         tuples = []
         for m_cls in all_m_by_cls:
             tuples.extend(
-                    (all_m_by_cls[base_cls], all_m_by_cls[m_cls])
-                    for base_cls in m_cls.__bases__
-                    if base_cls in all_m_by_cls
-                )
+                (all_m_by_cls[base_cls], all_m_by_cls[m_cls])
+                for base_cls in m_cls.__bases__
+                if base_cls in all_m_by_cls
+            )
         return list(
             topological.sort(
                 tuples,
index b05c3a6473e474e6ad930ebbdfcb4d965005fc8e..4595b857a80af62134cacf894fc948ef5544347a 100644 (file)
@@ -11,7 +11,7 @@ This system allows specification of classes and expressions used in
 
 """
 from ...orm.properties import ColumnProperty, RelationshipProperty, \
-                            SynonymProperty
+    SynonymProperty
 from ...schema import _get_table_key
 from ...orm import class_mapper, interfaces
 from ... import util
@@ -74,7 +74,7 @@ class _MultipleClassMarker(object):
     def __init__(self, classes, on_remove=None):
         self.on_remove = on_remove
         self.contents = set([
-                weakref.ref(item, self._remove_item) for item in classes])
+            weakref.ref(item, self._remove_item) for item in classes])
         _registries.add(self)
 
     def __iter__(self):
@@ -121,6 +121,7 @@ class _ModuleMarker(object):
     _decl_class_registry.
 
     """
+
     def __init__(self, name, parent):
         self.parent = parent
         self.name = name
@@ -161,8 +162,8 @@ class _ModuleMarker(object):
             existing.add_item(cls)
         else:
             existing = self.contents[name] = \
-                    _MultipleClassMarker([cls],
-                        on_remove=lambda: self._remove_item(name))
+                _MultipleClassMarker([cls],
+                                     on_remove=lambda: self._remove_item(name))
 
 
 class _ModNS(object):
@@ -182,7 +183,8 @@ class _ModNS(object):
                     assert isinstance(value, _MultipleClassMarker)
                     return value.attempt_get(self.__parent.path, key)
         raise AttributeError("Module %r has no mapped classes "
-                    "registered under the name %r" % (self.__parent.name, key))
+                             "registered under the name %r" % (
+                                 self.__parent.name, key))
 
 
 class _GetColumns(object):
@@ -194,8 +196,8 @@ class _GetColumns(object):
         if mp:
             if key not in mp.all_orm_descriptors:
                 raise exc.InvalidRequestError(
-                            "Class %r does not have a mapped column named %r"
-                            % (self.cls, key))
+                    "Class %r does not have a mapped column named %r"
+                    % (self.cls, key))
 
             desc = mp.all_orm_descriptors[key]
             if desc.extension_type is interfaces.NOT_EXTENSION:
@@ -204,13 +206,13 @@ class _GetColumns(object):
                     key = prop.name
                 elif not isinstance(prop, ColumnProperty):
                     raise exc.InvalidRequestError(
-                                "Property %r is not an instance of"
-                                " ColumnProperty (i.e. does not correspond"
-                                " directly to a Column)." % key)
+                        "Property %r is not an instance of"
+                        " ColumnProperty (i.e. does not correspond"
+                        " directly to a Column)." % key)
         return getattr(self.cls, key)
 
 inspection._inspects(_GetColumns)(
-            lambda target: inspection.inspect(target.cls))
+    lambda target: inspection.inspect(target.cls))
 
 
 class _GetTable(object):
@@ -220,8 +222,8 @@ class _GetTable(object):
 
     def __getattr__(self, key):
         return self.metadata.tables[
-                _get_table_key(key, self.key)
-            ]
+            _get_table_key(key, self.key)
+        ]
 
 
 def _determine_container(key, value):
@@ -248,7 +250,7 @@ class _class_resolver(object):
         elif key in cls.metadata._schemas:
             return _GetTable(key, cls.metadata)
         elif '_sa_module_registry' in cls._decl_class_registry and \
-            key in cls._decl_class_registry['_sa_module_registry']:
+                key in cls._decl_class_registry['_sa_module_registry']:
             registry = cls._decl_class_registry['_sa_module_registry']
             return registry.resolve_attr(key)
         elif self._resolvers:
index 233f172ef23574b4aa5e96d0ced697c827211039..d311fb2d4641a471d8e71186cb9a96ddb846805e 100644 (file)
@@ -44,10 +44,10 @@ class ShardedQuery(Query):
         def iter_for_shard(shard_id):
             context.attributes['shard_id'] = shard_id
             result = self._connection_from_session(
-                            mapper=self._mapper_zero(),
-                            shard_id=shard_id).execute(
-                                                context.statement,
-                                                self._params)
+                mapper=self._mapper_zero(),
+                shard_id=shard_id).execute(
+                context.statement,
+                self._params)
             return self.instances(result, context)
 
         if self._shard_id is not None:
@@ -115,9 +115,11 @@ class ShardedSession(Session):
         if self.transaction is not None:
             return self.transaction.connection(mapper, shard_id=shard_id)
         else:
-            return self.get_bind(mapper,
-                                shard_id=shard_id,
-                                instance=instance).contextual_connect(**kwargs)
+            return self.get_bind(
+                mapper,
+                shard_id=shard_id,
+                instance=instance
+            ).contextual_connect(**kwargs)
 
     def get_bind(self, mapper, shard_id=None,
                  instance=None, clause=None, **kw):
index 7f5a9135515d613effd1550a505db5703ad8c0b8..9f4e09e9261863500b64527413d806861038206d 100644 (file)
@@ -474,8 +474,8 @@ of measurement, currencies and encrypted passwords.
 .. seealso::
 
     `Hybrids and Value Agnostic Types
-    <http://techspot.zzzeek.org/2011/10/21/hybrids-and-value-agnostic-types/>`_ -
-    on the techspot.zzzeek.org blog
+    <http://techspot.zzzeek.org/2011/10/21/hybrids-and-value-agnostic-types/>`_
+    on the techspot.zzzeek.org blog
 
     `Value Agnostic Types, Part II
     <http://techspot.zzzeek.org/2011/10/29/value-agnostic-types-part-ii/>`_ -
@@ -659,6 +659,7 @@ HYBRID_PROPERTY = util.symbol('HYBRID_PROPERTY')
 
 """
 
+
 class hybrid_method(interfaces._InspectionAttr):
     """A decorator which allows definition of a Python object method with both
     instance-level and class-level behavior.
@@ -780,7 +781,7 @@ class hybrid_property(interfaces._InspectionAttr):
         """
 
         proxy_attr = attributes.\
-                        create_proxied_attribute(self)
+            create_proxied_attribute(self)
 
         def expr(owner):
             return proxy_attr(owner, self.__name__, self, comparator(owner))
index 2cf36e9bd96c3f29b9cc2fb760eec6bcd2061a1b..0241366612bb05ecdc5b0c26b5de48435ef1efc3 100644 (file)
@@ -105,7 +105,7 @@ class ExtendedInstrumentationRegistry(InstrumentationFactory):
 
     def _check_conflicts(self, class_, factory):
         existing_factories = self._collect_management_factories_for(class_).\
-                                difference([factory])
+            difference([factory])
         if existing_factories:
             raise TypeError(
                 "multiple instrumentation implementations specified "
@@ -182,7 +182,7 @@ class ExtendedInstrumentationRegistry(InstrumentationFactory):
 
 
 orm_instrumentation._instrumentation_factory = \
-        _instrumentation_factory = ExtendedInstrumentationRegistry()
+    _instrumentation_factory = ExtendedInstrumentationRegistry()
 orm_instrumentation.instrumentation_finders = instrumentation_finders
 
 
@@ -316,7 +316,7 @@ class _ClassInstrumentationAdapter(ClassManager):
             return delegate(key, state, factory)
         else:
             return ClassManager.initialize_collection(self, key,
-                                                        state, factory)
+                                                      state, factory)
 
     def new_instance(self, state=None):
         instance = self.class_.__new__(self.class_)
index 0f268de5f43f59ba63fa7a29aa1d1d195fb6337a..7469bcbdae7b11a5ba88651d2b060a17e6f636c3 100644 (file)
@@ -462,15 +462,15 @@ class MutableBase(object):
                     val._parents[state.obj()] = key
 
         event.listen(parent_cls, 'load', load,
-            raw=True, propagate=True)
+                     raw=True, propagate=True)
         event.listen(parent_cls, 'refresh', load,
-            raw=True, propagate=True)
+                     raw=True, propagate=True)
         event.listen(attribute, 'set', set,
-            raw=True, retval=True, propagate=True)
+                     raw=True, retval=True, propagate=True)
         event.listen(parent_cls, 'pickle', pickle,
-            raw=True, propagate=True)
+                     raw=True, propagate=True)
         event.listen(parent_cls, 'unpickle', unpickle,
-            raw=True, propagate=True)
+                     raw=True, propagate=True)
 
 
 class Mutable(MutableBase):
@@ -565,7 +565,6 @@ class Mutable(MutableBase):
         return sqltype
 
 
-
 class MutableComposite(MutableBase):
     """Mixin that defines transparent propagation of change
     events on a SQLAlchemy "composite" object to its
@@ -582,16 +581,17 @@ class MutableComposite(MutableBase):
 
             prop = object_mapper(parent).get_property(key)
             for value, attr_name in zip(
-                                    self.__composite_values__(),
-                                    prop._attribute_keys):
+                    self.__composite_values__(),
+                    prop._attribute_keys):
                 setattr(parent, attr_name, value)
 
+
 def _setup_composite_listener():
     def _listen_for_type(mapper, class_):
         for prop in mapper.iterate_properties:
             if (hasattr(prop, 'composite_class') and
-                isinstance(prop.composite_class, type) and
-                 issubclass(prop.composite_class, MutableComposite)):
+                    isinstance(prop.composite_class, type) and
+                    issubclass(prop.composite_class, MutableComposite)):
                 prop.composite_class._listen_on_attribute(
                     getattr(class_, prop.key), False, class_)
     if not event.contains(Mapper, "mapper_configured", _listen_for_type):
@@ -611,7 +611,6 @@ class MutableDict(Mutable, dict):
         dict.__setitem__(self, key, value)
         self.changed()
 
-
     def setdefault(self, key, value):
         result = dict.setdefault(self, key, value)
         self.changed()
index 8ffac5fea3684381f5d8635c5f8717a80a33043c..67fda44c40a82127840a3332906aa9c462ca4512 100644 (file)
@@ -83,11 +83,11 @@ With the above mapping the ``Bullet.position`` attribute is managed::
     s.bullets[2].position
     >>> 2
 
-The :class:`.OrderingList` construct only works with **changes** to a collection,
-and not the initial load from the database, and requires that the list be
-sorted when loaded.  Therefore, be sure to
-specify ``order_by`` on the :func:`.relationship` against the target ordering
-attribute, so that the ordering is correct when first loaded.
+The :class:`.OrderingList` construct only works with **changes** to a
+collection, and not the initial load from the database, and requires that the
+list be sorted when loaded.  Therefore, be sure to specify ``order_by`` on the
+:func:`.relationship` against the target ordering attribute, so that the
+ordering is correct when first loaded.
 
 .. warning::
 
@@ -111,11 +111,11 @@ attribute, so that the ordering is correct when first loaded.
       explicit configuration at the mapper level for sets of columns that
       are to be handled in this way.
 
-:func:`.ordering_list` takes the name of the related object's ordering attribute as
-an argument.  By default, the zero-based integer index of the object's
-position in the :func:`.ordering_list` is synchronized with the ordering attribute:
-index 0 will get position 0, index 1 position 1, etc.  To start numbering at 1
-or some other integer, provide ``count_from=1``.
+:func:`.ordering_list` takes the name of the related object's ordering
+attribute as an argument.  By default, the zero-based integer index of the
+object's position in the :func:`.ordering_list` is synchronized with the
+ordering attribute: index 0 will get position 0, index 1 position 1, etc.  To
+start numbering at 1 or some other integer, provide ``count_from=1``.
 
 
 """
@@ -359,7 +359,7 @@ class OrderingList(list):
 
     for func_name, func in list(locals().items()):
         if (util.callable(func) and func.__name__ == func_name and
-            not func.__doc__ and hasattr(list, func_name)):
+                not func.__doc__ and hasattr(list, func_name)):
             func.__doc__ = getattr(list, func_name).__doc__
     del func_name, func
 
index 17c1ed30c2775fb03b4e92dadd2012c75bef2995..bf8d67d8e1c83c5242a280eec51b3eab24df2591 100644 (file)
@@ -22,7 +22,8 @@ Usage is nearly the same as that of the standard Python pickle module::
 
     # ... define mappers
 
-    query = Session.query(MyClass).filter(MyClass.somedata=='foo').order_by(MyClass.sortkey)
+    query = Session.query(MyClass).
+        filter(MyClass.somedata=='foo').order_by(MyClass.sortkey)
 
     # pickle the query
     serialized = dumps(query)
@@ -70,7 +71,7 @@ def Serializer(*args, **kw):
     pickler = pickle.Pickler(*args, **kw)
 
     def persistent_id(obj):
-        #print "serializing:", repr(obj)
+        # print "serializing:", repr(obj)
         if isinstance(obj, QueryableAttribute):
             cls = obj.impl.class_
             key = obj.impl.key
@@ -79,11 +80,12 @@ def Serializer(*args, **kw):
             id = "mapper:" + b64encode(pickle.dumps(obj.class_))
         elif isinstance(obj, MapperProperty) and not obj.parent.non_primary:
             id = "mapperprop:" + b64encode(pickle.dumps(obj.parent.class_)) + \
-                                    ":" + obj.key
+                ":" + obj.key
         elif isinstance(obj, Table):
             id = "table:" + text_type(obj.key)
         elif isinstance(obj, Column) and isinstance(obj.table, Table):
-            id = "column:" + text_type(obj.table.key) + ":" + text_type(obj.key)
+            id = "column:" + \
+                text_type(obj.table.key) + ":" + text_type(obj.key)
         elif isinstance(obj, Session):
             id = "session:"
         elif isinstance(obj, Engine):
@@ -96,7 +98,7 @@ def Serializer(*args, **kw):
     return pickler
 
 our_ids = re.compile(
-            r'(mapperprop|mapper|table|column|session|attribute|engine):(.*)')
+    r'(mapperprop|mapper|table|column|session|attribute|engine):(.*)')
 
 
 def Deserializer(file, metadata=None, scoped_session=None, engine=None):
index dcd3b441dba2b53467ce7fe96a5529eeff1bfd4a..ab9f2ae384b2dff1aa5de1917d5d068980419b0f 100644 (file)
@@ -68,7 +68,7 @@ def inspect(subject, raiseerr=True):
 
     if raiseerr and (
             reg is None or ret is None
-        ):
+            ):
         raise exc.NoInspectionAvailable(
             "No inspection system is "
             "available for object of type %s" %
@@ -81,8 +81,8 @@ def _inspects(*types):
         for type_ in types:
             if type_ in _registrars:
                 raise AssertionError(
-                            "Type %s is already "
-                            "registered" % type_)
+                    "Type %s is already "
+                    "registered" % type_)
             _registrars[type_] = fn_or_cls
         return fn_or_cls
     return decorate
index f09a3ff81b7e7b400245af2be103f3a7e48f8088..ae11d1930585d1c59c7d7885c9dedd615de01c12 100644 (file)
@@ -80,8 +80,9 @@ class PoolListener(object):
 
         """
 
-        listener = util.as_interface(listener, methods=('connect',
-                                'first_connect', 'checkout', 'checkin'))
+        listener = util.as_interface(listener,
+                                     methods=('connect', 'first_connect',
+                                              'checkout', 'checkin'))
         if hasattr(listener, 'connect'):
             event.listen(self, 'connect', listener.connect)
         if hasattr(listener, 'first_connect'):
@@ -206,7 +207,7 @@ class ConnectionProxy(object):
                 statement,
                 parameters,
                 context,
-                ):
+            ):
                 return statement, parameters
 
             return listener.cursor_execute(
index 88d6de08933948b1b04d792ab85e023d83f0a1ba..b3c9ae024bc68f0b11f4feff59486170b406d538 100644 (file)
@@ -47,6 +47,7 @@ def class_logger(cls):
     _logged_classes.add(cls)
     return cls
 
+
 class Identified(object):
     logging_name = None
 
@@ -91,7 +92,7 @@ class InstanceLogger(object):
         # if echo flag is enabled and no handlers,
         # add a handler to the list
         if self._echo_map[echo] <= logging.INFO \
-            and not self.logger.handlers:
+           and not self.logger.handlers:
             _add_default_handler(self.logger)
 
     #
@@ -174,10 +175,11 @@ def instance_logger(instance, echoflag=None):
 
     if instance.logging_name:
         name = "%s.%s.%s" % (instance.__class__.__module__,
-                      instance.__class__.__name__, instance.logging_name)
+                             instance.__class__.__name__,
+                             instance.logging_name)
     else:
         name = "%s.%s" % (instance.__class__.__module__,
-                  instance.__class__.__name__)
+                          instance.__class__.__name__)
 
     instance._echo = echoflag
 
index 7150ce81fda998f58661d4bbff96ff10f62f4ec8..d26bbf32c19043528131a7e32c524187554b85d0 100644 (file)
@@ -67,7 +67,9 @@ reset_rollback = util.symbol('reset_rollback')
 reset_commit = util.symbol('reset_commit')
 reset_none = util.symbol('reset_none')
 
+
 class _ConnDialect(object):
+
     """partial implementation of :class:`.Dialect`
     which provides DBAPI connection methods.
 
@@ -76,6 +78,7 @@ class _ConnDialect(object):
     :class:`.Dialect`.
 
     """
+
     def do_rollback(self, dbapi_connection):
         dbapi_connection.rollback()
 
@@ -85,20 +88,22 @@ class _ConnDialect(object):
     def do_close(self, dbapi_connection):
         dbapi_connection.close()
 
+
 class Pool(log.Identified):
+
     """Abstract base class for connection pools."""
 
     _dialect = _ConnDialect()
 
     def __init__(self,
-                    creator, recycle=-1, echo=None,
-                    use_threadlocal=False,
-                    logging_name=None,
-                    reset_on_return=True,
-                    listeners=None,
-                    events=None,
-                    _dispatch=None,
-                    _dialect=None):
+                 creator, recycle=-1, echo=None,
+                 use_threadlocal=False,
+                 logging_name=None,
+                 reset_on_return=True,
+                 listeners=None,
+                 events=None,
+                 _dispatch=None,
+                 _dialect=None):
         """
         Construct a Pool.
 
@@ -134,10 +139,10 @@ class Pool(log.Identified):
 
           .. warning::  The :paramref:`.Pool.use_threadlocal` flag
              **does not affect the behavior** of :meth:`.Engine.connect`.
-             :meth:`.Engine.connect` makes use of the :meth:`.Pool.unique_connection`
-             method which **does not use thread local context**.
-             To produce a :class:`.Connection` which refers to the
-             :meth:`.Pool.connect` method, use
+             :meth:`.Engine.connect` makes use of the
+             :meth:`.Pool.unique_connection` method which **does not use thread
+             local context**.  To produce a :class:`.Connection` which refers
+             to the :meth:`.Pool.connect` method, use
              :meth:`.Engine.contextual_connect`.
 
              Note that other SQLAlchemy connectivity systems such as
@@ -221,8 +226,8 @@ class Pool(log.Identified):
             self._reset_on_return = reset_commit
         else:
             raise exc.ArgumentError(
-                        "Invalid value for 'reset_on_return': %r"
-                                    % reset_on_return)
+                "Invalid value for 'reset_on_return': %r"
+                % reset_on_return)
 
         self.echo = echo
         if _dispatch:
@@ -234,8 +239,8 @@ class Pool(log.Identified):
                 event.listen(self, target, fn)
         if listeners:
             util.warn_deprecated(
-                        "The 'listeners' argument to Pool (and "
-                        "create_engine()) is deprecated.  Use event.listen().")
+                "The 'listeners' argument to Pool (and "
+                "create_engine()) is deprecated.  Use event.listen().")
             for l in listeners:
                 self.add_listener(l)
 
@@ -247,7 +252,7 @@ class Pool(log.Identified):
             raise
         except:
             self.logger.error("Exception closing connection %r",
-                            connection, exc_info=True)
+                              connection, exc_info=True)
 
     @util.deprecated(
         2.7, "Pool.add_listener is deprecated.  Use event.listen()")
@@ -267,8 +272,9 @@ class Pool(log.Identified):
 
         This method is equivalent to :meth:`.Pool.connect` when the
         :paramref:`.Pool.use_threadlocal` flag is not set to True.
-        When :paramref:`.Pool.use_threadlocal` is True, the :meth:`.Pool.unique_connection`
-        method provides a means of bypassing the threadlocal context.
+        When :paramref:`.Pool.use_threadlocal` is True, the
+        :meth:`.Pool.unique_connection` method provides a means of bypassing
+        the threadlocal context.
 
         """
         return _ConnectionFairy._checkout(self)
@@ -295,7 +301,6 @@ class Pool(log.Identified):
         if getattr(connection, 'is_valid', False):
             connection.invalidate(exception)
 
-
     def recreate(self):
         """Return a new :class:`.Pool`, of the same class as this one
         and configured with identical creation arguments.
@@ -371,6 +376,7 @@ class Pool(log.Identified):
 
 
 class _ConnectionRecord(object):
+
     """Internal object which maintains an individual DBAPI connection
     referenced by a :class:`.Pool`.
 
@@ -406,8 +412,8 @@ class _ConnectionRecord(object):
         self.finalize_callback = deque()
 
         pool.dispatch.first_connect.\
-                    for_modify(pool.dispatch).\
-                    exec_once(self.connection, self)
+            for_modify(pool.dispatch).\
+            exec_once(self.connection, self)
         pool.dispatch.connect(self.connection, self)
 
     connection = None
@@ -439,16 +445,16 @@ class _ConnectionRecord(object):
             raise
         fairy = _ConnectionFairy(dbapi_connection, rec)
         rec.fairy_ref = weakref.ref(
-                        fairy,
-                        lambda ref: _finalize_fairy and \
-                            _finalize_fairy(
-                                    dbapi_connection,
-                                    rec, pool, ref, pool._echo)
-                    )
+            fairy,
+            lambda ref: _finalize_fairy and
+            _finalize_fairy(
+                dbapi_connection,
+                rec, pool, ref, pool._echo)
+        )
         _refs.add(rec)
         if pool._echo:
             pool.logger.debug("Connection %r checked out from pool",
-                       dbapi_connection)
+                              dbapi_connection)
         return fairy
 
     def checkin(self):
@@ -462,7 +468,6 @@ class _ConnectionRecord(object):
             pool.dispatch.checkin(connection, self)
         pool._return_conn(self)
 
-
     def close(self):
         if self.connection is not None:
             self.__close()
@@ -471,9 +476,9 @@ class _ConnectionRecord(object):
         """Invalidate the DBAPI connection held by this :class:`._ConnectionRecord`.
 
         This method is called for all connection invalidations, including
-        when the :meth:`._ConnectionFairy.invalidate` or :meth:`.Connection.invalidate`
-        methods are called, as well as when any so-called "automatic invalidation"
-        condition occurs.
+        when the :meth:`._ConnectionFairy.invalidate` or
+        :meth:`.Connection.invalidate` methods are called, as well as when any
+        so-called "automatic invalidation" condition occurs.
 
         .. seealso::
 
@@ -504,14 +509,15 @@ class _ConnectionRecord(object):
         elif self.__pool._recycle > -1 and \
                 time.time() - self.starttime > self.__pool._recycle:
             self.__pool.logger.info(
-                    "Connection %r exceeded timeout; recycling",
-                    self.connection)
+                "Connection %r exceeded timeout; recycling",
+                self.connection)
             recycle = True
         elif self.__pool._invalidate_time > self.starttime:
             self.__pool.logger.info(
-                    "Connection %r invalidated due to pool invalidation; recycling",
-                    self.connection
-                    )
+                "Connection %r invalidated due to pool invalidation; " +
+                "recycling",
+                self.connection
+            )
             recycle = True
 
         if recycle:
@@ -536,7 +542,8 @@ class _ConnectionRecord(object):
             raise
 
 
-def _finalize_fairy(connection, connection_record, pool, ref, echo, fairy=None):
+def _finalize_fairy(connection, connection_record,
+                    pool, ref, echo, fairy=None):
     """Cleanup for a :class:`._ConnectionFairy` whether or not it's already
     been garbage collected.
 
@@ -544,13 +551,13 @@ def _finalize_fairy(connection, connection_record, pool, ref, echo, fairy=None):
     _refs.discard(connection_record)
 
     if ref is not None and \
-                connection_record.fairy_ref is not ref:
+            connection_record.fairy_ref is not ref:
         return
 
     if connection is not None:
         if connection_record and echo:
             pool.logger.debug("Connection %r being returned to pool",
-                                    connection)
+                              connection)
 
         try:
             fairy = fairy or _ConnectionFairy(connection, connection_record)
@@ -561,7 +568,8 @@ def _finalize_fairy(connection, connection_record, pool, ref, echo, fairy=None):
             if not connection_record:
                 pool._close_connection(connection)
         except Exception as e:
-            pool.logger.error("Exception during reset or similar", exc_info=True)
+            pool.logger.error(
+                "Exception during reset or similar", exc_info=True)
             if connection_record:
                 connection_record.invalidate(e=e)
             if isinstance(e, (SystemExit, KeyboardInterrupt)):
@@ -575,6 +583,7 @@ _refs = set()
 
 
 class _ConnectionFairy(object):
+
     """Proxies a DBAPI connection and provides return-on-dereference
     support.
 
@@ -582,10 +591,11 @@ class _ConnectionFairy(object):
     to provide context management to a DBAPI connection delivered by
     that :class:`.Pool`.
 
-    The name "fairy" is inspired by the fact that the :class:`._ConnectionFairy`
-    object's lifespan is transitory, as it lasts only for the length of a
-    specific DBAPI connection being checked out from the pool, and additionally
-    that as a transparent proxy, it is mostly invisible.
+    The name "fairy" is inspired by the fact that the
+    :class:`._ConnectionFairy` object's lifespan is transitory, as it lasts
+    only for the length of a specific DBAPI connection being checked out from
+    the pool, and additionally that as a transparent proxy, it is mostly
+    invisible.
 
     .. seealso::
 
@@ -611,8 +621,8 @@ class _ConnectionFairy(object):
     _reset_agent = None
     """Refer to an object with a ``.commit()`` and ``.rollback()`` method;
     if non-None, the "reset-on-return" feature will call upon this object
-    rather than directly against the dialect-level do_rollback() and do_commit()
-    methods.
+    rather than directly against the dialect-level do_rollback() and
+    do_commit() methods.
 
     In practice, a :class:`.Connection` assigns a :class:`.Transaction` object
     to this variable when one is in scope so that the :class:`.Transaction`
@@ -649,8 +659,8 @@ class _ConnectionFairy(object):
         while attempts > 0:
             try:
                 pool.dispatch.checkout(fairy.connection,
-                                            fairy._connection_record,
-                                            fairy)
+                                       fairy._connection_record,
+                                       fairy)
                 return fairy
             except exc.DisconnectionError as e:
                 pool.logger.info(
@@ -668,7 +678,7 @@ class _ConnectionFairy(object):
 
     def _checkin(self):
         _finalize_fairy(self.connection, self._connection_record,
-                            self._pool, None, self._echo, fairy=self)
+                        self._pool, None, self._echo, fairy=self)
         self.connection = None
         self._connection_record = None
 
@@ -680,9 +690,9 @@ class _ConnectionFairy(object):
         if pool._reset_on_return is reset_rollback:
             if echo:
                 pool.logger.debug("Connection %s rollback-on-return%s",
-                                                self.connection,
-                                                ", via agent"
-                                                if self._reset_agent else "")
+                                  self.connection,
+                                  ", via agent"
+                                  if self._reset_agent else "")
             if self._reset_agent:
                 self._reset_agent.rollback()
             else:
@@ -690,9 +700,9 @@ class _ConnectionFairy(object):
         elif pool._reset_on_return is reset_commit:
             if echo:
                 pool.logger.debug("Connection %s commit-on-return%s",
-                                                self.connection,
-                                                ", via agent"
-                                                if self._reset_agent else "")
+                                  self.connection,
+                                  ", via agent"
+                                  if self._reset_agent else "")
             if self._reset_agent:
                 self._reset_agent.commit()
             else:
@@ -759,7 +769,6 @@ class _ConnectionFairy(object):
     def __getattr__(self, key):
         return getattr(self.connection, key)
 
-
     def detach(self):
         """Separate this connection from its Pool.
 
@@ -788,8 +797,8 @@ class _ConnectionFairy(object):
             self._checkin()
 
 
-
 class SingletonThreadPool(Pool):
+
     """A Pool that maintains one connection per thread.
 
     Maintains one connection per each thread, never moving a connection to a
@@ -816,14 +825,14 @@ class SingletonThreadPool(Pool):
     def recreate(self):
         self.logger.info("Pool recreating")
         return self.__class__(self._creator,
-            pool_size=self.size,
-            recycle=self._recycle,
-            echo=self.echo,
-            logging_name=self._orig_logging_name,
-            use_threadlocal=self._use_threadlocal,
-            reset_on_return=self._reset_on_return,
-            _dispatch=self.dispatch,
-            _dialect=self._dialect)
+                              pool_size=self.size,
+                              recycle=self._recycle,
+                              echo=self.echo,
+                              logging_name=self._orig_logging_name,
+                              use_threadlocal=self._use_threadlocal,
+                              reset_on_return=self._reset_on_return,
+                              _dispatch=self.dispatch,
+                              _dialect=self._dialect)
 
     def dispose(self):
         """Dispose of this pool."""
@@ -847,7 +856,7 @@ class SingletonThreadPool(Pool):
 
     def status(self):
         return "SingletonThreadPool id:%d size: %d" % \
-                            (id(self), len(self._all_conns))
+            (id(self), len(self._all_conns))
 
     def _do_return_conn(self, conn):
         pass
@@ -868,6 +877,7 @@ class SingletonThreadPool(Pool):
 
 
 class QueuePool(Pool):
+
     """A :class:`.Pool` that imposes a limit on the number of open connections.
 
     :class:`.QueuePool` is the default pooling implementation used for
@@ -908,9 +918,10 @@ class QueuePool(Pool):
         :param timeout: The number of seconds to wait before giving up
           on returning a connection. Defaults to 30.
 
-        :param \**kw: Other keyword arguments including :paramref:`.Pool.recycle`,
-         :paramref:`.Pool.echo`, :paramref:`.Pool.reset_on_return` and others
-         are passed to the :class:`.Pool` constructor.
+        :param \**kw: Other keyword arguments including
+        :paramref:`.Pool.recycle`, :paramref:`.Pool.echo`,
+        :paramref:`.Pool.reset_on_return` and others are passed to the
+        :class:`.Pool` constructor.
 
         """
         Pool.__init__(self, creator, **kw)
@@ -941,9 +952,9 @@ class QueuePool(Pool):
                     return self._do_get()
                 else:
                     raise exc.TimeoutError(
-                            "QueuePool limit of size %d overflow %d reached, "
-                            "connection timed out, timeout %d" %
-                            (self.size(), self.overflow(), self._timeout))
+                        "QueuePool limit of size %d overflow %d reached, "
+                        "connection timed out, timeout %d" %
+                        (self.size(), self.overflow(), self._timeout))
 
             if self._inc_overflow():
                 try:
@@ -976,14 +987,14 @@ class QueuePool(Pool):
     def recreate(self):
         self.logger.info("Pool recreating")
         return self.__class__(self._creator, pool_size=self._pool.maxsize,
-                          max_overflow=self._max_overflow,
-                          timeout=self._timeout,
-                          recycle=self._recycle, echo=self.echo,
-                          logging_name=self._orig_logging_name,
-                          use_threadlocal=self._use_threadlocal,
-                          reset_on_return=self._reset_on_return,
-                          _dispatch=self.dispatch,
-                          _dialect=self._dialect)
+                              max_overflow=self._max_overflow,
+                              timeout=self._timeout,
+                              recycle=self._recycle, echo=self.echo,
+                              logging_name=self._orig_logging_name,
+                              use_threadlocal=self._use_threadlocal,
+                              reset_on_return=self._reset_on_return,
+                              _dispatch=self.dispatch,
+                              _dialect=self._dialect)
 
     def dispose(self):
         while True:
@@ -998,11 +1009,11 @@ class QueuePool(Pool):
 
     def status(self):
         return "Pool size: %d  Connections in pool: %d "\
-                "Current Overflow: %d Current Checked out "\
-                "connections: %d" % (self.size(),
-                                    self.checkedin(),
-                                    self.overflow(),
-                                    self.checkedout())
+            "Current Overflow: %d Current Checked out "\
+            "connections: %d" % (self.size(),
+                                 self.checkedin(),
+                                 self.overflow(),
+                                 self.checkedout())
 
     def size(self):
         return self._pool.maxsize
@@ -1018,6 +1029,7 @@ class QueuePool(Pool):
 
 
 class NullPool(Pool):
+
     """A Pool which does not pool connections.
 
     Instead it literally opens and closes the underlying DB-API connection
@@ -1046,19 +1058,20 @@ class NullPool(Pool):
         self.logger.info("Pool recreating")
 
         return self.__class__(self._creator,
-            recycle=self._recycle,
-            echo=self.echo,
-            logging_name=self._orig_logging_name,
-            use_threadlocal=self._use_threadlocal,
-            reset_on_return=self._reset_on_return,
-            _dispatch=self.dispatch,
-            _dialect=self._dialect)
+                              recycle=self._recycle,
+                              echo=self.echo,
+                              logging_name=self._orig_logging_name,
+                              use_threadlocal=self._use_threadlocal,
+                              reset_on_return=self._reset_on_return,
+                              _dispatch=self.dispatch,
+                              _dialect=self._dialect)
 
     def dispose(self):
         pass
 
 
 class StaticPool(Pool):
+
     """A Pool of exactly one connection, used for all requests.
 
     Reconnect-related functions such as ``recycle`` and connection
@@ -1106,6 +1119,7 @@ class StaticPool(Pool):
 
 
 class AssertionPool(Pool):
+
     """A :class:`.Pool` that allows at most one checked out connection at
     any given time.
 
@@ -1119,6 +1133,7 @@ class AssertionPool(Pool):
         this in the assertion error raised.
 
     """
+
     def __init__(self, *args, **kw):
         self._conn = None
         self._checked_out = False
@@ -1143,9 +1158,9 @@ class AssertionPool(Pool):
     def recreate(self):
         self.logger.info("Pool recreating")
         return self.__class__(self._creator, echo=self.echo,
-                            logging_name=self._orig_logging_name,
-                            _dispatch=self.dispatch,
-                            _dialect=self._dialect)
+                              logging_name=self._orig_logging_name,
+                              _dispatch=self.dispatch,
+                              _dialect=self._dialect)
 
     def _do_get(self):
         if self._checked_out:
@@ -1166,6 +1181,7 @@ class AssertionPool(Pool):
 
 
 class _DBProxy(object):
+
     """Layers connection pooling behavior on top of a standard DB-API module.
 
     Proxies a DB-API 2.0 connect() call to a connection pool keyed to the
@@ -1211,8 +1227,8 @@ class _DBProxy(object):
             try:
                 if key not in self.pools:
                     kw.pop('sa_pool_key', None)
-                    pool = self.poolclass(lambda:
-                                self.module.connect(*args, **kw), **self.kw)
+                    pool = self.poolclass(
+                        lambda: self.module.connect(*args, **kw), **self.kw)
                     self.pools[key] = pool
                     return pool
                 else:
index 0f47f4e66af5bb5f0eb7571dfd02f7a0a312b68a..3794b01f5892708047159c5ed7c965309864d9fa 100644 (file)
@@ -33,15 +33,17 @@ def str_to_datetime_processor_factory(regexp, type_):
                 m = rmatch(value)
             except TypeError:
                 raise ValueError("Couldn't parse %s string '%r' "
-                                "- value is not a string." %
-                                (type_.__name__, value))
+                                 "- value is not a string." %
+                                 (type_.__name__, value))
             if m is None:
                 raise ValueError("Couldn't parse %s string: "
-                                "'%s'" % (type_.__name__, value))
+                                 "'%s'" % (type_.__name__, value))
             if has_named_groups:
                 groups = m.groupdict(0)
-                return type_(**dict(list(zip(iter(groups.keys()),
-                                        list(map(int, iter(groups.values())))))))
+                return type_(**dict(list(zip(
+                    iter(groups.keys()),
+                    list(map(int, iter(groups.values())))
+                ))))
             else:
                 return type_(*list(map(int, m.groups(0))))
     return process
@@ -112,7 +114,7 @@ def py_fallback():
             return value and True or False
 
     DATETIME_RE = re.compile(
-                        "(\d+)-(\d+)-(\d+) (\d+):(\d+):(\d+)(?:\.(\d+))?")
+        "(\d+)-(\d+)-(\d+) (\d+):(\d+):(\d+)(?:\.(\d+))?")
     TIME_RE = re.compile("(\d+):(\d+):(\d+)(?:\.(\d+))?")
     DATE_RE = re.compile("(\d+)-(\d+)-(\d+)")
 
@@ -124,10 +126,10 @@ def py_fallback():
 
 try:
     from sqlalchemy.cprocessors import UnicodeResultProcessor, \
-                                       DecimalResultProcessor, \
-                                       to_float, to_str, int_to_boolean, \
-                                       str_to_datetime, str_to_time, \
-                                       str_to_date
+        DecimalResultProcessor, \
+        to_float, to_str, int_to_boolean, \
+        str_to_datetime, str_to_time, \
+        str_to_date
 
     def to_unicode_processor_factory(encoding, errors=None):
         if errors is not None:
index 75fabcad81d5d8c7d11e2bf0f2a9a2a9e90e783f..b49e389ac35769d59243087e4357ea1e131001e1 100644 (file)
 """
 
 __all__ = ['TypeEngine', 'TypeDecorator', 'UserDefinedType',
-            'INT', 'CHAR', 'VARCHAR', 'NCHAR', 'NVARCHAR', 'TEXT', 'Text',
-            'FLOAT', 'NUMERIC', 'REAL', 'DECIMAL', 'TIMESTAMP', 'DATETIME',
-            'CLOB', 'BLOB', 'BINARY', 'VARBINARY', 'BOOLEAN', 'BIGINT',
-            'SMALLINT', 'INTEGER', 'DATE', 'TIME', 'String', 'Integer',
-            'SmallInteger', 'BigInteger', 'Numeric', 'Float', 'DateTime',
-            'Date', 'Time', 'LargeBinary', 'Binary', 'Boolean', 'Unicode',
-            'Concatenable', 'UnicodeText', 'PickleType', 'Interval', 'Enum']
+           'INT', 'CHAR', 'VARCHAR', 'NCHAR', 'NVARCHAR', 'TEXT', 'Text',
+           'FLOAT', 'NUMERIC', 'REAL', 'DECIMAL', 'TIMESTAMP', 'DATETIME',
+           'CLOB', 'BLOB', 'BINARY', 'VARBINARY', 'BOOLEAN', 'BIGINT',
+           'SMALLINT', 'INTEGER', 'DATE', 'TIME', 'String', 'Integer',
+           'SmallInteger', 'BigInteger', 'Numeric', 'Float', 'DateTime',
+           'Date', 'Time', 'LargeBinary', 'Binary', 'Boolean', 'Unicode',
+           'Concatenable', 'UnicodeText', 'PickleType', 'Interval', 'Enum']
 
 from .sql.type_api import (
     adapt_type,
@@ -75,4 +75,3 @@ from .sql.sqltypes import (
     VARCHAR,
     _type_map
     )
-