__version__ = '1.0.0'
+
def __go(lcls):
global __all__
import inspect as _inspect
__all__ = sorted(name for name, obj in lcls.items()
- if not (name.startswith('_') or _inspect.ismodule(obj)))
+ if not (name.startswith('_') or _inspect.ismodule(obj)))
_sa_util.dependencies.resolve_all("sqlalchemy")
-__go(locals())
\ No newline at end of file
+__go(locals())
if issubclass(errorclass, MxOdbcWarning):
errorclass.__bases__ = (Warning,)
warnings.warn(message=str(errorvalue),
- category=errorclass,
- stacklevel=2)
+ category=errorclass,
+ stacklevel=2)
else:
raise errorclass(errorvalue)
return error_handler
def _get_direct(self, context):
if context:
native_odbc_execute = context.execution_options.\
- get('native_odbc_execute', 'auto')
+ get('native_odbc_execute', 'auto')
# default to direct=True in all cases, is more generally
# compatible especially with SQL Server
return False if native_odbc_execute is True else True
connectors = [util.unquote_plus(keys.pop('odbc_connect'))]
else:
dsn_connection = 'dsn' in keys or \
- ('host' in keys and 'database' not in keys)
+ ('host' in keys and 'database' not in keys)
if dsn_connection:
- connectors = ['dsn=%s' % (keys.pop('host', '') or \
- keys.pop('dsn', ''))]
+ connectors = ['dsn=%s' % (keys.pop('host', '') or
+ keys.pop('dsn', ''))]
else:
port = ''
- if 'port' in keys and not 'port' in query:
+ if 'port' in keys and 'port' not in query:
port = ',%d' % int(keys.pop('port'))
connectors = ["DRIVER={%s}" %
- keys.pop('driver', self.pyodbc_driver_name),
+ keys.pop('driver', self.pyodbc_driver_name),
'Server=%s%s' % (keys.pop('host', ''), port),
'Database=%s' % keys.pop('database', '')]
# you query a cp1253 encoded database from a latin1 client...
if 'odbc_autotranslate' in keys:
connectors.append("AutoTranslate=%s" %
- keys.pop("odbc_autotranslate"))
+ keys.pop("odbc_autotranslate"))
connectors.extend(['%s=%s' % (k, v) for k, v in keys.items()])
return [[";".join(connectors)], connect_args]
def is_disconnect(self, e, connection, cursor):
if isinstance(e, self.dbapi.ProgrammingError):
return "The cursor's connection has been closed." in str(e) or \
- 'Attempt to use a closed connection.' in str(e)
+ 'Attempt to use a closed connection.' in str(e)
elif isinstance(e, self.dbapi.Error):
return '[08S01]' in str(e)
else:
_sql_driver_name = dbapi_con.getinfo(pyodbc.SQL_DRIVER_NAME)
self.freetds = bool(re.match(r".*libtdsodbc.*\.so", _sql_driver_name
- ))
+ ))
self.easysoft = bool(re.match(r".*libessqlsrv.*\.so", _sql_driver_name
- ))
+ ))
if self.freetds:
self.freetds_driver_version = dbapi_con.getinfo(
def _parse_dbapi_version(self, vers):
m = re.match(
- r'(?:py.*-)?([\d\.]+)(?:-(\w+))?',
- vers
- )
+ r'(?:py.*-)?([\d\.]+)(?:-(\w+))?',
+ vers
+ )
if not m:
return ()
vers = tuple([int(x) for x in m.group(1).split(".")])
"""Create a JDBC url from a :class:`~sqlalchemy.engine.url.URL`"""
return 'jdbc:%s://%s%s/%s' % (self.jdbc_db_name, url.host,
url.port is not None
- and ':%s' % url.port or '',
+ and ':%s' % url.port or '',
url.database)
def create_connect_args(self, url):
opts = self._driver_kwargs()
opts.update(url.query)
return [
- [self._create_jdbc_url(url),
- url.username, url.password,
- self.jdbc_driver_name],
- opts]
+ [self._create_jdbc_url(url),
+ url.username, url.password,
+ self.jdbc_driver_name],
+ opts]
def is_disconnect(self, e, connection, cursor):
if not isinstance(e, self.dbapi.ProgrammingError):
'sqlite',
'oracle',
'sybase',
- )
+)
RootTransaction,
Transaction,
TwoPhaseTransaction,
- )
+)
from .result import (
BufferedColumnResultProxy,
FullyBufferedResultProxy,
ResultProxy,
RowProxy,
- )
+)
from .util import (
connection_memoize
- )
+)
from . import util, strategies
__all__ = (
'create_engine',
'engine_from_config',
- )
+)
# want to handle any of the engine's events in that case.
self.dispatch = self.dispatch._join(engine.dispatch)
self._has_events = _has_events or (
- _has_events is None and engine._has_events)
+ _has_events is None and engine._has_events)
self._echo = self.engine._should_log_info()
if _execution_options:
"""
return self.engine._connection_cls(
- self.engine,
- self.__connection,
- _branch=True,
- _has_events=self._has_events,
- _dispatch=self.dispatch)
+ self.engine,
+ self.__connection,
+ _branch=True,
+ _has_events=self._has_events,
+ _dispatch=self.dispatch)
def _clone(self):
"""Create a shallow copy of this Connection.
if self.__can_reconnect and self.__invalid:
if self.__transaction is not None:
raise exc.InvalidRequestError(
- "Can't reconnect until invalid "
- "transaction is rolled back")
+ "Can't reconnect until invalid "
+ "transaction is rolled back")
self.__connection = self.engine.raw_connection()
self.__invalid = False
return self.__connection
:meth:`.Connection.invalidate` method is called, at the DBAPI
level all state associated with this transaction is lost, as
the DBAPI connection is closed. The :class:`.Connection`
- will not allow a reconnection to proceed until the :class:`.Transaction`
- object is ended, by calling the :meth:`.Transaction.rollback`
- method; until that point, any attempt at continuing to use the
- :class:`.Connection` will raise an
+ will not allow a reconnection to proceed until the
+ :class:`.Transaction` object is ended, by calling the
+ :meth:`.Transaction.rollback` method; until that point, any attempt at
+ continuing to use the :class:`.Connection` will raise an
:class:`~sqlalchemy.exc.InvalidRequestError`.
This is to prevent applications from accidentally
continuing an ongoing transactional operations despite the
invalidation.
The :meth:`.Connection.invalidate` method, just like auto-invalidation,
- will at the connection pool level invoke the :meth:`.PoolEvents.invalidate`
- event.
+ will at the connection pool level invoke the
+ :meth:`.PoolEvents.invalidate` event.
.. seealso::
if self._still_open_and_connection_is_valid:
assert isinstance(self.__transaction, TwoPhaseTransaction)
try:
- self.engine.dialect.do_rollback_twophase(self, xid, is_prepared)
+ self.engine.dialect.do_rollback_twophase(
+ self, xid, is_prepared)
finally:
if self.connection._reset_agent is self.__transaction:
self.connection._reset_agent = None
meth = object._execute_on_connection
except AttributeError:
raise exc.InvalidRequestError(
- "Unexecutable object type: %s" %
- type(object))
+ "Unexecutable object type: %s" %
+ type(object))
else:
return meth(self, multiparams, params)
"""Execute a sql.FunctionElement object."""
return self._execute_clauseelement(func.select(),
- multiparams, params)
+ multiparams, params)
def _execute_default(self, default, multiparams, params):
"""Execute a schema.ColumnDefault object."""
dialect = self.dialect
ctx = dialect.execution_ctx_cls._init_default(
- dialect, self, conn)
+ dialect, self, conn)
except Exception as e:
self._handle_dbapi_exception(e, None, None, None, None)
if self._has_events or self.engine._has_events:
self.dispatch.after_execute(self,
- default, multiparams, params, ret)
+ default, multiparams, params, ret)
return ret
)
if self._has_events or self.engine._has_events:
self.dispatch.after_execute(self,
- ddl, multiparams, params, ret)
+ ddl, multiparams, params, ret)
return ret
def _execute_clauseelement(self, elem, multiparams, params):
compiled_sql = self._execution_options['compiled_cache'][key]
else:
compiled_sql = elem.compile(
- dialect=dialect, column_keys=keys,
- inline=len(distilled_params) > 1)
+ dialect=dialect, column_keys=keys,
+ inline=len(distilled_params) > 1)
self._execution_options['compiled_cache'][key] = compiled_sql
else:
compiled_sql = elem.compile(
- dialect=dialect, column_keys=keys,
- inline=len(distilled_params) > 1)
+ dialect=dialect, column_keys=keys,
+ inline=len(distilled_params) > 1)
ret = self._execute_context(
dialect,
)
if self._has_events or self.engine._has_events:
self.dispatch.after_execute(self,
- elem, multiparams, params, ret)
+ elem, multiparams, params, ret)
return ret
def _execute_compiled(self, compiled, multiparams, params):
)
if self._has_events or self.engine._has_events:
self.dispatch.after_execute(self,
- compiled, multiparams, params, ret)
+ compiled, multiparams, params, ret)
return ret
def _execute_text(self, statement, multiparams, params):
)
if self._has_events or self.engine._has_events:
self.dispatch.after_execute(self,
- statement, multiparams, params, ret)
+ statement, multiparams, params, ret)
return ret
def _execute_context(self, dialect, constructor,
- statement, parameters,
- *args):
+ statement, parameters,
+ *args):
"""Create an :class:`.ExecutionContext` and execute, returning
a :class:`.ResultProxy`."""
context = constructor(dialect, self, conn, *args)
except Exception as e:
self._handle_dbapi_exception(e,
- util.text_type(statement), parameters,
- None, None)
+ util.text_type(statement), parameters,
+ None, None)
if context.compiled:
context.pre_exec()
cursor, statement, parameters = context.cursor, \
- context.statement, \
- context.parameters
+ context.statement, \
+ context.parameters
if not context.executemany:
parameters = parameters[0]
if self._has_events or self.engine._has_events:
for fn in self.dispatch.before_cursor_execute:
statement, parameters = \
- fn(self, cursor, statement, parameters,
- context, context.executemany)
+ fn(self, cursor, statement, parameters,
+ context, context.executemany)
if self._echo:
self.engine.logger.info(statement)
- self.engine.logger.info("%r",
- sql_util._repr_params(parameters, batches=10))
+ self.engine.logger.info(
+ "%r",
+ sql_util._repr_params(parameters, batches=10)
+ )
try:
if context.executemany:
for fn in () if not self.dialect._has_events \
- else self.dialect.dispatch.do_executemany:
+ else self.dialect.dispatch.do_executemany:
if fn(cursor, statement, parameters, context):
break
else:
self.dialect.do_executemany(
- cursor,
- statement,
- parameters,
- context)
+ cursor,
+ statement,
+ parameters,
+ context)
elif not parameters and context.no_parameters:
for fn in () if not self.dialect._has_events \
- else self.dialect.dispatch.do_execute_no_params:
+ else self.dialect.dispatch.do_execute_no_params:
if fn(cursor, statement, context):
break
else:
self.dialect.do_execute_no_params(
- cursor,
- statement,
- context)
+ cursor,
+ statement,
+ context)
else:
for fn in () if not self.dialect._has_events \
- else self.dialect.dispatch.do_execute:
+ else self.dialect.dispatch.do_execute:
if fn(cursor, statement, parameters, context):
break
else:
self.dialect.do_execute(
- cursor,
- statement,
- parameters,
- context)
+ cursor,
+ statement,
+ parameters,
+ context)
except Exception as e:
self._handle_dbapi_exception(
- e,
- statement,
- parameters,
- cursor,
- context)
+ e,
+ statement,
+ parameters,
+ cursor,
+ context)
if self._has_events or self.engine._has_events:
self.dispatch.after_cursor_execute(self, cursor,
- statement,
- parameters,
- context,
- context.executemany)
+ statement,
+ parameters,
+ context,
+ context.executemany)
if context.compiled:
context.post_exec()
if self._has_events or self.engine._has_events:
for fn in self.dispatch.before_cursor_execute:
statement, parameters = \
- fn(self, cursor, statement, parameters,
- context,
- False)
+ fn(self, cursor, statement, parameters,
+ context,
+ False)
if self._echo:
self.engine.logger.info(statement)
self.engine.logger.info("%r", parameters)
try:
for fn in () if not self.dialect._has_events \
- else self.dialect.dispatch.do_execute:
+ else self.dialect.dispatch.do_execute:
if fn(cursor, statement, parameters, context):
break
else:
self.dialect.do_execute(
- cursor,
- statement,
- parameters,
- context)
+ cursor,
+ statement,
+ parameters,
+ context)
except Exception as e:
self._handle_dbapi_exception(
- e,
- statement,
- parameters,
- cursor,
- context)
+ e,
+ statement,
+ parameters,
+ cursor,
+ context)
if self._has_events or self.engine._has_events:
self.dispatch.after_cursor_execute(self, cursor,
- statement,
- parameters,
- context,
- False)
+ statement,
+ parameters,
+ context,
+ False)
def _safe_close_cursor(self, cursor):
"""Close the given cursor, catching exceptions
except Exception:
# log the error through the connection pool's logger.
self.engine.pool.logger.error(
- "Error closing cursor", exc_info=True)
+ "Error closing cursor", exc_info=True)
_reentrant_error = False
_is_disconnect = False
def _handle_dbapi_exception(self,
- e,
- statement,
- parameters,
- cursor,
- context):
+ e,
+ statement,
+ parameters,
+ cursor,
+ context):
exc_info = sys.exc_info()
if self._reentrant_error:
util.raise_from_cause(
- exc.DBAPIError.instance(statement,
- parameters,
- e,
- self.dialect.dbapi.Error),
- exc_info
- )
+ exc.DBAPIError.instance(statement,
+ parameters,
+ e,
+ self.dialect.dbapi.Error),
+ exc_info
+ )
self._reentrant_error = True
try:
# non-DBAPI error - if we already got a context,
# legacy dbapi_error event
if should_wrap and context:
self.dispatch.dbapi_error(self,
- cursor,
- statement,
- parameters,
- context,
- e)
+ cursor,
+ statement,
+ parameters,
+ context,
+ e)
# new handle_error event
ctx = ExceptionContextImpl(
util.raise_from_cause(newraise, exc_info)
elif should_wrap:
util.raise_from_cause(
- sqlalchemy_exception,
- exc_info
- )
+ sqlalchemy_exception,
+ exc_info
+ )
else:
util.reraise(*exc_info)
def _run_visitor(self, visitorcallable, element, **kwargs):
visitorcallable(self.dialect, self,
- **kwargs).traverse_single(element)
+ **kwargs).traverse_single(element)
class ExceptionContextImpl(ExceptionContext):
"""Implement the :class:`.ExceptionContext` interface."""
def __init__(self, exception, sqlalchemy_exception,
- connection, cursor, statement, parameters,
- context, is_disconnect):
+ connection, cursor, statement, parameters,
+ context, is_disconnect):
self.connection = connection
self.sqlalchemy_exception = sqlalchemy_exception
self.original_exception = exception
The interface is the same as that of :class:`.Transaction`.
"""
+
def __init__(self, connection, parent):
super(NestedTransaction, self).__init__(connection, parent)
self._savepoint = self.connection._savepoint_impl()
def _do_rollback(self):
if self.is_active:
self.connection._rollback_to_savepoint_impl(
- self._savepoint, self._parent)
+ self._savepoint, self._parent)
def _do_commit(self):
if self.is_active:
self.connection._release_savepoint_impl(
- self._savepoint, self._parent)
+ self._savepoint, self._parent)
class TwoPhaseTransaction(Transaction):
with the addition of the :meth:`prepare` method.
"""
+
def __init__(self, connection, xid):
super(TwoPhaseTransaction, self).__init__(connection, None)
self._is_prepared = False
_connection_cls = Connection
def __init__(self, pool, dialect, url,
- logging_name=None, echo=None, proxy=None,
- execution_options=None
- ):
+ logging_name=None, echo=None, proxy=None,
+ execution_options=None
+ ):
self.pool = pool
self.url = url
self.dialect = dialect
"""
self._execution_options = \
- self._execution_options.union(opt)
+ self._execution_options.union(opt)
self.dispatch.set_engine_execution_options(self, opt)
self.dialect.set_engine_execution_options(self, opt)
shards = {"default": "base", shard_1: "db1", "shard_2": "db2"}
@event.listens_for(Engine, "before_cursor_execute")
- def _switch_shard(conn, cursor, stmt, params, context, executemany):
+ def _switch_shard(conn, cursor, stmt,
+ params, context, executemany):
shard_id = conn._execution_options.get('shard_id', "default")
current_shard = conn.info.get("current_shard", None)
yield connection
def _run_visitor(self, visitorcallable, element,
- connection=None, **kwargs):
+ connection=None, **kwargs):
with self._optional_conn_ctx_manager(connection) as conn:
conn._run_visitor(visitorcallable, element, **kwargs)
.. seealso::
- :ref:`metadata_reflection_inspector` - detailed schema inspection using
- the :class:`.Inspector` interface.
+ :ref:`metadata_reflection_inspector` - detailed schema inspection
+ using the :class:`.Inspector` interface.
:class:`.quoted_name` - used to pass quoting information along
with a schema identifier.
from .. import event
AUTOCOMMIT_REGEXP = re.compile(
- r'\s*(?:UPDATE|INSERT|CREATE|DELETE|DROP|ALTER)',
- re.I | re.UNICODE)
-
+ r'\s*(?:UPDATE|INSERT|CREATE|DELETE|DROP|ALTER)',
+ re.I | re.UNICODE)
class DefaultDialect(interfaces.Dialect):
if label_length and label_length > self.max_identifier_length:
raise exc.ArgumentError(
- "Label length of %d is greater than this dialect's"
- " maximum identifier length of %d" %
- (label_length, self.max_identifier_length))
+ "Label length of %d is greater than this dialect's"
+ " maximum identifier length of %d" %
+ (label_length, self.max_identifier_length))
self.label_length = label_length
if self.description_encoding == 'use_encoding':
self._description_decoder = \
- processors.to_unicode_processor_factory(
- encoding
- )
+ processors.to_unicode_processor_factory(
+ encoding
+ )
elif self.description_encoding is not None:
self._description_decoder = \
- processors.to_unicode_processor_factory(
- self.description_encoding
- )
+ processors.to_unicode_processor_factory(
+ self.description_encoding
+ )
self._encoder = codecs.getencoder(self.encoding)
self._decoder = processors.to_unicode_processor_factory(self.encoding)
-
-
@util.memoized_property
def _type_memos(self):
return weakref.WeakKeyDictionary()
def initialize(self, connection):
try:
self.server_version_info = \
- self._get_server_version_info(connection)
+ self._get_server_version_info(connection)
except NotImplementedError:
self.server_version_info = None
try:
self.default_schema_name = \
- self._get_default_schema_name(connection)
+ self._get_default_schema_name(connection)
except NotImplementedError:
self.default_schema_name = None
try:
self.default_isolation_level = \
- self.get_isolation_level(connection.connection)
+ self.get_isolation_level(connection.connection)
except NotImplementedError:
self.default_isolation_level = None
self.returns_unicode_strings = self._check_unicode_returns(connection)
if self.description_encoding is not None and \
- self._check_unicode_description(connection):
+ self._check_unicode_description(connection):
self._description_decoder = self.description_encoding = None
self.do_rollback(connection.connection)
parameters = {}
def check_unicode(test):
- statement = cast_to(expression.select([test]).compile(dialect=self))
+ statement = cast_to(
+ expression.select([test]).compile(dialect=self))
try:
cursor = connection.connection.cursor()
connection._cursor_execute(cursor, statement, parameters)
# note that _cursor_execute() will have closed the cursor
# if an exception is thrown.
util.warn("Exception attempting to "
- "detect unicode returns: %r" % de)
+ "detect unicode returns: %r" % de)
return False
else:
return isinstance(row[0], util.text_type)
expression.literal_column("'test plain returns'"),
sqltypes.VARCHAR(60)
),
- # detect if there's an NVARCHAR type with different behavior available
+ # detect if there's an NVARCHAR type with different behavior
+ # available
expression.cast(
expression.literal_column("'test unicode returns'"),
sqltypes.Unicode(60)
"""
return sqltypes.adapt_type(typeobj, self.colspecs)
- def reflecttable(self, connection, table, include_columns, exclude_columns):
+ def reflecttable(
+ self, connection, table, include_columns, exclude_columns):
insp = reflection.Inspector.from_engine(connection)
return insp.reflecttable(table, include_columns, exclude_columns)
"""
return {
'constrained_columns':
- self.get_primary_keys(conn, table_name,
- schema=schema, **kw)
+ self.get_primary_keys(conn, table_name,
+ schema=schema, **kw)
}
def validate_identifier(self, ident):
def set_engine_execution_options(self, engine, opts):
if 'isolation_level' in opts:
isolation_level = opts['isolation_level']
+
@event.listens_for(engine, "engine_connect")
def set_isolation(connection, branch):
if not branch:
connection.connection._connection_record.\
finalize_callback.append(self.reset_isolation_level)
-
def do_begin(self, dbapi_connection):
pass
@classmethod
def _init_compiled(cls, dialect, connection, dbapi_connection,
- compiled, parameters):
+ compiled, parameters):
"""Initialize execution context for a Compiled construct."""
self = cls.__new__(cls)
self.unicode_statement = util.text_type(compiled)
if not dialect.supports_unicode_statements:
self.statement = self.unicode_statement.encode(
- self.dialect.encoding)
+ self.dialect.encoding)
else:
self.statement = self.unicode_statement
if self.isinsert or self.isupdate or self.isdelete:
self._is_explicit_returning = bool(compiled.statement._returning)
- self._is_implicit_returning = bool(compiled.returning and \
- not compiled.statement._returning)
+ self._is_implicit_returning = bool(
+ compiled.returning and not compiled.statement._returning)
if not parameters:
self.compiled_parameters = [compiled.construct_params()]
else:
self.compiled_parameters = \
- [compiled.construct_params(m, _group_number=grp) for
- grp, m in enumerate(parameters)]
+ [compiled.construct_params(m, _group_number=grp) for
+ grp, m in enumerate(parameters)]
self.executemany = len(parameters) > 1
for key in compiled_params:
if key in processors:
param[dialect._encoder(key)[0]] = \
- processors[key](compiled_params[key])
+ processors[key](compiled_params[key])
else:
param[dialect._encoder(key)[0]] = \
- compiled_params[key]
+ compiled_params[key]
else:
for key in compiled_params:
if key in processors:
@classmethod
def _init_statement(cls, dialect, connection, dbapi_connection,
- statement, parameters):
+ statement, parameters):
"""Initialize execution context for a string SQL statement."""
self = cls.__new__(cls)
self.parameters = parameters
else:
self.parameters = [
- dict((dialect._encoder(k)[0], d[k]) for k in d)
- for d in parameters
- ] or [{}]
+ dict((dialect._encoder(k)[0], d[k]) for k in d)
+ for d in parameters
+ ] or [{}]
else:
self.parameters = [dialect.execute_sequence_format(p)
- for p in parameters]
+ for p in parameters]
self.executemany = len(parameters) > 1
if type_ is not None:
# apply type post processors to the result
proc = type_._cached_result_processor(
- self.dialect,
- self.cursor.description[0][1]
- )
+ self.dialect,
+ self.cursor.description[0][1]
+ )
if proc:
return proc(r)
return r
not self._is_explicit_returning and \
not self.compiled.inline and \
self.dialect.postfetch_lastrowid and \
- (not self.inserted_primary_key or \
- None in self.inserted_primary_key):
+ (not self.inserted_primary_key or
+ None in self.inserted_primary_key):
table = self.compiled.statement.table
lastrowid = self.get_lastrowid()
if autoinc_col is not None:
# apply type post processors to the lastrowid
proc = autoinc_col.type._cached_result_processor(
- self.dialect, None)
+ self.dialect, None)
if proc is not None:
lastrowid = proc(lastrowid)
self.inserted_primary_key = [
lastrowid if c is autoinc_col else v
for c, v in zip(
- table.primary_key,
- self.inserted_primary_key)
+ table.primary_key,
+ self.inserted_primary_key)
]
def _fetch_implicit_returning(self, resultproxy):
return
types = dict(
- (self.compiled.bind_names[bindparam], bindparam.type)
- for bindparam in self.compiled.bind_names)
+ (self.compiled.bind_names[bindparam], bindparam.type)
+ for bindparam in self.compiled.bind_names)
if self.dialect.positional:
inputsizes = []
for key in self.compiled.positiontup:
typeengine = types[key]
dbtype = typeengine.dialect_impl(self.dialect).\
- get_dbapi_type(self.dialect.dbapi)
+ get_dbapi_type(self.dialect.dbapi)
if dbtype is not None and \
- (not exclude_types or dbtype not in exclude_types):
+ (not exclude_types or dbtype not in exclude_types):
inputsizes.append(dbtype)
try:
self.cursor.setinputsizes(*inputsizes)
except Exception as e:
self.root_connection._handle_dbapi_exception(
- e, None, None, None, self)
+ e, None, None, None, self)
else:
inputsizes = {}
for key in self.compiled.bind_names.values():
typeengine = types[key]
dbtype = typeengine.dialect_impl(self.dialect).\
- get_dbapi_type(self.dialect.dbapi)
+ get_dbapi_type(self.dialect.dbapi)
if dbtype is not None and \
(not exclude_types or dbtype not in exclude_types):
if translate:
self.cursor.setinputsizes(**inputsizes)
except Exception as e:
self.root_connection._handle_dbapi_exception(
- e, None, None, None, self)
+ e, None, None, None, self)
def _exec_default(self, default, type_):
if default.is_sequence:
del self.current_parameters
else:
self.current_parameters = compiled_parameters = \
- self.compiled_parameters[0]
+ self.compiled_parameters[0]
for c in self.compiled.prefetch:
if self.isinsert:
if self.isinsert:
self.inserted_primary_key = [
- self.compiled_parameters[0].get(key_getter(c), None)
- for c in self.compiled.\
- statement.table.primary_key
- ]
+ self.compiled_parameters[0].get(key_getter(c), None)
+ for c in self.compiled.
+ statement.table.primary_key
+ ]
DefaultDialect.execution_ctx_cls = DefaultExecutionContext
# backwards compat
from ..sql.compiler import Compiled, TypeCompiler
+
class Dialect(object):
"""Define the behavior of a specific database and DB-API combination.
_has_events = False
-
def create_connect_args(self, url):
"""Build DB-API compatible connection arguments.
pass
- def reflecttable(self, connection, table, include_columns, exclude_columns):
+ def reflecttable(
+ self, connection, table, include_columns, exclude_columns):
"""Load table description from the database.
Given a :class:`.Connection` and a
Deprecated. This method is only called by the default
implementation of :meth:`.Dialect.get_pk_constraint`. Dialects should
- instead implement the :meth:`.Dialect.get_pk_constraint` method directly.
+ instead implement the :meth:`.Dialect.get_pk_constraint` method
+ directly.
"""
raise NotImplementedError()
- def get_unique_constraints(self, connection, table_name, schema=None, **kw):
+ def get_unique_constraints(
+ self, connection, table_name, schema=None, **kw):
"""Return information about unique constraints in `table_name`.
Given a string `table_name` and an optional string `schema`, return
list of column names in order
\**kw
- other options passed to the dialect's get_unique_constraints() method.
+ other options passed to the dialect's get_unique_constraints()
+ method.
.. versionadded:: 0.9.0
raise NotImplementedError()
-
def do_commit(self, dbapi_connection):
"""Provide an implementation of ``connection.commit()``, given a
DB-API connection.
raise NotImplementedError()
def do_rollback_twophase(self, connection, xid, is_prepared=True,
- recover=False):
+ recover=False):
"""Rollback a two phase transaction on the given connection.
:param connection: a :class:`.Connection`.
raise NotImplementedError()
def do_commit_twophase(self, connection, xid, is_prepared=True,
- recover=False):
+ recover=False):
"""Commit a two phase transaction on the given connection.
"""
-
def create_cursor(self):
"""Return a new cursor generated from this ExecutionContext's
connection.
raise NotImplementedError()
def _run_visitor(self, visitorcallable, element,
- **kwargs):
+ **kwargs):
raise NotImplementedError()
def _execute_clauseelement(self, elem, multiparams=None, params=None):
raise NotImplementedError()
+
class ExceptionContext(object):
"""Encapsulate information about an error condition in progress.
if info_cache is None:
return fn(self, con, *args, **kw)
key = (
- fn.__name__,
- tuple(a for a in args if isinstance(a, util.string_types)),
- tuple((k, v) for k, v in kw.items() if
- isinstance(v,
- util.string_types + util.int_types + (float, )
- )
- )
- )
+ fn.__name__,
+ tuple(a for a in args if isinstance(a, util.string_types)),
+ tuple((k, v) for k, v in kw.items() if
+ isinstance(v,
+ util.string_types + util.int_types + (float, )
+ )
+ )
+ )
ret = info_cache.get(key)
if ret is None:
ret = fn(self, con, *args, **kw)
if hasattr(self.dialect, 'get_schema_names'):
return self.dialect.get_schema_names(self.bind,
- info_cache=self.info_cache)
+ info_cache=self.info_cache)
return []
def get_table_names(self, schema=None, order_by=None):
"""
if hasattr(self.dialect, 'get_table_names'):
- tnames = self.dialect.get_table_names(self.bind,
- schema, info_cache=self.info_cache)
+ tnames = self.dialect.get_table_names(
+ self.bind, schema, info_cache=self.info_cache)
else:
tnames = self.engine.table_names(schema)
if order_by == 'foreign_key':
"""
return self.dialect.get_view_names(self.bind, schema,
- info_cache=self.info_cache)
+ info_cache=self.info_cache)
def get_view_definition(self, view_name, schema=None):
"""Return definition for `view_name`.
"""
return self.dialect.get_pk_constraint(self.bind, table_name, schema,
- info_cache=self.info_cache,
- **kw)['constrained_columns']
+ info_cache=self.info_cache,
+ **kw)['constrained_columns']
def get_pk_constraint(self, table_name, schema=None, **kw):
"""Return information about primary key constraint on `table_name`.
"""
return self.dialect.get_foreign_keys(self.bind, table_name, schema,
- info_cache=self.info_cache,
- **kw)
+ info_cache=self.info_cache,
+ **kw)
def get_indexes(self, table_name, schema=None, **kw):
"""Return information about indexes in `table_name`.
"""
return self.dialect.get_indexes(self.bind, table_name,
- schema,
- info_cache=self.info_cache, **kw)
+ schema,
+ info_cache=self.info_cache, **kw)
def get_unique_constraints(self, table_name, schema=None, **kw):
"""Return information about unique constraints in `table_name`.
)
# reflect table options, like mysql_engine
- tbl_opts = self.get_table_options(table_name, schema, **table.dialect_kwargs)
+ tbl_opts = self.get_table_options(
+ table_name, schema, **table.dialect_kwargs)
if tbl_opts:
# add additional kwargs to the Table if the dialect
# returned them
found_table = False
cols_by_orig_name = {}
- for col_d in self.get_columns(table_name, schema, **table.dialect_kwargs):
+ for col_d in self.get_columns(
+ table_name, schema, **table.dialect_kwargs):
found_table = True
orig_name = col_d['name']
colargs.append(sequence)
cols_by_orig_name[orig_name] = col = \
- sa_schema.Column(name, coltype, *colargs, **col_kw)
+ sa_schema.Column(name, coltype, *colargs, **col_kw)
if col.key in table.primary_key:
col.primary_key = True
if not found_table:
raise exc.NoSuchTableError(table.name)
- pk_cons = self.get_pk_constraint(table_name, schema, **table.dialect_kwargs)
+ pk_cons = self.get_pk_constraint(
+ table_name, schema, **table.dialect_kwargs)
if pk_cons:
pk_cols = [
cols_by_orig_name[pk]
# its column collection
table.primary_key._reload(pk_cols)
- fkeys = self.get_foreign_keys(table_name, schema, **table.dialect_kwargs)
+ fkeys = self.get_foreign_keys(
+ table_name, schema, **table.dialect_kwargs)
for fkey_d in fkeys:
conname = fkey_d['name']
# look for columns by orig name in cols_by_orig_name,
# but support columns that are in-Python only as fallback
constrained_columns = [
- cols_by_orig_name[c].key
- if c in cols_by_orig_name else c
- for c in fkey_d['constrained_columns']
- ]
+ cols_by_orig_name[c].key
+ if c in cols_by_orig_name else c
+ for c in fkey_d['constrained_columns']
+ ]
if exclude_columns and set(constrained_columns).intersection(
- exclude_columns):
+ exclude_columns):
continue
referred_schema = fkey_d['referred_schema']
referred_table = fkey_d['referred_table']
unique = index_d['unique']
flavor = index_d.get('type', 'unknown type')
if include_columns and \
- not set(columns).issubset(include_columns):
+ not set(columns).issubset(include_columns):
util.warn(
"Omitting %s KEY for (%s), key covers omitted columns." %
(flavor, ', '.join(columns)))
# look for columns by orig name in cols_by_orig_name,
# but support columns that are in-Python only as fallback
sa_schema.Index(name, *[
- cols_by_orig_name[c] if c in cols_by_orig_name
- else table.c[c]
- for c in columns
- ],
- **dict(unique=unique))
+ cols_by_orig_name[c] if c in cols_by_orig_name
+ else table.c[c]
+ for c in columns
+ ],
+ **dict(unique=unique))
and :class:`.RowProxy."""
-
from .. import exc, util
from ..sql import expression, sqltypes
import collections
if isinstance(key, slice):
l = []
for processor, value in zip(self._processors[key],
- self._row[key]):
+ self._row[key]):
if processor is None:
l.append(value)
else:
raise
if index is None:
raise exc.InvalidRequestError(
- "Ambiguous column name '%s' in result set! "
- "try 'use_labels' option on select statement." % key)
+ "Ambiguous column name '%s' in result set! "
+ "try 'use_labels' option on select statement." % key)
if processor is not None:
return processor(self._row[index])
else:
if context.result_map:
try:
- name, obj, type_ = context.result_map[colname
- if self.case_sensitive
- else colname.lower()]
+ name, obj, type_ = context.result_map[
+ colname if self.case_sensitive else colname.lower()]
except KeyError:
name, obj, type_ = \
colname, None, typemap.get(coltype, sqltypes.NULLTYPE)
else:
name, obj, type_ = \
- colname, None, typemap.get(coltype, sqltypes.NULLTYPE)
+ colname, None, typemap.get(coltype, sqltypes.NULLTYPE)
processor = context.get_result_processor(type_, colname, coltype)
# populate primary keymap, looking for conflicts.
if primary_keymap.setdefault(
- name if self.case_sensitive
- else name.lower(),
- rec) is not rec:
+ name if self.case_sensitive
+ else name.lower(),
+ rec) is not rec:
# place a record that doesn't have the "index" - this
# is interpreted later as an AmbiguousColumnError,
# but only when actually accessed. Columns
# aren't used; integer access is always
# unambiguous.
primary_keymap[name
- if self.case_sensitive
- else name.lower()] = rec = (None, obj, None)
+ if self.case_sensitive
+ else name.lower()] = rec = (None, obj, None)
self.keys.append(colname)
if obj:
# keymap[o] = (None, obj, None)
if translate_colname and \
- untranslated:
+ untranslated:
keymap[untranslated] = rec
# overwrite keymap values with those of the
# high precedence keymap.
keymap.update(primary_keymap)
-
@util.pending_deprecation("0.8", "sqlite dialect uses "
- "_translate_colname() now")
+ "_translate_colname() now")
def _set_keymap_synonym(self, name, origname):
"""Set a synonym for the given name.
"""
rec = (processor, obj, i) = self._keymap[origname if
- self.case_sensitive
- else origname.lower()]
+ self.case_sensitive
+ else origname.lower()]
if self._keymap.setdefault(name, rec) is not rec:
self._keymap[name] = (processor, obj, None)
# pickle/unpickle roundtrip
elif isinstance(key, expression.ColumnElement):
if key._label and (
- key._label
- if self.case_sensitive
- else key._label.lower()) in map:
+ key._label
+ if self.case_sensitive
+ else key._label.lower()) in map:
result = map[key._label
- if self.case_sensitive
- else key._label.lower()]
+ if self.case_sensitive
+ else key._label.lower()]
elif hasattr(key, 'name') and (
- key.name
- if self.case_sensitive
- else key.name.lower()) in map:
+ key.name
+ if self.case_sensitive
+ else key.name.lower()) in map:
# match is only on name.
result = map[key.name
- if self.case_sensitive
- else key.name.lower()]
+ if self.case_sensitive
+ else key.name.lower()]
# search extra hard to make sure this
# isn't a column/label name overlap.
# this check isn't currently available if the row
# was unpickled.
if result is not None and \
- result[1] is not None:
+ result[1] is not None:
for obj in result[1]:
if key._compare_name_for_result(obj):
break
if raiseerr:
raise exc.NoSuchColumnError(
"Could not locate column in row for column '%s'" %
- expression._string_or_unprintable(key))
+ expression._string_or_unprintable(key))
else:
return None
else:
self.cursor = self._saved_cursor = context.cursor
self.connection = context.root_connection
self._echo = self.connection._echo and \
- context.engine._should_log_debug()
+ context.engine._should_log_debug()
self._init_metadata()
def _init_metadata(self):
else:
self._metadata = ResultMetaData(self, metadata)
if self._echo:
- self.context.engine.logger.debug(
+ self.context.engine.logger.debug(
"Col %r", tuple(x[0] for x in metadata))
def keys(self):
return self.context.rowcount
except Exception as e:
self.connection._handle_dbapi_exception(
- e, None, None, self.cursor, self.context)
+ e, None, None, self.cursor, self.context)
@property
def lastrowid(self):
return self._saved_cursor.lastrowid
except Exception as e:
self.connection._handle_dbapi_exception(
- e, None, None,
- self._saved_cursor, self.context)
+ e, None, None,
+ self._saved_cursor, self.context)
@property
def returns_rows(self):
self.closed = True
self.connection._safe_close_cursor(self.cursor)
if _autoclose_connection and \
- self.connection.should_close_with_result:
+ self.connection.should_close_with_result:
self.connection.close()
# allow consistent errors
self.cursor = None
if not self.context.compiled:
raise exc.InvalidRequestError(
- "Statement is not a compiled "
- "expression construct.")
+ "Statement is not a compiled "
+ "expression construct.")
elif not self.context.isinsert:
raise exc.InvalidRequestError(
- "Statement is not an insert() "
- "expression construct.")
+ "Statement is not an insert() "
+ "expression construct.")
elif self.context._is_explicit_returning:
raise exc.InvalidRequestError(
- "Can't call inserted_primary_key "
- "when returning() "
- "is used.")
+ "Can't call inserted_primary_key "
+ "when returning() "
+ "is used.")
return self.context.inserted_primary_key
"""
if not self.context.compiled:
raise exc.InvalidRequestError(
- "Statement is not a compiled "
- "expression construct.")
+ "Statement is not a compiled "
+ "expression construct.")
elif not self.context.isupdate:
raise exc.InvalidRequestError(
- "Statement is not an update() "
- "expression construct.")
+ "Statement is not an update() "
+ "expression construct.")
elif self.context.executemany:
return self.context.compiled_parameters
else:
"""
if not self.context.compiled:
raise exc.InvalidRequestError(
- "Statement is not a compiled "
- "expression construct.")
+ "Statement is not a compiled "
+ "expression construct.")
elif not self.context.isinsert:
raise exc.InvalidRequestError(
- "Statement is not an insert() "
- "expression construct.")
+ "Statement is not an insert() "
+ "expression construct.")
elif self.context.executemany:
return self.context.compiled_parameters
else:
if not self.context.compiled:
raise exc.InvalidRequestError(
- "Statement is not a compiled "
- "expression construct.")
+ "Statement is not a compiled "
+ "expression construct.")
elif not self.context.isinsert and not self.context.isupdate:
raise exc.InvalidRequestError(
- "Statement is not an insert() or update() "
- "expression construct.")
+ "Statement is not an insert() or update() "
+ "expression construct.")
return self.context.postfetch_cols
def prefetch_cols(self):
if not self.context.compiled:
raise exc.InvalidRequestError(
- "Statement is not a compiled "
- "expression construct.")
+ "Statement is not a compiled "
+ "expression construct.")
elif not self.context.isinsert and not self.context.isupdate:
raise exc.InvalidRequestError(
- "Statement is not an insert() or update() "
- "expression construct.")
+ "Statement is not an insert() or update() "
+ "expression construct.")
return self.context.prefetch_cols
def supports_sane_rowcount(self):
def _non_result(self):
if self._metadata is None:
raise exc.ResourceClosedError(
- "This result object does not return rows. "
- "It has been closed automatically.",
+ "This result object does not return rows. "
+ "It has been closed automatically.",
)
else:
raise exc.ResourceClosedError("This result object is closed.")
return l
except Exception as e:
self.connection._handle_dbapi_exception(
- e, None, None,
- self.cursor, self.context)
+ e, None, None,
+ self.cursor, self.context)
def fetchmany(self, size=None):
"""Fetch many rows, just like DB-API
return l
except Exception as e:
self.connection._handle_dbapi_exception(
- e, None, None,
- self.cursor, self.context)
+ e, None, None,
+ self.cursor, self.context)
def fetchone(self):
"""Fetch one row, just like DB-API ``cursor.fetchone()``.
return None
except Exception as e:
self.connection._handle_dbapi_exception(
- e, None, None,
- self.cursor, self.context)
+ e, None, None,
+ self.cursor, self.context)
def first(self):
"""Fetch the first row and then close the result set unconditionally.
row = self._fetchone_impl()
except Exception as e:
self.connection._handle_dbapi_exception(
- e, None, None,
- self.cursor, self.context)
+ e, None, None,
+ self.cursor, self.context)
try:
if row is not None:
such as MSSQL INSERT...OUTPUT after an autocommit.
"""
+
def _init_metadata(self):
super(FullyBufferedResultProxy, self)._init_metadata()
self.__rowbuffer = self._buffer_rows()
except dialect.dbapi.Error as e:
invalidated = dialect.is_disconnect(e, None, None)
util.raise_from_cause(
- exc.DBAPIError.instance(None, None,
- e, dialect.dbapi.Error,
+ exc.DBAPIError.instance(
+ None, None, e, dialect.dbapi.Error,
connection_invalidated=invalidated
)
)
def first_connect(dbapi_connection, connection_record):
c = base.Connection(engine, connection=dbapi_connection,
- _has_events=False)
+ _has_events=False)
dialect.initialize(c)
event.listen(pool, 'first_connect', first_connect, once=True)
self.dialect, self, **kwargs).traverse_single(entity)
def _run_visitor(self, visitorcallable, element,
- connection=None,
- **kwargs):
+ connection=None,
+ **kwargs):
kwargs['checkfirst'] = False
visitorcallable(self.dialect, self,
- **kwargs).traverse_single(element)
+ **kwargs).traverse_single(element)
def execute(self, object, *multiparams, **params):
raise NotImplementedError()
def prepare(self):
if not hasattr(self._connections, 'trans') or \
- not self._connections.trans:
+ not self._connections.trans:
return
self._connections.trans[-1].prepare()
def commit(self):
if not hasattr(self._connections, 'trans') or \
- not self._connections.trans:
+ not self._connections.trans:
return
trans = self._connections.trans.pop(-1)
trans.commit()
def rollback(self):
if not hasattr(self._connections, 'trans') or \
- not self._connections.trans:
+ not self._connections.trans:
return
trans = self._connections.trans.pop(-1)
trans.rollback()
@property
def closed(self):
return not hasattr(self._connections, 'conn') or \
- self._connections.conn() is None or \
- self._connections.conn().closed
+ self._connections.conn() is None or \
+ self._connections.conn().closed
def close(self):
if not self.closed:
Represent the components of a URL used to connect to a database.
This object is suitable to be passed directly to a
- :func:`~sqlalchemy.create_engine` call. The fields of the URL are parsed from a
- string by the :func:`.make_url` function. the string
+ :func:`~sqlalchemy.create_engine` call. The fields of the URL are parsed
+ from a string by the :func:`.make_url` function. the string
format of the URL is an RFC-1738-style string.
All initialization parameters are available as public attributes.
# would return a module with 'dialect' as the
# actual class
if hasattr(cls, 'dialect') and \
- isinstance(cls.dialect, type) and \
- issubclass(cls.dialect, Dialect):
+ isinstance(cls.dialect, type) and \
+ issubclass(cls.dialect, Dialect):
return cls.dialect
else:
return cls
if components['database'] is not None:
tokens = components['database'].split('?', 2)
components['database'] = tokens[0]
- query = (len(tokens) > 1 and dict(util.parse_qsl(tokens[1]))) or None
+ query = (
+ len(tokens) > 1 and dict(util.parse_qsl(tokens[1]))) or None
if util.py2k and query is not None:
query = dict((k.encode('ascii'), query[k]) for k in query)
else:
def _rfc_1738_quote(text):
return re.sub(r'[:@/]', lambda m: "%%%X" % ord(m.group(0)), text)
+
def _rfc_1738_unquote(text):
return util.unquote(text)
+
def _parse_keyvalue_args(name):
m = re.match(r'(\w+)://(.*)', name)
if m is not None:
from .. import util
+
def connection_memoize(key):
"""Decorator, memoize a function in a connection.info stash.
return [[zero]]
else:
if hasattr(multiparams[0], '__iter__') and \
- not hasattr(multiparams[0], 'strip'):
+ not hasattr(multiparams[0], 'strip'):
return multiparams
else:
return [multiparams]
from .engine import Connectable, Engine, Dialect
from .sql.base import SchemaEventTarget
+
class DDLEvents(event.Events):
"""
Define event listeners for schema objects,
"""
-
class PoolEvents(event.Events):
"""Available events for :class:`.Pool`.
:class:`.Pool` refers to a single "creator" function (which in terms
of a :class:`.Engine` refers to the URL and connection options used),
it is typically valid to make observations about a single connection
- that can be safely assumed to be valid about all subsequent connections,
- such as the database version, the server and client encoding settings,
- collation settings, and many others.
+ that can be safely assumed to be valid about all subsequent
+ connections, such as the database version, the server and client
+ encoding settings, collation settings, and many others.
:param dbapi_connection: a DBAPI connection.
DBAPI connection.
:param connection_proxy: the :class:`._ConnectionFairy` object which
- will proxy the public interface of the DBAPI connection for the lifespan
- of the checkout.
+ will proxy the public interface of the DBAPI connection for the
+ lifespan of the checkout.
If you raise a :class:`~sqlalchemy.exc.DisconnectionError`, the current
connection will be disposed and a fresh connection retrieved.
This event is called any time the :meth:`._ConnectionRecord.invalidate`
method is invoked, either from API usage or via "auto-invalidation".
- The event occurs before a final attempt to call ``.close()`` on the connection
- occurs.
+ The event occurs before a final attempt to call ``.close()`` on the
+ connection occurs.
:param dbapi_connection: a DBAPI connection.
_target_class_doc = "SomeEngine"
_dispatch_target = Connectable
-
@classmethod
def _listen(cls, event_key, retval=False):
target, identifier, fn = \
orig_fn = fn
def wrap_before_execute(conn, clauseelement,
- multiparams, params):
+ multiparams, params):
orig_fn(conn, clauseelement, multiparams, params)
return clauseelement, multiparams, params
fn = wrap_before_execute
orig_fn = fn
def wrap_before_cursor_execute(conn, cursor, statement,
- parameters, context, executemany):
+ parameters, context,
+ executemany):
orig_fn(conn, cursor, statement,
- parameters, context, executemany)
+ parameters, context, executemany)
return statement, parameters
fn = wrap_before_cursor_execute
elif retval and \
identifier not in ('before_execute',
- 'before_cursor_execute', 'handle_error'):
+ 'before_cursor_execute', 'handle_error'):
raise exc.ArgumentError(
- "Only the 'before_execute', "
- "'before_cursor_execute' and 'handle_error' engine "
- "event listeners accept the 'retval=True' "
- "argument.")
+ "Only the 'before_execute', "
+ "'before_cursor_execute' and 'handle_error' engine "
+ "event listeners accept the 'retval=True' "
+ "argument.")
event_key.with_wrapper(fn).base_listen()
def before_execute(self, conn, clauseelement, multiparams, params):
"""
def before_cursor_execute(self, conn, cursor, statement,
- parameters, context, executemany):
+ parameters, context, executemany):
"""Intercept low-level cursor execute() events before execution,
receiving the string
SQL statement and DBAPI-specific parameter list to be invoked
"""
def after_cursor_execute(self, conn, cursor, statement,
- parameters, context, executemany):
+ parameters, context, executemany):
"""Intercept low-level cursor execute() events after execution.
:param conn: :class:`.Connection` object
"""
def dbapi_error(self, conn, cursor, statement, parameters,
- context, exception):
+ context, exception):
"""Intercept a raw DBAPI error.
This event is called with the DBAPI exception instance
It also differs from the :meth:`.PoolEvents.checkout` event
in that it is specific to the :class:`.Connection` object, not the
DBAPI connection that :meth:`.PoolEvents.checkout` deals with, although
- this DBAPI connection is available here via the :attr:`.Connection.connection`
- attribute. But note there can in fact
+ this DBAPI connection is available here via the
+ :attr:`.Connection.connection` attribute. But note there can in fact
be multiple :meth:`.PoolEvents.checkout` events within the lifespan
of a single :class:`.Connection` object, if that :class:`.Connection`
is invalidated and re-established. There can also be multiple
:meth:`.PoolEvents.checkout` the lower-level pool checkout event
for an individual DBAPI connection
- :meth:`.ConnectionEvents.set_connection_execution_options` - a copy of a
- :class:`.Connection` is also made when the
+ :meth:`.ConnectionEvents.set_connection_execution_options` - a copy
+ of a :class:`.Connection` is also made when the
:meth:`.Connection.execution_options` method is called.
"""
.. seealso::
:meth:`.ConnectionEvents.set_connection_execution_options` - event
- which is called when :meth:`.Connection.execution_options` is called.
+ which is called when :meth:`.Connection.execution_options` is
+ called.
"""
:class:`.DialectEvents` hooks should be considered **semi-public**
and experimental.
- These hooks are not for general use and are only for those situations where
- intricate re-statement of DBAPI mechanics must be injected onto an existing
- dialect. For general-use statement-interception events, please
- use the :class:`.ConnectionEvents` interface.
+ These hooks are not for general use and are only for those situations
+ where intricate re-statement of DBAPI mechanics must be injected onto
+ an existing dialect. For general-use statement-interception events,
+ please use the :class:`.ConnectionEvents` interface.
.. seealso::
place within the event handler.
"""
-
"""
+
class NoSuchModuleError(ArgumentError):
"""Raised when a dynamically-loaded module (usually a database dialect)
of a particular name cannot be located."""
+
class NoForeignKeysError(ArgumentError):
"""Raised when no foreign keys can be located between two selectables
during a join."""
def __reduce__(self):
return self.__class__, (None, self.cycles,
- self.edges, self.args[0])
+ self.edges, self.args[0])
class CompileError(SQLAlchemyError):
"""Raised when an error occurs during SQL compilation"""
+
class UnsupportedCompilationError(CompileError):
"""Raised when an operation is not supported by the given compiler.
def __init__(self, compiler, element_type):
super(UnsupportedCompilationError, self).__init__(
- "Compiler %r can't render element of type %s" %
- (compiler, element_type))
+ "Compiler %r can't render element of type %s" %
+ (compiler, element_type))
+
class IdentifierError(SQLAlchemyError):
"""Raised when a schema name is beyond the max character limit"""
def __reduce__(self):
return self.__class__, (self.args[0], self.table_name,
- self.column_name)
+ self.column_name)
class NoSuchTableError(InvalidRequestError):
params_repr = util._repr_params(self.params, 10)
return ' '.join([
- "(%s)" % det for det in self.detail
- ] + [
- SQLAlchemyError.__str__(self),
- repr(self.statement), repr(params_repr)
- ])
+ "(%s)" % det for det in self.detail
+ ] + [
+ SQLAlchemyError.__str__(self),
+ repr(self.statement), repr(params_repr)
+ ])
def __unicode__(self):
return self.__str__()
@classmethod
def instance(cls, statement, params,
- orig,
- dbapi_base_err,
- connection_invalidated=False):
+ orig, dbapi_base_err,
+ connection_invalidated=False):
# Don't ever wrap these, just return them directly as if
# DBAPIError didn't exist.
if isinstance(orig, (KeyboardInterrupt, SystemExit, DontWrapMixin)):
def __reduce__(self):
return self.__class__, (self.statement, self.params,
- self.orig, self.connection_invalidated)
+ self.orig, self.connection_invalidated)
def __init__(self, statement, params, orig, connection_invalidated=False):
try:
except Exception as e:
text = 'Error in str() of DB-API-generated exception: ' + str(e)
StatementError.__init__(
- self,
- '(%s) %s' % (orig.__class__.__name__, text),
- statement,
- params,
- orig
+ self,
+ '(%s) %s' % (orig.__class__.__name__, text),
+ statement,
+ params,
+ orig
)
self.connection_invalidated = connection_invalidated
"""
+
class AssociationProxy(interfaces._InspectionAttr):
"""A descriptor that presents a read/write view of an object attribute."""
is_attribute = False
extension_type = ASSOCIATION_PROXY
-
def __init__(self, target_collection, attr, creator=None,
getset_factory=None, proxy_factory=None,
proxy_bulk_set=None):
@util.memoized_property
def _value_is_scalar(self):
return not self._get_property().\
- mapper.get_property(self.value_attr).uselist
+ mapper.get_property(self.value_attr).uselist
@util.memoized_property
def _target_is_object(self):
proxy.update(values)
else:
raise exc.ArgumentError(
- 'no proxy_bulk_set supplied for custom '
- 'collection_class implementation')
+ 'no proxy_bulk_set supplied for custom '
+ 'collection_class implementation')
@property
def _comparator(self):
# the "can't call any() on a scalar" msg is raised.
if self.scalar and not self._value_is_scalar:
return self._comparator.has(
- value_expr
- )
+ value_expr
+ )
else:
return self._comparator.any(
- value_expr
- )
+ value_expr
+ )
def has(self, criterion=None, **kwargs):
"""Produce a proxied 'has' expression using EXISTS.
if self._target_is_object:
return self._comparator.has(
- getattr(self.target_class, self.value_attr).\
- has(criterion, **kwargs)
- )
+ getattr(self.target_class, self.value_attr).
+ has(criterion, **kwargs)
+ )
else:
if criterion is not None or kwargs:
raise exc.ArgumentError(
- "Non-empty has() not allowed for "
- "column-targeted association proxy; use ==")
+ "Non-empty has() not allowed for "
+ "column-targeted association proxy; use ==")
return self._comparator.has()
def contains(self, obj):
# is only allowed with a scalar.
if obj is None:
return or_(
- self._comparator.has(**{self.value_attr: obj}),
- self._comparator == None
- )
+ self._comparator.has(**{self.value_attr: obj}),
+ self._comparator == None
+ )
else:
return self._comparator.has(**{self.value_attr: obj})
# note the has() here will fail for collections; eq_()
# is only allowed with a scalar.
return self._comparator.has(
- getattr(self.target_class, self.value_attr) != obj)
+ getattr(self.target_class, self.value_attr) != obj)
class _lazy_collection(object):
obj = self.ref()
if obj is None:
raise exc.InvalidRequestError(
- "stale association proxy, parent object has gone out of "
- "scope")
+ "stale association proxy, parent object has gone out of "
+ "scope")
return getattr(obj, self.target)
def __getstate__(self):
for func_name, func in list(locals().items()):
if (util.callable(func) and func.__name__ == func_name and
- not func.__doc__ and hasattr(list, func_name)):
+ not func.__doc__ and hasattr(list, func_name)):
func.__doc__ = getattr(list, func_name).__doc__
del func_name, func
self[k] = v
except ValueError:
raise ValueError(
- "dictionary update sequence "
- "requires 2-element tuples")
+ "dictionary update sequence "
+ "requires 2-element tuples")
for key, value in kw:
self[key] = value
for func_name, func in list(locals().items()):
if (util.callable(func) and func.__name__ == func_name and
- not func.__doc__ and hasattr(dict, func_name)):
+ not func.__doc__ and hasattr(dict, func_name)):
func.__doc__ = getattr(dict, func_name).__doc__
del func_name, func
for func_name, func in list(locals().items()):
if (util.callable(func) and func.__name__ == func_name and
- not func.__doc__ and hasattr(set, func_name)):
+ not func.__doc__ and hasattr(set, func_name)):
func.__doc__ = getattr(set, func_name).__doc__
del func_name, func
session.add(Address(email_address="foo@bar.com", user=User(name="foo")))
session.commit()
- # collection-based relationships are by default named "<classname>_collection"
+ # collection-based relationships are by default named
+ # "<classname>_collection"
print (u1.address_collection)
Above, calling :meth:`.AutomapBase.prepare` while passing along the
link the various tables together will be used to produce new, bidirectional
:func:`.relationship` objects between classes. The classes and relationships
follow along a default naming scheme that we can customize. At this point,
-our basic mapping consisting of related ``User`` and ``Address`` classes is ready
-to use in the traditional way.
+our basic mapping consisting of related ``User`` and ``Address`` classes is
+ready to use in the traditional way.
Generating Mappings from an Existing MetaData
=============================================
We can pass a pre-declared :class:`.MetaData` object to :func:`.automap_base`.
This object can be constructed in any way, including programmatically, from
-a serialized file, or from itself being reflected using :meth:`.MetaData.reflect`.
-Below we illustrate a combination of reflection and explicit table declaration::
+a serialized file, or from itself being reflected using
+:meth:`.MetaData.reflect`. Below we illustrate a combination of reflection and
+explicit table declaration::
from sqlalchemy import create_engine, MetaData, Table, Column, ForeignKey
engine = create_engine("sqlite:///mydatabase.db")
Base.prepare()
# mapped classes are ready
- User, Address, Order = Base.classes.user, Base.classes.address, Base.classes.user_order
+ User, Address, Order = Base.classes.user, Base.classes.address,\
+ Base.classes.user_order
Specifying Classes Explcitly
============================
The :mod:`.sqlalchemy.ext.automap` extension allows classes to be defined
explicitly, in a way similar to that of the :class:`.DeferredReflection` class.
Classes that extend from :class:`.AutomapBase` act like regular declarative
-classes, but are not immediately mapped after their construction, and are instead
-mapped when we call :meth:`.AutomapBase.prepare`. The :meth:`.AutomapBase.prepare`
-method will make use of the classes we've established based on the table name
-we use. If our schema contains tables ``user`` and ``address``, we can define
-one or both of the classes to be used::
+classes, but are not immediately mapped after their construction, and are
+instead mapped when we call :meth:`.AutomapBase.prepare`. The
+:meth:`.AutomapBase.prepare` method will make use of the classes we've
+established based on the table name we use. If our schema contains tables
+``user`` and ``address``, we can define one or both of the classes to be used::
from sqlalchemy.ext.automap import automap_base
from sqlalchemy import create_engine
user_name = Column('name', String)
# override relationships too, if desired.
- # we must use the same name that automap would use for the relationship,
- # and also must refer to the class name that automap will generate
- # for "address"
+ # we must use the same name that automap would use for the
+ # relationship, and also must refer to the class name that automap will
+ # generate for "address"
address_collection = relationship("address", collection_class=set)
# reflect
Above, one of the more intricate details is that we illustrated overriding
one of the :func:`.relationship` objects that automap would have created.
To do this, we needed to make sure the names match up with what automap
-would normally generate, in that the relationship name would be ``User.address_collection``
-and the name of the class referred to, from automap's perspective, is called
-``address``, even though we are referring to it as ``Address`` within our usage
-of this class.
+would normally generate, in that the relationship name would be
+``User.address_collection`` and the name of the class referred to, from
+automap's perspective, is called ``address``, even though we are referring to
+it as ``Address`` within our usage of this class.
Overriding Naming Schemes
=========================
)
From the above mapping, we would now have classes ``User`` and ``Address``,
-where the collection from ``User`` to ``Address`` is called ``User.addresses``::
+where the collection from ``User`` to ``Address`` is called
+``User.addresses``::
User, Address = Base.classes.User, Base.classes.Address
The vast majority of what automap accomplishes is the generation of
:func:`.relationship` structures based on foreign keys. The mechanism
-by which this works for many-to-one and one-to-many relationships is as follows:
+by which this works for many-to-one and one-to-many relationships is as
+follows:
1. A given :class:`.Table`, known to be mapped to a particular class,
is examined for :class:`.ForeignKeyConstraint` objects.
object present is matched up to the class to which it is to be mapped,
if any, else it is skipped.
-3. As the :class:`.ForeignKeyConstraint` we are examining corresponds to a reference
- from the immediate mapped class,
- the relationship will be set up as a many-to-one referring to the referred class;
- a corresponding one-to-many backref will be created on the referred class referring
+3. As the :class:`.ForeignKeyConstraint` we are examining corresponds to a
+ reference from the immediate mapped class, the relationship will be set up
+ as a many-to-one referring to the referred class; a corresponding
+ one-to-many backref will be created on the referred class referring
to this class.
4. The names of the relationships are determined using the
name will be derived.
5. The classes are inspected for an existing mapped property matching these
- names. If one is detected on one side, but none on the other side, :class:`.AutomapBase`
- attempts to create a relationship on the missing side, then uses the
- :paramref:`.relationship.back_populates` parameter in order to point
- the new relationship to the other side.
+ names. If one is detected on one side, but none on the other side,
+ :class:`.AutomapBase` attempts to create a relationship on the missing side,
+ then uses the :paramref:`.relationship.back_populates` parameter in order to
+ point the new relationship to the other side.
6. In the usual case where no relationship is on either side,
- :meth:`.AutomapBase.prepare` produces a :func:`.relationship` on the "many-to-one"
- side and matches it to the other using the :paramref:`.relationship.backref`
- parameter.
+ :meth:`.AutomapBase.prepare` produces a :func:`.relationship` on the
+ "many-to-one" side and matches it to the other using the
+ :paramref:`.relationship.backref` parameter.
7. Production of the :func:`.relationship` and optionally the :func:`.backref`
is handed off to the :paramref:`.AutomapBase.prepare.generate_relationship`
# make use of the built-in function to actually return
# the result.
return generate_relationship(base, direction, return_fn,
- attrname, local_cls, referred_cls, **kw)
+ attrname, local_cls, referred_cls, **kw)
from sqlalchemy.ext.automap import automap_base
from sqlalchemy import create_engine
those which contain a ``secondary`` argument. The process for producing these
is as follows:
-1. A given :class:`.Table` is examined for :class:`.ForeignKeyConstraint` objects,
- before any mapped class has been assigned to it.
+1. A given :class:`.Table` is examined for :class:`.ForeignKeyConstraint`
+ objects, before any mapped class has been assigned to it.
2. If the table contains two and exactly two :class:`.ForeignKeyConstraint`
objects, and all columns within this table are members of these two
:class:`.ForeignKeyConstraint` objects, the table is assumed to be a
"secondary" table, and will **not be mapped directly**.
-3. The two (or one, for self-referential) external tables to which the :class:`.Table`
- refers to are matched to the classes to which they will be mapped, if any.
+3. The two (or one, for self-referential) external tables to which the
+ :class:`.Table` refers to are matched to the classes to which they will be
+ mapped, if any.
4. If mapped classes for both sides are located, a many-to-many bi-directional
:func:`.relationship` / :func:`.backref` pair is created between the two
------------------------------
:mod:`.sqlalchemy.ext.automap` will not generate any relationships between
-two classes that are in an inheritance relationship. That is, with two classes
-given as follows::
+two classes that are in an inheritance relationship. That is, with two
+classes given as follows::
class Employee(Base):
__tablename__ = 'employee'
'polymorphic_identity':'engineer',
}
-The foreign key from ``Engineer`` to ``Employee`` is used not for a relationship,
-but to establish joined inheritance between the two classes.
+The foreign key from ``Engineer`` to ``Employee`` is used not for a
+relationship, but to establish joined inheritance between the two classes.
Note that this means automap will not generate *any* relationships
for foreign keys that link from a subclass to a superclass. If a mapping
id = Column(Integer, ForeignKey('employee.id'), primary_key=True)
favorite_employee_id = Column(Integer, ForeignKey('employee.id'))
- favorite_employee = relationship(Employee, foreign_keys=favorite_employee_id)
+ favorite_employee = relationship(Employee,
+ foreign_keys=favorite_employee_id)
__mapper_args__ = {
'polymorphic_identity':'engineer',
As noted previously, automap has no dependency on reflection, and can make
use of any collection of :class:`.Table` objects within a :class:`.MetaData`
collection. From this, it follows that automap can also be used
-generate missing relationships given an otherwise complete model that fully defines
-table metadata::
+generate missing relationships given an otherwise complete model that fully
+defines table metadata::
from sqlalchemy.ext.automap import automap_base
from sqlalchemy import Column, Integer, String, ForeignKey
Above, given mostly complete ``User`` and ``Address`` mappings, the
:class:`.ForeignKey` which we defined on ``Address.user_id`` allowed a
-bidirectional relationship pair ``Address.user`` and ``User.address_collection``
-to be generated on the mapped classes.
+bidirectional relationship pair ``Address.user`` and
+``User.address_collection`` to be generated on the mapped classes.
-Note that when subclassing :class:`.AutomapBase`, the :meth:`.AutomapBase.prepare`
-method is required; if not called, the classes we've declared are in an
-un-mapped state.
+Note that when subclassing :class:`.AutomapBase`,
+the :meth:`.AutomapBase.prepare` method is required; if not called, the classes
+we've declared are in an un-mapped state.
"""
.. note::
- In Python 2, the string used for the class name **must** be a non-Unicode
- object, e.g. a ``str()`` object. The ``.name`` attribute of
- :class:`.Table` is typically a Python unicode subclass, so the ``str()``
- function should be applied to this name, after accounting for any non-ASCII
- characters.
+ In Python 2, the string used for the class name **must** be a
+ non-Unicode object, e.g. a ``str()`` object. The ``.name`` attribute
+ of :class:`.Table` is typically a Python unicode subclass, so the
+ ``str()`` function should be applied to this name, after accounting for
+ any non-ASCII characters.
"""
return str(tablename)
+
def name_for_scalar_relationship(base, local_cls, referred_cls, constraint):
"""Return the attribute name that should be used to refer from one
class to another, for a scalar object reference.
"""
return referred_cls.__name__.lower()
-def name_for_collection_relationship(base, local_cls, referred_cls, constraint):
+
+def name_for_collection_relationship(
+ base, local_cls, referred_cls, constraint):
"""Return the attribute name that should be used to refer from one
class to another, for a collection reference.
return referred_cls.__name__.lower() + "_collection"
Alternate implementations
- can be specified using the :paramref:`.AutomapBase.prepare.name_for_collection_relationship`
+ can be specified using the
+ :paramref:`.AutomapBase.prepare.name_for_collection_relationship`
parameter.
:param base: the :class:`.AutomapBase` class doing the prepare.
"""
return referred_cls.__name__.lower() + "_collection"
-def generate_relationship(base, direction, return_fn, attrname, local_cls, referred_cls, **kw):
+
+def generate_relationship(
+ base, direction, return_fn, attrname, local_cls, referred_cls, **kw):
"""Generate a :func:`.relationship` or :func:`.backref` on behalf of two
mapped classes.
be one of :data:`.ONETOMANY`, :data:`.MANYTOONE`, :data:`.MANYTOONE`.
:param return_fn: the function that is used by default to create the
- relationship. This will be either :func:`.relationship` or :func:`.backref`.
- The :func:`.backref` function's result will be used to produce a new
- :func:`.relationship` in a second step, so it is critical that user-defined
- implementations correctly differentiate between the two functions, if
- a custom relationship function is being used.
+ relationship. This will be either :func:`.relationship` or
+ :func:`.backref`. The :func:`.backref` function's result will be used to
+ produce a new :func:`.relationship` in a second step, so it is critical
+ that user-defined implementations correctly differentiate between the two
+ functions, if a custom relationship function is being used.
:attrname: the attribute name to which this relationship is being assigned.
If the value of :paramref:`.generate_relationship.return_fn` is the
:param local_cls: the "local" class to which this relationship or backref
will be locally present.
- :param referred_cls: the "referred" class to which the relationship or backref
- refers to.
+ :param referred_cls: the "referred" class to which the relationship or
+ backref refers to.
:param \**kw: all additional keyword arguments are passed along to the
function.
else:
raise TypeError("Unknown relationship function: %s" % return_fn)
+
class AutomapBase(object):
"""Base class for an "automap" schema.
"""
@classmethod
- def prepare(cls,
- engine=None,
- reflect=False,
- classname_for_table=classname_for_table,
- collection_class=list,
- name_for_scalar_relationship=name_for_scalar_relationship,
- name_for_collection_relationship=name_for_collection_relationship,
- generate_relationship=generate_relationship):
-
+ def prepare(
+ cls,
+ engine=None,
+ reflect=False,
+ classname_for_table=classname_for_table,
+ collection_class=list,
+ name_for_scalar_relationship=name_for_scalar_relationship,
+ name_for_collection_relationship=name_for_collection_relationship,
+ generate_relationship=generate_relationship):
"""Extract mapped classes and relationships from the :class:`.MetaData` and
perform mappings.
:param engine: an :class:`.Engine` or :class:`.Connection` with which
to perform schema reflection, if specified.
- If the :paramref:`.AutomapBase.prepare.reflect` argument is False, this
- object is not used.
+ If the :paramref:`.AutomapBase.prepare.reflect` argument is False,
+ this object is not used.
:param reflect: if True, the :meth:`.MetaData.reflect` method is called
on the :class:`.MetaData` associated with this :class:`.AutomapBase`.
- The :class:`.Engine` passed via :paramref:`.AutomapBase.prepare.engine` will
- be used to perform the reflection if present; else, the :class:`.MetaData`
- should already be bound to some engine else the operation will fail.
+ The :class:`.Engine` passed via
+ :paramref:`.AutomapBase.prepare.engine` will be used to perform the
+ reflection if present; else, the :class:`.MetaData` should already be
+ bound to some engine else the operation will fail.
:param classname_for_table: callable function which will be used to
produce new class names, given a table name. Defaults to
:func:`.classname_for_table`.
- :param name_for_scalar_relationship: callable function which will be used
- to produce relationship names for scalar relationships. Defaults to
- :func:`.name_for_scalar_relationship`.
+ :param name_for_scalar_relationship: callable function which will be
+ used to produce relationship names for scalar relationships. Defaults
+ to :func:`.name_for_scalar_relationship`.
- :param name_for_collection_relationship: callable function which will be used
- to produce relationship names for collection-oriented relationships. Defaults to
- :func:`.name_for_collection_relationship`.
+ :param name_for_collection_relationship: callable function which will
+ be used to produce relationship names for collection-oriented
+ relationships. Defaults to :func:`.name_for_collection_relationship`.
:param generate_relationship: callable function which will be used to
- actually generate :func:`.relationship` and :func:`.backref` constructs.
- Defaults to :func:`.generate_relationship`.
+ actually generate :func:`.relationship` and :func:`.backref`
+ constructs. Defaults to :func:`.generate_relationship`.
:param collection_class: the Python collection class that will be used
when a new :func:`.relationship` object is created that represents a
"""
if reflect:
cls.metadata.reflect(
- engine,
- extend_existing=True,
- autoload_replace=False
- )
+ engine,
+ extend_existing=True,
+ autoload_replace=False
+ )
table_to_map_config = dict(
- (m.local_table, m)
- for m in _DeferredMapperConfig.
- classes_for_base(cls, sort=False)
- )
+ (m.local_table, m)
+ for m in _DeferredMapperConfig.
+ classes_for_base(cls, sort=False)
+ )
many_to_many = []
for map_config in table_to_map_config.values():
_relationships_for_fks(cls,
- map_config,
- table_to_map_config,
- collection_class,
- name_for_scalar_relationship,
- name_for_collection_relationship,
- generate_relationship)
+ map_config,
+ table_to_map_config,
+ collection_class,
+ name_for_scalar_relationship,
+ name_for_collection_relationship,
+ generate_relationship)
for lcl_m2m, rem_m2m, m2m_const, table in many_to_many:
_m2m_relationship(cls, lcl_m2m, rem_m2m, m2m_const, table,
- table_to_map_config,
- collection_class,
- name_for_scalar_relationship,
- name_for_collection_relationship,
- generate_relationship)
+ table_to_map_config,
+ collection_class,
+ name_for_scalar_relationship,
+ name_for_collection_relationship,
+ generate_relationship)
for map_config in _DeferredMapperConfig.classes_for_base(cls):
map_config.map()
-
_sa_decl_prepare = True
"""Indicate that the mapping of classes should be deferred.
"""
+
def automap_base(declarative_base=None, **kw):
"""Produce a declarative automap base.
:param declarative_base: an existing class produced by
:func:`.declarative.declarative_base`. When this is passed, the function
- no longer invokes :func:`.declarative.declarative_base` itself, and all other
- keyword arguments are ignored.
+ no longer invokes :func:`.declarative.declarative_base` itself, and all
+ other keyword arguments are ignored.
:param \**kw: keyword arguments are passed along to
:func:`.declarative.declarative_base`.
Base = declarative_base
return type(
- Base.__name__,
- (AutomapBase, Base,),
- {"__abstract__": True, "classes": util.Properties({})}
- )
+ Base.__name__,
+ (AutomapBase, Base,),
+ {"__abstract__": True, "classes": util.Properties({})}
+ )
+
def _is_many_to_many(automap_base, table):
fk_constraints = [const for const in table.constraints
- if isinstance(const, ForeignKeyConstraint)]
+ if isinstance(const, ForeignKeyConstraint)]
if len(fk_constraints) != 2:
return None, None, None
cols = sum(
- [[fk.parent for fk in fk_constraint.elements]
- for fk_constraint in fk_constraints], [])
+ [[fk.parent for fk in fk_constraint.elements]
+ for fk_constraint in fk_constraints], [])
if set(cols) != set(table.c):
return None, None, None
fk_constraints
)
+
def _relationships_for_fks(automap_base, map_config, table_to_map_config,
- collection_class,
- name_for_scalar_relationship,
- name_for_collection_relationship,
- generate_relationship):
+ collection_class,
+ name_for_scalar_relationship,
+ name_for_collection_relationship,
+ generate_relationship):
local_table = map_config.local_table
local_cls = map_config.cls
continue
referred_cls = referred_cfg.cls
- if local_cls is not referred_cls and issubclass(local_cls, referred_cls):
+ if local_cls is not referred_cls and issubclass(
+ local_cls, referred_cls):
continue
relationship_name = name_for_scalar_relationship(
- automap_base,
- local_cls,
- referred_cls, constraint)
+ automap_base,
+ local_cls,
+ referred_cls, constraint)
backref_name = name_for_collection_relationship(
- automap_base,
- referred_cls,
- local_cls,
- constraint
- )
+ automap_base,
+ referred_cls,
+ local_cls,
+ constraint
+ )
create_backref = backref_name not in referred_cfg.properties
if relationship_name not in map_config.properties:
if create_backref:
- backref_obj = generate_relationship(automap_base,
- interfaces.ONETOMANY, backref,
- backref_name, referred_cls, local_cls,
- collection_class=collection_class)
+ backref_obj = generate_relationship(
+ automap_base,
+ interfaces.ONETOMANY, backref,
+ backref_name, referred_cls, local_cls,
+ collection_class=collection_class)
else:
backref_obj = None
rel = generate_relationship(automap_base,
- interfaces.MANYTOONE,
- relationship,
- relationship_name,
- local_cls, referred_cls,
- foreign_keys=[fk.parent for fk in constraint.elements],
- backref=backref_obj,
- remote_side=[fk.column for fk in constraint.elements]
- )
+ interfaces.MANYTOONE,
+ relationship,
+ relationship_name,
+ local_cls, referred_cls,
+ foreign_keys=[
+ fk.parent
+ for fk in constraint.elements],
+ backref=backref_obj,
+ remote_side=[
+ fk.column
+ for fk in constraint.elements]
+ )
if rel is not None:
map_config.properties[relationship_name] = rel
if not create_backref:
- referred_cfg.properties[backref_name].back_populates = relationship_name
+ referred_cfg.properties[
+ backref_name].back_populates = relationship_name
elif create_backref:
rel = generate_relationship(automap_base,
- interfaces.ONETOMANY,
- relationship,
- backref_name,
- referred_cls, local_cls,
- foreign_keys=[fk.parent for fk in constraint.elements],
- back_populates=relationship_name,
- collection_class=collection_class)
+ interfaces.ONETOMANY,
+ relationship,
+ backref_name,
+ referred_cls, local_cls,
+ foreign_keys=[
+ fk.parent
+ for fk in constraint.elements],
+ back_populates=relationship_name,
+ collection_class=collection_class)
if rel is not None:
referred_cfg.properties[backref_name] = rel
- map_config.properties[relationship_name].back_populates = backref_name
+ map_config.properties[
+ relationship_name].back_populates = backref_name
+
def _m2m_relationship(automap_base, lcl_m2m, rem_m2m, m2m_const, table,
- table_to_map_config,
- collection_class,
- name_for_scalar_relationship,
- name_for_collection_relationship,
- generate_relationship):
+ table_to_map_config,
+ collection_class,
+ name_for_scalar_relationship,
+ name_for_collection_relationship,
+ generate_relationship):
map_config = table_to_map_config.get(lcl_m2m, None)
referred_cfg = table_to_map_config.get(rem_m2m, None)
referred_cls = referred_cfg.cls
relationship_name = name_for_collection_relationship(
- automap_base,
- local_cls,
- referred_cls, m2m_const[0])
+ automap_base,
+ local_cls,
+ referred_cls, m2m_const[0])
backref_name = name_for_collection_relationship(
- automap_base,
- referred_cls,
- local_cls,
- m2m_const[1]
- )
+ automap_base,
+ referred_cls,
+ local_cls,
+ m2m_const[1]
+ )
create_backref = backref_name not in referred_cfg.properties
if relationship_name not in map_config.properties:
if create_backref:
- backref_obj = generate_relationship(automap_base,
- interfaces.MANYTOMANY,
- backref,
- backref_name,
- referred_cls, local_cls,
- collection_class=collection_class
- )
+ backref_obj = generate_relationship(
+ automap_base,
+ interfaces.MANYTOMANY,
+ backref,
+ backref_name,
+ referred_cls, local_cls,
+ collection_class=collection_class
+ )
else:
backref_obj = None
rel = generate_relationship(automap_base,
- interfaces.MANYTOMANY,
- relationship,
- relationship_name,
- local_cls, referred_cls,
- secondary=table,
- primaryjoin=and_(fk.column == fk.parent for fk in m2m_const[0].elements),
- secondaryjoin=and_(fk.column == fk.parent for fk in m2m_const[1].elements),
- backref=backref_obj,
- collection_class=collection_class
- )
+ interfaces.MANYTOMANY,
+ relationship,
+ relationship_name,
+ local_cls, referred_cls,
+ secondary=table,
+ primaryjoin=and_(
+ fk.column == fk.parent
+ for fk in m2m_const[0].elements),
+ secondaryjoin=and_(
+ fk.column == fk.parent
+ for fk in m2m_const[1].elements),
+ backref=backref_obj,
+ collection_class=collection_class
+ )
if rel is not None:
map_config.properties[relationship_name] = rel
if not create_backref:
- referred_cfg.properties[backref_name].back_populates = relationship_name
+ referred_cfg.properties[
+ backref_name].back_populates = relationship_name
elif create_backref:
rel = generate_relationship(automap_base,
- interfaces.MANYTOMANY,
- relationship,
- backref_name,
- referred_cls, local_cls,
- secondary=table,
- primaryjoin=and_(fk.column == fk.parent for fk in m2m_const[1].elements),
- secondaryjoin=and_(fk.column == fk.parent for fk in m2m_const[0].elements),
- back_populates=relationship_name,
- collection_class=collection_class)
+ interfaces.MANYTOMANY,
+ relationship,
+ backref_name,
+ referred_cls, local_cls,
+ secondary=table,
+ primaryjoin=and_(
+ fk.column == fk.parent
+ for fk in m2m_const[1].elements),
+ secondaryjoin=and_(
+ fk.column == fk.parent
+ for fk in m2m_const[0].elements),
+ back_populates=relationship_name,
+ collection_class=collection_class)
if rel is not None:
referred_cfg.properties[backref_name] = rel
- map_config.properties[relationship_name].back_populates = backref_name
+ map_config.properties[
+ relationship_name].back_populates = backref_name
@compiles(AlterColumn, 'postgresql')
def visit_alter_column(element, compiler, **kw):
- return "ALTER TABLE %s ALTER COLUMN %s ..." % (element.table.name, element.column.name)
+ return "ALTER TABLE %s ALTER COLUMN %s ..." % (element.table.name,
+ element.column.name)
The second ``visit_alter_table`` will be invoked when any ``postgresql``
dialect is used.
Produces::
- "INSERT INTO mytable (SELECT mytable.x, mytable.y, mytable.z FROM mytable WHERE mytable.x > :x_1)"
+ "INSERT INTO mytable (SELECT mytable.x, mytable.y, mytable.z
+ FROM mytable WHERE mytable.x > :x_1)"
.. note::
# TODO: why is the lambda needed ?
setattr(class_, '_compiler_dispatch',
- lambda *arg, **kw: existing(*arg, **kw))
+ lambda *arg, **kw: existing(*arg, **kw))
setattr(class_, '_compiler_dispatcher', existing)
if specs:
fn = self.specs['default']
except KeyError:
raise exc.CompileError(
- "%s construct has no default "
- "compilation handler." % type(element))
+ "%s construct has no default "
+ "compilation handler." % type(element))
return fn(element, compiler, **kw)
sqlalchemy.exc.InvalidRequestError: this ForeignKey's parent column is not
yet associated with a Table.
-This is because the ``target_id`` :class:`.Column` we've called upon in our ``target()``
-method is not the same :class:`.Column` that declarative is actually going to map
-to our table.
+This is because the ``target_id`` :class:`.Column` we've called upon in our
+``target()`` method is not the same :class:`.Column` that declarative is
+actually going to map to our table.
The condition above is resolved using a lambda::
``__declare_first__()``
~~~~~~~~~~~~~~~~~~~~~~~
-Like ``__declare_last__()``, but is called at the beginning of mapper configuration
-via the :meth:`.MapperEvents.before_configured` event::
+Like ``__declare_last__()``, but is called at the beginning of mapper
+configuration via the :meth:`.MapperEvents.before_configured` event::
class MyClass(Base):
@classmethod
__all__ = ['declarative_base', 'synonym_for', 'has_inherited_table',
- 'comparable_using', 'instrument_declarative', 'declared_attr',
- 'ConcreteBase', 'AbstractConcreteBase', 'DeclarativeMeta',
- 'DeferredReflection']
+ 'comparable_using', 'instrument_declarative', 'declared_attr',
+ 'ConcreteBase', 'AbstractConcreteBase', 'DeclarativeMeta',
+ 'DeferredReflection']
from ...schema import Table, MetaData
from ...orm import synonym as _orm_synonym, mapper,\
- comparable_property,\
- interfaces, properties
+ comparable_property,\
+ interfaces, properties
from ...orm.util import polymorphic_union
from ...orm.base import _mapper_or_none
from ...util import OrderedDict
import weakref
from .base import _as_declarative, \
- _declarative_constructor,\
- _DeferredMapperConfig, _add_attribute
+ _declarative_constructor,\
+ _DeferredMapperConfig, _add_attribute
from .clsregistry import _class_resolver
from . import clsregistry
+
def instrument_declarative(cls, registry, metadata):
"""Given a class, configure the class declaratively,
using the given registry, which can be any dictionary, and
"""
if '_decl_class_registry' in cls.__dict__:
raise exc.InvalidRequestError(
- "Class %r already has been "
- "instrumented declaratively" % cls)
+ "Class %r already has been "
+ "instrumented declaratively" % cls)
cls._decl_class_registry = registry
cls.metadata = metadata
_as_declarative(cls, cls.__name__, cls.__dict__)
return metaclass(name, bases, class_dict)
+
def as_declarative(**kw):
"""
Class decorator for :func:`.declarative_base`.
return decorate
+
class ConcreteBase(object):
"""A helper class for 'concrete' declarative mappings.
return polymorphic_union(OrderedDict(
(mp.polymorphic_identity, mp.local_table)
for mp in mappers
- ), 'type', 'pjoin')
+ ), 'type', 'pjoin')
@classmethod
def __declare_first__(cls):
metadata = mapper.class_.metadata
for rel in mapper._props.values():
if isinstance(rel, properties.RelationshipProperty) and \
- rel.secondary is not None:
+ rel.secondary is not None:
if isinstance(rel.secondary, Table):
cls._reflect_table(rel.secondary, engine)
elif isinstance(rel.secondary, _class_resolver):
@classmethod
def _reflect_table(cls, table, engine):
Table(table.name,
- table.metadata,
- extend_existing=True,
- autoload_replace=False,
- autoload=True,
- autoload_with=engine,
- schema=table.schema)
+ table.metadata,
+ extend_existing=True,
+ autoload_replace=False,
+ autoload=True,
+ autoload_with=engine,
+ schema=table.schema)
import collections
import weakref
+
def _declared_mapping_info(cls):
# deferred mapping
if _DeferredMapperConfig.has_cls(cls):
cls.__declare_first__()
if '__abstract__' in base.__dict__ and base.__abstract__:
if (base is cls or
- (base in cls.__bases__ and not _is_declarative_inherits)
- ):
+ (base in cls.__bases__ and not _is_declarative_inherits)):
return
class_mapped = _declared_mapping_info(base) is not None
for name, obj in vars(base).items():
if name == '__mapper_args__':
if not mapper_args_fn and (
- not class_mapped or
- isinstance(obj, declarative_props)
- ):
+ not class_mapped or
+ isinstance(obj, declarative_props)
+ ):
# don't even invoke __mapper_args__ until
# after we've determined everything about the
# mapped table.
mapper_args_fn = lambda: dict(cls.__mapper_args__)
elif name == '__tablename__':
if not tablename and (
- not class_mapped or
- isinstance(obj, declarative_props)
- ):
+ not class_mapped or
+ isinstance(obj, declarative_props)
+ ):
tablename = cls.__tablename__
elif name == '__table_args__':
if not table_args and (
- not class_mapped or
- isinstance(obj, declarative_props)
- ):
+ not class_mapped or
+ isinstance(obj, declarative_props)
+ ):
table_args = cls.__table_args__
if not isinstance(table_args, (tuple, dict, type(None))):
raise exc.ArgumentError(
- "__table_args__ value must be a tuple, "
- "dict, or None")
+ "__table_args__ value must be a tuple, "
+ "dict, or None")
if base is not cls:
inherited_table_args = True
elif class_mapped:
if isinstance(obj, declarative_props):
util.warn("Regular (i.e. not __special__) "
- "attribute '%s.%s' uses @declared_attr, "
- "but owning class %s is mapped - "
- "not applying to subclass %s."
- % (base.__name__, name, base, cls))
+ "attribute '%s.%s' uses @declared_attr, "
+ "but owning class %s is mapped - "
+ "not applying to subclass %s."
+ % (base.__name__, name, base, cls))
continue
elif base is not cls:
# we're a mixin.
continue
if obj.foreign_keys:
raise exc.InvalidRequestError(
- "Columns with foreign keys to other columns "
- "must be declared as @declared_attr callables "
- "on declarative mixin classes. ")
+ "Columns with foreign keys to other columns "
+ "must be declared as @declared_attr callables "
+ "on declarative mixin classes. ")
if name not in dict_ and not (
'__table__' in dict_ and
(obj.name or name) in dict_['__table__'].c
- ) and name not in potential_columns:
+ ) and name not in potential_columns:
potential_columns[name] = \
- column_copies[obj] = \
- obj.copy()
+ column_copies[obj] = \
+ obj.copy()
column_copies[obj]._creation_order = \
- obj._creation_order
+ obj._creation_order
elif isinstance(obj, MapperProperty):
raise exc.InvalidRequestError(
"Mapper properties (i.e. deferred,"
"on declarative mixin classes.")
elif isinstance(obj, declarative_props):
dict_[name] = ret = \
- column_copies[obj] = getattr(cls, name)
+ column_copies[obj] = getattr(cls, name)
if isinstance(ret, (Column, MapperProperty)) and \
- ret.doc is None:
+ ret.doc is None:
ret.doc = obj.__doc__
# apply inherited columns as we should
value = synonym(value.key)
setattr(cls, k, value)
-
if (isinstance(value, tuple) and len(value) == 1 and
- isinstance(value[0], (Column, MapperProperty))):
+ isinstance(value[0], (Column, MapperProperty))):
util.warn("Ignoring declarative-like tuple value of attribute "
"%s: possibly a copy-and-paste error with a comma "
"left at the end of the line?" % k)
if isinstance(c, (ColumnProperty, CompositeProperty)):
for col in c.columns:
if isinstance(col, Column) and \
- col.table is None:
+ col.table is None:
_undefer_column_name(key, col)
if not isinstance(c, CompositeProperty):
name_to_prop_key[col.name].add(key)
"Class %r does not have a __table__ or __tablename__ "
"specified and does not inherit from an existing "
"table-mapped class." % cls
- )
+ )
elif inherits:
inherited_mapper = _declared_mapping_info(inherits)
inherited_table = inherited_mapper.local_table
raise exc.ArgumentError(
"Can't place __table_args__ on an inherited class "
"with no table."
- )
+ )
# add any columns declared here to the inherited table.
for c in declared_columns:
if c.primary_key:
raise exc.ArgumentError(
"Can't place primary key columns on an inherited "
"class with no table."
- )
+ )
if c.name in inherited_table.c:
if inherited_table.c[c.name] is c:
continue
)
inherited_table.append_column(c)
if inherited_mapped_table is not None and \
- inherited_mapped_table is not inherited_table:
+ inherited_mapped_table is not inherited_table:
inherited_mapped_table._refresh_for_new_column(c)
defer_map = hasattr(cls, '_sa_decl_prepare')
else:
cfg_cls = _MapperConfig
mt = cfg_cls(mapper_cls,
- cls, table,
- inherits,
- declared_columns,
- column_copies,
- our_stuff,
- mapper_args_fn)
+ cls, table,
+ inherits,
+ declared_columns,
+ column_copies,
+ our_stuff,
+ mapper_args_fn)
if not defer_map:
mt.map()
mapped_table = None
def __init__(self, mapper_cls,
- cls,
- table,
- inherits,
- declared_columns,
- column_copies,
- properties, mapper_args_fn):
+ cls,
+ table,
+ inherits,
+ declared_columns,
+ column_copies,
+ properties, mapper_args_fn):
self.mapper_cls = mapper_cls
self.cls = cls
self.local_table = table
self.declared_columns = declared_columns
self.column_copies = column_copies
-
def _prepare_mapper_arguments(self):
properties = self.properties
if self.mapper_args_fn:
set([c.key for c in inherited_table.c
if c not in inherited_mapper._columntoproperty])
exclude_properties.difference_update(
- [c.key for c in self.declared_columns])
+ [c.key for c in self.declared_columns])
# look through columns in the current mapper that
# are keyed to a propname different than the colname
**mapper_args
)
+
class _DeferredMapperConfig(_MapperConfig):
_configs = util.OrderedDict()
def has_cls(cls, class_):
# 2.6 fails on weakref if class_ is an old style class
return isinstance(class_, type) and \
- weakref.ref(class_) in cls._configs
+ weakref.ref(class_) in cls._configs
@classmethod
def config_for_cls(cls, class_):
return cls._configs[weakref.ref(class_)]
-
@classmethod
def classes_for_base(cls, base_cls, sort=True):
classes_for_base = [m for m in cls._configs.values()
- if issubclass(m.cls, base_cls)]
+ if issubclass(m.cls, base_cls)]
if not sort:
return classes_for_base
all_m_by_cls = dict(
- (m.cls, m)
- for m in classes_for_base
- )
+ (m.cls, m)
+ for m in classes_for_base
+ )
tuples = []
for m_cls in all_m_by_cls:
tuples.extend(
- (all_m_by_cls[base_cls], all_m_by_cls[m_cls])
- for base_cls in m_cls.__bases__
- if base_cls in all_m_by_cls
- )
+ (all_m_by_cls[base_cls], all_m_by_cls[m_cls])
+ for base_cls in m_cls.__bases__
+ if base_cls in all_m_by_cls
+ )
return list(
topological.sort(
tuples,
"""
from ...orm.properties import ColumnProperty, RelationshipProperty, \
- SynonymProperty
+ SynonymProperty
from ...schema import _get_table_key
from ...orm import class_mapper, interfaces
from ... import util
def __init__(self, classes, on_remove=None):
self.on_remove = on_remove
self.contents = set([
- weakref.ref(item, self._remove_item) for item in classes])
+ weakref.ref(item, self._remove_item) for item in classes])
_registries.add(self)
def __iter__(self):
_decl_class_registry.
"""
+
def __init__(self, name, parent):
self.parent = parent
self.name = name
existing.add_item(cls)
else:
existing = self.contents[name] = \
- _MultipleClassMarker([cls],
- on_remove=lambda: self._remove_item(name))
+ _MultipleClassMarker([cls],
+ on_remove=lambda: self._remove_item(name))
class _ModNS(object):
assert isinstance(value, _MultipleClassMarker)
return value.attempt_get(self.__parent.path, key)
raise AttributeError("Module %r has no mapped classes "
- "registered under the name %r" % (self.__parent.name, key))
+ "registered under the name %r" % (
+ self.__parent.name, key))
class _GetColumns(object):
if mp:
if key not in mp.all_orm_descriptors:
raise exc.InvalidRequestError(
- "Class %r does not have a mapped column named %r"
- % (self.cls, key))
+ "Class %r does not have a mapped column named %r"
+ % (self.cls, key))
desc = mp.all_orm_descriptors[key]
if desc.extension_type is interfaces.NOT_EXTENSION:
key = prop.name
elif not isinstance(prop, ColumnProperty):
raise exc.InvalidRequestError(
- "Property %r is not an instance of"
- " ColumnProperty (i.e. does not correspond"
- " directly to a Column)." % key)
+ "Property %r is not an instance of"
+ " ColumnProperty (i.e. does not correspond"
+ " directly to a Column)." % key)
return getattr(self.cls, key)
inspection._inspects(_GetColumns)(
- lambda target: inspection.inspect(target.cls))
+ lambda target: inspection.inspect(target.cls))
class _GetTable(object):
def __getattr__(self, key):
return self.metadata.tables[
- _get_table_key(key, self.key)
- ]
+ _get_table_key(key, self.key)
+ ]
def _determine_container(key, value):
elif key in cls.metadata._schemas:
return _GetTable(key, cls.metadata)
elif '_sa_module_registry' in cls._decl_class_registry and \
- key in cls._decl_class_registry['_sa_module_registry']:
+ key in cls._decl_class_registry['_sa_module_registry']:
registry = cls._decl_class_registry['_sa_module_registry']
return registry.resolve_attr(key)
elif self._resolvers:
def iter_for_shard(shard_id):
context.attributes['shard_id'] = shard_id
result = self._connection_from_session(
- mapper=self._mapper_zero(),
- shard_id=shard_id).execute(
- context.statement,
- self._params)
+ mapper=self._mapper_zero(),
+ shard_id=shard_id).execute(
+ context.statement,
+ self._params)
return self.instances(result, context)
if self._shard_id is not None:
if self.transaction is not None:
return self.transaction.connection(mapper, shard_id=shard_id)
else:
- return self.get_bind(mapper,
- shard_id=shard_id,
- instance=instance).contextual_connect(**kwargs)
+ return self.get_bind(
+ mapper,
+ shard_id=shard_id,
+ instance=instance
+ ).contextual_connect(**kwargs)
def get_bind(self, mapper, shard_id=None,
instance=None, clause=None, **kw):
.. seealso::
`Hybrids and Value Agnostic Types
- <http://techspot.zzzeek.org/2011/10/21/hybrids-and-value-agnostic-types/>`_ -
- on the techspot.zzzeek.org blog
+ <http://techspot.zzzeek.org/2011/10/21/hybrids-and-value-agnostic-types/>`_
+ - on the techspot.zzzeek.org blog
`Value Agnostic Types, Part II
<http://techspot.zzzeek.org/2011/10/29/value-agnostic-types-part-ii/>`_ -
"""
+
class hybrid_method(interfaces._InspectionAttr):
"""A decorator which allows definition of a Python object method with both
instance-level and class-level behavior.
"""
proxy_attr = attributes.\
- create_proxied_attribute(self)
+ create_proxied_attribute(self)
def expr(owner):
return proxy_attr(owner, self.__name__, self, comparator(owner))
def _check_conflicts(self, class_, factory):
existing_factories = self._collect_management_factories_for(class_).\
- difference([factory])
+ difference([factory])
if existing_factories:
raise TypeError(
"multiple instrumentation implementations specified "
orm_instrumentation._instrumentation_factory = \
- _instrumentation_factory = ExtendedInstrumentationRegistry()
+ _instrumentation_factory = ExtendedInstrumentationRegistry()
orm_instrumentation.instrumentation_finders = instrumentation_finders
return delegate(key, state, factory)
else:
return ClassManager.initialize_collection(self, key,
- state, factory)
+ state, factory)
def new_instance(self, state=None):
instance = self.class_.__new__(self.class_)
val._parents[state.obj()] = key
event.listen(parent_cls, 'load', load,
- raw=True, propagate=True)
+ raw=True, propagate=True)
event.listen(parent_cls, 'refresh', load,
- raw=True, propagate=True)
+ raw=True, propagate=True)
event.listen(attribute, 'set', set,
- raw=True, retval=True, propagate=True)
+ raw=True, retval=True, propagate=True)
event.listen(parent_cls, 'pickle', pickle,
- raw=True, propagate=True)
+ raw=True, propagate=True)
event.listen(parent_cls, 'unpickle', unpickle,
- raw=True, propagate=True)
+ raw=True, propagate=True)
class Mutable(MutableBase):
return sqltype
-
class MutableComposite(MutableBase):
"""Mixin that defines transparent propagation of change
events on a SQLAlchemy "composite" object to its
prop = object_mapper(parent).get_property(key)
for value, attr_name in zip(
- self.__composite_values__(),
- prop._attribute_keys):
+ self.__composite_values__(),
+ prop._attribute_keys):
setattr(parent, attr_name, value)
+
def _setup_composite_listener():
def _listen_for_type(mapper, class_):
for prop in mapper.iterate_properties:
if (hasattr(prop, 'composite_class') and
- isinstance(prop.composite_class, type) and
- issubclass(prop.composite_class, MutableComposite)):
+ isinstance(prop.composite_class, type) and
+ issubclass(prop.composite_class, MutableComposite)):
prop.composite_class._listen_on_attribute(
getattr(class_, prop.key), False, class_)
if not event.contains(Mapper, "mapper_configured", _listen_for_type):
dict.__setitem__(self, key, value)
self.changed()
-
def setdefault(self, key, value):
result = dict.setdefault(self, key, value)
self.changed()
s.bullets[2].position
>>> 2
-The :class:`.OrderingList` construct only works with **changes** to a collection,
-and not the initial load from the database, and requires that the list be
-sorted when loaded. Therefore, be sure to
-specify ``order_by`` on the :func:`.relationship` against the target ordering
-attribute, so that the ordering is correct when first loaded.
+The :class:`.OrderingList` construct only works with **changes** to a
+collection, and not the initial load from the database, and requires that the
+list be sorted when loaded. Therefore, be sure to specify ``order_by`` on the
+:func:`.relationship` against the target ordering attribute, so that the
+ordering is correct when first loaded.
.. warning::
explicit configuration at the mapper level for sets of columns that
are to be handled in this way.
-:func:`.ordering_list` takes the name of the related object's ordering attribute as
-an argument. By default, the zero-based integer index of the object's
-position in the :func:`.ordering_list` is synchronized with the ordering attribute:
-index 0 will get position 0, index 1 position 1, etc. To start numbering at 1
-or some other integer, provide ``count_from=1``.
+:func:`.ordering_list` takes the name of the related object's ordering
+attribute as an argument. By default, the zero-based integer index of the
+object's position in the :func:`.ordering_list` is synchronized with the
+ordering attribute: index 0 will get position 0, index 1 position 1, etc. To
+start numbering at 1 or some other integer, provide ``count_from=1``.
"""
for func_name, func in list(locals().items()):
if (util.callable(func) and func.__name__ == func_name and
- not func.__doc__ and hasattr(list, func_name)):
+ not func.__doc__ and hasattr(list, func_name)):
func.__doc__ = getattr(list, func_name).__doc__
del func_name, func
# ... define mappers
- query = Session.query(MyClass).filter(MyClass.somedata=='foo').order_by(MyClass.sortkey)
+ query = Session.query(MyClass).
+ filter(MyClass.somedata=='foo').order_by(MyClass.sortkey)
# pickle the query
serialized = dumps(query)
pickler = pickle.Pickler(*args, **kw)
def persistent_id(obj):
- #print "serializing:", repr(obj)
+ # print "serializing:", repr(obj)
if isinstance(obj, QueryableAttribute):
cls = obj.impl.class_
key = obj.impl.key
id = "mapper:" + b64encode(pickle.dumps(obj.class_))
elif isinstance(obj, MapperProperty) and not obj.parent.non_primary:
id = "mapperprop:" + b64encode(pickle.dumps(obj.parent.class_)) + \
- ":" + obj.key
+ ":" + obj.key
elif isinstance(obj, Table):
id = "table:" + text_type(obj.key)
elif isinstance(obj, Column) and isinstance(obj.table, Table):
- id = "column:" + text_type(obj.table.key) + ":" + text_type(obj.key)
+ id = "column:" + \
+ text_type(obj.table.key) + ":" + text_type(obj.key)
elif isinstance(obj, Session):
id = "session:"
elif isinstance(obj, Engine):
return pickler
our_ids = re.compile(
- r'(mapperprop|mapper|table|column|session|attribute|engine):(.*)')
+ r'(mapperprop|mapper|table|column|session|attribute|engine):(.*)')
def Deserializer(file, metadata=None, scoped_session=None, engine=None):
if raiseerr and (
reg is None or ret is None
- ):
+ ):
raise exc.NoInspectionAvailable(
"No inspection system is "
"available for object of type %s" %
for type_ in types:
if type_ in _registrars:
raise AssertionError(
- "Type %s is already "
- "registered" % type_)
+ "Type %s is already "
+ "registered" % type_)
_registrars[type_] = fn_or_cls
return fn_or_cls
return decorate
"""
- listener = util.as_interface(listener, methods=('connect',
- 'first_connect', 'checkout', 'checkin'))
+ listener = util.as_interface(listener,
+ methods=('connect', 'first_connect',
+ 'checkout', 'checkin'))
if hasattr(listener, 'connect'):
event.listen(self, 'connect', listener.connect)
if hasattr(listener, 'first_connect'):
statement,
parameters,
context,
- ):
+ ):
return statement, parameters
return listener.cursor_execute(
_logged_classes.add(cls)
return cls
+
class Identified(object):
logging_name = None
# if echo flag is enabled and no handlers,
# add a handler to the list
if self._echo_map[echo] <= logging.INFO \
- and not self.logger.handlers:
+ and not self.logger.handlers:
_add_default_handler(self.logger)
#
if instance.logging_name:
name = "%s.%s.%s" % (instance.__class__.__module__,
- instance.__class__.__name__, instance.logging_name)
+ instance.__class__.__name__,
+ instance.logging_name)
else:
name = "%s.%s" % (instance.__class__.__module__,
- instance.__class__.__name__)
+ instance.__class__.__name__)
instance._echo = echoflag
reset_commit = util.symbol('reset_commit')
reset_none = util.symbol('reset_none')
+
class _ConnDialect(object):
+
"""partial implementation of :class:`.Dialect`
which provides DBAPI connection methods.
:class:`.Dialect`.
"""
+
def do_rollback(self, dbapi_connection):
dbapi_connection.rollback()
def do_close(self, dbapi_connection):
dbapi_connection.close()
+
class Pool(log.Identified):
+
"""Abstract base class for connection pools."""
_dialect = _ConnDialect()
def __init__(self,
- creator, recycle=-1, echo=None,
- use_threadlocal=False,
- logging_name=None,
- reset_on_return=True,
- listeners=None,
- events=None,
- _dispatch=None,
- _dialect=None):
+ creator, recycle=-1, echo=None,
+ use_threadlocal=False,
+ logging_name=None,
+ reset_on_return=True,
+ listeners=None,
+ events=None,
+ _dispatch=None,
+ _dialect=None):
"""
Construct a Pool.
.. warning:: The :paramref:`.Pool.use_threadlocal` flag
**does not affect the behavior** of :meth:`.Engine.connect`.
- :meth:`.Engine.connect` makes use of the :meth:`.Pool.unique_connection`
- method which **does not use thread local context**.
- To produce a :class:`.Connection` which refers to the
- :meth:`.Pool.connect` method, use
+ :meth:`.Engine.connect` makes use of the
+ :meth:`.Pool.unique_connection` method which **does not use thread
+ local context**. To produce a :class:`.Connection` which refers
+ to the :meth:`.Pool.connect` method, use
:meth:`.Engine.contextual_connect`.
Note that other SQLAlchemy connectivity systems such as
self._reset_on_return = reset_commit
else:
raise exc.ArgumentError(
- "Invalid value for 'reset_on_return': %r"
- % reset_on_return)
+ "Invalid value for 'reset_on_return': %r"
+ % reset_on_return)
self.echo = echo
if _dispatch:
event.listen(self, target, fn)
if listeners:
util.warn_deprecated(
- "The 'listeners' argument to Pool (and "
- "create_engine()) is deprecated. Use event.listen().")
+ "The 'listeners' argument to Pool (and "
+ "create_engine()) is deprecated. Use event.listen().")
for l in listeners:
self.add_listener(l)
raise
except:
self.logger.error("Exception closing connection %r",
- connection, exc_info=True)
+ connection, exc_info=True)
@util.deprecated(
2.7, "Pool.add_listener is deprecated. Use event.listen()")
This method is equivalent to :meth:`.Pool.connect` when the
:paramref:`.Pool.use_threadlocal` flag is not set to True.
- When :paramref:`.Pool.use_threadlocal` is True, the :meth:`.Pool.unique_connection`
- method provides a means of bypassing the threadlocal context.
+ When :paramref:`.Pool.use_threadlocal` is True, the
+ :meth:`.Pool.unique_connection` method provides a means of bypassing
+ the threadlocal context.
"""
return _ConnectionFairy._checkout(self)
if getattr(connection, 'is_valid', False):
connection.invalidate(exception)
-
def recreate(self):
"""Return a new :class:`.Pool`, of the same class as this one
and configured with identical creation arguments.
class _ConnectionRecord(object):
+
"""Internal object which maintains an individual DBAPI connection
referenced by a :class:`.Pool`.
self.finalize_callback = deque()
pool.dispatch.first_connect.\
- for_modify(pool.dispatch).\
- exec_once(self.connection, self)
+ for_modify(pool.dispatch).\
+ exec_once(self.connection, self)
pool.dispatch.connect(self.connection, self)
connection = None
raise
fairy = _ConnectionFairy(dbapi_connection, rec)
rec.fairy_ref = weakref.ref(
- fairy,
- lambda ref: _finalize_fairy and \
- _finalize_fairy(
- dbapi_connection,
- rec, pool, ref, pool._echo)
- )
+ fairy,
+ lambda ref: _finalize_fairy and
+ _finalize_fairy(
+ dbapi_connection,
+ rec, pool, ref, pool._echo)
+ )
_refs.add(rec)
if pool._echo:
pool.logger.debug("Connection %r checked out from pool",
- dbapi_connection)
+ dbapi_connection)
return fairy
def checkin(self):
pool.dispatch.checkin(connection, self)
pool._return_conn(self)
-
def close(self):
if self.connection is not None:
self.__close()
"""Invalidate the DBAPI connection held by this :class:`._ConnectionRecord`.
This method is called for all connection invalidations, including
- when the :meth:`._ConnectionFairy.invalidate` or :meth:`.Connection.invalidate`
- methods are called, as well as when any so-called "automatic invalidation"
- condition occurs.
+ when the :meth:`._ConnectionFairy.invalidate` or
+ :meth:`.Connection.invalidate` methods are called, as well as when any
+ so-called "automatic invalidation" condition occurs.
.. seealso::
elif self.__pool._recycle > -1 and \
time.time() - self.starttime > self.__pool._recycle:
self.__pool.logger.info(
- "Connection %r exceeded timeout; recycling",
- self.connection)
+ "Connection %r exceeded timeout; recycling",
+ self.connection)
recycle = True
elif self.__pool._invalidate_time > self.starttime:
self.__pool.logger.info(
- "Connection %r invalidated due to pool invalidation; recycling",
- self.connection
- )
+ "Connection %r invalidated due to pool invalidation; " +
+ "recycling",
+ self.connection
+ )
recycle = True
if recycle:
raise
-def _finalize_fairy(connection, connection_record, pool, ref, echo, fairy=None):
+def _finalize_fairy(connection, connection_record,
+ pool, ref, echo, fairy=None):
"""Cleanup for a :class:`._ConnectionFairy` whether or not it's already
been garbage collected.
_refs.discard(connection_record)
if ref is not None and \
- connection_record.fairy_ref is not ref:
+ connection_record.fairy_ref is not ref:
return
if connection is not None:
if connection_record and echo:
pool.logger.debug("Connection %r being returned to pool",
- connection)
+ connection)
try:
fairy = fairy or _ConnectionFairy(connection, connection_record)
if not connection_record:
pool._close_connection(connection)
except Exception as e:
- pool.logger.error("Exception during reset or similar", exc_info=True)
+ pool.logger.error(
+ "Exception during reset or similar", exc_info=True)
if connection_record:
connection_record.invalidate(e=e)
if isinstance(e, (SystemExit, KeyboardInterrupt)):
class _ConnectionFairy(object):
+
"""Proxies a DBAPI connection and provides return-on-dereference
support.
to provide context management to a DBAPI connection delivered by
that :class:`.Pool`.
- The name "fairy" is inspired by the fact that the :class:`._ConnectionFairy`
- object's lifespan is transitory, as it lasts only for the length of a
- specific DBAPI connection being checked out from the pool, and additionally
- that as a transparent proxy, it is mostly invisible.
+ The name "fairy" is inspired by the fact that the
+ :class:`._ConnectionFairy` object's lifespan is transitory, as it lasts
+ only for the length of a specific DBAPI connection being checked out from
+ the pool, and additionally that as a transparent proxy, it is mostly
+ invisible.
.. seealso::
_reset_agent = None
"""Refer to an object with a ``.commit()`` and ``.rollback()`` method;
if non-None, the "reset-on-return" feature will call upon this object
- rather than directly against the dialect-level do_rollback() and do_commit()
- methods.
+ rather than directly against the dialect-level do_rollback() and
+ do_commit() methods.
In practice, a :class:`.Connection` assigns a :class:`.Transaction` object
to this variable when one is in scope so that the :class:`.Transaction`
while attempts > 0:
try:
pool.dispatch.checkout(fairy.connection,
- fairy._connection_record,
- fairy)
+ fairy._connection_record,
+ fairy)
return fairy
except exc.DisconnectionError as e:
pool.logger.info(
def _checkin(self):
_finalize_fairy(self.connection, self._connection_record,
- self._pool, None, self._echo, fairy=self)
+ self._pool, None, self._echo, fairy=self)
self.connection = None
self._connection_record = None
if pool._reset_on_return is reset_rollback:
if echo:
pool.logger.debug("Connection %s rollback-on-return%s",
- self.connection,
- ", via agent"
- if self._reset_agent else "")
+ self.connection,
+ ", via agent"
+ if self._reset_agent else "")
if self._reset_agent:
self._reset_agent.rollback()
else:
elif pool._reset_on_return is reset_commit:
if echo:
pool.logger.debug("Connection %s commit-on-return%s",
- self.connection,
- ", via agent"
- if self._reset_agent else "")
+ self.connection,
+ ", via agent"
+ if self._reset_agent else "")
if self._reset_agent:
self._reset_agent.commit()
else:
def __getattr__(self, key):
return getattr(self.connection, key)
-
def detach(self):
"""Separate this connection from its Pool.
self._checkin()
-
class SingletonThreadPool(Pool):
+
"""A Pool that maintains one connection per thread.
Maintains one connection per each thread, never moving a connection to a
def recreate(self):
self.logger.info("Pool recreating")
return self.__class__(self._creator,
- pool_size=self.size,
- recycle=self._recycle,
- echo=self.echo,
- logging_name=self._orig_logging_name,
- use_threadlocal=self._use_threadlocal,
- reset_on_return=self._reset_on_return,
- _dispatch=self.dispatch,
- _dialect=self._dialect)
+ pool_size=self.size,
+ recycle=self._recycle,
+ echo=self.echo,
+ logging_name=self._orig_logging_name,
+ use_threadlocal=self._use_threadlocal,
+ reset_on_return=self._reset_on_return,
+ _dispatch=self.dispatch,
+ _dialect=self._dialect)
def dispose(self):
"""Dispose of this pool."""
def status(self):
return "SingletonThreadPool id:%d size: %d" % \
- (id(self), len(self._all_conns))
+ (id(self), len(self._all_conns))
def _do_return_conn(self, conn):
pass
class QueuePool(Pool):
+
"""A :class:`.Pool` that imposes a limit on the number of open connections.
:class:`.QueuePool` is the default pooling implementation used for
:param timeout: The number of seconds to wait before giving up
on returning a connection. Defaults to 30.
- :param \**kw: Other keyword arguments including :paramref:`.Pool.recycle`,
- :paramref:`.Pool.echo`, :paramref:`.Pool.reset_on_return` and others
- are passed to the :class:`.Pool` constructor.
+ :param \**kw: Other keyword arguments including
+ :paramref:`.Pool.recycle`, :paramref:`.Pool.echo`,
+ :paramref:`.Pool.reset_on_return` and others are passed to the
+ :class:`.Pool` constructor.
"""
Pool.__init__(self, creator, **kw)
return self._do_get()
else:
raise exc.TimeoutError(
- "QueuePool limit of size %d overflow %d reached, "
- "connection timed out, timeout %d" %
- (self.size(), self.overflow(), self._timeout))
+ "QueuePool limit of size %d overflow %d reached, "
+ "connection timed out, timeout %d" %
+ (self.size(), self.overflow(), self._timeout))
if self._inc_overflow():
try:
def recreate(self):
self.logger.info("Pool recreating")
return self.__class__(self._creator, pool_size=self._pool.maxsize,
- max_overflow=self._max_overflow,
- timeout=self._timeout,
- recycle=self._recycle, echo=self.echo,
- logging_name=self._orig_logging_name,
- use_threadlocal=self._use_threadlocal,
- reset_on_return=self._reset_on_return,
- _dispatch=self.dispatch,
- _dialect=self._dialect)
+ max_overflow=self._max_overflow,
+ timeout=self._timeout,
+ recycle=self._recycle, echo=self.echo,
+ logging_name=self._orig_logging_name,
+ use_threadlocal=self._use_threadlocal,
+ reset_on_return=self._reset_on_return,
+ _dispatch=self.dispatch,
+ _dialect=self._dialect)
def dispose(self):
while True:
def status(self):
return "Pool size: %d Connections in pool: %d "\
- "Current Overflow: %d Current Checked out "\
- "connections: %d" % (self.size(),
- self.checkedin(),
- self.overflow(),
- self.checkedout())
+ "Current Overflow: %d Current Checked out "\
+ "connections: %d" % (self.size(),
+ self.checkedin(),
+ self.overflow(),
+ self.checkedout())
def size(self):
return self._pool.maxsize
class NullPool(Pool):
+
"""A Pool which does not pool connections.
Instead it literally opens and closes the underlying DB-API connection
self.logger.info("Pool recreating")
return self.__class__(self._creator,
- recycle=self._recycle,
- echo=self.echo,
- logging_name=self._orig_logging_name,
- use_threadlocal=self._use_threadlocal,
- reset_on_return=self._reset_on_return,
- _dispatch=self.dispatch,
- _dialect=self._dialect)
+ recycle=self._recycle,
+ echo=self.echo,
+ logging_name=self._orig_logging_name,
+ use_threadlocal=self._use_threadlocal,
+ reset_on_return=self._reset_on_return,
+ _dispatch=self.dispatch,
+ _dialect=self._dialect)
def dispose(self):
pass
class StaticPool(Pool):
+
"""A Pool of exactly one connection, used for all requests.
Reconnect-related functions such as ``recycle`` and connection
class AssertionPool(Pool):
+
"""A :class:`.Pool` that allows at most one checked out connection at
any given time.
this in the assertion error raised.
"""
+
def __init__(self, *args, **kw):
self._conn = None
self._checked_out = False
def recreate(self):
self.logger.info("Pool recreating")
return self.__class__(self._creator, echo=self.echo,
- logging_name=self._orig_logging_name,
- _dispatch=self.dispatch,
- _dialect=self._dialect)
+ logging_name=self._orig_logging_name,
+ _dispatch=self.dispatch,
+ _dialect=self._dialect)
def _do_get(self):
if self._checked_out:
class _DBProxy(object):
+
"""Layers connection pooling behavior on top of a standard DB-API module.
Proxies a DB-API 2.0 connect() call to a connection pool keyed to the
try:
if key not in self.pools:
kw.pop('sa_pool_key', None)
- pool = self.poolclass(lambda:
- self.module.connect(*args, **kw), **self.kw)
+ pool = self.poolclass(
+ lambda: self.module.connect(*args, **kw), **self.kw)
self.pools[key] = pool
return pool
else:
m = rmatch(value)
except TypeError:
raise ValueError("Couldn't parse %s string '%r' "
- "- value is not a string." %
- (type_.__name__, value))
+ "- value is not a string." %
+ (type_.__name__, value))
if m is None:
raise ValueError("Couldn't parse %s string: "
- "'%s'" % (type_.__name__, value))
+ "'%s'" % (type_.__name__, value))
if has_named_groups:
groups = m.groupdict(0)
- return type_(**dict(list(zip(iter(groups.keys()),
- list(map(int, iter(groups.values())))))))
+ return type_(**dict(list(zip(
+ iter(groups.keys()),
+ list(map(int, iter(groups.values())))
+ ))))
else:
return type_(*list(map(int, m.groups(0))))
return process
return value and True or False
DATETIME_RE = re.compile(
- "(\d+)-(\d+)-(\d+) (\d+):(\d+):(\d+)(?:\.(\d+))?")
+ "(\d+)-(\d+)-(\d+) (\d+):(\d+):(\d+)(?:\.(\d+))?")
TIME_RE = re.compile("(\d+):(\d+):(\d+)(?:\.(\d+))?")
DATE_RE = re.compile("(\d+)-(\d+)-(\d+)")
try:
from sqlalchemy.cprocessors import UnicodeResultProcessor, \
- DecimalResultProcessor, \
- to_float, to_str, int_to_boolean, \
- str_to_datetime, str_to_time, \
- str_to_date
+ DecimalResultProcessor, \
+ to_float, to_str, int_to_boolean, \
+ str_to_datetime, str_to_time, \
+ str_to_date
def to_unicode_processor_factory(encoding, errors=None):
if errors is not None:
"""
__all__ = ['TypeEngine', 'TypeDecorator', 'UserDefinedType',
- 'INT', 'CHAR', 'VARCHAR', 'NCHAR', 'NVARCHAR', 'TEXT', 'Text',
- 'FLOAT', 'NUMERIC', 'REAL', 'DECIMAL', 'TIMESTAMP', 'DATETIME',
- 'CLOB', 'BLOB', 'BINARY', 'VARBINARY', 'BOOLEAN', 'BIGINT',
- 'SMALLINT', 'INTEGER', 'DATE', 'TIME', 'String', 'Integer',
- 'SmallInteger', 'BigInteger', 'Numeric', 'Float', 'DateTime',
- 'Date', 'Time', 'LargeBinary', 'Binary', 'Boolean', 'Unicode',
- 'Concatenable', 'UnicodeText', 'PickleType', 'Interval', 'Enum']
+ 'INT', 'CHAR', 'VARCHAR', 'NCHAR', 'NVARCHAR', 'TEXT', 'Text',
+ 'FLOAT', 'NUMERIC', 'REAL', 'DECIMAL', 'TIMESTAMP', 'DATETIME',
+ 'CLOB', 'BLOB', 'BINARY', 'VARBINARY', 'BOOLEAN', 'BIGINT',
+ 'SMALLINT', 'INTEGER', 'DATE', 'TIME', 'String', 'Integer',
+ 'SmallInteger', 'BigInteger', 'Numeric', 'Float', 'DateTime',
+ 'Date', 'Time', 'LargeBinary', 'Binary', 'Boolean', 'Unicode',
+ 'Concatenable', 'UnicodeText', 'PickleType', 'Interval', 'Enum']
from .sql.type_api import (
adapt_type,
VARCHAR,
_type_map
)
-
ext_modules = [
Extension('sqlalchemy.cprocessors',
- sources=['lib/sqlalchemy/cextension/processors.c']),
+ sources=['lib/sqlalchemy/cextension/processors.c']),
Extension('sqlalchemy.cresultproxy',
- sources=['lib/sqlalchemy/cextension/resultproxy.c']),
+ sources=['lib/sqlalchemy/cextension/resultproxy.c']),
Extension('sqlalchemy.cutils',
- sources=['lib/sqlalchemy/cextension/utils.c'])
+ sources=['lib/sqlalchemy/cextension/utils.c'])
]
ext_errors = (CCompilerError, DistutilsExecError, DistutilsPlatformError)
# find the compiler
ext_errors += (IOError,)
+
class BuildFailed(Exception):
def __init__(self):
self.cause = sys.exc_info()[1] # work around py 2/3 different syntax
+
class ve_build_ext(build_ext):
# This class allows C extension building to fail.
cmdclass['build_ext'] = ve_build_ext
+
def status_msgs(*msgs):
print('*' * 75)
for msg in msgs:
print(msg)
print('*' * 75)
+
def find_packages(location):
packages = []
for pkg in ['sqlalchemy']:
return packages
v_file = open(os.path.join(os.path.dirname(__file__),
- 'lib', 'sqlalchemy', '__init__.py'))
+ 'lib', 'sqlalchemy', '__init__.py'))
VERSION = re.compile(r".*__version__ = '(.*?)'",
re.S).match(v_file.read()).group(1)
v_file.close()
if with_cext:
if has_feature:
kwargs['features'] = {'cextensions': Feature(
- "optional C speed-enhancements",
- standard=True,
- ext_modules=ext_modules
- )}
+ "optional C speed-enhancements",
+ standard=True,
+ ext_modules=ext_modules
+ )}
else:
kwargs['ext_modules'] = ext_modules
setup(name="SQLAlchemy",
- version=VERSION,
- description="Database Abstraction Library",
- author="Mike Bayer",
- author_email="mike_mp@zzzcomputing.com",
- url="http://www.sqlalchemy.org",
- packages=find_packages('lib'),
- package_dir={'': 'lib'},
- license="MIT License",
- cmdclass=cmdclass,
- tests_require=['pytest >= 2.5.2', 'mock'],
- test_suite="sqlalchemy.testing.distutils_run",
- long_description=readme,
- classifiers=[
- "Development Status :: 5 - Production/Stable",
- "Intended Audience :: Developers",
- "License :: OSI Approved :: MIT License",
- "Programming Language :: Python",
- "Programming Language :: Python :: 3",
- "Programming Language :: Python :: Implementation :: CPython",
- "Programming Language :: Python :: Implementation :: Jython",
- "Programming Language :: Python :: Implementation :: PyPy",
- "Topic :: Database :: Front-Ends",
- "Operating System :: OS Independent",
- ],
- **kwargs
+ version=VERSION,
+ description="Database Abstraction Library",
+ author="Mike Bayer",
+ author_email="mike_mp@zzzcomputing.com",
+ url="http://www.sqlalchemy.org",
+ packages=find_packages('lib'),
+ package_dir={'': 'lib'},
+ license="MIT License",
+ cmdclass=cmdclass,
+ tests_require=['pytest >= 2.5.2', 'mock'],
+ test_suite="sqlalchemy.testing.distutils_run",
+ long_description=readme,
+ classifiers=[
+ "Development Status :: 5 - Production/Stable",
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: MIT License",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: Implementation :: CPython",
+ "Programming Language :: Python :: Implementation :: Jython",
+ "Programming Language :: Python :: Implementation :: PyPy",
+ "Topic :: Database :: Front-Ends",
+ "Operating System :: OS Independent",
+ ],
+ **kwargs
)
if not cpython:
run_setup(False)
status_msgs(
"WARNING: C extensions are not supported on " +
- "this Python platform, speedups are not enabled.",
+ "this Python platform, speedups are not enabled.",
"Plain-Python build succeeded."
)
elif os.environ.get('DISABLE_SQLALCHEMY_CEXT'):
run_setup(False)
status_msgs(
- "DISABLE_SQLALCHEMY_CEXT is set; not attempting to build C extensions.",
+ "DISABLE_SQLALCHEMY_CEXT is set; " +
+ "not attempting to build C extensions.",
"Plain-Python build succeeded."
)
status_msgs(
exc.cause,
"WARNING: The C extension could not be compiled, " +
- "speedups are not enabled.",
+ "speedups are not enabled.",
"Failure information, if any, is above.",
"Retrying the build without the C extension now."
)
status_msgs(
"WARNING: The C extension could not be compiled, " +
- "speedups are not enabled.",
+ "speedups are not enabled.",
"Plain-Python build succeeded."
)
# installing without importing SQLAlchemy, so that coverage includes
# SQLAlchemy itself.
path = "lib/sqlalchemy/testing/plugin/noseplugin.py"
-if sys.version_info >= (3,3):
+if sys.version_info >= (3, 3):
from importlib import machinery
noseplugin = machinery.SourceFileLoader("noseplugin", path).load_module()
else: